13. Generative Adversarial Network

13.1. Parameters

[1]:
import torch
import numpy as np

np.random.seed(37)
torch.manual_seed(37)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
n_gpu = 1 # number of GPUs
n_c = 3 # number of classes
n_z = 100 # size of latent vector
n_gf = 64 # generator
n_df = 64 # discriminator
batch_size = 64 # batch size
beta1 = 0.5 # beta1 for adam
lr = 0.0002 # learning rate
n_iters = 25 # number of epochs
out_folder = './output' # output folder
image_size = 64 # image size
data_root = './output/cifar'

13.2. Data

[2]:
from torchvision import datasets, models, transforms
from torchvision.transforms import *
from torch.utils.data import DataLoader

def get_dataloaders(image_size=64, batch_size=4):
    def clean_up(path):
        import os
        from shutil import rmtree

        ipynb_checkpoints = f'{path}/.ipynb_checkpoints'
        if os.path.exists(ipynb_checkpoints):
            rmtree(ipynb_checkpoints)

    def get_dataloader(phase):
        path = f'./shapes/{phase}'
        clean_up(path)

        transform = transforms.Compose([
            Resize(image_size),
            CenterCrop(image_size),
            ToTensor(),
            Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
        ])

        image_folder = datasets.ImageFolder(path, transform=transform)

        return DataLoader(image_folder, batch_size=batch_size, shuffle=True, num_workers=4)

    return {phase: get_dataloader(phase) for phase in ['train', 'test', 'valid']}

def get_cifar(data_root, image_size=64, batch_size=4, num_workers=4):
    transform = transforms.Compose([
        Resize(image_size),
        ToTensor(),
        Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    ])
    dataset = datasets.CIFAR10(root=data_root, download=True, transform=transform)
    dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers)
    return dataloader

# dataloaders = get_dataloaders(image_size=image_size, batch_size=batch_size)
dataloader = get_cifar(data_root, image_size, batch_size)
Files already downloaded and verified

13.3. Generator network

[3]:
import torch.nn as nn

def weights_init(m):
    classname = m.__class__.__name__
    if classname.find('Conv') != -1:
        m.weight.data.normal_(0.0, 0.02)
    elif classname.find('BatchNorm') != -1:
        m.weight.data.normal_(1.0, 0.02)
        m.bias.data.fill_(0)


class Generator(nn.Module):
    def __init__(self, ngpu, nz, ngf, nc):
        super(Generator, self).__init__()
        self.ngpu = ngpu
        self.main = nn.Sequential(
            # input is Z, going into a convolution
            nn.ConvTranspose2d(     nz, ngf * 8, 4, 1, 0, bias=False),
            nn.BatchNorm2d(ngf * 8),
            nn.ReLU(True),
            # state size. (ngf*8) x 4 x 4
            nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf * 4),
            nn.ReLU(True),
            # state size. (ngf*4) x 8 x 8
            nn.ConvTranspose2d(ngf * 4, ngf * 2, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf * 2),
            nn.ReLU(True),
            # state size. (ngf*2) x 16 x 16
            nn.ConvTranspose2d(ngf * 2,     ngf, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf),
            nn.ReLU(True),
            # state size. (ngf) x 32 x 32
            nn.ConvTranspose2d(    ngf,      nc, 4, 2, 1, bias=False),
            nn.Tanh()
            # state size. (nc) x 64 x 64
        )

    def forward(self, input):
        if input.is_cuda and self.ngpu > 1:
            output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
        else:
            output = self.main(input)
        return output


netG = Generator(n_gpu, n_z, n_gf, n_c).to(device)
netG.apply(weights_init)
[3]:
Generator(
  (main): Sequential(
    (0): ConvTranspose2d(100, 512, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): ReLU(inplace=True)
    (3): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (5): ReLU(inplace=True)
    (6): ConvTranspose2d(256, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (7): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (8): ReLU(inplace=True)
    (9): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (10): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (11): ReLU(inplace=True)
    (12): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (13): Tanh()
  )
)

13.4. Discriminator

[4]:
class Discriminator(nn.Module):
    def __init__(self, ngpu, ndf, nc):
        super(Discriminator, self).__init__()
        self.ngpu = ngpu
        self.main = nn.Sequential(
            # input is (nc) x 64 x 64
            nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
            nn.LeakyReLU(0.2, inplace=True),
            # state size. (ndf) x 32 x 32
            nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf * 2),
            nn.LeakyReLU(0.2, inplace=True),
            # state size. (ndf*2) x 16 x 16
            nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf * 4),
            nn.LeakyReLU(0.2, inplace=True),
            # state size. (ndf*4) x 8 x 8
            nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf * 8),
            nn.LeakyReLU(0.2, inplace=True),
            # state size. (ndf*8) x 4 x 4
            nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False),
            nn.Sigmoid()
        )

    def forward(self, input):
        if input.is_cuda and self.ngpu > 1:
            output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
        else:
            output = self.main(input)

        return output.view(-1, 1).squeeze(1)


netD = Discriminator(n_gpu, n_df, n_c).to(device)
netD.apply(weights_init)
[4]:
Discriminator(
  (main): Sequential(
    (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (1): LeakyReLU(negative_slope=0.2, inplace=True)
    (2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (3): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (4): LeakyReLU(negative_slope=0.2, inplace=True)
    (5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (6): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (7): LeakyReLU(negative_slope=0.2, inplace=True)
    (8): Conv2d(256, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (9): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (10): LeakyReLU(negative_slope=0.2, inplace=True)
    (11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (12): Sigmoid()
  )
)

13.5. Loss function and optimizer

[5]:
import torch.optim as optim

criterion = nn.BCELoss()

fixed_noise = torch.randn(batch_size, n_z, 1, 1, device=device)
real_label = 1
fake_label = 0

# setup optimizer
optimizerD = optim.Adam(netD.parameters(), lr=lr, betas=(beta1, 0.999))
optimizerG = optim.Adam(netG.parameters(), lr=lr, betas=(beta1, 0.999))

13.6. Training

[6]:
import torchvision.utils as vutils
from collections import namedtuple

RESULT = namedtuple('RESULT', 'epoch i D G Dx DGz1 DGz2')
results = []
images = []
iters = 0

for epoch in range(n_iters):
    for i, data in enumerate(dataloader, 0):
        ############################
        # (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
        ###########################
        # train with real
        netD.zero_grad()
        real_cpu = data[0].to(device);
        batch_size = real_cpu.size(0);
        label = torch.full((batch_size,), real_label, device=device);

        output = netD(real_cpu);
        errD_real = criterion(output, label)
        errD_real.backward()
        D_x = output.mean().item()

        # train with fake
        noise = torch.randn(batch_size, n_z, 1, 1, device=device)
        fake = netG(noise)
        label.fill_(fake_label)
        output = netD(fake.detach())
        errD_fake = criterion(output, label)
        errD_fake.backward()
        D_G_z1 = output.mean().item()
        errD = errD_real + errD_fake
        optimizerD.step()

        ############################
        # (2) Update G network: maximize log(D(G(z)))
        ###########################
        netG.zero_grad()
        label.fill_(real_label)  # fake labels are real for generator cost
        output = netD(fake)
        errG = criterion(output, label)
        errG.backward()
        D_G_z2 = output.mean().item()
        optimizerG.step()

        result = RESULT(epoch, i, errD.item(), errG.item(), D_x, D_G_z1, D_G_z2)
        results.append(result)

        if i % 50 == 0 and i != 0:
            s_iter = f'[{epoch}/{n_iters}][{i}/{len(dataloader)}]'
            s_loss = f'Loss_D {errD.item():.4f} Loss_G: {errG.item():.4f}'
            f_loss = f'D(x): {D_x:.4f} D(G(z)): {D_G_z1:.4f} / {D_G_z2:.4f}'
            print(f'{s_iter} {s_loss} {f_loss}')

        if i % 100 == 0:
            s_epoch = str(epoch).zfill(3)
            f_path = f'{out_folder}/real_samples_epoch_{s_epoch}.png'
            vutils.save_image(real_cpu, f_path, normalize=True)

            f_path = f'{out_folder}/fake_samples_epoch_{s_epoch}.png'

            with torch.no_grad():
                fake = netG(fixed_noise)
            vutils.save_image(fake.detach(), f_path, normalize=True)


        if (iters % 500 == 0) or ((epoch == n_iters -1) and (i == len(dataloader) - 1)):
            with torch.no_grad():
                fake = netG(fixed_noise).detach().cpu()
            images.append(vutils.make_grid(fake, padding=2, normalize=True))

        iters += 1
[0/25][50/782] Loss_D 0.7834 Loss_G: 15.7248 D(x): 0.9287 D(G(z)): 0.4678 / 0.0000
[0/25][100/782] Loss_D 0.4271 Loss_G: 5.3906 D(x): 0.9323 D(G(z)): 0.2665 / 0.0080
[0/25][150/782] Loss_D 0.3438 Loss_G: 4.1836 D(x): 0.8240 D(G(z)): 0.0964 / 0.0353
[0/25][200/782] Loss_D 0.2067 Loss_G: 5.6477 D(x): 0.9412 D(G(z)): 0.1287 / 0.0053
[0/25][250/782] Loss_D 0.2138 Loss_G: 5.7386 D(x): 0.9344 D(G(z)): 0.1209 / 0.0044
[0/25][300/782] Loss_D 0.1902 Loss_G: 4.8060 D(x): 0.8888 D(G(z)): 0.0156 / 0.0154
[0/25][350/782] Loss_D 0.1072 Loss_G: 7.5507 D(x): 0.9394 D(G(z)): 0.0237 / 0.0060
[0/25][400/782] Loss_D 0.1532 Loss_G: 5.0835 D(x): 0.9484 D(G(z)): 0.0876 / 0.0111
[0/25][450/782] Loss_D 0.6046 Loss_G: 3.0891 D(x): 0.6569 D(G(z)): 0.0137 / 0.0736
[0/25][500/782] Loss_D 0.4401 Loss_G: 5.5217 D(x): 0.8637 D(G(z)): 0.2194 / 0.0053
[0/25][550/782] Loss_D 0.7421 Loss_G: 6.5020 D(x): 0.9843 D(G(z)): 0.4203 / 0.0037
[0/25][600/782] Loss_D 0.3080 Loss_G: 4.7863 D(x): 0.8827 D(G(z)): 0.1266 / 0.0159
[0/25][650/782] Loss_D 0.2596 Loss_G: 4.2760 D(x): 0.9353 D(G(z)): 0.1616 / 0.0187
[0/25][700/782] Loss_D 0.7165 Loss_G: 8.0837 D(x): 0.9550 D(G(z)): 0.4491 / 0.0004
[0/25][750/782] Loss_D 1.4665 Loss_G: 9.3171 D(x): 0.9717 D(G(z)): 0.6945 / 0.0002
[1/25][50/782] Loss_D 0.6382 Loss_G: 3.0363 D(x): 0.7563 D(G(z)): 0.2311 / 0.0650
[1/25][100/782] Loss_D 0.4246 Loss_G: 3.4614 D(x): 0.8657 D(G(z)): 0.2114 / 0.0612
[1/25][150/782] Loss_D 0.6902 Loss_G: 6.1063 D(x): 0.9367 D(G(z)): 0.4202 / 0.0041
[1/25][200/782] Loss_D 0.4456 Loss_G: 3.4427 D(x): 0.7696 D(G(z)): 0.1046 / 0.0577
[1/25][250/782] Loss_D 0.2298 Loss_G: 2.7385 D(x): 0.9193 D(G(z)): 0.1258 / 0.0819
[1/25][300/782] Loss_D 0.4711 Loss_G: 4.7522 D(x): 0.7041 D(G(z)): 0.0397 / 0.0172
[1/25][350/782] Loss_D 0.6086 Loss_G: 5.5933 D(x): 0.9591 D(G(z)): 0.4010 / 0.0056
[1/25][400/782] Loss_D 0.5073 Loss_G: 4.3764 D(x): 0.9838 D(G(z)): 0.3308 / 0.0241
[1/25][450/782] Loss_D 1.6894 Loss_G: 9.4357 D(x): 0.8774 D(G(z)): 0.7249 / 0.0002
[1/25][500/782] Loss_D 0.1665 Loss_G: 3.7841 D(x): 0.9124 D(G(z)): 0.0581 / 0.0434
[1/25][550/782] Loss_D 0.6058 Loss_G: 1.4416 D(x): 0.6373 D(G(z)): 0.0615 / 0.2800
[1/25][600/782] Loss_D 2.1562 Loss_G: 8.0170 D(x): 0.9590 D(G(z)): 0.8363 / 0.0007
[1/25][650/782] Loss_D 1.0446 Loss_G: 0.9181 D(x): 0.4662 D(G(z)): 0.0974 / 0.4707
[1/25][700/782] Loss_D 0.3369 Loss_G: 4.2627 D(x): 0.7539 D(G(z)): 0.0151 / 0.0234
[1/25][750/782] Loss_D 0.4951 Loss_G: 2.3422 D(x): 0.7628 D(G(z)): 0.1241 / 0.1420
[2/25][50/782] Loss_D 1.7842 Loss_G: 6.8725 D(x): 0.9742 D(G(z)): 0.7521 / 0.0018
[2/25][100/782] Loss_D 0.8502 Loss_G: 2.9008 D(x): 0.6482 D(G(z)): 0.2478 / 0.0828
[2/25][150/782] Loss_D 0.9343 Loss_G: 5.2947 D(x): 0.9023 D(G(z)): 0.5148 / 0.0068
[2/25][200/782] Loss_D 1.1300 Loss_G: 5.4461 D(x): 0.8801 D(G(z)): 0.5454 / 0.0073
[2/25][250/782] Loss_D 0.8607 Loss_G: 2.7530 D(x): 0.5140 D(G(z)): 0.0087 / 0.1163
[2/25][300/782] Loss_D 0.6639 Loss_G: 2.9879 D(x): 0.6521 D(G(z)): 0.1018 / 0.0957
[2/25][350/782] Loss_D 0.4694 Loss_G: 2.5834 D(x): 0.7714 D(G(z)): 0.1381 / 0.1039
[2/25][400/782] Loss_D 0.2773 Loss_G: 4.1966 D(x): 0.8335 D(G(z)): 0.0669 / 0.0335
[2/25][450/782] Loss_D 0.6883 Loss_G: 1.6746 D(x): 0.6910 D(G(z)): 0.2092 / 0.2262
[2/25][500/782] Loss_D 0.3422 Loss_G: 3.7816 D(x): 0.9185 D(G(z)): 0.1987 / 0.0322
[2/25][550/782] Loss_D 1.9842 Loss_G: 6.7170 D(x): 0.9543 D(G(z)): 0.7967 / 0.0034
[2/25][600/782] Loss_D 0.4712 Loss_G: 2.6178 D(x): 0.7329 D(G(z)): 0.0946 / 0.1093
[2/25][650/782] Loss_D 0.7656 Loss_G: 1.4454 D(x): 0.5847 D(G(z)): 0.0990 / 0.3102
[2/25][700/782] Loss_D 1.0424 Loss_G: 4.5118 D(x): 0.9036 D(G(z)): 0.5086 / 0.0188
[2/25][750/782] Loss_D 0.4275 Loss_G: 3.6798 D(x): 0.7718 D(G(z)): 0.0963 / 0.0390
[3/25][50/782] Loss_D 0.6632 Loss_G: 4.0165 D(x): 0.9461 D(G(z)): 0.4023 / 0.0273
[3/25][100/782] Loss_D 1.0716 Loss_G: 3.5090 D(x): 0.7807 D(G(z)): 0.4790 / 0.0474
[3/25][150/782] Loss_D 0.7347 Loss_G: 1.6191 D(x): 0.5927 D(G(z)): 0.0917 / 0.2504
[3/25][200/782] Loss_D 0.4516 Loss_G: 2.2081 D(x): 0.8864 D(G(z)): 0.2394 / 0.1563
[3/25][250/782] Loss_D 0.6102 Loss_G: 3.8919 D(x): 0.9163 D(G(z)): 0.3603 / 0.0293
[3/25][300/782] Loss_D 0.6649 Loss_G: 4.5004 D(x): 0.9577 D(G(z)): 0.4236 / 0.0152
[3/25][350/782] Loss_D 0.7462 Loss_G: 3.9795 D(x): 0.9376 D(G(z)): 0.4419 / 0.0288
[3/25][400/782] Loss_D 0.9484 Loss_G: 1.8836 D(x): 0.5117 D(G(z)): 0.1323 / 0.2050
[3/25][450/782] Loss_D 1.4884 Loss_G: 6.1669 D(x): 0.9702 D(G(z)): 0.7024 / 0.0037
[3/25][500/782] Loss_D 0.7755 Loss_G: 3.7367 D(x): 0.8899 D(G(z)): 0.4016 / 0.0308
[3/25][550/782] Loss_D 0.5314 Loss_G: 2.4791 D(x): 0.7577 D(G(z)): 0.1842 / 0.1115
[3/25][600/782] Loss_D 0.4666 Loss_G: 3.1325 D(x): 0.8619 D(G(z)): 0.2361 / 0.0673
[3/25][650/782] Loss_D 0.6703 Loss_G: 2.4378 D(x): 0.6888 D(G(z)): 0.2012 / 0.1253
[3/25][700/782] Loss_D 0.7299 Loss_G: 1.6210 D(x): 0.5456 D(G(z)): 0.0396 / 0.2587
[3/25][750/782] Loss_D 0.6318 Loss_G: 3.3232 D(x): 0.9062 D(G(z)): 0.3766 / 0.0467
[4/25][50/782] Loss_D 0.5869 Loss_G: 2.7591 D(x): 0.7839 D(G(z)): 0.2448 / 0.0884
[4/25][100/782] Loss_D 0.4530 Loss_G: 2.9521 D(x): 0.7842 D(G(z)): 0.1593 / 0.0727
[4/25][150/782] Loss_D 0.4725 Loss_G: 2.8858 D(x): 0.8716 D(G(z)): 0.2533 / 0.0719
[4/25][200/782] Loss_D 1.1878 Loss_G: 2.2200 D(x): 0.3965 D(G(z)): 0.0171 / 0.1627
[4/25][250/782] Loss_D 0.6807 Loss_G: 2.3422 D(x): 0.6014 D(G(z)): 0.0590 / 0.1293
[4/25][300/782] Loss_D 0.3175 Loss_G: 3.6563 D(x): 0.8467 D(G(z)): 0.1109 / 0.0395
[4/25][350/782] Loss_D 0.4606 Loss_G: 2.9883 D(x): 0.8387 D(G(z)): 0.2113 / 0.0637
[4/25][400/782] Loss_D 0.4089 Loss_G: 3.6320 D(x): 0.9372 D(G(z)): 0.2611 / 0.0379
[4/25][450/782] Loss_D 0.2978 Loss_G: 4.4109 D(x): 0.9427 D(G(z)): 0.2010 / 0.0155
[4/25][500/782] Loss_D 0.3835 Loss_G: 3.7706 D(x): 0.9383 D(G(z)): 0.2491 / 0.0314
[4/25][550/782] Loss_D 1.1661 Loss_G: 3.4594 D(x): 0.4153 D(G(z)): 0.0046 / 0.0543
[4/25][600/782] Loss_D 0.1664 Loss_G: 4.0682 D(x): 0.9333 D(G(z)): 0.0885 / 0.0241
[4/25][650/782] Loss_D 0.5433 Loss_G: 2.5226 D(x): 0.7809 D(G(z)): 0.2169 / 0.1025
[4/25][700/782] Loss_D 0.3219 Loss_G: 4.1177 D(x): 0.9231 D(G(z)): 0.1948 / 0.0253
[4/25][750/782] Loss_D 0.3230 Loss_G: 3.2808 D(x): 0.8937 D(G(z)): 0.1692 / 0.0517
[5/25][50/782] Loss_D 1.0511 Loss_G: 2.6656 D(x): 0.4296 D(G(z)): 0.0141 / 0.1016
[5/25][100/782] Loss_D 0.0991 Loss_G: 3.3821 D(x): 0.9643 D(G(z)): 0.0585 / 0.0458
[5/25][150/782] Loss_D 0.1011 Loss_G: 3.8977 D(x): 0.9839 D(G(z)): 0.0783 / 0.0307
[5/25][200/782] Loss_D 0.5857 Loss_G: 3.7562 D(x): 0.6394 D(G(z)): 0.0290 / 0.0396
[5/25][250/782] Loss_D 0.1734 Loss_G: 3.7030 D(x): 0.9124 D(G(z)): 0.0727 / 0.0369
[5/25][300/782] Loss_D 1.6813 Loss_G: 2.0502 D(x): 0.2505 D(G(z)): 0.0095 / 0.1846
[5/25][350/782] Loss_D 0.1265 Loss_G: 3.4406 D(x): 0.9272 D(G(z)): 0.0462 / 0.0392
[5/25][400/782] Loss_D 6.1522 Loss_G: 1.4817 D(x): 0.0042 D(G(z)): 0.0000 / 0.2669
[5/25][450/782] Loss_D 0.6658 Loss_G: 4.6075 D(x): 0.9676 D(G(z)): 0.3993 / 0.0158
[5/25][500/782] Loss_D 0.3513 Loss_G: 3.3515 D(x): 0.8492 D(G(z)): 0.1455 / 0.0546
[5/25][550/782] Loss_D 0.1261 Loss_G: 5.4038 D(x): 0.9828 D(G(z)): 0.0997 / 0.0066
[5/25][600/782] Loss_D 0.0284 Loss_G: 5.1358 D(x): 0.9923 D(G(z)): 0.0196 / 0.0085
[5/25][650/782] Loss_D 0.2197 Loss_G: 3.1694 D(x): 0.8336 D(G(z)): 0.0179 / 0.0659
[5/25][700/782] Loss_D 0.2793 Loss_G: 2.4749 D(x): 0.9100 D(G(z)): 0.1453 / 0.1408
[5/25][750/782] Loss_D 0.3403 Loss_G: 2.9249 D(x): 0.9704 D(G(z)): 0.2331 / 0.0777
[6/25][50/782] Loss_D 0.6414 Loss_G: 2.4279 D(x): 0.7573 D(G(z)): 0.2531 / 0.1194
[6/25][100/782] Loss_D 1.8957 Loss_G: 0.8063 D(x): 0.2084 D(G(z)): 0.0175 / 0.4967
[6/25][150/782] Loss_D 0.1432 Loss_G: 3.4046 D(x): 0.8843 D(G(z)): 0.0112 / 0.0477
[6/25][200/782] Loss_D 0.8540 Loss_G: 1.8126 D(x): 0.6009 D(G(z)): 0.2125 / 0.2005
[6/25][250/782] Loss_D 0.8944 Loss_G: 3.5156 D(x): 0.8774 D(G(z)): 0.4764 / 0.0449
[6/25][300/782] Loss_D 0.0763 Loss_G: 4.2608 D(x): 0.9491 D(G(z)): 0.0217 / 0.0217
[6/25][350/782] Loss_D 0.2623 Loss_G: 6.6695 D(x): 0.9774 D(G(z)): 0.1984 / 0.0018
[6/25][400/782] Loss_D 0.1227 Loss_G: 3.5579 D(x): 0.9526 D(G(z)): 0.0684 / 0.0353
[6/25][450/782] Loss_D 0.8186 Loss_G: 2.7789 D(x): 0.7954 D(G(z)): 0.3833 / 0.0854
[6/25][500/782] Loss_D 1.1695 Loss_G: 3.0927 D(x): 0.8876 D(G(z)): 0.5750 / 0.0694
[6/25][550/782] Loss_D 0.1699 Loss_G: 3.3492 D(x): 0.9334 D(G(z)): 0.0893 / 0.0508
[6/25][600/782] Loss_D 0.1275 Loss_G: 4.5166 D(x): 0.9167 D(G(z)): 0.0356 / 0.0190
[6/25][650/782] Loss_D 0.9575 Loss_G: 1.8803 D(x): 0.5637 D(G(z)): 0.2577 / 0.1916
[6/25][700/782] Loss_D 1.0913 Loss_G: 3.4504 D(x): 0.8814 D(G(z)): 0.5606 / 0.0455
[6/25][750/782] Loss_D 0.6791 Loss_G: 6.1872 D(x): 0.9782 D(G(z)): 0.4186 / 0.0036
[7/25][50/782] Loss_D 0.0370 Loss_G: 4.4206 D(x): 0.9783 D(G(z)): 0.0147 / 0.0161
[7/25][100/782] Loss_D 0.9807 Loss_G: 2.3161 D(x): 0.4682 D(G(z)): 0.0546 / 0.1333
[7/25][150/782] Loss_D 0.9362 Loss_G: 2.4235 D(x): 0.4699 D(G(z)): 0.0343 / 0.1350
[7/25][200/782] Loss_D 0.1806 Loss_G: 3.5593 D(x): 0.9400 D(G(z)): 0.1055 / 0.0392
[7/25][250/782] Loss_D 0.3208 Loss_G: 3.3814 D(x): 0.8541 D(G(z)): 0.1312 / 0.0490
[7/25][300/782] Loss_D 0.0915 Loss_G: 4.4662 D(x): 0.9566 D(G(z)): 0.0438 / 0.0163
[7/25][350/782] Loss_D 0.1190 Loss_G: 5.8998 D(x): 0.9025 D(G(z)): 0.0121 / 0.0046
[7/25][400/782] Loss_D 0.3323 Loss_G: 3.5362 D(x): 0.8811 D(G(z)): 0.1627 / 0.0434
[7/25][450/782] Loss_D 0.2230 Loss_G: 2.0106 D(x): 0.8979 D(G(z)): 0.0999 / 0.1810
[7/25][500/782] Loss_D 0.0557 Loss_G: 4.3187 D(x): 0.9745 D(G(z)): 0.0287 / 0.0183
[7/25][550/782] Loss_D 1.2668 Loss_G: 3.4001 D(x): 0.7895 D(G(z)): 0.5711 / 0.0463
[7/25][600/782] Loss_D 0.3011 Loss_G: 6.2022 D(x): 0.9970 D(G(z)): 0.2356 / 0.0030
[7/25][650/782] Loss_D 1.2279 Loss_G: 3.3099 D(x): 0.7172 D(G(z)): 0.5223 / 0.0520
[7/25][700/782] Loss_D 0.1256 Loss_G: 4.0823 D(x): 0.9676 D(G(z)): 0.0819 / 0.0267
[7/25][750/782] Loss_D 0.1673 Loss_G: 3.2711 D(x): 0.9100 D(G(z)): 0.0616 / 0.0534
[8/25][50/782] Loss_D 0.0420 Loss_G: 4.6339 D(x): 0.9774 D(G(z)): 0.0186 / 0.0147
[8/25][100/782] Loss_D 0.0837 Loss_G: 5.6516 D(x): 0.9252 D(G(z)): 0.0021 / 0.0052
[8/25][150/782] Loss_D 0.8059 Loss_G: 1.6140 D(x): 0.6019 D(G(z)): 0.1435 / 0.3100
[8/25][200/782] Loss_D 0.4669 Loss_G: 2.9765 D(x): 0.7083 D(G(z)): 0.0694 / 0.0850
[8/25][250/782] Loss_D 1.3632 Loss_G: 1.2597 D(x): 0.3412 D(G(z)): 0.0119 / 0.3349
[8/25][300/782] Loss_D 0.1502 Loss_G: 6.1929 D(x): 0.9976 D(G(z)): 0.1315 / 0.0027
[8/25][350/782] Loss_D 0.5929 Loss_G: 1.5850 D(x): 0.7698 D(G(z)): 0.2402 / 0.2373
[8/25][400/782] Loss_D 1.1660 Loss_G: 0.9682 D(x): 0.3790 D(G(z)): 0.0169 / 0.4468
[8/25][450/782] Loss_D 1.1737 Loss_G: 5.2161 D(x): 0.9932 D(G(z)): 0.6199 / 0.0096
[8/25][500/782] Loss_D 0.8662 Loss_G: 4.6275 D(x): 0.9275 D(G(z)): 0.4852 / 0.0168
[8/25][550/782] Loss_D 0.3842 Loss_G: 2.0045 D(x): 0.7963 D(G(z)): 0.1217 / 0.1814
[8/25][600/782] Loss_D 0.5976 Loss_G: 3.9504 D(x): 0.8913 D(G(z)): 0.3332 / 0.0287
[8/25][650/782] Loss_D 0.0237 Loss_G: 5.2939 D(x): 0.9828 D(G(z)): 0.0062 / 0.0078
[8/25][700/782] Loss_D 0.0297 Loss_G: 5.4265 D(x): 0.9841 D(G(z)): 0.0133 / 0.0066
[8/25][750/782] Loss_D 0.4214 Loss_G: 3.0348 D(x): 0.8034 D(G(z)): 0.1556 / 0.0685
[9/25][50/782] Loss_D 0.0447 Loss_G: 4.6524 D(x): 0.9864 D(G(z)): 0.0299 / 0.0142
[9/25][100/782] Loss_D 1.3072 Loss_G: 0.9871 D(x): 0.3933 D(G(z)): 0.1340 / 0.4223
[9/25][150/782] Loss_D 0.2969 Loss_G: 2.3855 D(x): 0.8564 D(G(z)): 0.1192 / 0.1295
[9/25][200/782] Loss_D 0.7744 Loss_G: 3.3468 D(x): 0.9009 D(G(z)): 0.4482 / 0.0493
[9/25][250/782] Loss_D 0.0482 Loss_G: 4.2668 D(x): 0.9784 D(G(z)): 0.0252 / 0.0209
[9/25][300/782] Loss_D 1.1020 Loss_G: 1.6177 D(x): 0.6193 D(G(z)): 0.3955 / 0.2420
[9/25][350/782] Loss_D 0.5407 Loss_G: 2.6003 D(x): 0.8205 D(G(z)): 0.2568 / 0.0870
[9/25][400/782] Loss_D 0.0882 Loss_G: 4.8674 D(x): 0.9268 D(G(z)): 0.0074 / 0.0107
[9/25][450/782] Loss_D 4.2005 Loss_G: 0.0015 D(x): 0.0286 D(G(z)): 0.0008 / 0.9985
[9/25][500/782] Loss_D 0.2555 Loss_G: 2.7405 D(x): 0.8624 D(G(z)): 0.0792 / 0.0957
[9/25][550/782] Loss_D 0.5079 Loss_G: 3.6012 D(x): 0.8837 D(G(z)): 0.2803 / 0.0355
[9/25][600/782] Loss_D 0.2615 Loss_G: 3.7985 D(x): 0.9162 D(G(z)): 0.1400 / 0.0307
[9/25][650/782] Loss_D 0.0212 Loss_G: 5.6326 D(x): 0.9851 D(G(z)): 0.0060 / 0.0050
[9/25][700/782] Loss_D 0.6515 Loss_G: 2.6710 D(x): 0.7764 D(G(z)): 0.2861 / 0.0905
[9/25][750/782] Loss_D 0.2682 Loss_G: 4.8963 D(x): 0.9801 D(G(z)): 0.2000 / 0.0104
[10/25][50/782] Loss_D 0.9245 Loss_G: 1.7341 D(x): 0.6046 D(G(z)): 0.2419 / 0.2329
[10/25][100/782] Loss_D 0.4506 Loss_G: 2.6127 D(x): 0.7815 D(G(z)): 0.1470 / 0.1015
[10/25][150/782] Loss_D 0.6538 Loss_G: 2.2270 D(x): 0.7023 D(G(z)): 0.1900 / 0.1547
[10/25][200/782] Loss_D 0.9543 Loss_G: 1.5560 D(x): 0.6615 D(G(z)): 0.3327 / 0.2521
[10/25][250/782] Loss_D 0.1074 Loss_G: 3.5261 D(x): 0.9628 D(G(z)): 0.0623 / 0.0432
[10/25][300/782] Loss_D 0.0795 Loss_G: 4.1646 D(x): 0.9468 D(G(z)): 0.0206 / 0.0249
[10/25][350/782] Loss_D 0.0480 Loss_G: 5.9392 D(x): 0.9589 D(G(z)): 0.0053 / 0.0044
[10/25][400/782] Loss_D 0.0063 Loss_G: 6.1719 D(x): 0.9975 D(G(z)): 0.0038 / 0.0036
[10/25][450/782] Loss_D 2.5055 Loss_G: 2.6493 D(x): 0.1609 D(G(z)): 0.0047 / 0.2117
[10/25][500/782] Loss_D 0.4496 Loss_G: 2.3673 D(x): 0.8132 D(G(z)): 0.1805 / 0.1305
[10/25][550/782] Loss_D 0.6912 Loss_G: 4.9578 D(x): 0.9292 D(G(z)): 0.3971 / 0.0111
[10/25][600/782] Loss_D 0.4626 Loss_G: 2.8946 D(x): 0.7895 D(G(z)): 0.1683 / 0.0839
[10/25][650/782] Loss_D 0.7364 Loss_G: 2.2725 D(x): 0.5803 D(G(z)): 0.0516 / 0.1543
[10/25][700/782] Loss_D 0.4676 Loss_G: 4.1460 D(x): 0.8459 D(G(z)): 0.2147 / 0.0264
[10/25][750/782] Loss_D 0.4504 Loss_G: 1.4417 D(x): 0.7458 D(G(z)): 0.1065 / 0.2798
[11/25][50/782] Loss_D 0.8905 Loss_G: 1.2604 D(x): 0.6084 D(G(z)): 0.2436 / 0.3134
[11/25][100/782] Loss_D 0.4074 Loss_G: 3.1855 D(x): 0.9149 D(G(z)): 0.2454 / 0.0574
[11/25][150/782] Loss_D 1.0705 Loss_G: 1.3106 D(x): 0.4699 D(G(z)): 0.1609 / 0.3054
[11/25][200/782] Loss_D 0.4235 Loss_G: 2.6661 D(x): 0.7308 D(G(z)): 0.0833 / 0.0864
[11/25][250/782] Loss_D 0.0798 Loss_G: 4.3766 D(x): 0.9498 D(G(z)): 0.0249 / 0.0187
[11/25][300/782] Loss_D 0.0101 Loss_G: 6.3352 D(x): 0.9921 D(G(z)): 0.0020 / 0.0031
[11/25][350/782] Loss_D 0.7885 Loss_G: 2.4356 D(x): 0.8011 D(G(z)): 0.3770 / 0.1072
[11/25][400/782] Loss_D 0.9588 Loss_G: 1.6773 D(x): 0.6588 D(G(z)): 0.3565 / 0.2170
[11/25][450/782] Loss_D 0.0457 Loss_G: 4.8730 D(x): 0.9680 D(G(z)): 0.0125 / 0.0129
[11/25][500/782] Loss_D 0.9351 Loss_G: 2.9391 D(x): 0.7262 D(G(z)): 0.3932 / 0.0748
[11/25][550/782] Loss_D 0.2743 Loss_G: 3.2473 D(x): 0.8958 D(G(z)): 0.1356 / 0.0578
[11/25][600/782] Loss_D 0.4896 Loss_G: 3.1020 D(x): 0.8775 D(G(z)): 0.2653 / 0.0599
[11/25][650/782] Loss_D 0.0533 Loss_G: 4.0916 D(x): 0.9811 D(G(z)): 0.0308 / 0.0260
[11/25][700/782] Loss_D 0.5871 Loss_G: 2.6649 D(x): 0.7972 D(G(z)): 0.2508 / 0.0979
[11/25][750/782] Loss_D 0.8476 Loss_G: 2.9815 D(x): 0.7443 D(G(z)): 0.3588 / 0.0746
[12/25][50/782] Loss_D 0.0598 Loss_G: 4.1849 D(x): 0.9743 D(G(z)): 0.0325 / 0.0209
[12/25][100/782] Loss_D 0.0416 Loss_G: 4.9185 D(x): 0.9708 D(G(z)): 0.0109 / 0.0127
[12/25][150/782] Loss_D 0.9905 Loss_G: 1.0574 D(x): 0.4616 D(G(z)): 0.0824 / 0.4063
[12/25][200/782] Loss_D 0.6353 Loss_G: 2.8862 D(x): 0.7644 D(G(z)): 0.2594 / 0.0729
[12/25][250/782] Loss_D 0.5639 Loss_G: 2.5309 D(x): 0.7504 D(G(z)): 0.2008 / 0.1006
[12/25][300/782] Loss_D 0.9474 Loss_G: 4.0552 D(x): 0.8197 D(G(z)): 0.4576 / 0.0263
[12/25][350/782] Loss_D 0.1257 Loss_G: 3.5964 D(x): 0.9171 D(G(z)): 0.0353 / 0.0389
[12/25][400/782] Loss_D 0.7247 Loss_G: 2.1387 D(x): 0.6517 D(G(z)): 0.1821 / 0.1610
[12/25][450/782] Loss_D 0.6868 Loss_G: 2.1138 D(x): 0.6239 D(G(z)): 0.1269 / 0.1673
[12/25][500/782] Loss_D 0.0584 Loss_G: 4.3623 D(x): 0.9569 D(G(z)): 0.0130 / 0.0203
[12/25][550/782] Loss_D 0.8797 Loss_G: 1.5697 D(x): 0.6370 D(G(z)): 0.2439 / 0.2460
[12/25][600/782] Loss_D 7.5710 Loss_G: 2.4674 D(x): 0.0009 D(G(z)): 0.0001 / 0.1276
[12/25][650/782] Loss_D 0.9782 Loss_G: 5.7926 D(x): 0.9519 D(G(z)): 0.5529 / 0.0043
[12/25][700/782] Loss_D 0.6807 Loss_G: 2.1282 D(x): 0.6616 D(G(z)): 0.1727 / 0.1749
[12/25][750/782] Loss_D 0.6323 Loss_G: 2.3704 D(x): 0.7030 D(G(z)): 0.1984 / 0.1226
[13/25][50/782] Loss_D 1.0368 Loss_G: 2.7112 D(x): 0.5151 D(G(z)): 0.1691 / 0.0971
[13/25][100/782] Loss_D 0.2770 Loss_G: 5.8978 D(x): 0.7826 D(G(z)): 0.0016 / 0.0045
[13/25][150/782] Loss_D 0.4838 Loss_G: 2.5277 D(x): 0.8444 D(G(z)): 0.2408 / 0.1027
[13/25][200/782] Loss_D 0.5211 Loss_G: 6.1884 D(x): 0.9972 D(G(z)): 0.3586 / 0.0033
[13/25][250/782] Loss_D 0.0469 Loss_G: 4.8081 D(x): 0.9855 D(G(z)): 0.0312 / 0.0128
[13/25][300/782] Loss_D 0.0776 Loss_G: 5.4189 D(x): 0.9877 D(G(z)): 0.0614 / 0.0061
[13/25][350/782] Loss_D 0.6767 Loss_G: 2.1970 D(x): 0.6357 D(G(z)): 0.1544 / 0.1457
[13/25][400/782] Loss_D 1.0212 Loss_G: 1.8238 D(x): 0.5974 D(G(z)): 0.2626 / 0.2514
[13/25][450/782] Loss_D 0.4763 Loss_G: 3.6136 D(x): 0.8988 D(G(z)): 0.2602 / 0.0468
[13/25][500/782] Loss_D 1.2395 Loss_G: 5.9642 D(x): 0.9952 D(G(z)): 0.6262 / 0.0043
[13/25][550/782] Loss_D 0.0623 Loss_G: 4.6587 D(x): 0.9810 D(G(z)): 0.0404 / 0.0141
[13/25][600/782] Loss_D 0.5365 Loss_G: 2.9783 D(x): 0.8365 D(G(z)): 0.2708 / 0.0678
[13/25][650/782] Loss_D 0.2531 Loss_G: 3.5508 D(x): 0.9189 D(G(z)): 0.1404 / 0.0418
[13/25][700/782] Loss_D 0.0448 Loss_G: 5.0289 D(x): 0.9982 D(G(z)): 0.0413 / 0.0090
[13/25][750/782] Loss_D 0.8919 Loss_G: 2.0974 D(x): 0.6869 D(G(z)): 0.3303 / 0.1491
[14/25][50/782] Loss_D 1.0443 Loss_G: 0.3851 D(x): 0.4256 D(G(z)): 0.0371 / 0.7058
[14/25][100/782] Loss_D 0.4612 Loss_G: 3.0151 D(x): 0.9191 D(G(z)): 0.2825 / 0.0650
[14/25][150/782] Loss_D 0.0562 Loss_G: 4.0934 D(x): 0.9755 D(G(z)): 0.0300 / 0.0264
[14/25][200/782] Loss_D 0.0049 Loss_G: 8.7068 D(x): 0.9955 D(G(z)): 0.0003 / 0.0003
[14/25][250/782] Loss_D 0.7704 Loss_G: 2.2456 D(x): 0.5524 D(G(z)): 0.0426 / 0.1532
[14/25][300/782] Loss_D 1.2417 Loss_G: 5.4829 D(x): 0.9759 D(G(z)): 0.6279 / 0.0068
[14/25][350/782] Loss_D 1.2918 Loss_G: 3.5468 D(x): 0.7943 D(G(z)): 0.5866 / 0.0428
[14/25][400/782] Loss_D 0.5118 Loss_G: 3.6140 D(x): 0.8822 D(G(z)): 0.2947 / 0.0361
[14/25][450/782] Loss_D 1.4008 Loss_G: 1.4631 D(x): 0.3078 D(G(z)): 0.0521 / 0.2791
[14/25][500/782] Loss_D 0.0391 Loss_G: 5.3991 D(x): 0.9686 D(G(z)): 0.0067 / 0.0077
[14/25][550/782] Loss_D 0.0154 Loss_G: 5.2375 D(x): 0.9951 D(G(z)): 0.0104 / 0.0080
[14/25][600/782] Loss_D 0.6185 Loss_G: 2.4703 D(x): 0.7577 D(G(z)): 0.2456 / 0.1083
[14/25][650/782] Loss_D 0.7204 Loss_G: 4.3873 D(x): 0.9332 D(G(z)): 0.4298 / 0.0178
[14/25][700/782] Loss_D 0.6526 Loss_G: 3.2116 D(x): 0.8768 D(G(z)): 0.3687 / 0.0528
[14/25][750/782] Loss_D 0.4965 Loss_G: 2.5433 D(x): 0.8045 D(G(z)): 0.2056 / 0.1070
[15/25][50/782] Loss_D 2.8338 Loss_G: 8.8114 D(x): 0.9933 D(G(z)): 0.9004 / 0.0003
[15/25][100/782] Loss_D 0.7220 Loss_G: 2.1041 D(x): 0.7197 D(G(z)): 0.2670 / 0.1578
[15/25][150/782] Loss_D 0.5481 Loss_G: 1.8587 D(x): 0.6881 D(G(z)): 0.1127 / 0.1915
[15/25][200/782] Loss_D 0.1658 Loss_G: 4.2430 D(x): 0.8791 D(G(z)): 0.0253 / 0.0288
[15/25][250/782] Loss_D 0.1563 Loss_G: 3.2045 D(x): 0.9144 D(G(z)): 0.0587 / 0.0567
[15/25][300/782] Loss_D 0.7528 Loss_G: 1.2340 D(x): 0.6329 D(G(z)): 0.1851 / 0.3387
[15/25][350/782] Loss_D 0.7421 Loss_G: 2.3607 D(x): 0.7524 D(G(z)): 0.3138 / 0.1264
[15/25][400/782] Loss_D 0.5206 Loss_G: 2.4389 D(x): 0.7333 D(G(z)): 0.1509 / 0.1131
[15/25][450/782] Loss_D 0.0279 Loss_G: 7.0473 D(x): 0.9745 D(G(z)): 0.0016 / 0.0014
[15/25][500/782] Loss_D 0.0063 Loss_G: 6.9873 D(x): 0.9954 D(G(z)): 0.0017 / 0.0016
[15/25][550/782] Loss_D 0.3320 Loss_G: 3.7679 D(x): 0.9446 D(G(z)): 0.2199 / 0.0324
[15/25][600/782] Loss_D 1.3902 Loss_G: 0.8891 D(x): 0.3377 D(G(z)): 0.0581 / 0.4712
[15/25][650/782] Loss_D 0.1105 Loss_G: 4.5017 D(x): 0.9907 D(G(z)): 0.0899 / 0.0177
[15/25][700/782] Loss_D 0.6418 Loss_G: 3.1570 D(x): 0.9576 D(G(z)): 0.3968 / 0.0592
[15/25][750/782] Loss_D 0.0113 Loss_G: 5.7220 D(x): 0.9960 D(G(z)): 0.0072 / 0.0055
[16/25][50/782] Loss_D 0.6471 Loss_G: 2.3364 D(x): 0.7472 D(G(z)): 0.2410 / 0.1309
[16/25][100/782] Loss_D 0.6096 Loss_G: 2.0560 D(x): 0.6224 D(G(z)): 0.0604 / 0.1637
[16/25][150/782] Loss_D 0.5783 Loss_G: 3.7573 D(x): 0.9112 D(G(z)): 0.3505 / 0.0320
[16/25][200/782] Loss_D 4.6998 Loss_G: 1.4176 D(x): 0.0170 D(G(z)): 0.0016 / 0.2941
[16/25][250/782] Loss_D 0.5604 Loss_G: 3.3540 D(x): 0.8991 D(G(z)): 0.3251 / 0.0450
[16/25][300/782] Loss_D 0.7839 Loss_G: 1.4070 D(x): 0.5527 D(G(z)): 0.0951 / 0.2933
[16/25][350/782] Loss_D 0.1370 Loss_G: 4.4457 D(x): 0.9786 D(G(z)): 0.1041 / 0.0168
[16/25][400/782] Loss_D 0.4606 Loss_G: 2.6231 D(x): 0.8568 D(G(z)): 0.2324 / 0.0972
[16/25][450/782] Loss_D 0.1300 Loss_G: 3.8260 D(x): 0.9169 D(G(z)): 0.0370 / 0.0323
[16/25][500/782] Loss_D 0.0423 Loss_G: 5.0355 D(x): 0.9633 D(G(z)): 0.0042 / 0.0099
[16/25][550/782] Loss_D 0.0367 Loss_G: 5.3109 D(x): 0.9934 D(G(z)): 0.0292 / 0.0074
[16/25][600/782] Loss_D 0.0169 Loss_G: 6.5044 D(x): 0.9862 D(G(z)): 0.0027 / 0.0026
[16/25][650/782] Loss_D 0.0053 Loss_G: 6.3846 D(x): 0.9973 D(G(z)): 0.0026 / 0.0025
[16/25][700/782] Loss_D 0.0034 Loss_G: 6.6401 D(x): 0.9989 D(G(z)): 0.0022 / 0.0020
[16/25][750/782] Loss_D 0.0089 Loss_G: 6.0763 D(x): 0.9955 D(G(z)): 0.0043 / 0.0030
[17/25][50/782] Loss_D 0.0048 Loss_G: 6.1288 D(x): 0.9991 D(G(z)): 0.0039 / 0.0025
[17/25][100/782] Loss_D 0.0057 Loss_G: 6.0259 D(x): 0.9998 D(G(z)): 0.0055 / 0.0029
[17/25][150/782] Loss_D 0.0076 Loss_G: 6.6150 D(x): 0.9979 D(G(z)): 0.0055 / 0.0017
[17/25][200/782] Loss_D 0.0076 Loss_G: 6.5694 D(x): 0.9985 D(G(z)): 0.0061 / 0.0017
[17/25][250/782] Loss_D 0.0103 Loss_G: 7.1226 D(x): 0.9989 D(G(z)): 0.0092 / 0.0011
[17/25][300/782] Loss_D 0.0033 Loss_G: 6.8271 D(x): 0.9996 D(G(z)): 0.0029 / 0.0013
[17/25][350/782] Loss_D 0.0026 Loss_G: 7.4169 D(x): 0.9997 D(G(z)): 0.0023 / 0.0009
[17/25][400/782] Loss_D 0.0020 Loss_G: 7.1720 D(x): 0.9994 D(G(z)): 0.0013 / 0.0008
[17/25][450/782] Loss_D 0.0016 Loss_G: 7.9112 D(x): 0.9992 D(G(z)): 0.0009 / 0.0004
[17/25][500/782] Loss_D 0.0021 Loss_G: 7.8129 D(x): 0.9985 D(G(z)): 0.0006 / 0.0005
[17/25][550/782] Loss_D 0.0072 Loss_G: 6.5582 D(x): 0.9990 D(G(z)): 0.0061 / 0.0015
[17/25][600/782] Loss_D 0.0008 Loss_G: 7.6831 D(x): 0.9998 D(G(z)): 0.0006 / 0.0005
[17/25][650/782] Loss_D 0.0007 Loss_G: 8.9271 D(x): 0.9995 D(G(z)): 0.0002 / 0.0002
[17/25][700/782] Loss_D 0.0002 Loss_G: 9.5442 D(x): 0.9999 D(G(z)): 0.0002 / 0.0001
[17/25][750/782] Loss_D 0.0010 Loss_G: 9.5904 D(x): 0.9991 D(G(z)): 0.0001 / 0.0001
[18/25][50/782] Loss_D 0.0027 Loss_G: 7.0911 D(x): 0.9997 D(G(z)): 0.0024 / 0.0011
[18/25][100/782] Loss_D 0.0005 Loss_G: 11.2623 D(x): 0.9995 D(G(z)): 0.0000 / 0.0000
[18/25][150/782] Loss_D 0.0000 Loss_G: 11.4600 D(x): 1.0000 D(G(z)): 0.0000 / 0.0000
[18/25][200/782] Loss_D 0.0003 Loss_G: 8.6411 D(x): 0.9999 D(G(z)): 0.0002 / 0.0002
[18/25][250/782] Loss_D 0.0001 Loss_G: 13.7842 D(x): 0.9999 D(G(z)): 0.0000 / 0.0000
[18/25][300/782] Loss_D 0.0137 Loss_G: 7.3127 D(x): 1.0000 D(G(z)): 0.0135 / 0.0007
[18/25][350/782] Loss_D 0.0003 Loss_G: 9.0741 D(x): 0.9998 D(G(z)): 0.0001 / 0.0001
[18/25][400/782] Loss_D 0.0122 Loss_G: 6.9928 D(x): 0.9966 D(G(z)): 0.0087 / 0.0012
[18/25][450/782] Loss_D 0.0496 Loss_G: 9.3902 D(x): 0.9939 D(G(z)): 0.0416 / 0.0002
[18/25][500/782] Loss_D 0.0137 Loss_G: 9.8691 D(x): 0.9909 D(G(z)): 0.0042 / 0.0003
[18/25][550/782] Loss_D 0.0406 Loss_G: 8.1716 D(x): 0.9972 D(G(z)): 0.0362 / 0.0005
[18/25][600/782] Loss_D 0.0110 Loss_G: 7.7297 D(x): 0.9990 D(G(z)): 0.0099 / 0.0008
[18/25][650/782] Loss_D 0.1799 Loss_G: 10.9061 D(x): 0.9987 D(G(z)): 0.1118 / 0.0002
[18/25][700/782] Loss_D 0.1018 Loss_G: 8.3347 D(x): 0.9540 D(G(z)): 0.0003 / 0.0006
[18/25][750/782] Loss_D 10.9013 Loss_G: 8.5192 D(x): 0.0003 D(G(z)): 0.0000 / 0.0074
[19/25][50/782] Loss_D 0.2945 Loss_G: 7.4652 D(x): 0.9693 D(G(z)): 0.1850 / 0.0012
[19/25][100/782] Loss_D 0.0630 Loss_G: 7.3468 D(x): 0.9952 D(G(z)): 0.0524 / 0.0014
[19/25][150/782] Loss_D 0.0723 Loss_G: 4.9565 D(x): 0.9584 D(G(z)): 0.0222 / 0.0119
[19/25][200/782] Loss_D 0.2456 Loss_G: 7.7924 D(x): 0.8256 D(G(z)): 0.0003 / 0.0015
[19/25][250/782] Loss_D 0.3179 Loss_G: 6.3104 D(x): 0.9786 D(G(z)): 0.2184 / 0.0037
[19/25][300/782] Loss_D 0.0668 Loss_G: 5.9980 D(x): 0.9859 D(G(z)): 0.0499 / 0.0041
[19/25][350/782] Loss_D 0.0746 Loss_G: 4.6333 D(x): 0.9565 D(G(z)): 0.0254 / 0.0181
[19/25][400/782] Loss_D 0.0181 Loss_G: 6.6617 D(x): 0.9902 D(G(z)): 0.0080 / 0.0028
[19/25][450/782] Loss_D 0.0032 Loss_G: 7.3127 D(x): 0.9984 D(G(z)): 0.0015 / 0.0011
[19/25][500/782] Loss_D 0.0506 Loss_G: 10.9246 D(x): 0.9558 D(G(z)): 0.0013 / 0.0001
[19/25][550/782] Loss_D 0.2767 Loss_G: 3.7003 D(x): 0.9002 D(G(z)): 0.1255 / 0.0418
[19/25][600/782] Loss_D 0.0907 Loss_G: 4.4226 D(x): 0.9410 D(G(z)): 0.0206 / 0.0220
[19/25][650/782] Loss_D 0.0764 Loss_G: 6.8746 D(x): 0.9876 D(G(z)): 0.0542 / 0.0037
[19/25][700/782] Loss_D 0.1184 Loss_G: 5.4722 D(x): 0.9645 D(G(z)): 0.0673 / 0.0078
[19/25][750/782] Loss_D 0.0607 Loss_G: 4.9716 D(x): 0.9666 D(G(z)): 0.0225 / 0.0110
[20/25][50/782] Loss_D 0.1465 Loss_G: 10.3587 D(x): 0.9929 D(G(z)): 0.1104 / 0.0001
[20/25][100/782] Loss_D 0.0383 Loss_G: 5.3983 D(x): 0.9928 D(G(z)): 0.0295 / 0.0084
[20/25][150/782] Loss_D 0.0193 Loss_G: 5.8721 D(x): 0.9915 D(G(z)): 0.0106 / 0.0047
[20/25][200/782] Loss_D 0.0369 Loss_G: 6.4084 D(x): 0.9966 D(G(z)): 0.0321 / 0.0026
[20/25][250/782] Loss_D 0.1427 Loss_G: 5.7938 D(x): 0.9043 D(G(z)): 0.0222 / 0.0082
[20/25][300/782] Loss_D 0.2397 Loss_G: 4.6791 D(x): 0.8543 D(G(z)): 0.0136 / 0.0212
[20/25][350/782] Loss_D 0.0621 Loss_G: 5.7603 D(x): 0.9871 D(G(z)): 0.0452 / 0.0062
[20/25][400/782] Loss_D 0.4881 Loss_G: 5.3915 D(x): 0.9969 D(G(z)): 0.3013 / 0.0122
[20/25][450/782] Loss_D 0.0683 Loss_G: 5.2579 D(x): 0.9692 D(G(z)): 0.0342 / 0.0094
[20/25][500/782] Loss_D 0.2125 Loss_G: 9.6402 D(x): 0.8419 D(G(z)): 0.0002 / 0.0001
[20/25][550/782] Loss_D 0.0289 Loss_G: 5.8903 D(x): 0.9822 D(G(z)): 0.0097 / 0.0075
[20/25][600/782] Loss_D 0.1424 Loss_G: 3.6984 D(x): 0.9250 D(G(z)): 0.0549 / 0.0397
[20/25][650/782] Loss_D 0.1027 Loss_G: 4.9997 D(x): 0.9689 D(G(z)): 0.0637 / 0.0124
[20/25][700/782] Loss_D 0.1531 Loss_G: 3.5543 D(x): 0.8989 D(G(z)): 0.0298 / 0.0526
[20/25][750/782] Loss_D 0.0328 Loss_G: 4.7151 D(x): 0.9859 D(G(z)): 0.0175 / 0.0150
[21/25][50/782] Loss_D 0.0382 Loss_G: 5.5601 D(x): 0.9825 D(G(z)): 0.0193 / 0.0084
[21/25][100/782] Loss_D 4.5161 Loss_G: 0.1684 D(x): 0.0323 D(G(z)): 0.0001 / 0.8668
[21/25][150/782] Loss_D 0.1309 Loss_G: 4.6155 D(x): 0.9465 D(G(z)): 0.0651 / 0.0173
[21/25][200/782] Loss_D 0.0234 Loss_G: 6.2571 D(x): 0.9818 D(G(z)): 0.0046 / 0.0031
[21/25][250/782] Loss_D 0.3610 Loss_G: 3.3042 D(x): 0.7769 D(G(z)): 0.0253 / 0.0705
[21/25][300/782] Loss_D 0.0287 Loss_G: 5.8048 D(x): 0.9747 D(G(z)): 0.0024 / 0.0046
[21/25][350/782] Loss_D 0.1591 Loss_G: 3.9839 D(x): 0.9601 D(G(z)): 0.1037 / 0.0299
[21/25][400/782] Loss_D 0.1334 Loss_G: 4.1704 D(x): 0.9038 D(G(z)): 0.0233 / 0.0264
[21/25][450/782] Loss_D 0.0845 Loss_G: 4.6322 D(x): 0.9461 D(G(z)): 0.0242 / 0.0196
[21/25][500/782] Loss_D 0.8870 Loss_G: 4.5116 D(x): 0.5200 D(G(z)): 0.0065 / 0.0301
[21/25][550/782] Loss_D 0.2062 Loss_G: 5.6796 D(x): 0.9783 D(G(z)): 0.1454 / 0.0065
[21/25][600/782] Loss_D 1.1910 Loss_G: 6.4619 D(x): 0.9846 D(G(z)): 0.5830 / 0.0044
[21/25][650/782] Loss_D 0.1141 Loss_G: 4.2848 D(x): 0.9327 D(G(z)): 0.0370 / 0.0218
[21/25][700/782] Loss_D 0.1020 Loss_G: 3.7425 D(x): 0.9216 D(G(z)): 0.0145 / 0.0386
[21/25][750/782] Loss_D 0.0244 Loss_G: 5.3808 D(x): 0.9964 D(G(z)): 0.0203 / 0.0083
[22/25][50/782] Loss_D 0.0182 Loss_G: 5.4579 D(x): 0.9934 D(G(z)): 0.0114 / 0.0068
[22/25][100/782] Loss_D 0.0082 Loss_G: 7.3942 D(x): 0.9929 D(G(z)): 0.0008 / 0.0010
[22/25][150/782] Loss_D 0.0620 Loss_G: 5.6863 D(x): 0.9582 D(G(z)): 0.0165 / 0.0069
[22/25][200/782] Loss_D 0.0267 Loss_G: 5.1148 D(x): 0.9951 D(G(z)): 0.0210 / 0.0113
[22/25][250/782] Loss_D 0.3422 Loss_G: 2.7569 D(x): 0.8017 D(G(z)): 0.0706 / 0.0989
[22/25][300/782] Loss_D 0.0896 Loss_G: 5.9563 D(x): 0.9414 D(G(z)): 0.0245 / 0.0055
[22/25][350/782] Loss_D 0.1432 Loss_G: 4.2021 D(x): 0.9013 D(G(z)): 0.0246 / 0.0267
[22/25][400/782] Loss_D 0.2143 Loss_G: 7.7975 D(x): 0.9958 D(G(z)): 0.1676 / 0.0007
[22/25][450/782] Loss_D 0.3073 Loss_G: 3.8885 D(x): 0.8105 D(G(z)): 0.0553 / 0.0405
[22/25][500/782] Loss_D 0.3127 Loss_G: 3.3018 D(x): 0.8921 D(G(z)): 0.1416 / 0.0649
[22/25][550/782] Loss_D 0.0605 Loss_G: 4.3164 D(x): 0.9913 D(G(z)): 0.0491 / 0.0193
[22/25][600/782] Loss_D 0.0328 Loss_G: 4.6500 D(x): 0.9928 D(G(z)): 0.0250 / 0.0139
[22/25][650/782] Loss_D 0.3176 Loss_G: 3.6946 D(x): 0.8474 D(G(z)): 0.1091 / 0.0427
[22/25][700/782] Loss_D 0.2083 Loss_G: 2.9641 D(x): 0.9257 D(G(z)): 0.1112 / 0.0783
[22/25][750/782] Loss_D 0.2745 Loss_G: 2.7967 D(x): 0.8512 D(G(z)): 0.0799 / 0.0974
[23/25][50/782] Loss_D 0.1032 Loss_G: 4.4621 D(x): 0.9338 D(G(z)): 0.0291 / 0.0193
[23/25][100/782] Loss_D 0.2001 Loss_G: 4.5253 D(x): 0.9311 D(G(z)): 0.1020 / 0.0219
[23/25][150/782] Loss_D 0.8856 Loss_G: 6.5326 D(x): 0.9633 D(G(z)): 0.4404 / 0.0039
[23/25][200/782] Loss_D 0.0675 Loss_G: 4.4328 D(x): 0.9869 D(G(z)): 0.0509 / 0.0184
[23/25][250/782] Loss_D 0.0263 Loss_G: 5.4504 D(x): 0.9856 D(G(z)): 0.0110 / 0.0081
[23/25][300/782] Loss_D 0.1661 Loss_G: 4.7940 D(x): 0.9680 D(G(z)): 0.1162 / 0.0143
[23/25][350/782] Loss_D 0.0978 Loss_G: 3.9687 D(x): 0.9514 D(G(z)): 0.0432 / 0.0300
[23/25][400/782] Loss_D 0.4275 Loss_G: 2.7146 D(x): 0.7338 D(G(z)): 0.0293 / 0.1011
[23/25][450/782] Loss_D 0.1470 Loss_G: 4.0145 D(x): 0.8867 D(G(z)): 0.0146 / 0.0314
[23/25][500/782] Loss_D 0.0408 Loss_G: 5.3538 D(x): 0.9946 D(G(z)): 0.0332 / 0.0085
[23/25][550/782] Loss_D 0.0404 Loss_G: 5.4267 D(x): 0.9884 D(G(z)): 0.0269 / 0.0083
[23/25][600/782] Loss_D 0.5927 Loss_G: 5.2480 D(x): 0.6238 D(G(z)): 0.0051 / 0.0186
[23/25][650/782] Loss_D 0.0971 Loss_G: 5.0581 D(x): 0.9332 D(G(z)): 0.0214 / 0.0106
[23/25][700/782] Loss_D 0.2573 Loss_G: 4.2253 D(x): 0.8213 D(G(z)): 0.0195 / 0.0283
[23/25][750/782] Loss_D 0.0374 Loss_G: 5.0644 D(x): 0.9821 D(G(z)): 0.0160 / 0.0105
[24/25][50/782] Loss_D 0.0080 Loss_G: 8.7325 D(x): 0.9923 D(G(z)): 0.0003 / 0.0003
[24/25][100/782] Loss_D 0.0128 Loss_G: 6.3000 D(x): 0.9980 D(G(z)): 0.0107 / 0.0032
[24/25][150/782] Loss_D 0.0014 Loss_G: 8.6238 D(x): 0.9990 D(G(z)): 0.0003 / 0.0003
[24/25][200/782] Loss_D 0.3359 Loss_G: 5.3243 D(x): 0.8233 D(G(z)): 0.0083 / 0.0177
[24/25][250/782] Loss_D 0.1343 Loss_G: 4.4410 D(x): 0.9650 D(G(z)): 0.0821 / 0.0201
[24/25][300/782] Loss_D 0.0596 Loss_G: 6.2632 D(x): 0.9490 D(G(z)): 0.0051 / 0.0033
[24/25][350/782] Loss_D 0.0159 Loss_G: 6.4369 D(x): 0.9952 D(G(z)): 0.0109 / 0.0034
[24/25][400/782] Loss_D 0.2918 Loss_G: 6.1881 D(x): 0.9918 D(G(z)): 0.2209 / 0.0035
[24/25][450/782] Loss_D 0.3634 Loss_G: 4.4417 D(x): 0.7729 D(G(z)): 0.0219 / 0.0227
[24/25][500/782] Loss_D 0.2780 Loss_G: 2.7401 D(x): 0.8505 D(G(z)): 0.0851 / 0.1071
[24/25][550/782] Loss_D 0.0414 Loss_G: 5.5519 D(x): 0.9772 D(G(z)): 0.0168 / 0.0082
[24/25][600/782] Loss_D 0.1559 Loss_G: 4.7665 D(x): 0.9126 D(G(z)): 0.0431 / 0.0189
[24/25][650/782] Loss_D 0.0488 Loss_G: 5.0526 D(x): 0.9780 D(G(z)): 0.0252 / 0.0132
[24/25][700/782] Loss_D 0.0135 Loss_G: 5.2277 D(x): 0.9981 D(G(z)): 0.0115 / 0.0108
[24/25][750/782] Loss_D 0.0136 Loss_G: 6.5464 D(x): 0.9893 D(G(z)): 0.0023 / 0.0027

13.7. Visualization

13.7.1. Visualize loss

[7]:
%matplotlib inline
import matplotlib.pyplot as plt

y1 = [r.D for r in results]
y2 = [r.G for r in results]
x = [i for i in range(len(y1))]

fig, ax1 = plt.subplots(figsize=(15, 5))
color = 'tab:red'
ax1.plot(x, y1, color=color)
ax1.set_ylabel('Discriminator Loss', color=color)
ax1.tick_params(axis='y', labelcolor=color)

ax2 = ax1.twinx()
color = 'tab:blue'
ax2.plot(x, y2, color=color)
ax2.set_ylabel('Generator Loss', color=color)
ax2.tick_params(axis='y', labelcolor=color)
_images/gan_14_0.png

13.7.2. Visualize generator’s progression

[12]:
import matplotlib
import matplotlib.animation as animation
from IPython.display import HTML

matplotlib.rcParams['animation.embed_limit'] = 2**128

fig = plt.figure(figsize=(8,8))
plt.axis('off')
ims = [[plt.imshow(np.transpose(i,(1,2,0)), animated=True)] for i in images]
ani = animation.ArtistAnimation(fig, ims, interval=1000, repeat_delay=1000, blit=True)

HTML(ani.to_jshtml())
[12]:
_images/gan_16_1.png

13.7.3. Visualize real vs fake

[9]:
# Grab a batch of real images from the dataloader
real_batch = next(iter(dataloader))

# Plot the real images
plt.figure(figsize=(15,15))
plt.subplot(1,2,1)
plt.axis("off")
plt.title("Real Images")
plt.imshow(np.transpose(vutils.make_grid(real_batch[0].to(device)[:64], padding=5, normalize=True).cpu(),(1,2,0)))

# Plot the fake images from the last epoch
plt.subplot(1,2,2)
plt.axis("off")
plt.title("Fake Images")
plt.imshow(np.transpose(images[-1],(1,2,0)))
plt.show()
_images/gan_18_0.png

13.8. Saving

[10]:
s_epoch = str(epoch).zfill(3)

g_path = f'{out_folder}/netG_epoch_{s_epoch}.pth'
d_path = f'{out_folder}/netD_epoch_{s_epoch}.pth'

torch.save(netG.state_dict(), g_path)
torch.save(netD.state_dict(), d_path)