Make sure the generator training step gets a full batch of noise even if the data loader just ran out.

This commit is contained in:
FeepingCreature 2017-02-06 19:46:58 +01:00
parent 040d553d2a
commit 2a163090e8
1 changed files with 3 additions and 0 deletions

View File

@ -203,6 +203,9 @@ for epoch in range(opt.niter):
for p in netD.parameters():
p.requires_grad = False # to avoid computation
netG.zero_grad()
# in case our last batch was the tail batch of the dataloader,
# make sure we feed a full batch of noise
noise.data.resize_(opt.batchSize, nz, 1, 1)
noise.data.normal_(0, 1)
fake = netG(noise)
errG = netD(fake)