Make sure the generator training step gets a full batch of noise even if the data loader just ran out.
This commit is contained in:
parent
040d553d2a
commit
2a163090e8
3
main.py
3
main.py
|
@ -203,6 +203,9 @@ for epoch in range(opt.niter):
|
|||
for p in netD.parameters():
|
||||
p.requires_grad = False # to avoid computation
|
||||
netG.zero_grad()
|
||||
# in case our last batch was the tail batch of the dataloader,
|
||||
# make sure we feed a full batch of noise
|
||||
noise.data.resize_(opt.batchSize, nz, 1, 1)
|
||||
noise.data.normal_(0, 1)
|
||||
fake = netG(noise)
|
||||
errG = netD(fake)
|
||||
|
|
Loading…
Reference in New Issue