Fix Capacitron training (#2086)

pull/2135/head
Victor Shepardson 2022-11-01 12:52:06 +01:00 committed by GitHub
parent 5ccef6e665
commit 5307a2229b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 4 deletions

View File

@ -344,7 +344,7 @@ class BaseTTS(BaseTrainerModel):
loader = DataLoader(
dataset,
batch_size=config.eval_batch_size if is_eval else config.batch_size,
shuffle=False, # shuffle is done in the dataset.
shuffle=True, # if there is no other sampler
collate_fn=dataset.collate_fn,
drop_last=False, # setting this False might cause issues in AMP training.
sampler=sampler,

View File

@ -38,9 +38,9 @@ class CapacitronOptimizer:
self.param_groups = self.primary_optimizer.param_groups
self.primary_optimizer.step()
def zero_grad(self):
self.primary_optimizer.zero_grad()
self.secondary_optimizer.zero_grad()
def zero_grad(self, set_to_none=False):
self.primary_optimizer.zero_grad(set_to_none)
self.secondary_optimizer.zero_grad(set_to_none)
def load_state_dict(self, state_dict):
self.primary_optimizer.load_state_dict(state_dict[0])