optional lr schedule

pull/10/head
Eren Golge 2018-11-03 19:47:28 +01:00
parent 0b6a9995fc
commit bb04a1c6e0
2 changed files with 10 additions and 4 deletions

View File

@ -27,11 +27,15 @@
"embedding_size": 256,
"text_cleaner": "english_cleaners",
"epochs": 1000,
"lr": 0.0015,
// "lr": 0.0015,
"lr": 0.0001,
"lr_decay": false,
"warmup_steps": 4000,
"batch_size":32,
"batch_size": 32,
"eval_batch_size":32,
"r": 1,
"r": 5,
"wd": 0.000001,
"checkpoint": true,
"save_step": 5000,

View File

@ -58,7 +58,8 @@ def train(model, criterion, criterion_st, data_loader, optimizer, optimizer_st,
epoch * len(data_loader) + 1
# setup lr
scheduler.step()
if c.lr_decay:
scheduler.step()
optimizer.zero_grad()
optimizer_st.zero_grad()
@ -92,6 +93,7 @@ def train(model, criterion, criterion_st, data_loader, optimizer, optimizer_st,
# backpass and check the grad norm for spec losses
loss.backward(retain_graph=True)
# custom weight decay
for group in optimizer.param_groups:
for param in group['params']:
param.data = param.data.add(-c.wd * group['lr'], param.data)