зеркало из https://github.com/mozilla/TTS.git
bug fix
This commit is contained in:
Родитель
395da52dec
Коммит
8ef8ddb915
2
train.py
2
train.py
|
@ -144,7 +144,7 @@ def train(model, criterion, data_loader, optimizer, epoch):
|
||||||
progbar_display['total_loss'] = loss.item()
|
progbar_display['total_loss'] = loss.item()
|
||||||
progbar_display['linear_loss'] = linear_loss.item()
|
progbar_display['linear_loss'] = linear_loss.item()
|
||||||
progbar_display['mel_loss'] = mel_loss.item()
|
progbar_display['mel_loss'] = mel_loss.item()
|
||||||
progbar_display['grad_norm'] = grad_norm
|
progbar_display['grad_norm'] = grad_norm.item()
|
||||||
|
|
||||||
# update
|
# update
|
||||||
progbar.update(num_iter+1, values=list(progbar_display.items()))
|
progbar.update(num_iter+1, values=list(progbar_display.items()))
|
||||||
|
|
|
@ -113,7 +113,7 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path,
|
||||||
def check_update(model, grad_clip, grad_top):
|
def check_update(model, grad_clip, grad_top):
|
||||||
r'''Check model gradient against unexpected jumps and failures'''
|
r'''Check model gradient against unexpected jumps and failures'''
|
||||||
skip_flag = False
|
skip_flag = False
|
||||||
grad_norm = torch.nn.utils.clip_grad_norm(model.parameters(), grad_clip)
|
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip)
|
||||||
if np.isinf(grad_norm):
|
if np.isinf(grad_norm):
|
||||||
print(" | > Gradient is INF !!")
|
print(" | > Gradient is INF !!")
|
||||||
skip_flag = True
|
skip_flag = True
|
||||||
|
|
Загрузка…
Ссылка в новой задаче