Update example files so that tr_loss is not affected by args.gradient_accumulation_step

This commit is contained in:
Mathieu Prouveur 2019-04-24 14:07:00 +02:00
parent c36cca075a
commit ed8fad7390
2 changed files with 2 additions and 2 deletions

View File

@ -845,7 +845,7 @@ def main():
else:
loss.backward()
tr_loss += loss.item()
tr_loss += loss.item() * args.gradient_accumulation_steps
nb_tr_examples += input_ids.size(0)
nb_tr_steps += 1
if (step + 1) % args.gradient_accumulation_steps == 0:

View File

@ -452,7 +452,7 @@ def main():
loss = loss * args.loss_scale
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
tr_loss += loss.item()
tr_loss += loss.item() * args.gradient_accumulation_steps
nb_tr_examples += input_ids.size(0)
nb_tr_steps += 1