Fix bug in examples: double wrap into DataParallel during eval

This commit is contained in:
Andrey Kulagin 2020-04-17 17:33:24 +03:00 committed by Julien Chaumond
parent 7f23af1684
commit b1ff0b2ae7
6 changed files with 6 additions and 6 deletions

View File

@ -255,7 +255,7 @@ def evaluate(args, model, tokenizer, prefix=""):
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
# multi-gpu eval
if args.n_gpu > 1:
if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
# Eval!

View File

@ -278,7 +278,7 @@ def evaluate(args, model, tokenizer, criterion, prefix=""):
)
# multi-gpu eval
if args.n_gpu > 1:
if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
# Eval!

View File

@ -253,7 +253,7 @@ def evaluate(args, model, tokenizer, labels, pad_token_label_id, mode, prefix=""
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
# multi-gpu evaluate
if args.n_gpu > 1:
if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
# Eval!

View File

@ -427,7 +427,7 @@ def evaluate(args, model: PreTrainedModel, tokenizer: PreTrainedTokenizer, prefi
)
# multi-gpu evaluate
if args.n_gpu > 1:
if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
# Eval!

View File

@ -256,7 +256,7 @@ def evaluate(args, model, tokenizer, prefix="", test=False):
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
# multi-gpu evaluate
if args.n_gpu > 1:
if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
# Eval!

View File

@ -266,7 +266,7 @@ def evaluate(args, model, tokenizer, prefix=""):
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
# multi-gpu eval
if args.n_gpu > 1:
if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
# Eval!