fix bug with attention_mask as optional input argument

This commit is contained in:
patrickvonplaten 2020-03-08 22:27:00 +01:00 committed by Patrick von Platen
parent 629aac92ec
commit a5751f7578
1 changed files with 1 additions and 1 deletions

View File

@ -313,7 +313,7 @@ class BartHeadTests(unittest.TestCase):
config, input_ids, batch_size = self._get_config_and_data(output_past=True)
attention_mask = input_ids.ne(1)
lm_model = BartForConditionalGeneration(config).eval().to(torch_device).half()
lm_model.generate(input_ids, attention_mask)
lm_model.generate(input_ids, attention_mask=attention_mask)
def test_prepare_bart_decoder_inputs(self):
config, *_ = self._get_config_and_data(output_past=False)