diff --git a/src/transformers/models/longt5/modeling_longt5.py b/src/transformers/models/longt5/modeling_longt5.py index abd1cb7786..3a8fb9f66a 100644 --- a/src/transformers/models/longt5/modeling_longt5.py +++ b/src/transformers/models/longt5/modeling_longt5.py @@ -1966,13 +1966,12 @@ class LongT5ForConditionalGeneration(LongT5PreTrainedModel): ... ) >>> # Let's try a very long input. - >>> input_ids = tokenizer( - ... "summarize: " + 100 * "studies have shown that owning a dog is good for you ", return_tensors="pt" - ... ).input_ids # Batch size 1 + >>> inputs = tokenizer(100 * "studies have shown that owning a dog is good for you ", return_tensors="pt") + >>> input_ids = inputs.input_ids >>> outputs = model.generate(input_ids) >>> print(tokenizer.decode(outputs[0], skip_special_tokens=True)) - abstractthe aim of this article is to summarize the studies have shown that owning a dog + abstractthe aim of this article is to provide an overview of the literature on the role of dog ```""" use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict