From 4a00613c2429f51fa50dfbc118576399fc00ab98 Mon Sep 17 00:00:00 2001 From: Lysandre Debut Date: Thu, 8 Oct 2020 14:36:00 +0200 Subject: [PATCH] Fix RobertaForCausalLM docs (#7642) * Fix RobertaForCausalLM docs * Apply review suggestion Co-authored-by: sgugger Co-authored-by: sgugger --- src/transformers/modeling_roberta.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/modeling_roberta.py b/src/transformers/modeling_roberta.py index 3e4ead7c3e..6e258ba7d4 100644 --- a/src/transformers/modeling_roberta.py +++ b/src/transformers/modeling_roberta.py @@ -758,13 +758,13 @@ class RobertaForCausalLM(RobertaPreTrainedModel): Example:: - >>> from transformers import RobertaTokenizer, RobertaLMHeadModel, RobertaConfig + >>> from transformers import RobertaTokenizer, RobertaForCausalLM, RobertaConfig >>> import torch >>> tokenizer = RobertaTokenizer.from_pretrained('roberta-base') - >>> config = RobertaConfig.from_pretrained("roberta-base") + >>> config = RobertaConfig.from_pretrained("roberta-base", return_dict=True) >>> config.is_decoder = True - >>> model = RobertaLMHeadModel.from_pretrained('roberta-base', config=config, return_dict=True) + >>> model = RobertaForCausalLM.from_pretrained('roberta-base', config=config) >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") >>> outputs = model(**inputs)