Fix RobertaForCausalLM docs (#7642)

* Fix RobertaForCausalLM docs

* Apply review suggestion

Co-authored-by: sgugger <sylvain.gugger@gmail,com>

Co-authored-by: sgugger <sylvain.gugger@gmail,com>
This commit is contained in:
Lysandre Debut 2020-10-08 14:36:00 +02:00 committed by GitHub
parent 55cb2ee62e
commit 4a00613c24
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 3 additions and 3 deletions

View File

@ -758,13 +758,13 @@ class RobertaForCausalLM(RobertaPreTrainedModel):
Example::
>>> from transformers import RobertaTokenizer, RobertaLMHeadModel, RobertaConfig
>>> from transformers import RobertaTokenizer, RobertaForCausalLM, RobertaConfig
>>> import torch
>>> tokenizer = RobertaTokenizer.from_pretrained('roberta-base')
>>> config = RobertaConfig.from_pretrained("roberta-base")
>>> config = RobertaConfig.from_pretrained("roberta-base", return_dict=True)
>>> config.is_decoder = True
>>> model = RobertaLMHeadModel.from_pretrained('roberta-base', config=config, return_dict=True)
>>> model = RobertaForCausalLM.from_pretrained('roberta-base', config=config)
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)