fix typo in modeling_encoder_decoder.py (#9297)

* Update modeling_encoder_decoder.py

Fixed typo.

* typo

Co-authored-by: Suraj Patil <surajp815@gmail.com>
This commit is contained in:
Daniele Sartiano 2020-12-24 14:38:08 +01:00 committed by GitHub
parent f3a3b91d6f
commit 71963a6633
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -30,7 +30,7 @@ logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "EncoderDecoderConfig"
ENCODER_DECODER_START_DOCSTRING = r"""
This class can be used to initialize a sequence-tsequencece model with any pretrained autoencoding model as the
This class can be used to initialize a sequence-to-sequence model with any pretrained autoencoding model as the
encoder and any pretrained autoregressive model as the decoder. The encoder is loaded via
:meth:`~transformers.AutoModel.from_pretrained` function and the decoder is loaded via
:meth:`~transformers.AutoModelForCausalLM.from_pretrained` function. Cross-attention layers are automatically added