diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 82434100c4..62c6f9950d 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1281,7 +1281,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin): if self.config.is_encoder_decoder: # do not return first token return decoded[:, 1:] -# return decoded + return decoded # force one of token_ids to be generated by setting prob of all other tokens to 0. def _force_token_ids_generation(self, scores, token_ids):