[`Nllb-Moe`] Fix nllb moe accelerate issue (#23758)

fix nllb moe accelerate issue
This commit is contained in:
Younes Belkada 2023-05-25 22:37:33 +02:00 committed by GitHub
parent d685e330b5
commit f67dac97bd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -856,7 +856,7 @@ class NllbMoePreTrainedModel(PreTrainedModel):
config_class = NllbMoeConfig
base_model_prefix = "model"
supports_gradient_checkpointing = True
_no_split_modules = ["NllbMoeAttention"]
_no_split_modules = ["NllbMoeEncoderLayer", "NllbMoeDecoderLayer"]
def _init_weights(self, module):
"""Initialize the weights"""