`auto_find_batch_size` isn't yet supported with DeepSpeed/FSDP. Raise error accrodingly. (#29058)

Update trainer.py
This commit is contained in:
Sourab Mangrulkar 2024-02-16 18:11:09 +05:30 committed by GitHub
parent b262808656
commit 4c18ddb5cf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 5 additions and 0 deletions

View File

@ -4136,6 +4136,11 @@ class Trainer:
wrapper = "DeepSpeed" if self.is_deepspeed_enabled else "FSDP"
raise ValueError(f"{wrapper} can't be used with `save_only_model` along with `load_best_model_at_end`.")
# `auto_find_batch_size` isn't yet supported with DeepSpeed/FSDP
if (self.is_deepspeed_enabled or self.is_fsdp_enabled) and self.args.auto_find_batch_size:
wrapper = "DeepSpeed" if self.is_deepspeed_enabled else "FSDP"
raise NotImplementedError(f"`{wrapper}` doesn't support `auto_find_batch_size`.")
def propagate_args_to_deepspeed(self, auto_find_batch_size=False):
"""
Sets values in the deepspeed plugin based on the Trainer args