set the default cache_enable to True, aligned with the default value in pytorch cpu/cuda amp autocast (#20289)
Signed-off-by: Wang, Yi A <yi.a.wang@intel.com> Signed-off-by: Wang, Yi A <yi.a.wang@intel.com>
This commit is contained in:
parent
07b8f249cd
commit
8b8b23a8cd
|
@ -2476,7 +2476,7 @@ class Trainer:
|
|||
"""
|
||||
return self.ctx_manager_torchdynamo
|
||||
|
||||
def autocast_smart_context_manager(self, cache_enabled: Optional[bool] = None):
|
||||
def autocast_smart_context_manager(self, cache_enabled: Optional[bool] = True):
|
||||
"""
|
||||
A helper wrapper that creates an appropriate context manager for `autocast` while feeding it the desired
|
||||
arguments, depending on the situation.
|
||||
|
|
Loading…
Reference in New Issue