Prevent crash with `WandbCallback` with third parties (#30477)

* Use EAFP principle to prevent crash with third parties

* Remove leftover debugging code

* Add info-level logger message
This commit is contained in:
Tom Aarsen 2024-04-25 12:49:06 +02:00 committed by GitHub
parent aca4a1037f
commit ce5ae5a434
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 3 additions and 8 deletions

View File

@ -786,15 +786,10 @@ class WandbCallback(TrainerCallback):
self._wandb.run._label(code="transformers_trainer")
# add number of model parameters to wandb config
if any(
(
isinstance(model, PreTrainedModel),
isinstance(model, PushToHubMixin),
(is_tf_available() and isinstance(model, TFPreTrainedModel)),
(is_torch_available() and isinstance(model, torch.nn.Module)),
)
):
try:
self._wandb.config["model/num_parameters"] = model.num_parameters()
except AttributeError:
logger.info("Could not log the number of model parameters in Weights & Biases.")
# log the initial model and architecture to an artifact
with tempfile.TemporaryDirectory() as temp_dir: