Remove logging of GPU count etc logging. (#12569)
Successfully logging this requires Pytorch. For the purposes of this script we are not using Pytorch.
This commit is contained in:
parent
d7e156bd1a
commit
122d7dc34f
|
@ -430,10 +430,6 @@ if __name__ == "__main__":
|
|||
|
||||
# Log on each process the small summary:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning(
|
||||
f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}"
|
||||
+ f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}"
|
||||
)
|
||||
|
||||
# Set the verbosity to info of the Transformers logger (on main process only):
|
||||
logger.info(f"Training/evaluation parameters {training_args}")
|
||||
|
|
Loading…
Reference in New Issue