From 6bab83683bf46352b59ab01cf596804dfdf7d973 Mon Sep 17 00:00:00 2001 From: Stas Bekman Date: Mon, 1 Feb 2021 00:08:12 -0800 Subject: [PATCH] fix logger format for non-main process (#9911) --- examples/seq2seq/finetune_trainer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/seq2seq/finetune_trainer.py b/examples/seq2seq/finetune_trainer.py index 73123063d..89dd80395 100755 --- a/examples/seq2seq/finetune_trainer.py +++ b/examples/seq2seq/finetune_trainer.py @@ -175,11 +175,11 @@ def main(): bool(training_args.parallel_mode == ParallelMode.DISTRIBUTED), training_args.fp16, ) + transformers.utils.logging.enable_default_handler() + transformers.utils.logging.enable_explicit_format() # Set the verbosity to info of the Transformers logger (on main process only): if is_main_process(training_args.local_rank): transformers.utils.logging.set_verbosity_info() - transformers.utils.logging.enable_default_handler() - transformers.utils.logging.enable_explicit_format() logger.info("Training/evaluation parameters %s", training_args) # Set seed