From 6da129cb3152d93c425aab08a92d68c99e09d252 Mon Sep 17 00:00:00 2001 From: Lysandre Debut Date: Tue, 25 May 2021 10:06:19 +0200 Subject: [PATCH] Enable memory metrics in tests that need it (#11859) --- tests/test_trainer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_trainer.py b/tests/test_trainer.py index ea343027b..abc31f1d4 100644 --- a/tests/test_trainer.py +++ b/tests/test_trainer.py @@ -1102,7 +1102,7 @@ class TrainerIntegrationTest(TestCasePlus, TrainerIntegrationCommon): def test_mem_metrics(self): # with mem metrics enabled - trainer = get_regression_trainer() + trainer = get_regression_trainer(skip_memory_metrics=False) self.check_mem_metrics(trainer, self.assertIn) # with mem metrics disabled @@ -1123,7 +1123,7 @@ class TrainerIntegrationTest(TestCasePlus, TrainerIntegrationCommon): b = torch.ones(1000, bs) - 0.001 # 1. with mem metrics enabled - trainer = get_regression_trainer(a=a, b=b, eval_len=16) + trainer = get_regression_trainer(a=a, b=b, eval_len=16, skip_memory_metrics=False) metrics = trainer.evaluate() del trainer gc.collect() @@ -1144,7 +1144,7 @@ class TrainerIntegrationTest(TestCasePlus, TrainerIntegrationCommon): self.assertLess(fp32_eval, 5_000) # 2. with mem metrics disabled - trainer = get_regression_trainer(a=a, b=b, eval_len=16, fp16_full_eval=True) + trainer = get_regression_trainer(a=a, b=b, eval_len=16, fp16_full_eval=True, skip_memory_metrics=False) metrics = trainer.evaluate() fp16_init = metrics["init_mem_gpu_alloc_delta"] fp16_eval = metrics["eval_mem_gpu_alloc_delta"]