From fa36eba77d3d2fa9b58eb091577777d13af977c7 Mon Sep 17 00:00:00 2001 From: Oguz Ulgen Date: Tue, 13 Aug 2024 02:49:43 +0000 Subject: [PATCH] Turn off remote caching in unit tests unless explicitly on (#133258) Summary: This PR turns off remote caching in unit tests unless the unit test explicitly turns it on. Test Plan: existing tests Differential Revision: D61152154 Pull Request resolved: https://github.com/pytorch/pytorch/pull/133258 Approved by: https://github.com/masnesral --- torch/_inductor/compile_fx.py | 4 ++++ torch/_inductor/runtime/triton_heuristics.py | 2 ++ torch/_utils_internal.py | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/torch/_inductor/compile_fx.py b/torch/_inductor/compile_fx.py index 8a4fce29329..ef49546c38e 100644 --- a/torch/_inductor/compile_fx.py +++ b/torch/_inductor/compile_fx.py @@ -419,6 +419,10 @@ def should_use_remote_fx_graph_cache(): return config.fx_graph_remote_cache if not config.is_fbcode(): return False + + if torch._utils_internal.is_fb_unit_test(): + return False + try: from torch._inductor.fb.remote_cache import REMOTE_CACHE_VERSION except ModuleNotFoundError: diff --git a/torch/_inductor/runtime/triton_heuristics.py b/torch/_inductor/runtime/triton_heuristics.py index 575e004b03a..41ad6fb2ec3 100644 --- a/torch/_inductor/runtime/triton_heuristics.py +++ b/torch/_inductor/runtime/triton_heuristics.py @@ -1026,6 +1026,8 @@ def should_use_remote_autotune_cache(inductor_meta): return inductor_meta.get("autotune_remote_cache") if not inductor_meta.get("is_fbcode"): return False + if torch._utils_internal.is_fb_unit_test(): + return False if inductor_meta.get("is_hip"): return False diff --git a/torch/_utils_internal.py b/torch/_utils_internal.py index 1493124596e..16748e1a168 100644 --- a/torch/_utils_internal.py +++ b/torch/_utils_internal.py @@ -184,6 +184,10 @@ def justknobs_getval_int(name: str) -> int: return 0 +def is_fb_unit_test() -> bool: + return False + + @functools.lru_cache(None) def max_clock_rate(): if not torch.version.hip: