[Profiler] Disable Dynamo-Sensitive Profiler Tests (#138762)

Summary: During compilation, a profiler context gets ignored so we should temporarily turn off tests that are failing due to dynamo. Once profiler integration with dynamo is introduced we can reintroduce these tests

Test Plan: Make sure CI is passing again

Differential Revision: D64867447

Pull Request resolved: https://github.com/pytorch/pytorch/pull/138762
Approved by: https://github.com/davidberard98
This commit is contained in:
Shivam Raikundalia 2024-10-25 00:25:49 +00:00 committed by PyTorch MergeBot
parent 1d98a526dd
commit 2f4af0f4e6
2 changed files with 3 additions and 0 deletions

View file

@ -121,6 +121,7 @@ class TestExecutionTrace(TestCase):
@unittest.skipIf(not kineto_available(), "Kineto is required")
@skipIfHpu
@skipIfTorchDynamo("profiler gets ignored if dynamo activated")
def test_execution_trace_with_kineto(self, device):
trace_called_num = 0

View file

@ -337,6 +337,7 @@ class TestProfiler(TestCase):
)
@serialTest()
@parametrize("work_in_main_thread", [True, False])
@skipIfTorchDynamo("profiler gets ignored if dynamo activated")
def test_source_multithreaded(self, name, thread_spec, work_in_main_thread):
"""Test various threading configurations.
@ -1452,6 +1453,7 @@ class TestProfiler(TestCase):
@patch.dict(os.environ, {"KINETO_USE_DAEMON": "1"})
@patch.dict(os.environ, {"KINETO_DAEMON_INIT_DELAY_S": "1"})
@skipIfTorchDynamo("profiler gets ignored if dynamo activated")
def test_kineto_profiler_with_environment_variable(self):
script = """
import torch