mirror of
https://github.com/saymrwulf/pytorch.git
synced 2026-05-14 20:57:59 +00:00
This allows infra/trainers to get detailed stats about communication efficiencies without know anything about what model or distributed training paradigms have been used. This is helpful as infra/trainer package usually prefers to be as model/algorithm agnostic as possible. Therefore, we cannot assume that infra/trainer can have access to all collectives used by the model authors. This commit adds an `OnCompletion` hook to `ProcessGroupNCCL` which will be fired on every work completion event. Pull Request resolved: https://github.com/pytorch/pytorch/pull/106988 Approved by: https://github.com/kumpera, https://github.com/H-Huang ghstack dependencies: #107140, #107141, #107160 |
||
|---|---|---|
| .. | ||
| _composable | ||
| _shard | ||
| _spmd | ||
| _tensor | ||
| _tools | ||
| algorithms | ||
| bin | ||
| checkpoint | ||
| elastic | ||
| fsdp | ||
| launcher | ||
| nn/jit | ||
| optim | ||
| pipeline/sync | ||
| rpc | ||
| tensor/parallel | ||
| argparse_util_test.py | ||
| test_c10d_common.py | ||
| test_c10d_gloo.py | ||
| test_c10d_logger.py | ||
| test_c10d_nccl.py | ||
| test_c10d_object_collectives.py | ||
| test_c10d_pypg.py | ||
| test_c10d_spawn.py | ||
| test_c10d_spawn_gloo.py | ||
| test_c10d_spawn_nccl.py | ||
| test_c10d_spawn_ucc.py | ||
| test_c10d_ucc.py | ||
| test_collective_utils.py | ||
| test_data_parallel.py | ||
| test_distributed_spawn.py | ||
| test_dynamo_distributed.py | ||
| test_fake_pg.py | ||
| test_functional_api.py | ||
| test_inductor_collectives.py | ||
| test_launcher.py | ||
| test_multi_threaded_pg.py | ||
| test_nccl.py | ||
| test_pg_wrapper.py | ||
| test_store.py | ||