Remove extraneous logging (#68830)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/68830

No logical changes, removing a logging statement that was accidentally committed.

cc pietern mrshenli pritamdamania87 zhaojuanmao satgera rohan-varma gqchen aazzolini osalpekar jiayisuse SciPioneer H-Huang jjlilley mrzzd

Test Plan: Imported from OSS

Reviewed By: ejguan

Differential Revision: D32628711

Pulled By: H-Huang

fbshipit-source-id: 070190b92f97c8e38d8bb03124c13cb061fc9ec1
This commit is contained in:
Howard Huang 2021-11-24 07:14:01 -08:00 committed by Facebook GitHub Bot
parent 7d8a79b6f3
commit be7e159e71
3 changed files with 3 additions and 4 deletions

View file

@ -71,9 +71,6 @@ int syncCallCount(
int totalCallCount = store.add(activeCallCountKey, activeCalls);
int totalProcessCount = store.add(processCountKey, 1);
VLOG(1) << processCountKey << " " << totalCallCount << " "
<< totalProcessCount;
// The last worker will need to set the ready key
if (totalProcessCount == worldSize) {
store.set(readyKey, std::vector<uint8_t>());

View file

@ -397,6 +397,7 @@ void TensorPipeAgent::startImpl() {
int lowestPriority = std::numeric_limits<int>::max();
std::string lowestPriorityTransport;
// Register transports
for (auto& key : TensorPipeTransportRegistry()->Keys()) {
int64_t priority = -1;
if (opts_.transports.has_value()) {
@ -427,6 +428,7 @@ void TensorPipeAgent::startImpl() {
priority, std::move(key), std::move(reg->transport));
}
// Register channels
for (auto& key : TensorPipeChannelRegistry()->Keys()) {
int64_t priority = -1;
if (opts_.channels.has_value()) {

View file

@ -163,7 +163,7 @@ if is_available():
# finishing handshaking. To avoid that issue, we make it global to
# keep it alive.
global rendezvous_iterator
rendezvous_iterator = torch.distributed.rendezvous(
rendezvous_iterator = dist.rendezvous(
rpc_backend_options.init_method, rank=rank, world_size=world_size
)
store, _, _ = next(rendezvous_iterator)