Exclude more files in torch/csrc/distributed when USE_DISTRIBUTED=0 (#28621)

Summary:
Changelog:
- Guard inclusion of certain files in torch/csrc/distributed included in caffe2/CMakeLists.txt when USE_DISTRIBUTED=0
Pull Request resolved: https://github.com/pytorch/pytorch/pull/28621

Test Plan:
- Builds should be successful
- Tests should pass

Differential Revision: D18145330

Pulled By: ezyang

fbshipit-source-id: 7167a356b03ae783e6b0120f2ad3552db2b3ed86
This commit is contained in:
vishwakftw 2019-10-28 07:59:53 -07:00 committed by Facebook Github Bot
parent 4cf7277d62
commit aea94de067
3 changed files with 56 additions and 46 deletions

View file

@ -481,37 +481,41 @@ if (NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
if (NOT INTERN_BUILD_MOBILE)
list(APPEND TORCH_SRCS
${TORCH_SRC_DIR}/csrc/api/src/jit.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_container.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_context.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/engine/dist_engine.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/recvrpc_backward.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/sendrpc_backward.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/autograd_metadata.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_req.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_req.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/rpc_with_autograd.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/utils.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/future_message.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/message.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/python_remote_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/rpc_agent.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/request_callback.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/rref_proto.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/script_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/script_remote_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/script_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/types.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/utils.cpp
${TORCH_SRC_DIR}/csrc/jit/export.cpp
${TORCH_SRC_DIR}/csrc/jit/import_legacy.cpp
${TORCH_SRC_DIR}/csrc/jit/netdef_converter.cpp
${TORCH_SRC_DIR}/csrc/jit/fuser/cpu/fused_kernel.cpp
${TORCH_SRC_DIR}/csrc/utils/byte_order.cpp
)
if (USE_DISTRIBUTED)
list(APPEND TORCH_SRCS
${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_container.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_context.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/engine/dist_engine.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/recvrpc_backward.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/sendrpc_backward.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/autograd_metadata.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_req.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_req.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/rpc_with_autograd.cpp
${TORCH_SRC_DIR}/csrc/distributed/autograd/utils.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/future_message.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/message.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/python_remote_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/rpc_agent.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/request_callback.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/rref_proto.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/script_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/script_remote_call.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/script_resp.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/types.cpp
${TORCH_SRC_DIR}/csrc/distributed/rpc/utils.cpp
)
endif()
endif()
if (USE_CUDA)

View file

@ -1,27 +1,29 @@
set(DIST_AUTOGRAD_TEST_DIR "${TORCH_ROOT}/test/cpp/dist_autograd")
set(DIST_AUTOGRAD_TEST_SOURCES
${TORCH_ROOT}/test/cpp/common/main.cpp
${DIST_AUTOGRAD_TEST_DIR}/test_dist_autograd.cpp
)
if (USE_DISTRIBUTED)
set(DIST_AUTOGRAD_TEST_DIR "${TORCH_ROOT}/test/cpp/dist_autograd")
set(DIST_AUTOGRAD_TEST_SOURCES
${TORCH_ROOT}/test/cpp/common/main.cpp
${DIST_AUTOGRAD_TEST_DIR}/test_dist_autograd.cpp
)
add_executable(test_dist_autograd ${DIST_AUTOGRAD_TEST_SOURCES})
target_include_directories(test_dist_autograd PRIVATE ${ATen_CPU_INCLUDE})
target_link_libraries(test_dist_autograd PRIVATE torch gtest)
add_executable(test_dist_autograd ${DIST_AUTOGRAD_TEST_SOURCES})
target_include_directories(test_dist_autograd PRIVATE ${ATen_CPU_INCLUDE})
target_link_libraries(test_dist_autograd PRIVATE torch gtest)
if (USE_CUDA)
target_link_libraries(test_dist_autograd PRIVATE
${CUDA_LIBRARIES}
${CUDA_NVRTC_LIB}
${CUDA_CUDA_LIB}
${TORCH_CUDA_LIBRARIES})
if (USE_CUDA)
target_link_libraries(test_dist_autograd PRIVATE
${CUDA_LIBRARIES}
${CUDA_NVRTC_LIB}
${CUDA_CUDA_LIB}
${TORCH_CUDA_LIBRARIES})
target_compile_definitions(test_dist_autograd PRIVATE "USE_CUDA")
endif()
target_compile_definitions(test_dist_autograd PRIVATE "USE_CUDA")
endif()
if (INSTALL_TEST)
install(TARGETS test_dist_autograd DESTINATION bin)
# Install PDB files for MSVC builds
if (MSVC AND BUILD_SHARED_LIBS)
install(FILES $<TARGET_PDB_FILE:test_dist_autograd> DESTINATION bin OPTIONAL)
if (INSTALL_TEST)
install(TARGETS test_dist_autograd DESTINATION bin)
# Install PDB files for MSVC builds
if (MSVC AND BUILD_SHARED_LIBS)
install(FILES $<TARGET_PDB_FILE:test_dist_autograd> DESTINATION bin OPTIONAL)
endif()
endif()
endif()

View file

@ -5,7 +5,9 @@
#include <torch/csrc/autograd/functions/tensor.h>
#include <torch/csrc/autograd/generated/python_functions.h>
#include <torch/csrc/autograd/python_cpp_function.h>
#ifdef USE_DISTRIBUTED
#include <torch/csrc/distributed/autograd/functions/sendrpc_backward.h>
#endif
#include <torch/csrc/jit/python_tracer.h>
#include <torch/csrc/utils/pybind.h>
#include <torch/csrc/utils/tuple_parser.h>
@ -103,9 +105,11 @@ void THPAutograd_initFunctions()
static PyTypeObject CopyBackwardsClass;
addClass<CopyBackwards, NoCtor>(module, CopyBackwardsClass, "CopyBackwards");
#ifdef USE_DISTRIBUTED
static PyTypeObject SendRpcBackwardClass;
addClass<torch::distributed::autograd::SendRpcBackward, NoCtor>(
module, SendRpcBackwardClass, "SendRpcBackward");
#endif
static PyTypeObject CopySlicesClass;
addClass<CopySlices, NoCtor>(module, CopySlicesClass, "CopySlices");