Separate RTLD_GLOBAL from _load_global_deps() (#36682)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/36682

For fb internal builds we need to separate whether to use global deps library from loading with RTLD_GLOBAL.

Test Plan: CI -- this should be a no-op for existing builds

Reviewed By: ezyang

Differential Revision: D21051427

fbshipit-source-id: 83bb703d6ceb0265a4c58166749312a44172e78c
This commit is contained in:
Alexander Fix 2020-04-22 19:05:54 -07:00 committed by Facebook GitHub Bot
parent d0291df7d9
commit ca665c682c
2 changed files with 12 additions and 2 deletions

View file

@ -19,7 +19,7 @@ if sys.version_info < (3,):
from ._utils import _import_dotted_name
from ._utils_internal import get_file_path, prepare_multiprocessing_environment, \
USE_RTLD_GLOBAL_WITH_LIBTORCH
USE_RTLD_GLOBAL_WITH_LIBTORCH, USE_GLOBAL_DEPS
from .version import __version__
from ._six import string_classes as _string_classes
@ -130,8 +130,13 @@ else:
# C++ symbols from libtorch clobbering C++ symbols from other
# libraries, leading to mysterious segfaults.
#
# If building in an environment where libtorch_global_deps isn't available
# like parts of fbsource, but where RTLD_GLOBAL causes segfaults, you will
# want USE_RTLD_GLOBAL_WITH_LIBTORCH = False and USE_GLOBAL_DEPS = False
#
# See Note [Global dependencies]
_load_global_deps()
if USE_GLOBAL_DEPS:
_load_global_deps()
from torch._C import *
__all__ += [name for name in dir(_C)

View file

@ -57,4 +57,9 @@ def get_source_lines_and_file(obj, error_msg=None):
TEST_MASTER_ADDR = '127.0.0.1'
TEST_MASTER_PORT = 29500
# USE_GLOBAL_DEPS controls whether __init__.py tries to load
# libtorch_global_deps, see Note [Global dependencies]
USE_GLOBAL_DEPS = True
# USE_RTLD_GLOBAL_WITH_LIBTORCH controls whether __init__.py tries to load
# _C.so with RTLD_GLOBAL during the call to dlopen.
USE_RTLD_GLOBAL_WITH_LIBTORCH = False