diff --git a/aten/src/ATen/cudnn/Handles.cpp b/aten/src/ATen/cudnn/Handles.cpp index 04a81d5a603..36f0beec55c 100644 --- a/aten/src/ATen/cudnn/Handles.cpp +++ b/aten/src/ATen/cudnn/Handles.cpp @@ -19,7 +19,16 @@ struct Handle { } ~Handle() { if (handle) { +// this is because of something dumb in the ordering of +// destruction. Sometimes atexit, the cuda context (or something) +// would already be destroyed by the time this gets destroyed. It +// happens in fbcode setting. @colesbury and I decided to not destroy +// the handle as a workaround. +// - @soumith +#ifdef NO_CUDNN_DESTROY_HANDLE +#else cudnnDestroy(handle); +#endif } } }; diff --git a/setup.py b/setup.py index 8387f4d1a66..a6c8e939458 100644 --- a/setup.py +++ b/setup.py @@ -379,8 +379,8 @@ class build_deps(PytorchCommand): # Use copies instead of symbolic files. # Windows has very poor support for them. - sym_files = ['tools/shared/cwrap_common.py'] - orig_files = ['aten/src/ATen/common_with_cwrap.py'] + sym_files = ['tools/shared/cwrap_common.py', 'tools/shared/_utils_internal.py'] + orig_files = ['aten/src/ATen/common_with_cwrap.py', 'torch/_utils_internal.py'] for sym_file, orig_file in zip(sym_files, orig_files): if os.path.exists(sym_file): os.remove(sym_file) diff --git a/test/common.py b/test/common.py index 859ca5aa98f..c87704fc259 100644 --- a/test/common.py +++ b/test/common.py @@ -27,6 +27,7 @@ import errno import torch import torch.cuda +from torch._utils_internal import get_writable_path from torch._six import string_classes import torch.backends.cudnn import torch.backends.mkl @@ -505,7 +506,7 @@ def download_file(url, binary=True): from urllib import request, error filename = os.path.basename(urlsplit(url)[2]) - data_dir = os.path.join(os.path.dirname(__file__), 'data') + data_dir = get_writable_path(os.path.join(os.path.dirname(__file__), 'data')) path = os.path.join(data_dir, filename) if os.path.exists(path): diff --git a/test/test_distributed.py b/test/test_distributed.py index 543202a8076..5f50165f0b8 100644 --- a/test/test_distributed.py +++ b/test/test_distributed.py @@ -17,11 +17,12 @@ import torch.nn.functional as F from torch.autograd import Variable from common import TestCase +from torch._utils_internal import TEST_MASTER_ADDR as MASTER_ADDR + BACKEND = os.environ['BACKEND'] TEMP_DIR = os.environ['TEMP_DIR'] INIT_METHOD = os.getenv('INIT_METHOD', 'env://') MASTER_PORT = '29500' -MASTER_ADDR = '127.0.0.1' DEFAULT_TIMEOUT = 15 CUSTOMIZED_TIMEOUT = {'test_DistributedDataParallel': 25} diff --git a/test/test_torch.py b/test/test_torch.py index 4890907403e..e3020f53790 100644 --- a/test/test_torch.py +++ b/test/test_torch.py @@ -13,6 +13,7 @@ import unittest import warnings import pickle import gzip +from torch._utils_internal import get_file_path, get_file_path_2 from torch.utils.dlpack import from_dlpack, to_dlpack from torch._utils import _rebuild_tensor from itertools import product, combinations @@ -6656,7 +6657,10 @@ class TestTorch(TestCase): return module with filecontext_lambda() as checkpoint: - fname = os.path.join(os.path.dirname(__file__), 'data/network1.py') + try: + fname = get_file_path_2(os.path.dirname(__file__), 'data', 'network1.py') + except IOError: + fname = get_file_path_2(os.path.dirname(__file__), 'data', 'network1.pyc') module = import_module(tmpmodule_name, fname) torch.save(module.Net(), checkpoint) @@ -6669,7 +6673,10 @@ class TestTorch(TestCase): self.assertEquals(len(w), 0) # Replace the module with different source - fname = os.path.join(os.path.dirname(__file__), 'data/network2.py') + try: + fname = get_file_path_2(os.path.dirname(__file__), 'data', 'network2.py') + except IOError: + fname = get_file_path_2(os.path.dirname(__file__), 'data', 'network2.pyc') module = import_module(tmpmodule_name, fname) checkpoint.seek(0) with warnings.catch_warnings(record=True) as w: diff --git a/test/test_utils.py b/test/test_utils.py index cde57e6f922..af936525627 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -530,6 +530,7 @@ class TestLuaReader(TestCase): return input, target.sub(1) +@unittest.skipIf('SKIP_TEST_BOTTLENECK' in os.environ.keys(), 'SKIP_TEST_BOTTLENECK is set') class TestBottleneck(TestCase): def _run(self, command): """Returns (return-code, stdout, stderr)""" diff --git a/tools/nnwrap/__init__.py b/tools/nnwrap/__init__.py index 2a574f72992..d6457a58c48 100644 --- a/tools/nnwrap/__init__.py +++ b/tools/nnwrap/__init__.py @@ -1,5 +1 @@ -from .generate_wrappers import generate_wrappers, wrap_function -try: - from .generate_wrappers import import_module -except ImportError: - pass +from .generate_wrappers import generate_wrappers, wrap_function, import_module diff --git a/tools/nnwrap/generate_wrappers.py b/tools/nnwrap/generate_wrappers.py index 1edbca928c1..db4caf6bef6 100644 --- a/tools/nnwrap/generate_wrappers.py +++ b/tools/nnwrap/generate_wrappers.py @@ -3,17 +3,16 @@ import sys from string import Template, ascii_lowercase from ..cwrap import cwrap from ..cwrap.plugins import NNExtension, NullableArguments, AutoGPU +from ..shared import import_module -BASE_PATH = os.path.realpath(os.path.join(__file__, '..', '..', '..')) -WRAPPER_PATH = os.path.join(BASE_PATH, 'torch', 'csrc', 'nn') -THNN_UTILS_PATH = os.path.join(BASE_PATH, 'torch', '_thnn', 'utils.py') +from ..shared._utils_internal import get_file_path +THNN_H_PATH = get_file_path('torch', 'lib', 'THNN.h') +THCUNN_H_PATH = get_file_path('torch', 'lib', 'THCUNN.h') -try: - from torch._thnn import utils as thnn_utils -except ImportError: - from ..shared import import_module - thnn_utils = import_module('torch._thnn.utils', THNN_UTILS_PATH) +THNN_UTILS_PATH = get_file_path('torch', '_thnn', 'utils.py') + +thnn_utils = import_module('torch._thnn.utils', THNN_UTILS_PATH) FUNCTION_TEMPLATE = Template("""\ [[ @@ -105,7 +104,7 @@ def generate_wrappers(nn_root=None, install_dir=None, template_path=None): def wrap_nn(thnn_h_path, install_dir, template_path): wrapper = '#include