Refactor hotpatch_vars and apply it to libtorch (#14976)
authorpeter <peterghost86@gmail.com>
Mon, 17 Dec 2018 05:50:43 +0000 (21:50 -0800)
committerFacebook Github Bot <facebook-github-bot@users.noreply.github.com>
Mon, 17 Dec 2018 05:53:31 +0000 (21:53 -0800)
Summary:
Fixes #14801.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/14976

Differential Revision: D13485381

Pulled By: soumith

fbshipit-source-id: 0af3c2e1b90988d56f6f85632328d1e4b788ffd2

setup.py
tools/build_libtorch.py
tools/setup_helpers/env.py

index 79ea3b7..4d327ba 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -145,29 +145,11 @@ import json
 import glob
 import importlib
 
-from tools.setup_helpers.env import check_env_flag, check_negative_env_flag
-
-
-def hotpatch_var(var, prefix='USE_'):
-    if check_env_flag('NO_' + var):
-        os.environ[prefix + var] = '0'
-    elif check_negative_env_flag('NO_' + var):
-        os.environ[prefix + var] = '1'
-    elif check_env_flag('WITH_' + var):
-        os.environ[prefix + var] = '1'
-    elif check_negative_env_flag('WITH_' + var):
-        os.environ[prefix + var] = '0'
-
-# Before we run the setup_helpers, let's look for NO_* and WITH_*
-# variables and hotpatch environment with the USE_* equivalent
-use_env_vars = ['CUDA', 'CUDNN', 'FBGEMM', 'MIOPEN', 'MKLDNN', 'NNPACK', 'DISTRIBUTED',
-                'OPENCV', 'TENSORRT', 'QNNPACK', 'FFMPEG', 'SYSTEM_NCCL',
-                'GLOO_IBVERBS']
-list(map(hotpatch_var, use_env_vars))
-
-# Also hotpatch a few with BUILD_* equivalent
-build_env_vars = ['BINARY', 'TEST', 'CAFFE2_OPS']
-[hotpatch_var(v, 'BUILD_') for v in build_env_vars]
+from tools.setup_helpers.env import (check_env_flag, check_negative_env_flag,
+                                     hotpatch_build_env_vars)
+
+
+hotpatch_build_env_vars()
 
 from tools.setup_helpers.cuda import USE_CUDA, CUDA_HOME, CUDA_VERSION
 from tools.setup_helpers.build import (BUILD_BINARY, BUILD_TEST,
index 6122bea..bc0baf7 100644 (file)
@@ -4,9 +4,13 @@ import shlex
 import subprocess
 import sys
 
+from setup_helpers.env import check_env_flag, hotpatch_build_env_vars
+
+
+hotpatch_build_env_vars()
+
 from setup_helpers.cuda import USE_CUDA
 from setup_helpers.dist_check import USE_DISTRIBUTED, USE_GLOO_IBVERBS, IS_LINUX
-from setup_helpers.env import check_env_flag
 
 if __name__ == '__main__':
     # Placeholder for future interface. For now just gives a nice -h.
index 0059125..8f88c42 100644 (file)
@@ -27,3 +27,27 @@ def gather_paths(env_vars):
 
 def lib_paths_from_base(base_path):
     return [os.path.join(base_path, s) for s in ['lib/x64', 'lib', 'lib64']]
+
+
+def hotpatch_var(var, prefix='USE_'):
+    if check_env_flag('NO_' + var):
+        os.environ[prefix + var] = '0'
+    elif check_negative_env_flag('NO_' + var):
+        os.environ[prefix + var] = '1'
+    elif check_env_flag('WITH_' + var):
+        os.environ[prefix + var] = '1'
+    elif check_negative_env_flag('WITH_' + var):
+        os.environ[prefix + var] = '0'
+
+
+def hotpatch_build_env_vars():
+    # Before we run the setup_helpers, let's look for NO_* and WITH_*
+    # variables and hotpatch environment with the USE_* equivalent
+    use_env_vars = ['CUDA', 'CUDNN', 'FBGEMM', 'MIOPEN', 'MKLDNN', 'NNPACK', 'DISTRIBUTED',
+                    'OPENCV', 'TENSORRT', 'QNNPACK', 'FFMPEG', 'SYSTEM_NCCL',
+                    'GLOO_IBVERBS']
+    list(map(hotpatch_var, use_env_vars))
+
+    # Also hotpatch a few with BUILD_* equivalent
+    build_env_vars = ['BINARY', 'TEST', 'CAFFE2_OPS']
+    [hotpatch_var(v, 'BUILD_') for v in build_env_vars]