moved try/catch to contrib/tensorrt/__init__.py to guard whole TRT; raise original...
authorjjsjann123 <jiej@nvidia.com>
Wed, 7 Mar 2018 22:47:20 +0000 (14:47 -0800)
committerjjsjann123 <jiej@nvidia.com>
Wed, 7 Mar 2018 22:47:20 +0000 (14:47 -0800)
tensorflow/contrib/tensorrt/__init__.py
tensorflow/contrib/tensorrt/python/__init__.py

index fd551d7..faedaf2 100644 (file)
@@ -19,5 +19,15 @@ from __future__ import division
 from __future__ import print_function
 
 # pylint: disable=unused-import,wildcard-import
-from tensorflow.contrib.tensorrt.python import *
+try:
+  from tensorflow.contrib.tensorrt.python import *
+except Exception as e:
+  no_trt_message = (
+      '**** Failed to initialize TensorRT. This is either because the TensorRT'
+      ' installation path is not in LD_LIBRARY_PATH, or because you do not have it'
+      ' installed. If not installed, please go to'
+      ' https://developer.nvidia.com/tensorrt to download and install'
+      ' TensorRT ****')
+  print(no_trt_message)
+  raise e
 # pylint: enable=unused-import,wildcard-import
index 658c0c7..0b2321b 100644 (file)
@@ -19,16 +19,7 @@ from __future__ import division
 from __future__ import print_function
 
 # pylint: disable=unused-import,line-too-long
-try:
-  from tensorflow.contrib.tensorrt.python.ops import trt_engine_op
-  from tensorflow.contrib.tensorrt.python.trt_convert import calib_graph_to_infer_graph
-  from tensorflow.contrib.tensorrt.python.trt_convert import create_inference_graph
-except:
-  no_trt_message = (
-      '**** Failed to initialize TensorRT. This is either because the TensorRT'
-      ' installation path is not in LD_LIBRARY_PATH, or because you do not have it'
-      ' installed. If not installed, please go to'
-      ' https://developer.nvidia.com/tensorrt to download and install'
-      ' TensorRT ****')
-  raise RuntimeError(no_trt_message)
+from tensorflow.contrib.tensorrt.python.ops import trt_engine_op
+from tensorflow.contrib.tensorrt.python.trt_convert import calib_graph_to_infer_graph
+from tensorflow.contrib.tensorrt.python.trt_convert import create_inference_graph
 # pylint: enable=unused-import,line-too-long