else:
print("GoogleCuda is disabled")
+ def testIsMklEnabled(self):
+ # This test doesn't assert anything.
+ # It ensures the py wrapper function is generated correctly.
+ if test_util.IsMklEnabled():
+ print("MKL is enabled")
+ else:
+ print("MKL is disabled")
+
def testAssertProtoEqualsStr(self):
graph_str = "node { name: 'w1' op: 'params' }"
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
+from tensorflow.python.framework import test_util
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.tools import selective_registration_header_lib
ops_and_kernels = selective_registration_header_lib.get_ops_and_kernels(
'rawproto', self.WriteGraphFiles(graphs), default_ops)
+ matmul_prefix = ''
+ if test_util.IsMklEnabled():
+ matmul_prefix = 'Mkl'
+
self.assertListEqual(
[
('BiasAdd', 'BiasOp<CPUDevice, float>'), #
- ('MatMul', 'MatMulOp<CPUDevice, double, false >'), #
- ('MatMul', 'MatMulOp<CPUDevice, float, false >'), #
+ ('MatMul',
+ matmul_prefix + 'MatMulOp<CPUDevice, double, false >'), #
+ ('MatMul', matmul_prefix + 'MatMulOp<CPUDevice, float, false >'), #
('NoOp', 'NoOp'), #
('Reshape', 'ReshapeOp'), #
('_Recv', 'RecvOp'), #
self.assertListEqual(
[
('BiasAdd', 'BiasOp<CPUDevice, float>'), #
- ('MatMul', 'MatMulOp<CPUDevice, double, false >'), #
- ('MatMul', 'MatMulOp<CPUDevice, float, false >'), #
+ ('MatMul',
+ matmul_prefix + 'MatMulOp<CPUDevice, double, false >'), #
+ ('MatMul', matmul_prefix + 'MatMulOp<CPUDevice, float, false >'), #
('NoOp', 'NoOp'), #
('Reshape', 'ReshapeOp'), #
('_Recv', 'RecvOp'), #