#include "../../../src/runtime/dso_library.cc"
#include "../../../src/runtime/file_util.cc"
#include "../../../src/runtime/library_module.cc"
+#include "../../../src/runtime/metadata_module.cc"
#include "../../../src/runtime/module.cc"
#include "../../../src/runtime/ndarray.cc"
#include "../../../src/runtime/object.cc"
output_name=outputs[0]
)
+def _convert_expand_dims(builder, name, inputs, outputs, args, attrs):
+ if attrs.axis >= 0:
+ axes = list(range(attrs.axis, attrs.axis+attrs.num_newaxis))
+ else:
+ axes = list(range(attrs.axis-attrs.num_newaxis+1, attrs.axis+1))
+
+ builder.add_expand_dims(
+ name=name,
+ input_name=inputs[0],
+ output_name=outputs[0],
+ axes=axes
+ )
+
+def _convert_relu(builder, name, inputs, outputs, args, attrs):
+ builder.add_activation(
+ name=name,
+ non_linearity='RELU',
+ input_name=inputs[0],
+ output_name=outputs[0]
+ )
+
def _convert_softmax(builder, name, inputs, outputs, args, attrs):
builder.add_softmax_nd(
name=name,
'add' : _convert_add,
'multiply' : _convert_multiply,
'clip' : _convert_clip,
+ 'expand_dims' : _convert_expand_dims,
+ 'nn.relu' : _convert_relu,
'nn.batch_flatten' : _convert_batch_flatten,
'nn.softmax' : _convert_softmax,
'nn.conv2d' : _convert_conv2d,
@tvm._ffi.register_func("relay.ext.coremlcompiler")
-def coreml_compiler(ref):
+def coreml_compiler(func):
"""
Create a CoreML runtime from a Relay module.
"""
+ assert isinstance(func, tvm.relay.function.Function)
model_dir = os.getcwd()
- if isinstance(ref, tvm.ir.module.IRModule):
- for var, func in ref.functions.items():
- name = var.name_hint
- builder = CodegenCoreML(name, func)
- builder.visit(func.body)
- mlmodelc_path = "{}/{}.mlmodelc".format(model_dir, name)
- if os.path.exists(mlmodelc_path):
- shutil.rmtree(mlmodelc_path)
- builder.compile(model_dir)
+ name = str(func.attrs.global_symbol)
+ builder = CodegenCoreML(name, func)
+ builder.visit(func.body)
+ mlmodelc_path = "{}/{}.mlmodelc".format(model_dir, name)
+ if os.path.exists(mlmodelc_path):
+ shutil.rmtree(mlmodelc_path)
+ builder.compile(model_dir)
ctx = tvm.cpu(0)
return coreml_runtime.create(model_dir, ctx).module
} else if (dtype == DataType::Float(32)) {
dataType = MLMultiArrayDataTypeFloat32;
size *= sizeof(float);
+ } else if (dtype == DataType::Int(32)) {
+ dataType = MLMultiArrayDataTypeInt32;
+ size *= sizeof(int);
} else {
LOG(FATAL) << "unsupported data type " << dtype;
return;
} else if (data_desc.dataType == MLMultiArrayDataTypeFloat32) {
dtype = DataType::Float(32);
size *= sizeof(float);
+ } else if (data_desc.dataType == MLMultiArrayDataTypeInt32) {
+ dtype = DataType::Int(32);
+ size *= sizeof(int);
} else {
LOG(FATAL) << "unexpected data type " << data_desc.dataType;
}
PackedFunc CoreMLRuntime::GetFunction(const std::string& name,
const ObjectPtr<Object>& sptr_to_self) {
// Return member functions during query.
- if (name == "invoke") {
+ if (name == "invoke" || name == "run") {
return PackedFunc(
[sptr_to_self, this](TVMArgs args, TVMRetValue* rv) { GetModel("main").Invoke(); });
} else if (name == "set_input") {
return PackedFunc([sptr_to_self, this](TVMArgs args, TVMRetValue* rv) {
*rv = GetModel("main").GetNumOutputs();
});
- } else {
+ } else if (model_map_.count(name) != 0) {
// Return the packedfunc which executes the subgraph.
return PackedFunc([sptr_to_self, name, this](TVMArgs args, TVMRetValue* rv) {
CoreMLModel& model = GetModel(name);
}
*rv = out;
});
+ } else {
+ return PackedFunc();
}
}
assert tvm.ir.structural_equal(mod, expected, map_free_vars=True)
-@mock.patch('tvm.contrib.coreml_runtime.create')
-@mock.patch('tvm.contrib.xcode.compile_coreml')
-def test_construct_model(m1, m2):
- mod = _create_graph_annotated()
-
- fcompile = tvm._ffi.get_global_func("relay.ext.coremlcompiler")
-
- for var, func in mod.functions.items():
- if func.attrs and 'Compiler' in func.attrs and \
- func.attrs['Compiler'] == 'coremlcompiler':
- fcompile(tvm.IRModule.from_expr(func.body))
-
-
@pytest.mark.skipif(not _has_xcode(), reason="Xcode is not available")
def test_compile_and_run():
ctx=tvm.cpu()
tvm.testing.assert_allclose(out.asnumpy(), expected, rtol=tol, atol=tol)
+@mock.patch('tvm.contrib.coreml_runtime.create')
+@mock.patch('tvm.contrib.xcode.compile_coreml')
+def _construct_model(func, m1, m2):
+ mod = tvm.IRModule()
+ mod["main"] = func
+ mod = transform.AnnotateTarget("coremlcompiler")(mod)
+ mod = transform.PartitionGraph()(mod)
+
+ fcompile = tvm._ffi.get_global_func("relay.ext.coremlcompiler")
+
+ for var, func in mod.functions.items():
+ if func.attrs and 'Compiler' in func.attrs and \
+ func.attrs['Compiler'] == 'coremlcompiler':
+ fcompile(func)
+
+
+def test_add():
+ shape = (10, 10)
+ x = relay.var('x', shape=shape)
+ y = x + x
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_multiply():
+ shape = (10, 10)
+ x = relay.var('x', shape=shape)
+ y = x * x
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_clip():
+ shape = (10, 10)
+ x = relay.var('x', shape=shape)
+ y = relay.clip(x, a_min=0.0, a_max=1.0)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_batch_flatten():
+ shape = (10, 10, 10)
+ x = relay.var('x', shape=shape)
+ y = relay.nn.batch_flatten(x)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_expand_dims():
+ shape = (10, 10)
+ x = relay.var('x', shape=shape)
+ y = relay.expand_dims(x, axis=0)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+ y = relay.expand_dims(x, axis=-1)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_relu():
+ shape = (10, 10)
+ x = relay.var('x', shape=shape)
+ y = relay.nn.relu(x)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_softmax():
+ shape = (10, 10)
+ x = relay.var('x', shape=shape)
+ y = relay.nn.softmax(x, axis=1)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_conv2d():
+ x = relay.var('x', shape=(1,3,224,224))
+ w = relay.const(np.zeros((16,3,3,3), dtype='float32'))
+ y = relay.nn.conv2d(x, w, strides=[2, 2], padding=[1, 1, 1, 1], kernel_size=[3, 3])
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
+def test_global_avg_pool2d():
+ shape = (10, 10, 10, 10)
+ x = relay.var('x', shape=shape)
+ y = relay.nn.global_avg_pool2d(x)
+ func = relay.Function([x], y)
+ _construct_model(func)
+
+
if __name__ == "__main__":
test_annotate()
- test_construct_model()
test_compile_and_run()
+ test_add()
+ test_multiply()
+ test_clip()
+ test_expand_dims()
+ test_relu()
+ test_batch_flatten()
+ test_softmax()
+ test_conv2d()
+ test_global_avg_pool2d()