make nnstreamer build process include the caffe2.
Signed-off-by: Hyoung Joo Ahn <hello.ahn@samsung.com>
--- /dev/null
+/usr/lib/nnstreamer/filters/libnnstreamer_filter_caffe2.so
)
endif
+if get_option('enable-caffe2')
+ filter_sub_caffe2_sources = [
+ 'tensor_filter_caffe2.c',
+ 'tensor_filter_caffe2_core.cc'
+ ]
+
+ nnstreamer_filter_caffe2_sources = []
+ foreach s : filter_sub_caffe2_sources
+ nnstreamer_filter_caffe2_sources += join_paths(meson.current_source_dir(), s)
+ endforeach
+
+ nnstreamer_filter_caffe2_deps = [caffe2_dep, protobuf_dep, glib_dep, gst_dep, nnstreamer_dep]
+
+ shared_library('nnstreamer_filter_caffe2',
+ nnstreamer_filter_caffe2_sources,
+ cpp_args: ['-Wno-sign-compare'],
+ dependencies: nnstreamer_filter_caffe2_deps,
+ install: true,
+ install_dir: filter_subplugin_install_dir
+ )
+
+ static_library('nnstreamer_filter_caffe2',
+ nnstreamer_filter_caffe2_sources,
+ cpp_args: ['-Wno-sign-compare'],
+ dependencies: nnstreamer_filter_caffe2_deps,
+ install: true,
+ install_dir: nnstreamer_libdir
+ )
+endif
+
filter_sub_python_sources = [
'tensor_filter_python.c',
'tensor_filter_python_core.cc'
/** @todo : Check the integrity of filter->data and filter->model_file, nnfw */
cf2 = *private_data;
if (g_strcmp0 (prop->model_file,
- caffe2_core_getModelPath (cf2->caffe2_private_data)) != 0 ||
+ caffe2_core_getPredModelPath (cf2->caffe2_private_data)) != 0 ||
g_strcmp0 (prop->model_file_sub,
caffe2_core_getInitModelPath (cf2->caffe2_private_data)) != 0) {
caffe2_close (prop, private_data);
cf2->caffe2_private_data = caffe2_core_new (prop->model_file,
prop->model_file_sub);
if (cf2->caffe2_private_data) {
- if (caffe2_core_init (cf2->caffe2_private_data)) {
- g_printerr ("failed to initialize the object: Caffe2");
+ if (caffe2_core_init (cf2->caffe2_private_data, prop)) {
+ g_critical ("failed to initialize the object: Caffe2");
g_free (cf2);
return -2;
}
return 0;
} else {
- g_printerr ("failed to create the object: Caffe2");
+ g_critical ("failed to create the object: Caffe2");
g_free (cf2);
return -1;
}
return caffe2_core_getOutputDim (cf2->caffe2_private_data, info);
}
+/**
+ * @brief The optional callback for GstTensorFilterFramework
+ * @param[in] data The data element.
+ */
+static void
+caffe2_destroyNotify (void *data)
+{
+ caffe2_core_destroyNotify (data);
+}
+
static gchar filter_subplugin_caffe2[] = "caffe2";
static GstTensorFilterFramework NNS_support_caffe2 = {
.name = filter_subplugin_caffe2,
.allow_in_place = FALSE, /** @todo: support this to optimize performance later. */
.allocate_in_invoke = TRUE,
+ .destroyNotify = caffe2_destroyNotify,
.invoke_NN = caffe2_run,
.getInputDimension = caffe2_getInputDim,
.getOutputDimension = caffe2_getOutputDim,
ReinitializeTensor (\
inputTensor,\
{\
- inputTensorMeta.info[i].dimension[0],\
- inputTensorMeta.info[i].dimension[1],\
+ inputTensorMeta.info[i].dimension[3],\
inputTensorMeta.info[i].dimension[2],\
- inputTensorMeta.info[i].dimension[3]\
+ inputTensorMeta.info[i].dimension[1],\
+ inputTensorMeta.info[i].dimension[0]\
},\
at::dtype<type> ().device (CPU)\
);\
gint64 start_time = g_get_real_time ();
#endif
if (!g_file_test (init_model_path, G_FILE_TEST_IS_REGULAR)) {
- g_critical ("the file of init_model_path is not valid\n");
+ g_critical ("the file of init_model_path is not valid: %s\n", init_model_path);
return -1;
}
if (!g_file_test (pred_model_path, G_FILE_TEST_IS_REGULAR)) {
- g_critical ("the file of pred_model_path is not valid\n");
+ g_critical ("the file of pred_model_path is not valid: %s\n", pred_model_path);
return -1;
}
-
CAFFE_ENFORCE (ReadProtoFromFile (init_model_path, &initNet));
CAFFE_ENFORCE (ReadProtoFromFile (pred_model_path, &predictNet));
+ /* set device type as CPU. If it is required, GPU/CUDA will be added as an option */
+ predictNet.mutable_device_option()->set_device_type(PROTO_CPU);
+ initNet.mutable_device_option()->set_device_type(PROTO_CPU);
+
+ for(int i = 0; i < predictNet.op_size(); ++i){
+ predictNet.mutable_op(i)->mutable_device_option()->set_device_type(PROTO_CPU);
+ }
+ for(int i = 0; i < initNet.op_size(); ++i){
+ initNet.mutable_op(i)->mutable_device_option()->set_device_type(PROTO_CPU);
+ }
+
CAFFE_ENFORCE (workSpace.RunNetOnce (initNet));
CAFFE_ENFORCE (workSpace.CreateNet (predictNet));
#if (DBG)
for (i = 0; i < outputTensorMeta.num_tensors; i++) {
const auto& out = workSpace.GetBlob (outputTensorMeta.info[i].name)
->Get<Tensor> ();
+
switch (outputTensorMeta.info[i].type){
case _NNS_INT32:
output[i].data = out.data<int32_t>();
Caffe2Core *c = (Caffe2Core *) caffe2;
return c->run (input, output);
}
+
+/**
+ * @brief the destroy notify method for caffe2. it will free the output tensor
+ * @param[in] data : the data element destroyed at the pipeline
+ */
+void
+caffe2_core_destroyNotify (void * data)
+{
+ /* do nothing */
+}
int caffe2_core_getOutputDim (void * caffe2, GstTensorsInfo * info);
int caffe2_core_run (void * caffe2, const GstTensorMemory * input,
GstTensorMemory * output);
+ void caffe2_core_destroyNotify (void * data);
#ifdef __cplusplus
}
$(NNSTREAMER_EXT_HOME)/tensor_filter/tensor_filter_pytorch.c \
$(NNSTREAMER_EXT_HOME)/tensor_filter/tensor_filter_pytorch_core.cc
+# filter caffe2
+NNSTREAMER_FILTER_CAFFE2_SRCS := \
+ $(NNSTREAMER_EXT_HOME)/tensor_filter/tensor_filter_caffe2.c \
+ $(NNSTREAMER_EXT_HOME)/tensor_filter/tensor_filter_caffe2_core.cc
+
# decoder boundingbox
NNSTREAMER_DECODER_BB_SRCS := \
$(NNSTREAMER_EXT_HOME)/tensor_decoder/tensordec-boundingbox.c
endif
endif
+# Caffe2
+if get_option('enable-caffe2')
+ caffe2_dep = dependency('caffe2', required: true)
+
+ if caffe2_dep.found()
+ add_project_arguments('-DENABLE_CAFFE2=1', language: ['c', 'cpp'])
+ else
+ message('Cannot find caffe2')
+ endif
+endif
+
# Python
have_python2 = false
have_python3 = false
option('enable-tensorflow-lite', type: 'boolean', value: true)
option('enable-tensorflow', type: 'boolean', value: true)
option('enable-tensorflow-mem-optmz', type: 'boolean', value: true)
+option('enable-caffe2', type: 'boolean', value: true)
option('enable-pytorch', type: 'boolean', value: true)
option('enable-pytorch-use-gpu', type: 'boolean', value: false) # default value, can be specified at run time
option('enable-movidius-ncsdk2', type: 'boolean', value: false)
%define enable_tf false
%endif
-meson --buildtype=plain --prefix=%{_prefix} --sysconfdir=%{_sysconfdir} --libdir=%{_libdir} --bindir=%{nnstexampledir} --includedir=%{_includedir} -Dinstall-example=true -Denable-tensorflow=%{enable_tf} -Denable-pytorch=false %{api} -Denable-env-var=false -Denable-symbolic-link=false -Denable-tizen=true build
+meson --buildtype=plain --prefix=%{_prefix} --sysconfdir=%{_sysconfdir} --libdir=%{_libdir} --bindir=%{nnstexampledir} --includedir=%{_includedir} -Dinstall-example=true -Denable-tensorflow=%{enable_tf} -Denable-pytorch=false -Denable-caffe2=false %{api} -Denable-env-var=false -Denable-symbolic-link=false -Denable-tizen=true build
ninja -C build %{?_smp_mflags}