--- /dev/null
+/**
+ * GStreamer Tensor_Filter, caffe2 Module
+ * Copyright (C) 2019 Hyoung Joo Ahn <hello.ahn@samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ */
+/**
+ * @file tensor_filter_caffe2.c
+ * @date 27 May 2019
+ * @brief Caffe2 for tensor_filter gstreamer plugin
+ * @see http://github.com/nnsuite/nnstreamer
+ * @author HyoungJoo Ahn <hello.ahn@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ * This is the per-NN-framework plugin (caffe2) for tensor_filter.
+ * Fill in "GstTensorFilterFramework" for tensor_filter.h/c
+ *
+ */
+
+#include <glib.h>
+#include <string.h>
+
+#include "tensor_filter_caffe2_core.h"
+
+void init_filter_caffe2 (void) __attribute__ ((constructor));
+void fini_filter_caffe2 (void) __attribute__ ((destructor));
+
+/**
+ * @brief internal data of caffe2
+ */
+struct _Caffe2_data
+{
+ void *caffe2_private_data;
+};
+typedef struct _Caffe2_data caffe2_data;
+
+
+/**
+ * @brief Free privateData and move on.
+ */
+static void
+caffe2_close (const GstTensorFilterProperties * prop, void **private_data)
+{
+ caffe2_data *cf2;
+ cf2 = *private_data;
+ caffe2_core_delete (cf2->caffe2_private_data);
+ g_free (cf2);
+ *private_data = NULL;
+}
+
+/**
+ * @brief Load caffe2 modelfile
+ * @param prop property of tensor_filter instance
+ * @param private_data : caffe2 plugin's private data
+ * @return 0 if successfully loaded. 1 if skipped (already loaded).
+ * -1 if the object construction is failed.
+ * -2 if the object initialization if failed
+ */
+static int
+caffe2_loadModelFile (const GstTensorFilterProperties * prop,
+ void **private_data)
+{
+ caffe2_data *cf2;
+ if (*private_data != NULL) {
+ /** @todo : Check the integrity of filter->data and filter->model_file, nnfw */
+ cf2 = *private_data;
+ if (g_strcmp0 (prop->model_file,
+ caffe2_core_getModelPath (cf2->caffe2_private_data)) != 0 ||
+ g_strcmp0 (prop->model_file_sub,
+ caffe2_core_getInitModelPath (cf2->caffe2_private_data)) != 0) {
+ caffe2_close (prop, private_data);
+ } else {
+ return 1;
+ }
+ }
+ cf2 = g_new0 (caffe2_data, 1); /** initialize cf2 Fill Zero! */
+ *private_data = cf2;
+ cf2->caffe2_private_data = caffe2_core_new (prop->model_file,
+ prop->model_file_sub);
+ if (cf2->caffe2_private_data) {
+ if (caffe2_core_init (cf2->caffe2_private_data)) {
+ g_printerr ("failed to initialize the object: Caffe2");
+ g_free (cf2);
+ return -2;
+ }
+ return 0;
+ } else {
+ g_printerr ("failed to create the object: Caffe2");
+ g_free (cf2);
+ return -1;
+ }
+}
+
+/**
+ * @brief The open callback for GstTensorFilterFramework. Called before anything else
+ * @param prop property of tensor_filter instance
+ * @param private_data : caffe2 plugin's private data
+ */
+static int
+caffe2_open (const GstTensorFilterProperties * prop, void **private_data)
+{
+ int ret = caffe2_loadModelFile (prop, private_data);
+ g_assert (ret == 0); /** This must be called only once */
+ return ret;
+}
+
+/**
+ * @brief The mandatory callback for GstTensorFilterFramework
+ * @param prop property of tensor_filter instance
+ * @param private_data : caffe2 plugin's private data
+ * @param[in] input The array of input tensors
+ * @param[out] output The array of output tensors
+ * @return 0 if OK. non-zero if error.
+ */
+static int
+caffe2_run (const GstTensorFilterProperties * prop, void **private_data,
+ const GstTensorMemory * input, GstTensorMemory * output)
+{
+ int retval;
+ caffe2_data *cf2;
+ cf2 = *private_data;
+ g_assert (*private_data);
+ retval = caffe2_core_run (cf2->caffe2_private_data, input, output);
+ g_assert (retval == 0);
+ return retval;
+}
+
+/**
+ * @brief The optional callback for GstTensorFilterFramework
+ * @param prop property of tensor_filter instance
+ * @param private_data : caffe2 plugin's private data
+ * @param[out] info The dimesions and types of input tensors
+ */
+static int
+caffe2_getInputDim (const GstTensorFilterProperties * prop, void **private_data,
+ GstTensorsInfo * info)
+{
+ caffe2_data *cf2;
+ cf2 = *private_data;
+ g_assert (*private_data);
+ return caffe2_core_getInputDim (cf2->caffe2_private_data, info);
+}
+
+/**
+ * @brief The optional callback for GstTensorFilterFramework
+ * @param prop property of tensor_filter instance
+ * @param private_data : caffe2 plugin's private data
+ * @param[out] info The dimesions and types of output tensors
+ */
+static int
+caffe2_getOutputDim (const GstTensorFilterProperties * prop,
+ void **private_data, GstTensorsInfo * info)
+{
+ caffe2_data *cf2;
+ cf2 = *private_data;
+ g_assert (*private_data);
+ return caffe2_core_getOutputDim (cf2->caffe2_private_data, info);
+}
+
+static gchar filter_subplugin_caffe2[] = "caffe2";
+
+static GstTensorFilterFramework NNS_support_caffe2 = {
+ .name = filter_subplugin_caffe2,
+ .allow_in_place = FALSE, /** @todo: support this to optimize performance later. */
+ .allocate_in_invoke = TRUE,
+ .invoke_NN = caffe2_run,
+ .getInputDimension = caffe2_getInputDim,
+ .getOutputDimension = caffe2_getOutputDim,
+ .open = caffe2_open,
+ .close = caffe2_close,
+};
+
+/** @brief Initialize this object for tensor_filter subplugin runtime register */
+void
+init_filter_caffe2 (void)
+{
+ nnstreamer_filter_probe (&NNS_support_caffe2);
+}
+
+/** @brief Destruct the subplugin */
+void
+fini_filter_caffe2 (void)
+{
+ nnstreamer_filter_exit (NNS_support_caffe2.name);
+}
--- /dev/null
+/**
+ * Copyright (C) 2019 Samsung Electronics Co., Ltd. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ */
+/**
+ * @file tensor_filter_caffe2_core.cc
+ * @author HyoungJoo Ahn <hello.ahn@samsung.com>
+ * @date 31/5/2019
+ * @brief connection with caffe2 libraries.
+ *
+ * @bug No known bugs.
+ */
+
+#include <unistd.h>
+#include <algorithm>
+
+#include <nnstreamer_plugin_api.h>
+#include "tensor_filter_caffe2_core.h"
+
+/**
+ * @brief Macro for debug mode.
+ */
+#ifndef DBG
+#define DBG FALSE
+#endif
+
+std::map <char*, Tensor*> Caffe2Core::inputTensorMap;
+
+/**
+ * @brief Caffe2Core creator
+ * @param _model_path : the logical path to '{model_name}.tffile' file
+ * @note the model of _model_path will be loaded simultaneously
+ * @return Nothing
+ */
+Caffe2Core::Caffe2Core (const char * _model_path, const char *_model_path_sub)
+{
+ pred_model_path = _model_path;
+ init_model_path = _model_path_sub;
+
+ gst_tensors_info_init (&inputTensorMeta);
+ gst_tensors_info_init (&outputTensorMeta);
+}
+
+/**
+ * @brief Caffe2Core Destructor
+ * @return Nothing
+ */
+Caffe2Core::~Caffe2Core ()
+{
+ gst_tensors_info_free (&inputTensorMeta);
+ gst_tensors_info_free (&outputTensorMeta);
+}
+
+/**
+ * @brief initialize the object with caffe2 model
+ * @return 0 if OK. non-zero if error.
+ * -1 if the model is not loaded.
+ * -2 if the initialization of input tensor is failed.
+ * -3 if the initialization of output tensor is failed.
+ */
+int
+Caffe2Core::init (const GstTensorFilterProperties * prop)
+{
+ if (loadModels ()) {
+ g_critical ("Failed to load model\n");
+ return -1;
+ }
+
+ gst_tensors_info_copy (&inputTensorMeta, &prop->input_meta);
+ gst_tensors_info_copy (&outputTensorMeta, &prop->output_meta);
+
+ if (initInputTensor ()) {
+ g_critical ("Failed to initialize input tensor\n");
+ return -2;
+ }
+ return 0;
+}
+
+#define initializeTensor(type)\
+do {\
+ ReinitializeTensor (\
+ inputTensor,\
+ {\
+ inputTensorMeta.info[i].dimension[0],\
+ inputTensorMeta.info[i].dimension[1],\
+ inputTensorMeta.info[i].dimension[2],\
+ inputTensorMeta.info[i].dimension[3]\
+ },\
+ at::dtype<type> ().device (CPU)\
+ );\
+} while (0);
+
+/**
+ * @brief initialize the input tensor
+ */
+int
+Caffe2Core::initInputTensor ()
+{
+ int i = 0;
+ inputTensorMap.clear ();
+ for (; i < inputTensorMeta.num_tensors; i++) {
+ Tensor *inputTensor = workSpace.CreateBlob (inputTensorMeta.info[i].name)
+ ->GetMutable<Tensor> ();
+
+ switch (inputTensorMeta.info[i].type){
+ case _NNS_INT32:
+ initializeTensor (int32_t);
+ break;
+ case _NNS_UINT32:
+ g_critical ("invalid data type is used");
+ return -1;
+ case _NNS_INT16:
+ initializeTensor (int16_t);
+ break;
+ case _NNS_UINT16:
+ initializeTensor (uint16_t);
+ break;
+ case _NNS_INT8:
+ initializeTensor (int8_t);
+ break;
+ case _NNS_UINT8:
+ initializeTensor (uint8_t);
+ break;
+ case _NNS_FLOAT64:
+ initializeTensor (double);
+ break;
+ case _NNS_FLOAT32:
+ initializeTensor (float);
+ break;
+ case _NNS_INT64:
+ initializeTensor (int64_t);
+ break;
+ case _NNS_UINT64:
+ g_critical ("invalid data type is used");
+ return -1;
+ default:
+ g_critical ("invalid data type is used");
+ return -1;
+ }
+
+ inputTensorMap.insert (
+ std::make_pair (inputTensorMeta.info[i].name, inputTensor)
+ );
+ }
+ return 0;
+}
+
+/**
+ * @brief get the model path
+ * @return the model path.
+ */
+const char *
+Caffe2Core::getPredModelPath ()
+{
+ return pred_model_path;
+}
+
+/**
+ * @brief get the model path
+ * @return the model path.
+ */
+const char *
+Caffe2Core::getInitModelPath ()
+{
+ return init_model_path;
+}
+
+/**
+ * @brief load the caffe2 model
+ * @note the model will be loaded
+ * @return 0 if OK. non-zero if error.
+ */
+int
+Caffe2Core::loadModels ()
+{
+#if (DBG)
+ gint64 start_time = g_get_real_time ();
+#endif
+ if (!g_file_test (init_model_path, G_FILE_TEST_IS_REGULAR)) {
+ g_critical ("the file of init_model_path is not valid\n");
+ return -1;
+ }
+ if (!g_file_test (pred_model_path, G_FILE_TEST_IS_REGULAR)) {
+ g_critical ("the file of pred_model_path is not valid\n");
+ return -1;
+ }
+
+ CAFFE_ENFORCE (ReadProtoFromFile (init_model_path, &initNet));
+ CAFFE_ENFORCE (ReadProtoFromFile (pred_model_path, &predictNet));
+
+ CAFFE_ENFORCE (workSpace.RunNetOnce (initNet));
+ CAFFE_ENFORCE (workSpace.CreateNet (predictNet));
+#if (DBG)
+ gint64 stop_time = g_get_real_time ();
+ g_message ("Model is loaded: %" G_GINT64_FORMAT, (stop_time - start_time));
+#endif
+ return 0;
+}
+
+/**
+ * @brief return the Dimension of Input Tensor.
+ * @param[out] info Structure for tensor info.
+ * @todo return whole array rather than index 0
+ * @return 0 if OK. non-zero if error.
+ */
+int
+Caffe2Core::getInputTensorDim (GstTensorsInfo * info)
+{
+ gst_tensors_info_copy (info, &inputTensorMeta);
+ return 0;
+}
+
+/**
+ * @brief return the Dimension of Tensor.
+ * @param[out] info Structure for tensor info.
+ * @todo return whole array rather than index 0
+ * @return 0 if OK. non-zero if error.
+ */
+int
+Caffe2Core::getOutputTensorDim (GstTensorsInfo * info)
+{
+ gst_tensors_info_copy (info, &outputTensorMeta);
+ return 0;
+}
+
+/**
+ * @brief run the model with the input.
+ * @param[in] input : The array of input tensors
+ * @param[out] output : The array of output tensors
+ * @return 0 if OK. non-zero if error.
+ */
+int
+Caffe2Core::run (const GstTensorMemory * input, GstTensorMemory * output)
+{
+ int i;
+#if (DBG)
+ gint64 start_time = g_get_real_time ();
+#endif
+ for (i = 0; i < inputTensorMeta.num_tensors; i++){
+ Tensor *inputTensor = inputTensorMap.
+ find(inputTensorMeta.info[i].name)->second;
+ inputTensor->ShareExternalPointer ((float*) input[i].data);
+ switch (outputTensorMeta.info[i].type){
+ case _NNS_INT32:
+ inputTensor->ShareExternalPointer ((int32_t*) input[i].data);
+ break;
+ case _NNS_UINT32:
+ g_critical ("invalid data type is used");
+ return -1;
+ case _NNS_INT16:
+ inputTensor->ShareExternalPointer ((int16_t*) input[i].data);
+ break;
+ case _NNS_UINT16:
+ inputTensor->ShareExternalPointer ((uint16_t*) input[i].data);
+ break;
+ case _NNS_INT8:
+ inputTensor->ShareExternalPointer ((int8_t*) input[i].data);
+ break;
+ case _NNS_UINT8:
+ inputTensor->ShareExternalPointer ((uint8_t*) input[i].data);
+ break;
+ case _NNS_FLOAT64:
+ inputTensor->ShareExternalPointer ((double*) input[i].data);
+ break;
+ case _NNS_FLOAT32:
+ inputTensor->ShareExternalPointer ((float*) input[i].data);
+ break;
+ case _NNS_INT64:
+ inputTensor->ShareExternalPointer ((int64_t*) input[i].data);
+ break;
+ case _NNS_UINT64:
+ g_critical ("invalid data type is used");
+ return -1;
+ default:
+ g_critical ("invalid data type is used");
+ return -1;
+ }
+ }
+
+ workSpace.RunNet (predictNet.name ());
+
+ for (i = 0; i < outputTensorMeta.num_tensors; i++) {
+ const auto& out = workSpace.GetBlob (outputTensorMeta.info[i].name)
+ ->Get<Tensor> ();
+ switch (outputTensorMeta.info[i].type){
+ case _NNS_INT32:
+ output[i].data = out.data<int32_t>();
+ break;
+ case _NNS_UINT32:
+ g_critical ("invalid data type is used");
+ return -1;
+ case _NNS_INT16:
+ output[i].data = out.data<int16_t>();
+ break;
+ case _NNS_UINT16:
+ output[i].data = out.data<uint16_t>();
+ break;
+ case _NNS_INT8:
+ output[i].data = out.data<int8_t>();
+ break;
+ case _NNS_UINT8:
+ output[i].data = out.data<uint8_t>();
+ break;
+ case _NNS_FLOAT64:
+ output[i].data = out.data<double>();
+ break;
+ case _NNS_FLOAT32:
+ output[i].data = out.data<float>();
+ break;
+ case _NNS_INT64:
+ output[i].data = out.data<int64_t>();
+ break;
+ case _NNS_UINT64:
+ g_critical ("invalid data type is used");
+ return -1;
+ default:
+ g_critical ("invalid data type is used");
+ return -1;
+ }
+ }
+
+#if (DBG)
+ gint64 stop_time = g_get_real_time ();
+ g_message ("Run() is finished: %" G_GINT64_FORMAT,
+ (stop_time - start_time));
+#endif
+
+ return 0;
+}
+
+/**
+ * @brief call the creator of Caffe2Core class.
+ * @param _model_path : the logical path to '{model_name}.tffile' file
+ * @return Caffe2Core class
+ */
+void *
+caffe2_core_new (const char *_model_path, const char *_model_path_sub)
+{
+ return new Caffe2Core (_model_path, _model_path_sub);
+}
+
+/**
+ * @brief delete the Caffe2Core class.
+ * @param caffe2 : the class object
+ * @return Nothing
+ */
+void
+caffe2_core_delete (void * caffe2)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ delete c;
+}
+
+/**
+ * @brief initialize the object with caffe2 model
+ * @param caffe2 : the class object
+ * @return 0 if OK. non-zero if error.
+ */
+int
+caffe2_core_init (void * caffe2, const GstTensorFilterProperties * prop)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ return c->init (prop);
+}
+
+/**
+ * @brief get the model path
+ * @param caffe2 : the class object
+ * @return the model path.
+ */
+const char *
+caffe2_core_getInitModelPath (void * caffe2)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ return c->getInitModelPath ();
+}
+
+/**
+ * @brief get the model path
+ * @param caffe2 : the class object
+ * @return the model path.
+ */
+const char *
+caffe2_core_getPredModelPath (void * caffe2)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ return c->getPredModelPath ();
+}
+
+/**
+ * @brief get the Dimension of Input Tensor of model
+ * @param caffe2 : the class object
+ * @param[out] info Structure for tensor info.
+ * @return 0 if OK. non-zero if error.
+ */
+int
+caffe2_core_getInputDim (void * caffe2, GstTensorsInfo * info)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ return c->getInputTensorDim (info);
+}
+
+/**
+ * @brief get the Dimension of Output Tensor of model
+ * @param caffe2 : the class object
+ * @param[out] info Structure for tensor info.
+ * @return 0 if OK. non-zero if error.
+ */
+int
+caffe2_core_getOutputDim (void * caffe2, GstTensorsInfo * info)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ return c->getOutputTensorDim (info);
+}
+
+/**
+ * @brief run the model
+ * @param caffe2 : the class object
+ * @param[in] input : The array of input tensors
+ * @param[out] output : The array of output tensors
+ * @return 0 if OK. non-zero if error.
+ */
+int
+caffe2_core_run (void * caffe2, const GstTensorMemory * input,
+ GstTensorMemory * output)
+{
+ Caffe2Core *c = (Caffe2Core *) caffe2;
+ return c->run (input, output);
+}
--- /dev/null
+/**
+ * Copyright (C) 2019 Samsung Electronics Co., Ltd. All rights reserved.
+ * Copyright (C) 2019 HyoungJoo Ahn <hello.ahn@samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ */
+/**
+ * @file tensor_filter_caffe2_core.h
+ * @author HyoungJoo Ahn <hello.ahn@samsung.com>
+ * @date 05/31/2019
+ * @brief connection with caffe2 libraries.
+ *
+ * @bug No known bugs.
+ */
+#ifndef TENSOR_FILTER_CAFFE2_CORE_H
+#define TENSOR_FILTER_CAFFE2_CORE_H
+
+#include "nnstreamer_plugin_api_filter.h"
+
+#ifdef __cplusplus
+#include <iostream>
+
+#include "caffe2/core/workspace.h"
+#include "caffe2/core/init.h"
+
+using namespace caffe2;
+
+/**
+ * @brief ring cache structure
+ */
+class Caffe2Core
+{
+public:
+ Caffe2Core (const char * _model_path, const char * _model_path_sub);
+ ~Caffe2Core ();
+
+ int init (const GstTensorFilterProperties * prop);
+ int loadModels ();
+ const char* getPredModelPath ();
+ const char* getInitModelPath ();
+ int getInputTensorDim (GstTensorsInfo * info);
+ int getOutputTensorDim (GstTensorsInfo * info);
+ int run (const GstTensorMemory * input, GstTensorMemory * output);
+
+private:
+
+ const char *init_model_path;
+ const char *pred_model_path;
+
+ GstTensorsInfo inputTensorMeta; /**< The tensor info of input tensors */
+ GstTensorsInfo outputTensorMeta; /**< The tensor info of output tensors */
+
+ Workspace workSpace;
+ NetDef initNet, predictNet;
+ static std::map <char*, Tensor*> inputTensorMap;
+
+ int initInputTensor ();
+};
+
+/**
+ * @brief the definition of functions to be used at C files.
+ */
+extern "C"
+{
+#endif
+
+ void *caffe2_core_new (const char *_model_path, const char *_model_path_sub);
+ void caffe2_core_delete (void * caffe2);
+ int caffe2_core_init (void * caffe2, const GstTensorFilterProperties * prop);
+ const char *caffe2_core_getInitModelPath (void * caffe2);
+ const char *caffe2_core_getPredModelPath (void * caffe2);
+ int caffe2_core_getInputDim (void * caffe2, GstTensorsInfo * info);
+ int caffe2_core_getOutputDim (void * caffe2, GstTensorsInfo * info);
+ int caffe2_core_run (void * caffe2, const GstTensorMemory * input,
+ GstTensorMemory * output);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* TENSOR_FILTER_CAFFE2_CORE_H */