[CAPI] Add ml_train_model_get_input|output_dims
authorJihoon Lee <jhoon.it.lee@samsung.com>
Wed, 30 Jun 2021 10:09:34 +0000 (19:09 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 13 Jul 2021 03:50:17 +0000 (12:50 +0900)
This commit contains capi proposal to
`ml_train_model_get_input_dimensions` and
`ml_train_model_get_output_dimensions`

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
api/capi/include/nntrainer.h
api/capi/meson.build
api/ccapi/include/model.h
jni/Android.mk
jni/prepare_ml-api.sh [moved from jni/prepare_ml-api-common.sh with 56% similarity]
meson.build
packaging/nntrainer.spec

index c0f0f02..632bc97 100644 (file)
 #include <stddef.h>
 
 #include <ml-api-common.h>
+#include <nnstreamer.h>
 #include <nntrainer-api-common.h>
 
 #ifdef __cplusplus
 extern "C" {
 #endif /* __cplusplus */
+
 /**
  * @addtogroup CAPI_ML_NNTRAINER_TRAIN_MODULE
  * @{
@@ -216,6 +218,36 @@ int ml_train_model_set_dataset(ml_train_model_h model,
                                ml_train_dataset_h dataset);
 
 /**
+ * @brief Get input dimensions information of the model
+ * @details Use this function to get input dimensions information of the model.
+ * destroy @a dimensions with @a ml_tensors_info_destroy() after use.
+ * @a model must be compiled before calling this function.
+ *
+ * @param[in] model The NNTrainer model handle.
+ * @param[out] info The tensors information handle.
+ * @return @c 0 on successs. Otherwise a negative error value.
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter.
+ */
+int ml_train_model_get_input_dimensions(ml_train_model_h model,
+                                        ml_tensors_info_h *info);
+
+/**
+ * @brief Get output dimensions information of the model
+ * @details Use this function to get output dimensions information of the model.
+ * destroy @a dimensions with @a ml_tensors_info_destroy() after use.
+ * @a model must be compiled before calling this function.
+ *
+ * @param[in] model The NNTrainer model handle.
+ * @param[out] info The tensors information handle.
+ * @return @c 0 on successs. Otherwise a negative error value.
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter.
+ */
+int ml_train_model_get_output_dimensions(ml_train_model_h model,
+                                         ml_tensors_info_h *info);
+
+/**
  * @brief Creates a neural network layer.
  * @details Use this function to create neural network layer.
  * @since_tizen 6.0
index 4710fda..6e52d8d 100644 (file)
@@ -4,6 +4,10 @@ capi_inc += include_directories('..')
 capi_src = []
 capi_src += meson.current_source_dir() / 'src' / 'nntrainer.cpp'
 
+if not nnstreamer_capi_dep.found()
+  error('nnstreamer capi dependency not found for tizen')
+endif
+
 if get_option('enable-tizen') and get_option('enable-tizen-feature-check')
   capi_src += meson.current_source_dir() / 'src' / 'nntrainer-capi-tizen-feature-check.cpp'
 endif
@@ -13,7 +17,8 @@ capi_headers += meson.current_source_dir() / 'include' / 'nntrainer.h'
 capi_headers += meson.current_source_dir() / '..' / 'nntrainer-api-common.h'
 
 capi_deps = [
-  nntrainer_ccapi_dep
+  nntrainer_ccapi_dep,
+  nnstreamer_capi_dep,
 ]
 
 if get_option('enable-tizen')
index 73b4b94..3f91857 100644 (file)
@@ -144,6 +144,19 @@ public:
    */
   virtual int getLayer(const char *name, std::shared_ptr<Layer> *layer) = 0;
 
+  /// @todo uncomment this by opening TensorDim
+  /**
+   * @brief     get input dimension of a model
+   * @retval std::vector<TensorDim> input dimension
+   */
+  // virtual std::vector<TensorDim> getInputDimension() = 0;
+
+  /**
+   * @brief     get output dimension of a model
+   * @retval std::vector<TensorDim> output dimension
+   */
+  // virtual std::vector<TensorDim> getOutputDimension() = 0;
+
   /**
    * @brief     Summarize the model
    * @param out std::ostream to get the model summary
index 3495174..4b6c065 100644 (file)
@@ -105,13 +105,20 @@ ifndef ML_API_COMMON_ROOT
 ifneq ($(MAKECMDGOALS),clean)
 
 ML_API_COMMON_ROOT := $(NDK_INCLUDES_OUT)/ml_api_common
-$(info $(shell ($(NNTRAINER_JNI_ROOT)/prepare_ml-api-common.sh $(ML_API_COMMON_ROOT))))
+$(info $(shell ($(NNTRAINER_JNI_ROOT)/prepare_ml-api.sh $(ML_API_COMMON_ROOT))))
 
 endif #MAKECMDGOALS
 endif #ML_API_COMMON_ROOT
 
 ML_API_COMMON_INCLUDES := $(ML_API_COMMON_ROOT)/include
 
+LOCAL_MODULE := ml-api-inference
+LOCAL_SRC_FILES := $(ML_API_COMMON_ROOT)/lib/arm64-v8a/libnnstreamer-native.so
+LOCAL_EXPORT_C_INCLUDES := $(ML_API_COMMON_ROOT)/include
+LOCAL_EXPORT_CFLAGS += -DUSE_BLAS=1
+
+include $(PREBUILT_SHARED_LIBRARY)
+
 include $(CLEAR_VARS)
 
 NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
@@ -270,7 +277,7 @@ CAPI_NNTRAINER_INCLUDES := $(NNTRAINER_ROOT)/nntrainer \
                       $(NNTRAINER_ROOT)/api/ccapi/include \
                       $(NNTRAINER_ROOT)/api/capi/include
 
-LOCAL_SHARED_LIBRARIES := ccapi-nntrainer
+LOCAL_SHARED_LIBRARIES := ccapi-nntrainer ml-api-inference
 
 LOCAL_ARM_NEON      := true
 LOCAL_CFLAGS        += -pthread -fexceptions
similarity index 56%
rename from jni/prepare_ml-api-common.sh
rename to jni/prepare_ml-api.sh
index 4858d5a..045d4cc 100755 (executable)
 
 TARGET=$1
 # Note: zip name can be nnstreamer-native-*.zip but this file is heavier to download
-FILE_PREFIX=nnstreamer-single-native
+FILE_PREFIX=nnstreamer-lite-native
 ZIP_NAME_REGEX=${FILE_PREFIX}-*.zip
 ZIP_NAME=${FILE_PREFIX}.zip
 URL="http://nnstreamer.mooo.com/nnstreamer/ci/daily-build/build_result/latest/android"
 
-echo "PREPARING ml_api_common at ${TARGET}"
+echo "PREPARING ml_api at ${TARGET}"
 
 [ ! -d ${TARGET} ] && mkdir -p ${TARGET}
 
 pushd ${TARGET}
 
-function _download_ml_api_common {
+function _download_ml_api {
   [ -f $ZIP_NAME ] && echo "${ZIP_NAME} exists, skip downloading" && return 0
-  echo "[ml_api_common] downloading ${ZIP_NAME}\n"
+  echo "[ml_api] downloading ${ZIP_NAME}\n"
   if ! wget -r -l1 -nH --cut-dirs=6 ${URL} -A ${ZIP_NAME_REGEX} -O ${ZIP_NAME} ; then
-    echo "[ml_api_common] Download failed, please check url\n"
+    echo "[ml_api] Download failed, please check url\n"
     exit $?
   fi
-  echo "[ml_api_common] Finish downloading ml_api_common\n"
+  echo "[ml_api] Finish downloading ml_api\n"
 }
 
-function _extract_ml_api_common {
-  echo "[ml_api_common] unzip ml_api_common\n"
+function _extract_ml_api {
+  echo "[ml_api] unzip ml_api\n"
   unzip -q ${ZIP_NAME} -d ${FILE_PREFIX}
   rm -f ${ZIP_NAME}
 }
 
-function _cleanup_ml_api_common {
-  echo "[ml_api_common] cleanup ml_api_common \n"
+function _cleanup_ml_api {
+  echo "[ml_api] cleanup ml_api \n"
   # move include to the target location
   mv ${FILE_PREFIX}/main/jni/nnstreamer/include .
-  # remove all directories/files other than include
+  mv ${FILE_PREFIX}/main/jni/nnstreamer/lib .
+  # remove all untarred directories/files
   rm -rf ${FILE_PREFIX}
-  # cleanup all files other than ml_api_common and tizen_error
-  find include ! \( -name 'ml-api-common.h' -o -name 'tizen_error.h' \) -type f -exec rm -f {} +
+  # cleanup all files other than ml_api and tizen_error
+  find include ! \( -name '*.h' \) -type f -exec rm -f {} +
+  find lib ! \( -name 'libnnstreamer-native.so' \) -type f -exec rm -f {} +
 }
 
-[ ! -d "${FILE_PREFIX}" ] && _download_ml_api_common && _extract_ml_api_common \
-  && _cleanup_ml_api_common
+[ ! -d "${FILE_PREFIX}" ] && _download_ml_api && _extract_ml_api \
+  && _cleanup_ml_api
 
 popd
index 11006c0..391b63d 100644 (file)
@@ -163,7 +163,7 @@ if nnstreamer_capi_dep.found()
   endif
 endif
 
-ml_api_common_dep = dependency(get_option('capi-ml-common-actual'), required:true)
+ml_api_common_dep = dependency(get_option('capi-ml-common-actual'), required: true)
 
 if get_option('enable-nnstreamer-backbone')
   add_project_arguments('-DENABLE_NNSTREAMER_BACKBONE=1', language:['c','cpp'])
index 8ea8b11..8d44e5c 100644 (file)
@@ -69,6 +69,7 @@ BuildRequires:        python3
 BuildRequires: python3-numpy
 
 BuildRequires: %{capi_machine_learning_common}-devel
+BuildRequires: %{capi_machine_learning_inference}-devel
 
 %if 0%{?unit_test}
 BuildRequires: ssat >= 1.1.0
@@ -98,10 +99,8 @@ BuildRequires:       pkgconfig(dlog)
 
 %if 0%{?support_nnstreamer_backbone}
 BuildRequires: nnstreamer-tensorflow2-lite
-BuildRequires: %{capi_machine_learning_inference}-devel
 
 Requires:      nnstreamer-tensorflow2-lite
-Requires:      %{capi_machine_learning_inference}
 %endif # support_nnstreamer_backbone
 
 %if 0%{?support_tflite_backbone}