If it's not Tizen, ML-API(C) is not mandatory.
Allow to build w/o dependencies on ML-API by default.
Fixes #1853
Signed-off-by: MyungJoo Ham <myungjoo.ham@samsung.com>
-Dreduce-tolerance=$(ENABLE_REDUCE_TOLERANCE) \
-Denable-debug=$(ENABLE_DEBUG) \
-Dml-api-support=enabled -Denable-nnstreamer-tensor-filter=true \
+ -Denable-nnstreamer-backbone=true \
+ -Dcapi-ml-common-actual=capi-ml-common \
+ -Dcapi-ml-inference-actual=capi-ml-inference \
+ -Denable-capi=enabled \
build
override_dh_auto_build:
'-Wno-error=varargs'
]
+
foreach extra_arg : warning_flags
if cc.has_argument (extra_arg)
add_project_arguments([extra_arg], language: 'c')
nntrainer_conf.set('LIB_INSTALL_DIR', nntrainer_libdir)
nntrainer_conf.set('PLUGIN_INSTALL_PREFIX', nntrainer_libdir / 'nntrainer')
nntrainer_conf.set('INCLUDE_INSTALL_DIR', nntrainer_includedir / '..')
-nntrainer_conf.set('CAPI_ML_COMMON_DEP', get_option('capi-ml-common-actual'))
dummy_dep = dependency('', required: false)
found_dummy_dep = declare_dependency() # dummy dep to use if found
+# if ml-api-support is disabled, enable dummy common api interfaces and disable related dependencies.
+ml_api_common_dep = dependency(get_option('capi-ml-common-actual'), required : get_option('ml-api-support').enabled())
+nnstreamer_capi_dep = dummy_dep
+if (ml_api_common_dep.found())
+ nntrainer_conf.set('CAPI_ML_COMMON_DEP', get_option('capi-ml-common-actual'))
+ extra_defines += '-DML_API_COMMON=1'
+
+ nnstreamer_capi_dep = dependency(get_option('capi-ml-inference-actual'), required : true)
+ extra_defines += '-DNNSTREAMER_AVAILABLE=1'
+ # accessing this variable when dep_.not_found() remains hard error on purpose
+ supported_nnstreamer_capi = nnstreamer_capi_dep.version().version_compare('>=1.7.0')
+ if not supported_nnstreamer_capi
+ extra_defines += '-DUNSUPPORTED_NNSTREAMER=1'
+ warning('capi-nnstreamer version is too old, we do not know if it works with older nnstreamer version')
+ endif
+else
+ nntrainer_conf.set('CAPI_ML_COMMON_DEP', '')
+ extra_defines += '-DML_API_COMMON=0'
+endif
+
+
blas_dep = dummy_dep
# Dependencies
if get_option('enable-cublas')
endif
endif
-nnstreamer_capi_dep = dependency(get_option('capi-ml-inference-actual'), required:false)
-if nnstreamer_capi_dep.found()
- extra_defines += '-DNNSTREAMER_AVAILABLE=1'
- # accessing this variable when dep_.not_found() remains hard error on purpose
- supported_nnstreamer_capi = nnstreamer_capi_dep.version().version_compare('>=1.7.0')
- if not supported_nnstreamer_capi
- extra_defines += '-DUNSUPPORTED_NNSTREAMER=1'
- warning('capi-nnstreamer version is too old, we do not know if it works with older nnstreamer version')
- endif
-endif
-
-ml_api_common_dep = dummy_dep
-
-if get_option('platform') != 'android'
- ml_api_common_dep = dependency(get_option('capi-ml-common-actual'), required: true)
-else
+if get_option('platform') == 'android'
message('preparing ml api')
run_command(meson.source_root() / 'jni' / 'prepare_ml-api.sh', meson.build_root() / 'ml-api-inference', check: true)
ml_api_common_root = meson.build_root() / 'ml-api-inference'
# Build nntrainer
subdir('nntrainer')
+enable_capi = false
+enable_ccapi = false
# Build api
subdir('api')
option('enable-app', type: 'boolean', value: true)
option('install-app', type: 'boolean', value: true)
option('use_gym', type: 'boolean', value: false)
-option('enable-capi', type: 'boolean', value: true)
+option('enable-capi', type: 'feature', value: 'auto')
option('enable-ccapi', type: 'boolean', value: true)
option('enable-test', type: 'boolean', value: true)
option('enable-logging', type: 'boolean', value: true)
option('enable-tizen-feature-check', type: 'boolean', value: true)
-option('enable-nnstreamer-backbone', type: 'boolean', value: true)
+option('enable-nnstreamer-backbone', type: 'boolean', value: false)
option('enable-tflite-backbone', type: 'boolean', value: true)
option('enable-profile', type: 'boolean', value: false)
option('enable-debug', type: 'boolean', value: false)
# dependency conflict resolution
option('capi-ml-inference-actual', type: 'string', value: 'capi-ml-inference',
- description: 'backward compatible dependency name of capi-ml-inference')
+ description: 'backward compatible dependency name of capi-ml-inference. ignored if ml-api-support is disabled.')
option('capi-ml-common-actual', type: 'string', value: 'capi-ml-common',
- description: 'backward compatible dependency name of capi-ml-common')
+ description: 'backward compatible dependency name of capi-ml-common. ignored if ml-api-support is disabled.')
option('tizen-version-major', type: 'integer', min : 4, max : 9999, value: 9999) # 9999 means "not Tizen"
option('tizen-version-minor', type: 'integer', min : 0, max : 9999, value: 0)
option('ml-api-support', type: 'feature', value: 'auto')
# @todo : make them use 'feature' and depend on ml-api-support
option('enable-nnstreamer-tensor-filter', type: 'boolean', value: false)
-option('nnstreamer-subplugin-install-path', type: 'string', value: '/usr/lib/nnstreamer') # where nnstreamer subplugin should be installed
\ No newline at end of file
+option('nnstreamer-subplugin-install-path', type: 'string', value: '/usr/lib/nnstreamer') # where nnstreamer subplugin should be installed
#include <dynamic_training_optimization.h>
#include <execution_mode.h>
#include <layer_node.h>
-#include <ml-api-common.h>
#include <model_common_properties.h>
#include <network_graph.h>
#include <optimizer_devel.h>
#include <model.h>
#include <nntrainer-api-common.h>
+#include <nntrainer_error.h>
namespace ml::train {
class DataSet;
#ifndef __NNTRAINER_ERROR_H__
#define __NNTRAINER_ERROR_H__
-#include <ml-api-common.h>
#if defined(__TIZEN__)
#include <tizen_error.h>
#define ML_ERROR_BAD_ADDRESS TIZEN_ERROR_BAD_ADDRESS
if ((pred)) \
nntrainer::exception::ErrorNotification<err> { cleanup_func }
+#if ML_API_COMMON
+#include <ml-api-common.h>
+#else
+/**
+ @ref:
+ https://gitlab.freedesktop.org/dude/gst-plugins-base/commit/89095e7f91cfbfe625ec2522da49053f1f98baf8
+ */
+#if !defined(ESTRPIPE)
+#define ESTRPIPE EPIPE
+#endif /* !defined(ESTRPIPE) */
+
+#define _ERROR_UNKNOWN (-1073741824LL)
+#define TIZEN_ERROR_TIMED_OUT (TIZEN_ERROR_UNKNOWN + 1)
+#define TIZEN_ERROR_NOT_SUPPORTED (TIZEN_ERROR_UNKNOWN + 2)
+#define TIZEN_ERROR_PERMISSION_DENIED (-EACCES)
+#define TIZEN_ERROR_OUT_OF_MEMORY (-ENOMEM)
+typedef enum {
+ ML_ERROR_NONE = 0, /**< Success! */
+ ML_ERROR_INVALID_PARAMETER = -EINVAL, /**< Invalid parameter */
+ ML_ERROR_TRY_AGAIN =
+ -EAGAIN, /**< The pipeline is not ready, yet (not negotiated, yet) */
+ ML_ERROR_UNKNOWN = _ERROR_UNKNOWN, /**< Unknown error */
+ ML_ERROR_TIMED_OUT = (_ERROR_UNKNOWN + 1), /**< Time out */
+ ML_ERROR_NOT_SUPPORTED =
+ (_ERROR_UNKNOWN + 2), /**< The feature is not supported */
+ ML_ERROR_PERMISSION_DENIED = -EACCES, /**< Permission denied */
+ ML_ERROR_OUT_OF_MEMORY = -ENOMEM, /**< Out of memory (Since 6.0) */
+} ml_error_e;
+#endif
+
namespace nntrainer {
/// @note underscore_case is used for ::exception to keep in accordance with
%{enable_tflite_interpreter} %{capi_ml_pkg_dep_resolution} \
%{enable_reduce_tolerance} %{configure_subplugin_install_path} %{enable_debug} \
-Dml-api-support=enabled -Denable-nnstreamer-tensor-filter=true \
+ -Denable-capi=enabled \
build
ninja -C build %{?_smp_mflags}