From bb3092e5de1cb1dec28b152f977650018e578c73 Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Mon, 10 Aug 2020 14:19:44 +0900 Subject: [PATCH] [capi] Add tizen feature checking Added tizen feature checking for C-API Added corresponding bypass for the tizen capi unittests **Self evaluation:** 1. Build test: [x]Passed [ ]Failed [ ]Skipped 2. Run test: [x]Passed [ ]Failed [ ]Skipped Signed-off-by: Parichay Kapoor --- api/capi/include/nntrainer.h | 19 ++++ api/capi/include/nntrainer_internal.h | 41 ++++++++ api/capi/meson.build | 18 +++- .../src/nntrainer-capi-tizen-feature-check.cpp | 114 +++++++++++++++++++++ api/capi/src/nntrainer.cpp | 49 ++++++++- meson.build | 4 + meson_options.txt | 2 +- packaging/nntrainer.spec | 16 ++- test/tizen_capi/unittest_tizen_capi.cpp | 9 +- test/tizen_capi/unittest_tizen_capi_dataset.cpp | 9 +- test/tizen_capi/unittest_tizen_capi_layer.cpp | 10 +- test/tizen_capi/unittest_tizen_capi_optimizer.cpp | 10 +- 12 files changed, 290 insertions(+), 11 deletions(-) create mode 100644 api/capi/src/nntrainer-capi-tizen-feature-check.cpp diff --git a/api/capi/include/nntrainer.h b/api/capi/include/nntrainer.h index ef21162..496b75c 100644 --- a/api/capi/include/nntrainer.h +++ b/api/capi/include/nntrainer.h @@ -95,6 +95,7 @@ typedef enum { * @param[out] model The NNTrainer model handle from the given description. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. * @retval #ML_ERROR_CANNOT_ASSIGN_ADDRESS Cannot assign object. */ @@ -112,6 +113,7 @@ int ml_train_model_construct(ml_train_model_h *model); * @param[out] model The NNTrainer model handle from the given description. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_construct_with_conf(const char *model_conf, @@ -130,6 +132,7 @@ int ml_train_model_construct_with_conf(const char *model_conf, * @param[in] ... hyperparmeters for compiling the model * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_compile(ml_train_model_h model, ...); @@ -145,6 +148,7 @@ int ml_train_model_compile(ml_train_model_h model, ...); * @param[in] ... Hyperparmeters for train model. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_run(ml_train_model_h model, ...); @@ -156,6 +160,7 @@ int ml_train_model_run(ml_train_model_h model, ...); * @param[in] model The NNTrainer model handle from the given description. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_destroy(ml_train_model_h model); @@ -173,6 +178,7 @@ int ml_train_model_destroy(ml_train_model_h model); * and exploit @a summary if possible. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_get_summary(ml_train_model_h model, @@ -191,6 +197,7 @@ int ml_train_model_get_summary(ml_train_model_h model, * @param[in] layer The NNTrainer layer handle. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_add_layer(ml_train_model_h model, ml_train_layer_h layer); @@ -208,6 +215,7 @@ int ml_train_model_add_layer(ml_train_model_h model, ml_train_layer_h layer); * @param[in] optimizer The NNTrainer optimizer handle. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_set_optimizer(ml_train_model_h model, @@ -227,6 +235,7 @@ int ml_train_model_set_optimizer(ml_train_model_h model, * @param[in] dataset The NNTrainer dataset handle. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_model_set_dataset(ml_train_model_h model, @@ -244,6 +253,7 @@ int ml_train_model_set_dataset(ml_train_model_h model, * @param[in] type The NNTrainer layer type * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. * @retval #ML_ERROR_CANNOT_ASSIGN_ADDRESS Cannot assign object. */ @@ -258,6 +268,7 @@ int ml_train_layer_create(ml_train_layer_h *layer, ml_train_layer_type_e type); * @param[in] layer The NNTrainer layer handle. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_layer_destroy(ml_train_layer_h layer); @@ -271,6 +282,7 @@ int ml_train_layer_destroy(ml_train_layer_h layer); * @param[in] ... Property values with NULL for termination. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. * * Here is an example of the usage of this function: @@ -317,6 +329,7 @@ int ml_train_layer_set_property(ml_train_layer_h layer, ...); * @param[in] type The NNTrainer optimizer type. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. * @retval #ML_ERROR_CANNOT_ASSIGN_ADDRESS Cannot assign object. */ @@ -332,6 +345,7 @@ int ml_train_optimizer_create(ml_train_optimizer_h *optimizer, * @param[in] optimizer The NNTrainer optimizer handle. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_optimizer_destroy(ml_train_optimizer_h optimizer); @@ -345,6 +359,7 @@ int ml_train_optimizer_destroy(ml_train_optimizer_h optimizer); * @param[in] ... Property values with NULL for termination. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_optimizer_set_property(ml_train_optimizer_h optimizer, ...); @@ -370,6 +385,7 @@ int ml_train_optimizer_set_property(ml_train_optimizer_h optimizer, ...); * @param[in] test_cb The dataset generator for testing. Can be null. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. * @retval #ML_ERROR_CANNOT_ASSIGN_ADDRESS Cannot assign object. */ @@ -393,6 +409,7 @@ int ml_train_dataset_create_with_generator(ml_train_dataset_h *dataset, * @param[in] test_file The dataset file for testing. Can be null. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. * @retval #ML_ERROR_CANNOT_ASSIGN_ADDRESS Cannot assign object. */ @@ -410,6 +427,7 @@ int ml_train_dataset_create_with_file(ml_train_dataset_h *dataset, * @param[in] dataset The NNTrainer dataset handle. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_dataset_destroy(ml_train_dataset_h dataset); @@ -423,6 +441,7 @@ int ml_train_dataset_destroy(ml_train_dataset_h dataset); * @param[in] ... Property values with NULL for termination. * @return @c 0 on success. Otherwise a negative error value. * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter. */ int ml_train_dataset_set_property(ml_train_dataset_h dataset, ...); diff --git a/api/capi/include/nntrainer_internal.h b/api/capi/include/nntrainer_internal.h index fae1f09..62dc9b5 100644 --- a/api/capi/include/nntrainer_internal.h +++ b/api/capi/include/nntrainer_internal.h @@ -38,6 +38,34 @@ #define ML_NNTRAINER_MAGIC 0x777F888F +/* Tizen ML feature */ +#if defined(__TIZEN__) + +typedef enum { + NOT_CHECKED_YET = -1, + NOT_SUPPORTED = 0, + SUPPORTED = 1 +} feature_state_t; + +#if defined(__FEATURE_CHECK_SUPPORT__) +#define check_feature_state() \ + do { \ + int feature_ret = ml_tizen_get_feature_enabled(); \ + if (ML_ERROR_NONE != feature_ret) \ + return feature_ret; \ + } while (0); + +#define set_feature_state(...) ml_tizen_set_feature_state(__VA_ARGS__) +#else /* __FEATURE_CHECK_SUPPORT__ */ +#define check_feature_state() +#define set_feature_state(...) +#endif /* __FEATURE_CHECK_SUPPORT__ */ + +#else /* __TIZEN__ */ +#define check_feature_state() +#define set_feature_state(...) +#endif /* __TIZEN__ */ + #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ @@ -288,6 +316,19 @@ int ml_train_model_insert_layer(ml_train_model_h model, ml_train_layer_h layer, const char *input_layer_names[], const char *output_layer_names[]); +#if defined(__TIZEN__) +/** + * @brief Checks whether machine_learning.training feature is enabled or not. + */ +int ml_tizen_get_feature_enabled(void); + +/** + * @brief Set the feature status of machine_learning.training. + * This is only used for Unit test. + */ +void ml_tizen_set_feature_state(feature_state_t state); +#endif /* __TIZEN__ */ + #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/api/capi/meson.build b/api/capi/meson.build index b96d0b2..679ad9b 100644 --- a/api/capi/meson.build +++ b/api/capi/meson.build @@ -13,6 +13,10 @@ capi_src = [] capi_src += join_paths(meson.current_source_dir(), 'src','nntrainer.cpp') capi_src += join_paths(meson.current_source_dir(), 'src','nntrainer_util.cpp') +if get_option('enable-tizen') and get_option('enable-tizen-feature-check') + capi_src += join_paths(meson.current_source_dir(), 'src','nntrainer-capi-tizen-feature-check.cpp') +endif + capi_headers = [] capi_headers += join_paths(meson.current_source_dir(), 'include', 'nntrainer.h') capi_headers += join_paths(meson.current_source_dir(), '..', 'nntrainer-api-common.h') @@ -21,6 +25,18 @@ capi_deps = [ nntrainer_dep ] +if get_option('enable-tizen') + message('CAPI is in Tizen mode') + + tizen_deps = [ + dependency('capi-base-common'), + dependency('capi-system-info'), + dependency('dlog') + ] + + capi_deps += tizen_deps +endif + shared_library('capi-nntrainer', capi_src, dependencies: capi_deps, @@ -47,6 +63,6 @@ configure_file(input: 'capi-nntrainer.pc.in', output: 'capi-nntrainer.pc', configuration: nntrainer_conf ) -install_headers( capi_headers, +install_headers(capi_headers, subdir: 'nntrainer' ) diff --git a/api/capi/src/nntrainer-capi-tizen-feature-check.cpp b/api/capi/src/nntrainer-capi-tizen-feature-check.cpp new file mode 100644 index 0000000..291e816 --- /dev/null +++ b/api/capi/src/nntrainer-capi-tizen-feature-check.cpp @@ -0,0 +1,114 @@ +// SPDX-License-Identifier: Apache-2.0-only +/** + * Copyright (C) 2020 Parichay Kapoor + * + * @file nnstreamer-capi-tizen-feature-check.cpp + * @date 7 August 2020 + * @brief NNTrainer/C-API Tizen dependent functions. + * @see https://github.com/nnstreamer/nntrainer + * @author MyungJoo Ham + * @author Parichay Kapoor + * @bug No known bugs except for NYI items + */ + +#if !defined(__TIZEN__) || !defined(__FEATURE_CHECK_SUPPORT__) +#error "This file can be included only in Tizen." +#endif + +#include +#include + +#include + +/** + * @brief Tizen ML feature. + */ +#define ML_TRAIN_FEATURE_PATH "tizen.org/feature/machine_learning.training" + +/** + * @brief Internal struct to control tizen feature support + * (machine_learning.training). -1: Not checked yet, 0: Not supported, 1: + * Supported + */ +typedef struct _feature_info_s { + pthread_mutex_t mutex; + feature_state_t feature_state; + + _feature_info_s() : feature_state(NOT_CHECKED_YET) { + pthread_mutex_init(&mutex, NULL); + } + + ~_feature_info_s() { pthread_mutex_destroy(&mutex); } +} feature_info_s; + +static feature_info_s feature_info; + +/** + * @brief Set the feature status of machine_learning.training. + */ +void ml_tizen_set_feature_state(feature_state_t state) { + pthread_mutex_lock(&feature_info.mutex); + + /** + * Update feature status + * -1: Not checked yet, 0: Not supported, 1: Supported + */ + feature_info.feature_state = state; + + pthread_mutex_unlock(&feature_info.mutex); +} + +/** + * @brief Checks whether machine_learning.training feature is enabled or not. + */ +int ml_tizen_get_feature_enabled(void) { + int ret; + int feature_enabled; + + pthread_mutex_lock(&feature_info.mutex); + feature_enabled = feature_info.feature_state; + pthread_mutex_unlock(&feature_info.mutex); + + if (NOT_SUPPORTED == feature_enabled) { + ml_loge("machine_learning.training NOT supported"); + return ML_ERROR_NOT_SUPPORTED; + } else if (NOT_CHECKED_YET == feature_enabled) { + bool ml_train_supported = false; + ret = + system_info_get_platform_bool(ML_TRAIN_FEATURE_PATH, &ml_train_supported); + if (0 == ret) { + if (false == ml_train_supported) { + ml_loge("machine_learning.training NOT supported"); + ml_tizen_set_feature_state(NOT_SUPPORTED); + return ML_ERROR_NOT_SUPPORTED; + } + + ml_tizen_set_feature_state(SUPPORTED); + } else { + switch (ret) { + case SYSTEM_INFO_ERROR_INVALID_PARAMETER: + ml_loge("failed to get feature value because feature key is not vaild"); + ret = ML_ERROR_NOT_SUPPORTED; + break; + + case SYSTEM_INFO_ERROR_IO_ERROR: + ml_loge("failed to get feature value because of input/output error"); + ret = ML_ERROR_NOT_SUPPORTED; + break; + + case SYSTEM_INFO_ERROR_PERMISSION_DENIED: + ml_loge("failed to get feature value because of permission denied"); + ret = ML_ERROR_PERMISSION_DENIED; + break; + + default: + ml_loge("failed to get feature value because of unknown error"); + ret = ML_ERROR_NOT_SUPPORTED; + break; + } + return ret; + } + } + + return ML_ERROR_NONE; +} diff --git a/api/capi/src/nntrainer.cpp b/api/capi/src/nntrainer.cpp index 15654b1..c0395ac 100644 --- a/api/capi/src/nntrainer.cpp +++ b/api/capi/src/nntrainer.cpp @@ -132,6 +132,9 @@ static int nn_object(ml_train_model_h *model) { int ml_train_model_construct(ml_train_model_h *model) { int status = ML_ERROR_NONE; + + check_feature_state(); + returnable f = [&]() { return nn_object(model); }; status = nntrainer_exception_boundary(f); @@ -184,6 +187,8 @@ int ml_train_model_compile(ml_train_model_h model, ...) { returnable f; std::shared_ptr NN; + check_feature_state(); + ML_TRAIN_VERIFY_VALID_HANDLE(model); std::vector arg_list; @@ -223,6 +228,8 @@ int ml_train_model_run(ml_train_model_h model, ...) { const char *data; std::shared_ptr NN; + check_feature_state(); + ML_TRAIN_VERIFY_VALID_HANDLE(model); std::vector arg_list; @@ -251,6 +258,8 @@ int ml_train_model_destroy(ml_train_model_h model) { int status = ML_ERROR_NONE; ml_train_model *nnmodel; + check_feature_state(); + { ML_TRAIN_GET_VALID_MODEL_LOCKED_RESET(nnmodel, model); ML_TRAIN_ADOPT_LOCK(nnmodel, model_lock); @@ -312,14 +321,16 @@ static int ml_train_model_get_summary_util(ml_train_model_h model, int ml_train_model_get_summary(ml_train_model_h model, ml_train_summary_type_e verbosity, char **summary) { + int status = ML_ERROR_NONE; + std::stringstream ss; + + check_feature_state(); + if (summary == nullptr) { ml_loge("summary pointer is null"); return ML_ERROR_INVALID_PARAMETER; } - int status = ML_ERROR_NONE; - std::stringstream ss; - status = ml_train_model_get_summary_util(model, verbosity, ss); if (status != ML_ERROR_NONE) { ml_loge("failed make a summary: %d", status); @@ -344,6 +355,8 @@ int ml_train_model_add_layer(ml_train_model_h model, ml_train_layer_h layer) { ml_train_model *nnmodel; ml_train_layer *nnlayer; + check_feature_state(); + ML_TRAIN_GET_VALID_MODEL_LOCKED(nnmodel, model); ML_TRAIN_ADOPT_LOCK(nnmodel, model_lock); ML_TRAIN_GET_VALID_LAYER_LOCKED(nnlayer, layer); @@ -377,6 +390,8 @@ int ml_train_model_set_optimizer(ml_train_model_h model, ml_train_model *nnmodel; ml_train_optimizer *nnopt; + check_feature_state(); + ML_TRAIN_GET_VALID_MODEL_LOCKED(nnmodel, model); ML_TRAIN_ADOPT_LOCK(nnmodel, model_lock); ML_TRAIN_GET_VALID_OPT_LOCKED(nnopt, optimizer); @@ -412,6 +427,8 @@ int ml_train_model_set_dataset(ml_train_model_h model, ml_train_model *nnmodel; ml_train_dataset *nndataset; + check_feature_state(); + ML_TRAIN_GET_VALID_MODEL_LOCKED(nnmodel, model); ML_TRAIN_ADOPT_LOCK(nnmodel, model_lock); ML_TRAIN_GET_VALID_DATASET_LOCKED(nndataset, dataset); @@ -446,6 +463,8 @@ int ml_train_model_get_layer(ml_train_model_h model, const char *layer_name, int status = ML_ERROR_NONE; ml_train_model *nnmodel; + check_feature_state(); + ML_TRAIN_GET_VALID_MODEL_LOCKED(nnmodel, model); ML_TRAIN_ADOPT_LOCK(nnmodel, model_lock); @@ -484,7 +503,11 @@ int ml_train_model_get_layer(ml_train_model_h model, const char *layer_name, int ml_train_layer_create(ml_train_layer_h *layer, ml_train_layer_type_e type) { int status = ML_ERROR_NONE; returnable f; - ml_train_layer *nnlayer = new ml_train_layer; + ml_train_layer *nnlayer; + + check_feature_state(); + + nnlayer = new ml_train_layer; nnlayer->magic = ML_NNTRAINER_MAGIC; try { @@ -517,6 +540,8 @@ int ml_train_layer_destroy(ml_train_layer_h layer) { int status = ML_ERROR_NONE; ml_train_layer *nnlayer; + check_feature_state(); + { ML_TRAIN_GET_VALID_LAYER_LOCKED_RESET(nnlayer, layer); ML_TRAIN_ADOPT_LOCK(nnlayer, layer_lock); @@ -539,6 +564,8 @@ int ml_train_layer_set_property(ml_train_layer_h layer, ...) { const char *data; std::shared_ptr NL; + check_feature_state(); + ML_TRAIN_VERIFY_VALID_HANDLE(layer); std::vector arg_list; @@ -568,6 +595,8 @@ int ml_train_optimizer_create(ml_train_optimizer_h *optimizer, ml_train_optimizer_type_e type) { int status = ML_ERROR_NONE; + check_feature_state(); + ml_train_optimizer *nnopt = new ml_train_optimizer; nnopt->magic = ML_NNTRAINER_MAGIC; nnopt->optimizer = std::make_shared(); @@ -591,6 +620,8 @@ int ml_train_optimizer_destroy(ml_train_optimizer_h optimizer) { int status = ML_ERROR_NONE; ml_train_optimizer *nnopt; + check_feature_state(); + { ML_TRAIN_GET_VALID_OPT_LOCKED_RESET(nnopt, optimizer); ML_TRAIN_ADOPT_LOCK(nnopt, optimizer_lock); @@ -612,6 +643,8 @@ int ml_train_optimizer_set_property(ml_train_optimizer_h optimizer, ...) { const char *data; std::shared_ptr opt; + check_feature_state(); + ML_TRAIN_VERIFY_VALID_HANDLE(optimizer); std::vector arg_list; @@ -644,6 +677,8 @@ int ml_train_dataset_create_with_generator(ml_train_dataset_h *dataset, ml_train_datagen_cb test_cb) { int status = ML_ERROR_NONE; + check_feature_state(); + std::shared_ptr data_buffer = std::make_shared(); @@ -677,6 +712,8 @@ int ml_train_dataset_create_with_file(ml_train_dataset_h *dataset, const char *test_file) { int status = ML_ERROR_NONE; + check_feature_state(); + std::shared_ptr data_buffer = std::make_shared(); @@ -719,6 +756,8 @@ int ml_train_dataset_set_property(ml_train_dataset_h dataset, ...) { const char *data; std::shared_ptr data_buffer; + check_feature_state(); + ML_TRAIN_VERIFY_VALID_HANDLE(dataset); std::vector arg_list; @@ -748,6 +787,8 @@ int ml_train_dataset_destroy(ml_train_dataset_h dataset) { int status = ML_ERROR_NONE; ml_train_dataset *nndataset; + check_feature_state(); + { ML_TRAIN_GET_VALID_DATASET_LOCKED_RESET(nndataset, dataset); ML_TRAIN_ADOPT_LOCK(nndataset, dataset_lock); diff --git a/meson.build b/meson.build index 04ee03f..4ba390c 100644 --- a/meson.build +++ b/meson.build @@ -18,6 +18,10 @@ if get_option('enable-tizen') # Pass __TIZEN__ to the compiler build_platform = 'tizen' add_project_arguments('-D__TIZEN__=1', language:['c','cpp']) + + if get_option('enable-tizen-feature-check') + add_project_arguments('-D__FEATURE_CHECK_SUPPORT__', language: ['c', 'cpp']) + endif endif warning_flags = [ diff --git a/meson_options.txt b/meson_options.txt index 81b2162..49e7711 100644 --- a/meson_options.txt +++ b/meson_options.txt @@ -6,4 +6,4 @@ option('use_gym', type: 'boolean', value: false) option('enable-capi', type: 'boolean', value: true) option('enable-test', type: 'boolean', value: true) option('enable-logging', type: 'boolean', value: true) - +option('enable-tizen-feature-check', type: 'boolean', value: true) diff --git a/packaging/nntrainer.spec b/packaging/nntrainer.spec index 6b14acd..01f768f 100644 --- a/packaging/nntrainer.spec +++ b/packaging/nntrainer.spec @@ -46,6 +46,12 @@ BuildRequires: lcov # BuildRequires: taos-ci-unittest-coverage-assessment %endif +%if %{with tizen} +BuildRequires: pkgconfig(capi-system-info) +BuildRequires: pkgconfig(capi-base-common) +BuildRequires: pkgconfig(dlog) +%endif # tizen + Requires: iniparser Requires: libopenblas_pthreads0 @@ -119,6 +125,14 @@ Static library of capi-nntrainer-devel package. %endif #tizen +## Define build options +%define enable_tizen -Denable-tizen=false +%define enable_tizen_feature_check -Denable-tizen-feature-check=true + +%if %{with tizen} +%define enable_tizen -Denable-tizen=true +%endif + # Using cblas for Matrix calculation %if 0%{?enable_cblas} %define enable_cblas -DUSE_BLAS=ON @@ -148,7 +162,7 @@ CFLAGS="${CFLAGS} -fprofile-arcs -ftest-coverage" mkdir -p build meson --buildtype=plain --prefix=%{_prefix} --sysconfdir=%{_sysconfdir} \ --libdir=%{_libdir} --bindir=%{nntrainerapplicationdir} --includedir=%{_includedir}\ - -Dinstall-app=true -Denable-tizen=true %{use_gym_option} build + -Dinstall-app=true %{enable_tizen} %{enable_tizen_feature_check} %{use_gym_option} build ninja -C build %{?_smp_mflags} diff --git a/test/tizen_capi/unittest_tizen_capi.cpp b/test/tizen_capi/unittest_tizen_capi.cpp index e3b2f62..aa55b6b 100644 --- a/test/tizen_capi/unittest_tizen_capi.cpp +++ b/test/tizen_capi/unittest_tizen_capi.cpp @@ -19,9 +19,10 @@ * @author Jijoong Moon * @bug No known bugs */ -#include "nntrainer_test_util.h" + #include #include +#include /** * @brief Neural Network Model Contruct / Destruct Test (possitive test ) @@ -797,11 +798,17 @@ int main(int argc, char **argv) { ml_loge("Failed to init gtest\n"); } + /** ignore tizen feature check while running the testcases */ + set_feature_state(SUPPORTED); + try { result = RUN_ALL_TESTS(); } catch (...) { ml_loge("Failed to run test.\n"); } + /** reset tizen feature check state */ + set_feature_state(NOT_CHECKED_YET); + return result; } diff --git a/test/tizen_capi/unittest_tizen_capi_dataset.cpp b/test/tizen_capi/unittest_tizen_capi_dataset.cpp index 0570610..3531f6c 100644 --- a/test/tizen_capi/unittest_tizen_capi_dataset.cpp +++ b/test/tizen_capi/unittest_tizen_capi_dataset.cpp @@ -10,8 +10,9 @@ * @bug No known bugs */ -#include "nntrainer_test_util.h" #include +#include +#include /** * @brief Neural Network Dataset Create / Destroy Test (negative test) @@ -243,11 +244,17 @@ int main(int argc, char **argv) { return 0; } + /** ignore tizen feature check while running the testcases */ + set_feature_state(SUPPORTED); + try { result = RUN_ALL_TESTS(); } catch (...) { std::cerr << "Error duing RUN_ALL_TSETS()" << std::endl; } + /** reset tizen feature check state */ + set_feature_state(NOT_CHECKED_YET); + return result; } diff --git a/test/tizen_capi/unittest_tizen_capi_layer.cpp b/test/tizen_capi/unittest_tizen_capi_layer.cpp index 3d1ea20..bf73ce5 100644 --- a/test/tizen_capi/unittest_tizen_capi_layer.cpp +++ b/test/tizen_capi/unittest_tizen_capi_layer.cpp @@ -19,8 +19,10 @@ * @author Jijoong Moon * @bug No known bugs */ -#include "nntrainer_test_util.h" + #include +#include +#include /** * @brief Neural Network Layer Create / Delete Test (possitive test) @@ -210,11 +212,17 @@ int main(int argc, char **argv) { return 0; } + /** ignore tizen feature check while running the testcases */ + set_feature_state(SUPPORTED); + try { result = RUN_ALL_TESTS(); } catch (...) { std::cerr << "Error duing RUN_ALL_TSETS()" << std::endl; } + /** reset tizen feature check state */ + set_feature_state(NOT_CHECKED_YET); + return result; } diff --git a/test/tizen_capi/unittest_tizen_capi_optimizer.cpp b/test/tizen_capi/unittest_tizen_capi_optimizer.cpp index 4c4bd30..7587524 100644 --- a/test/tizen_capi/unittest_tizen_capi_optimizer.cpp +++ b/test/tizen_capi/unittest_tizen_capi_optimizer.cpp @@ -19,8 +19,10 @@ * @author Jijoong Moon * @bug No known bugs */ -#include "nntrainer_test_util.h" + #include +#include +#include /** * @brief Neural Network Optimizer Create / Delete Test (positive test) @@ -125,11 +127,17 @@ int main(int argc, char **argv) { return 0; } + /** ignore tizen feature check while running the testcases */ + set_feature_state(SUPPORTED); + try { result = RUN_ALL_TESTS(); } catch (...) { std::cerr << "Error duing RUN_ALL_TSETS()" << std::endl; } + /** reset tizen feature check state */ + set_feature_state(NOT_CHECKED_YET); + return result; } -- 2.7.4