From: Jiho Chu Date: Fri, 21 Oct 2022 04:56:40 +0000 (+0900) Subject: [Context] Propergate property to model X-Git-Tag: accepted/tizen/unified/20230425.130129~131 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=92d2b8c3cf02bf1cf52875af599f6332dcf4d04d;p=platform%2Fcore%2Fml%2Fnntrainer.git [Context] Propergate property to model This patch is for app properties propergation. App properties is described in global configuration file (ini), and the properties are under section, which only have a meaning for human readability. It means section deos not have any real role for behavior. Propergated properties can be used in anywhere in the network. In this patch include 'memmory_swap' and 'memory_swap_path' which is used for Model flex properties. Signed-off-by: Jiho Chu --- diff --git a/meson.build b/meson.build index e60217f..711165d 100644 --- a/meson.build +++ b/meson.build @@ -78,6 +78,7 @@ if get_option('platform') != 'android' nntrainer_includedir = nntrainer_prefix / get_option('includedir') / 'nntrainer' nntrainer_confdir = get_option('sysconfdir') application_install_dir = nntrainer_bindir / 'applications' + nntrainer_swapdir = '/tmp' else nntrainer_prefix = meson.build_root() / 'android_build_result' # @todo arch has to be option @@ -86,6 +87,18 @@ else nntrainer_bindir = nntrainer_prefix / 'bin' nntrainer_confdir = nntrainer_prefix / 'conf' application_install_dir = nntrainer_prefix / 'examples' + nntrainer_swapdir = '/data/local/tmp' +endif + +# handle swap options +if get_option('enable-memory-swap') + nntrainer_enable_swap = 'true' +else + nntrainer_enable_swap = 'false' +endif + +if get_option('memory-swap-path') != '' + nntrainer_swapdir = get_option('memory-swap-path') endif # handle resources @@ -107,6 +120,8 @@ nntrainer_conf.set('EXEC_PREFIX', nntrainer_bindir) nntrainer_conf.set('LIB_INSTALL_DIR', nntrainer_libdir) nntrainer_conf.set('PLUGIN_INSTALL_PREFIX', nntrainer_libdir / 'nntrainer') nntrainer_conf.set('INCLUDE_INSTALL_DIR', nntrainer_includedir / '..') +nntrainer_conf.set('MEMORY_SWAP', nntrainer_enable_swap) +nntrainer_conf.set('MEMORY_SWAP_PATH', nntrainer_swapdir) dummy_dep = dependency('', required: false) found_dummy_dep = declare_dependency() # dummy dep to use if found diff --git a/meson_options.txt b/meson_options.txt index d5d2eb2..88f5d8e 100644 --- a/meson_options.txt +++ b/meson_options.txt @@ -12,6 +12,8 @@ option('enable-tflite-backbone', type: 'boolean', value: true) option('enable-profile', type: 'boolean', value: false) option('enable-debug', type: 'boolean', value: false) option('enable-tflite-interpreter', type: 'boolean', value: true) +option('enable-memory-swap', type: 'boolean', value: false) +option('memory-swap-path', type: 'string', value: '') # dependency conflict resolution option('capi-ml-inference-actual', type: 'string', value: 'capi-ml-inference', diff --git a/nntrainer/app_context.cpp b/nntrainer/app_context.cpp index cc59e61..43eaaa0 100644 --- a/nntrainer/app_context.cpp +++ b/nntrainer/app_context.cpp @@ -106,7 +106,7 @@ namespace { * * @return std::string plugin path */ -std::string getPluginPathConf(const std::string &suffix) { +std::string getConfig(const std::string &key) { std::string conf_path{getConfPath()}; ml_logd("%s conf path: %s", func_tag.c_str(), conf_path.c_str()); @@ -121,19 +121,25 @@ std::string getPluginPathConf(const std::string &suffix) { NNTR_THROW_IF(ini == nullptr, std::runtime_error) << func_tag << "loading ini failed"; - auto freedict = [ini] { iniparser_freedict(ini); }; + std::string value; + int nsec = iniparser_getnsec(ini); + for (int i = 0; i < nsec; i++) { + std::string query(iniparser_getsecname(ini, i)); + query += ":"; + query += key; - std::string s{"plugins:"}; + value = std::string(iniparser_getstring(ini, query.c_str(), "")); + if (!value.empty()) + break; + } - s += suffix; + if (value.empty()) + ml_logd("key %s is not found in config(%s)", key.c_str(), + conf_path.c_str()); - const char *path = iniparser_getstring(ini, s.c_str(), NULL); - NNTR_THROW_IF_CLEANUP(path == nullptr, std::invalid_argument, freedict) - << func_tag << "plugins layer failed"; + iniparser_freedict(ini); - std::string ret{path}; - freedict(); - return ret; + return value; } /** @@ -163,10 +169,10 @@ std::vector getPluginPaths() { } } - std::string conf_path = getPluginPathConf("layer"); - if (conf_path != "") { - ret.emplace_back(conf_path); - ml_logd("DEFAULT CONF PATH, path: %s", conf_path.c_str()); + std::string plugin_path = getConfig("layer"); + if (!plugin_path.empty()) { + ret.emplace_back(plugin_path); + ml_logd("DEFAULT CONF PATH, path: %s", plugin_path.c_str()); } return ret; @@ -404,6 +410,53 @@ const std::string AppContext::getWorkingPath(const std::string &path) { return getFullPath(path, working_path_base); } +/** + * @brief base case of iterate_prop, iterate_prop iterates the given tuple + * + * @tparam I size of tuple(automated) + * @tparam V container type of properties + * @tparam Ts types from tuple + * @param prop property container to be added to + * @param tup tuple to be iterated + * @return void + */ +template +typename std::enable_if::type inline parse_properties( + V &props, std::tuple &tup) { + // end of recursion. +} + +/** + * @brief base case of iterate_prop, iterate_prop iterates the given tuple + * + * @tparam I size of tuple(automated) + * @tparam V container type of properties + * @tparam Ts types from tuple + * @param prop property container to be added to + * @param tup tuple to be iterated + * @return void + */ +template + typename std::enable_if < + I::type inline parse_properties(V &props, + std::tuple &tup) { + std::string name = std::get(tup); + std::string prop = getConfig(name); + if (!prop.empty()) + props.push_back(name + "=" + prop); + + parse_properties(props, tup); +} + +const std::vector AppContext::getProperties(void) { + std::vector properties; + + auto props = std::tuple("memory_swap", "memory_swap_path"); + parse_properties(properties, props); + + return properties; +} + int AppContext::registerLayer(const std::string &library_path, const std::string &base_path) { const std::string full_path = getFullPath(library_path, base_path); diff --git a/nntrainer/app_context.h b/nntrainer/app_context.h index 0f99898..e0ed2ea 100644 --- a/nntrainer/app_context.h +++ b/nntrainer/app_context.h @@ -155,6 +155,14 @@ public: const std::string getWorkingPath(const std::string &path = ""); /** + * @brief Get memory swap file path from configuration file + * @return memory swap path. + * If memory swap path is not presented in configuration file, it returns + * empty string + */ + const std::vector getProperties(void); + + /** * @brief Factory register function, use this function to register custom * object * diff --git a/nntrainer/models/model_loader.cpp b/nntrainer/models/model_loader.cpp index 1932653..3000c9f 100644 --- a/nntrainer/models/model_loader.cpp +++ b/nntrainer/models/model_loader.cpp @@ -446,6 +446,16 @@ int ModelLoader::loadFromIni(std::string ini_file, NeuralNetwork &model, } /** + * @brief load all properties from context + */ +int ModelLoader::loadFromContext(NeuralNetwork &model) { + auto props = app_context.getProperties(); + model.setTrainConfig(props); + + return ML_ERROR_NONE; +} + +/** * @brief load all of model and dataset from given config file */ int ModelLoader::loadFromConfig(std::string config, NeuralNetwork &model) { diff --git a/nntrainer/models/model_loader.h b/nntrainer/models/model_loader.h index 7be4c88..35011f7 100644 --- a/nntrainer/models/model_loader.h +++ b/nntrainer/models/model_loader.h @@ -44,6 +44,12 @@ public: ~ModelLoader() {} /** + * @brief load all properties from context + * @param[in/out] model model to be loaded + */ + int loadFromContext(NeuralNetwork &model); + + /** * @brief load all of model and dataset from given config file * @param[in] config config file path * @param[in/out] model model to be loaded diff --git a/nntrainer/models/neuralnet.cpp b/nntrainer/models/neuralnet.cpp index 0ee192d..3d4f12c 100644 --- a/nntrainer/models/neuralnet.cpp +++ b/nntrainer/models/neuralnet.cpp @@ -105,7 +105,13 @@ int NeuralNetwork::loadFromConfig(const std::string &config) { ModelLoader loader(app_context); NeuralNetwork tempNet(*this); - int status = loader.loadFromConfig(config, tempNet); + + int status = loader.loadFromContext(tempNet); + if (status != ML_ERROR_NONE) { + return status; + } + + status = loader.loadFromConfig(config, tempNet); if (status != ML_ERROR_NONE) { return status; }