1 // SPDX-License-Identifier: Apache-2.0
3 * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
5 * @file app_context.cpp
6 * @date 10 November 2020
7 * @brief This file contains app context related functions and classes that
8 * manages the global configuration of the current environment
9 * @see https://github.com/nnstreamer/nntrainer
10 * @author Jihoon Lee <jhoon.it.lee@samsung.com>
11 * @bug No known bugs except for NYI items
21 #include <iniparser.h>
23 #include <app_context.h>
25 #include <nntrainer_error.h>
26 #include <nntrainer_log.h>
27 #include <optimizer.h>
28 #include <util_func.h>
33 #include <activation_layer.h>
34 #include <addition_layer.h>
35 #include <attention_layer.h>
37 #include <centroid_knn.h>
38 #include <concat_layer.h>
39 #include <constant_derivative_loss_layer.h>
40 #include <conv1d_layer.h>
41 #include <conv2d_layer.h>
42 #include <cross_entropy_sigmoid_loss_layer.h>
43 #include <cross_entropy_softmax_loss_layer.h>
45 #include <embedding.h>
47 #include <flatten_layer.h>
50 #include <identity_layer.h>
51 #include <input_layer.h>
52 #include <layer_normalization_layer.h>
53 #include <lr_scheduler_constant.h>
54 #include <lr_scheduler_exponential.h>
55 #include <lr_scheduler_step.h>
58 #include <mol_attention_layer.h>
59 #include <mse_loss_layer.h>
60 #include <multi_head_attention_layer.h>
61 #include <multiout_layer.h>
62 #include <nntrainer_error.h>
63 #include <permute_layer.h>
64 #include <plugged_layer.h>
65 #include <plugged_optimizer.h>
66 #include <pooling2d_layer.h>
67 #include <positional_encoding_layer.h>
68 #include <preprocess_flip_layer.h>
69 #include <preprocess_l2norm_layer.h>
70 #include <preprocess_translate_layer.h>
71 #include <reduce_mean_layer.h>
74 #include <split_layer.h>
75 #include <time_dist.h>
76 #include <zoneout_lstmcell.h>
78 #ifdef ENABLE_TFLITE_BACKBONE
79 #include <tflite_layer.h>
82 #ifdef ENABLE_NNSTREAMER_BACKBONE
83 #include <nnstreamer_layer.h>
86 /// add #ifdef across platform
87 static std::string solib_suffix = ".so";
88 static std::string layerlib_suffix = "layer.so";
89 static std::string optimizerlib_suffix = "optimizer.so";
90 static const std::string func_tag = "[AppContext] ";
92 #ifdef NNTRAINER_CONF_PATH
93 constexpr const char *DEFAULT_CONF_PATH = NNTRAINER_CONF_PATH;
95 constexpr const char *DEFAULT_CONF_PATH = "/etc/nntrainer.ini";
98 constexpr const char *getConfPath() { return DEFAULT_CONF_PATH; }
100 namespace nntrainer {
105 * @brief Get the plugin path from conf ini
107 * @return std::string plugin path
109 std::string getConfig(const std::string &key) {
110 std::string conf_path{getConfPath()};
112 ml_logd("%s conf path: %s", func_tag.c_str(), conf_path.c_str());
113 if (!isFileExist(conf_path)) {
115 "%s conf path does not exist, skip getting plugin path from the conf",
117 return std::string();
120 dictionary *ini = iniparser_load(conf_path.c_str());
121 NNTR_THROW_IF(ini == nullptr, std::runtime_error)
122 << func_tag << "loading ini failed";
125 int nsec = iniparser_getnsec(ini);
126 for (int i = 0; i < nsec; i++) {
127 std::string query(iniparser_getsecname(ini, i));
131 value = std::string(iniparser_getstring(ini, query.c_str(), ""));
137 ml_logd("key %s is not found in config(%s)", key.c_str(),
140 iniparser_freedict(ini);
146 * @brief Get the plugin paths
148 * @return std::vector<std::string> list of paths to search for
150 std::vector<std::string> getPluginPaths() {
151 std::vector<std::string> ret;
153 /*** @note NNTRAINER_PATH is an environment variable stating a @a directory
154 * where you would like to look for the layers, while NNTRAINER_CONF_PATH is a
155 * (buildtime hardcoded @a file path) to locate configuration file *.ini file
157 /*** @note for now, NNTRAINER_PATH is a SINGLE PATH rather than serise of path
158 * like PATH environment variable. this could be improved but for now, it is
161 const char *env_path = std::getenv("NNTRAINER_PATH");
162 if (env_path != nullptr) {
163 if (isFileExist(env_path)) {
164 ml_logd("NNTRAINER_PATH is defined and valid. path: %s", env_path);
165 ret.emplace_back(env_path);
167 ml_logw("NNTRAINER_PATH is given but it is not valid. path: %s",
172 std::string plugin_path = getConfig("layer");
173 if (!plugin_path.empty()) {
174 ret.emplace_back(plugin_path);
175 ml_logd("DEFAULT CONF PATH, path: %s", plugin_path.c_str());
182 * @brief Get the Full Path from given string
183 * @details path is resolved in the following order
184 * 1) if @a path is absolute, return path
185 * ----------------------------------------
186 * 2) if @a base == "" && @a path == "", return "."
187 * 3) if @a base == "" && @a path != "", return @a path
188 * 4) if @a base != "" && @a path == "", return @a base
189 * 5) if @a base != "" && @a path != "", return @a base + "/" + path
191 * @param path path to calculate from base
192 * @param base base path
193 * @return const std::string
195 const std::string getFullPath(const std::string &path,
196 const std::string &base) {
197 /// if path is absolute, return path
198 if (path[0] == '/') {
202 if (base == std::string()) {
203 return path == std::string() ? "." : path;
206 return path == std::string() ? base : base + "/" + path;
211 std::mutex factory_mutex;
214 * @brief finialize global context
217 static void fini_global_context_nntrainer(void) __attribute__((destructor));
219 static void fini_global_context_nntrainer(void) {}
221 std::once_flag global_app_context_init_flag;
223 static void add_default_object(AppContext &ac) {
224 /// @note all layers should be added to the app_context to gaurantee that
225 /// createLayer/createOptimizer class is created
226 using OptType = ml::train::OptimizerType;
227 ac.registerFactory(nntrainer::createOptimizer<SGD>, SGD::type, OptType::SGD);
228 ac.registerFactory(nntrainer::createOptimizer<Adam>, Adam::type,
230 ac.registerFactory(AppContext::unknownFactory<nntrainer::Optimizer>,
231 "unknown", OptType::UNKNOWN);
233 using LRType = LearningRateType;
235 ml::train::createLearningRateScheduler<ConstantLearningRateScheduler>,
236 ConstantLearningRateScheduler::type, LRType::CONSTANT);
238 ml::train::createLearningRateScheduler<ExponentialLearningRateScheduler>,
239 ExponentialLearningRateScheduler::type, LRType::EXPONENTIAL);
241 ml::train::createLearningRateScheduler<StepLearningRateScheduler>,
242 StepLearningRateScheduler::type, LRType::STEP);
244 using LayerType = ml::train::LayerType;
245 ac.registerFactory(nntrainer::createLayer<InputLayer>, InputLayer::type,
246 LayerType::LAYER_IN);
247 ac.registerFactory(nntrainer::createLayer<FullyConnectedLayer>,
248 FullyConnectedLayer::type, LayerType::LAYER_FC);
249 ac.registerFactory(nntrainer::createLayer<BatchNormalizationLayer>,
250 BatchNormalizationLayer::type, LayerType::LAYER_BN);
251 ac.registerFactory(nntrainer::createLayer<LayerNormalizationLayer>,
252 LayerNormalizationLayer::type,
253 LayerType::LAYER_LAYER_NORMALIZATION);
254 ac.registerFactory(nntrainer::createLayer<Conv2DLayer>, Conv2DLayer::type,
255 LayerType::LAYER_CONV2D);
256 ac.registerFactory(nntrainer::createLayer<Conv1DLayer>, Conv1DLayer::type,
257 LayerType::LAYER_CONV1D);
258 ac.registerFactory(nntrainer::createLayer<Pooling2DLayer>,
259 Pooling2DLayer::type, LayerType::LAYER_POOLING2D);
260 ac.registerFactory(nntrainer::createLayer<FlattenLayer>, FlattenLayer::type,
261 LayerType::LAYER_FLATTEN);
262 ac.registerFactory(nntrainer::createLayer<ReshapeLayer>, ReshapeLayer::type,
263 LayerType::LAYER_RESHAPE);
264 ac.registerFactory(nntrainer::createLayer<ActivationLayer>,
265 ActivationLayer::type, LayerType::LAYER_ACTIVATION);
266 ac.registerFactory(nntrainer::createLayer<AdditionLayer>, AdditionLayer::type,
267 LayerType::LAYER_ADDITION);
268 ac.registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
269 LayerType::LAYER_CONCAT);
270 ac.registerFactory(nntrainer::createLayer<MultiOutLayer>, MultiOutLayer::type,
271 LayerType::LAYER_MULTIOUT);
272 ac.registerFactory(nntrainer::createLayer<EmbeddingLayer>,
273 EmbeddingLayer::type, LayerType::LAYER_EMBEDDING);
274 ac.registerFactory(nntrainer::createLayer<RNNLayer>, RNNLayer::type,
275 LayerType::LAYER_RNN);
276 ac.registerFactory(nntrainer::createLayer<RNNCellLayer>, RNNCellLayer::type,
277 LayerType::LAYER_RNNCELL);
278 ac.registerFactory(nntrainer::createLayer<LSTMLayer>, LSTMLayer::type,
279 LayerType::LAYER_LSTM);
280 ac.registerFactory(nntrainer::createLayer<LSTMCellLayer>, LSTMCellLayer::type,
281 LayerType::LAYER_LSTMCELL);
282 ac.registerFactory(nntrainer::createLayer<ZoneoutLSTMCellLayer>,
283 ZoneoutLSTMCellLayer::type,
284 LayerType::LAYER_ZONEOUT_LSTMCELL);
285 ac.registerFactory(nntrainer::createLayer<SplitLayer>, SplitLayer::type,
286 LayerType::LAYER_SPLIT);
287 ac.registerFactory(nntrainer::createLayer<GRULayer>, GRULayer::type,
288 LayerType::LAYER_GRU);
289 ac.registerFactory(nntrainer::createLayer<GRUCellLayer>, GRUCellLayer::type,
290 LayerType::LAYER_GRUCELL);
291 ac.registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
292 LayerType::LAYER_PERMUTE);
293 ac.registerFactory(nntrainer::createLayer<DropOutLayer>, DropOutLayer::type,
294 LayerType::LAYER_DROPOUT);
295 ac.registerFactory(nntrainer::createLayer<AttentionLayer>,
296 AttentionLayer::type, LayerType::LAYER_ATTENTION);
297 ac.registerFactory(nntrainer::createLayer<MoLAttentionLayer>,
298 MoLAttentionLayer::type, LayerType::LAYER_MOL_ATTENTION);
299 ac.registerFactory(nntrainer::createLayer<MultiHeadAttentionLayer>,
300 MultiHeadAttentionLayer::type,
301 LayerType::LAYER_MULTI_HEAD_ATTENTION);
302 ac.registerFactory(nntrainer::createLayer<ReduceMeanLayer>,
303 ReduceMeanLayer::type, LayerType::LAYER_REDUCE_MEAN);
304 ac.registerFactory(nntrainer::createLayer<PositionalEncodingLayer>,
305 PositionalEncodingLayer::type,
306 LayerType::LAYER_POSITIONAL_ENCODING);
307 ac.registerFactory(nntrainer::createLayer<IdentityLayer>, IdentityLayer::type,
308 LayerType::LAYER_IDENTITY);
310 #ifdef ENABLE_NNSTREAMER_BACKBONE
311 ac.registerFactory(nntrainer::createLayer<NNStreamerLayer>,
312 NNStreamerLayer::type,
313 LayerType::LAYER_BACKBONE_NNSTREAMER);
315 #ifdef ENABLE_TFLITE_BACKBONE
316 ac.registerFactory(nntrainer::createLayer<TfLiteLayer>, TfLiteLayer::type,
317 LayerType::LAYER_BACKBONE_TFLITE);
319 ac.registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
320 LayerType::LAYER_CENTROID_KNN);
322 /** proprocess layers */
323 ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
324 PreprocessFlipLayer::type,
325 LayerType::LAYER_PREPROCESS_FLIP);
326 ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
327 PreprocessTranslateLayer::type,
328 LayerType::LAYER_PREPROCESS_TRANSLATE);
329 ac.registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
330 PreprocessL2NormLayer::type,
331 LayerType::LAYER_PREPROCESS_L2NORM);
333 /** register losses */
334 ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
335 LayerType::LAYER_LOSS_MSE);
336 ac.registerFactory(nntrainer::createLayer<CrossEntropySigmoidLossLayer>,
337 CrossEntropySigmoidLossLayer::type,
338 LayerType::LAYER_LOSS_CROSS_ENTROPY_SIGMOID);
339 ac.registerFactory(nntrainer::createLayer<CrossEntropySoftmaxLossLayer>,
340 CrossEntropySoftmaxLossLayer::type,
341 LayerType::LAYER_LOSS_CROSS_ENTROPY_SOFTMAX);
342 ac.registerFactory(nntrainer::createLayer<ConstantDerivativeLossLayer>,
343 ConstantDerivativeLossLayer::type,
344 LayerType::LAYER_LOSS_CONSTANT_DERIVATIVE);
346 ac.registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
347 LayerType::LAYER_TIME_DIST);
349 ac.registerFactory(AppContext::unknownFactory<nntrainer::Layer>, "unknown",
350 LayerType::LAYER_UNKNOWN);
353 static void add_extension_object(AppContext &ac) {
354 auto dir_list = getPluginPaths();
356 for (auto &path : dir_list) {
358 ac.registerPluggableFromDirectory(path);
359 } catch (std::exception &e) {
360 ml_logw("tried to register extension from %s but failed, reason: %s",
361 path.c_str(), e.what());
366 static void registerer(AppContext &ac) noexcept {
368 add_default_object(ac);
369 add_extension_object(ac);
370 } catch (std::exception &e) {
371 ml_loge("registering layers failed!!, reason: %s", e.what());
373 ml_loge("registering layer failed due to unknown reason");
377 AppContext &AppContext::Global() {
378 static AppContext instance;
379 /// in g++ there is a bug that hangs up if caller throws,
380 /// so registerer is noexcept although it'd better not
381 /// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70298
382 std::call_once(global_app_context_init_flag, registerer, std::ref(instance));
386 void AppContext::setWorkingDirectory(const std::string &base) {
387 DIR *dir = opendir(base.c_str());
390 std::stringstream ss;
391 ss << func_tag << "path is not directory or has no permission: " << base;
392 throw std::invalid_argument(ss.str().c_str());
396 char *ret = getRealpath(base.c_str(), nullptr);
398 if (ret == nullptr) {
399 std::stringstream ss;
400 ss << func_tag << "failed to get canonical path for the path: ";
401 throw std::invalid_argument(ss.str().c_str());
404 working_path_base = std::string(ret);
405 ml_logd("working path base has set: %s", working_path_base.c_str());
409 const std::string AppContext::getWorkingPath(const std::string &path) {
410 return getFullPath(path, working_path_base);
414 * @brief base case of iterate_prop, iterate_prop iterates the given tuple
416 * @tparam I size of tuple(automated)
417 * @tparam V container type of properties
418 * @tparam Ts types from tuple
419 * @param prop property container to be added to
420 * @param tup tuple to be iterated
423 template <size_t I = 0, typename V, typename... Ts>
424 typename std::enable_if<I == sizeof...(Ts), void>::type inline parse_properties(
425 V &props, std::tuple<Ts...> &tup) {
430 * @brief base case of iterate_prop, iterate_prop iterates the given tuple
432 * @tparam I size of tuple(automated)
433 * @tparam V container type of properties
434 * @tparam Ts types from tuple
435 * @param prop property container to be added to
436 * @param tup tuple to be iterated
439 template <size_t I = 0, typename V, typename... Ts>
440 typename std::enable_if <
441 I<sizeof...(Ts), void>::type inline parse_properties(V &props,
442 std::tuple<Ts...> &tup) {
443 std::string name = std::get<I>(tup);
444 std::string prop = getConfig(name);
446 props.push_back(name + "=" + prop);
448 parse_properties<I + 1>(props, tup);
451 const std::vector<std::string> AppContext::getProperties(void) {
452 std::vector<std::string> properties;
454 auto props = std::tuple("memory_swap", "memory_swap_path");
455 parse_properties(properties, props);
460 int AppContext::registerLayer(const std::string &library_path,
461 const std::string &base_path) {
462 const std::string full_path = getFullPath(library_path, base_path);
464 void *handle = dlopen(full_path.c_str(), RTLD_LAZY | RTLD_LOCAL);
465 const char *error_msg = dlerror();
467 NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
468 << func_tag << "open plugin failed, reason: " << error_msg;
470 nntrainer::LayerPluggable *pluggable =
471 reinterpret_cast<nntrainer::LayerPluggable *>(
472 dlsym(handle, "ml_train_layer_pluggable"));
474 error_msg = dlerror();
475 auto close_dl = [handle] { dlclose(handle); };
476 NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
477 std::invalid_argument, close_dl)
478 << func_tag << "loading symbol failed, reason: " << error_msg;
480 auto layer = pluggable->createfunc();
481 NNTR_THROW_IF_CLEANUP(layer == nullptr, std::invalid_argument, close_dl)
482 << func_tag << "created pluggable layer is null";
483 auto type = layer->getType();
484 NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
485 << func_tag << "custom layer must specify type name, but it is empty";
486 pluggable->destroyfunc(layer);
488 FactoryType<nntrainer::Layer> factory_func =
489 [pluggable](const PropsType &prop) {
490 std::unique_ptr<nntrainer::Layer> layer =
491 std::make_unique<internal::PluggedLayer>(pluggable);
496 return registerFactory<nntrainer::Layer>(factory_func, type);
499 int AppContext::registerOptimizer(const std::string &library_path,
500 const std::string &base_path) {
501 const std::string full_path = getFullPath(library_path, base_path);
503 void *handle = dlopen(full_path.c_str(), RTLD_LAZY | RTLD_LOCAL);
504 const char *error_msg = dlerror();
506 NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
507 << func_tag << "open plugin failed, reason: " << error_msg;
509 nntrainer::OptimizerPluggable *pluggable =
510 reinterpret_cast<nntrainer::OptimizerPluggable *>(
511 dlsym(handle, "ml_train_optimizer_pluggable"));
513 error_msg = dlerror();
514 auto close_dl = [handle] { dlclose(handle); };
515 NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
516 std::invalid_argument, close_dl)
517 << func_tag << "loading symbol failed, reason: " << error_msg;
519 auto optimizer = pluggable->createfunc();
520 NNTR_THROW_IF_CLEANUP(optimizer == nullptr, std::invalid_argument, close_dl)
521 << func_tag << "created pluggable optimizer is null";
522 auto type = optimizer->getType();
523 NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
524 << func_tag << "custom optimizer must specify type name, but it is empty";
525 pluggable->destroyfunc(optimizer);
527 FactoryType<nntrainer::Optimizer> factory_func =
528 [pluggable](const PropsType &prop) {
529 std::unique_ptr<nntrainer::Optimizer> optimizer =
530 std::make_unique<internal::PluggedOptimizer>(pluggable);
535 return registerFactory<nntrainer::Optimizer>(factory_func, type);
539 AppContext::registerPluggableFromDirectory(const std::string &base_path) {
540 DIR *dir = opendir(base_path.c_str());
542 NNTR_THROW_IF(dir == nullptr, std::invalid_argument)
543 << func_tag << "failed to open the directory: " << base_path;
545 struct dirent *entry;
547 std::vector<int> keys;
548 while ((entry = readdir(dir)) != NULL) {
549 if (endswith(entry->d_name, solib_suffix)) {
550 if (endswith(entry->d_name, layerlib_suffix)) {
552 int key = registerLayer(entry->d_name, base_path);
553 keys.emplace_back(key);
554 } catch (std::exception &e) {
558 } else if (endswith(entry->d_name, optimizerlib_suffix)) {
560 int key = registerOptimizer(entry->d_name, base_path);
561 keys.emplace_back(key);
562 } catch (std::exception &e) {
575 template <typename T>
576 const int AppContext::registerFactory(const FactoryType<T> factory,
577 const std::string &key,
579 static_assert(isSupported<T>::value,
580 "given type is not supported for current app context");
582 auto &index = std::get<IndexType<T>>(factory_map);
583 auto &str_map = std::get<StrIndexType<T>>(index);
584 auto &int_map = std::get<IntIndexType>(index);
586 std::string assigned_key = key == "" ? factory({})->getType() : key;
588 std::transform(assigned_key.begin(), assigned_key.end(), assigned_key.begin(),
589 [](unsigned char c) { return std::tolower(c); });
591 const std::lock_guard<std::mutex> lock(factory_mutex);
592 if (str_map.find(assigned_key) != str_map.end()) {
593 std::stringstream ss;
594 ss << "cannot register factory with already taken key: " << key;
595 throw std::invalid_argument(ss.str().c_str());
598 if (int_key != -1 && int_map.find(int_key) != int_map.end()) {
599 std::stringstream ss;
600 ss << "cannot register factory with already taken int key: " << int_key;
601 throw std::invalid_argument(ss.str().c_str());
604 int assigned_int_key = int_key == -1 ? str_map.size() + 1 : int_key;
606 str_map[assigned_key] = factory;
607 int_map[assigned_int_key] = assigned_key;
609 ml_logd("factory has registered with key: %s, int_key: %d",
610 assigned_key.c_str(), assigned_int_key);
612 return assigned_int_key;
616 * @copydoc const int AppContext::registerFactory
618 template const int AppContext::registerFactory<nntrainer::Optimizer>(
619 const FactoryType<nntrainer::Optimizer> factory, const std::string &key,
623 * @copydoc const int AppContext::registerFactory
625 template const int AppContext::registerFactory<nntrainer::Layer>(
626 const FactoryType<nntrainer::Layer> factory, const std::string &key,
630 * @copydoc const int AppContext::registerFactory
633 AppContext::registerFactory<ml::train::LearningRateScheduler>(
634 const FactoryType<ml::train::LearningRateScheduler> factory,
635 const std::string &key, const int int_key);
637 } // namespace nntrainer