43eaaa0b9387d2fda53a0afdd185a9383b3ca35f
[platform/core/ml/nntrainer.git] / nntrainer / app_context.cpp
1 // SPDX-License-Identifier: Apache-2.0
2 /**
3  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
4  *
5  * @file   app_context.cpp
6  * @date   10 November 2020
7  * @brief  This file contains app context related functions and classes that
8  * manages the global configuration of the current environment
9  * @see    https://github.com/nnstreamer/nntrainer
10  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
11  * @bug    No known bugs except for NYI items
12  *
13  */
14 #include <dirent.h>
15 #include <dlfcn.h>
16 #include <iostream>
17 #include <sstream>
18 #include <string>
19 #include <vector>
20
21 #include <iniparser.h>
22
23 #include <app_context.h>
24 #include <layer.h>
25 #include <nntrainer_error.h>
26 #include <nntrainer_log.h>
27 #include <optimizer.h>
28 #include <util_func.h>
29
30 #include <adam.h>
31 #include <sgd.h>
32
33 #include <activation_layer.h>
34 #include <addition_layer.h>
35 #include <attention_layer.h>
36 #include <bn_layer.h>
37 #include <centroid_knn.h>
38 #include <concat_layer.h>
39 #include <constant_derivative_loss_layer.h>
40 #include <conv1d_layer.h>
41 #include <conv2d_layer.h>
42 #include <cross_entropy_sigmoid_loss_layer.h>
43 #include <cross_entropy_softmax_loss_layer.h>
44 #include <dropout.h>
45 #include <embedding.h>
46 #include <fc_layer.h>
47 #include <flatten_layer.h>
48 #include <gru.h>
49 #include <grucell.h>
50 #include <identity_layer.h>
51 #include <input_layer.h>
52 #include <layer_normalization_layer.h>
53 #include <lr_scheduler_constant.h>
54 #include <lr_scheduler_exponential.h>
55 #include <lr_scheduler_step.h>
56 #include <lstm.h>
57 #include <lstmcell.h>
58 #include <mol_attention_layer.h>
59 #include <mse_loss_layer.h>
60 #include <multi_head_attention_layer.h>
61 #include <multiout_layer.h>
62 #include <nntrainer_error.h>
63 #include <permute_layer.h>
64 #include <plugged_layer.h>
65 #include <plugged_optimizer.h>
66 #include <pooling2d_layer.h>
67 #include <positional_encoding_layer.h>
68 #include <preprocess_flip_layer.h>
69 #include <preprocess_l2norm_layer.h>
70 #include <preprocess_translate_layer.h>
71 #include <reduce_mean_layer.h>
72 #include <rnn.h>
73 #include <rnncell.h>
74 #include <split_layer.h>
75 #include <time_dist.h>
76 #include <zoneout_lstmcell.h>
77
78 #ifdef ENABLE_TFLITE_BACKBONE
79 #include <tflite_layer.h>
80 #endif
81
82 #ifdef ENABLE_NNSTREAMER_BACKBONE
83 #include <nnstreamer_layer.h>
84 #endif
85
86 /// add #ifdef across platform
87 static std::string solib_suffix = ".so";
88 static std::string layerlib_suffix = "layer.so";
89 static std::string optimizerlib_suffix = "optimizer.so";
90 static const std::string func_tag = "[AppContext] ";
91
92 #ifdef NNTRAINER_CONF_PATH
93 constexpr const char *DEFAULT_CONF_PATH = NNTRAINER_CONF_PATH;
94 #else
95 constexpr const char *DEFAULT_CONF_PATH = "/etc/nntrainer.ini";
96 #endif
97
98 constexpr const char *getConfPath() { return DEFAULT_CONF_PATH; }
99
100 namespace nntrainer {
101
102 namespace {
103
104 /**
105  * @brief Get the plugin path from conf ini
106  *
107  * @return std::string plugin path
108  */
109 std::string getConfig(const std::string &key) {
110   std::string conf_path{getConfPath()};
111
112   ml_logd("%s conf path: %s", func_tag.c_str(), conf_path.c_str());
113   if (!isFileExist(conf_path)) {
114     ml_logw(
115       "%s conf path does not exist, skip getting plugin path from the conf",
116       func_tag.c_str());
117     return std::string();
118   }
119
120   dictionary *ini = iniparser_load(conf_path.c_str());
121   NNTR_THROW_IF(ini == nullptr, std::runtime_error)
122     << func_tag << "loading ini failed";
123
124   std::string value;
125   int nsec = iniparser_getnsec(ini);
126   for (int i = 0; i < nsec; i++) {
127     std::string query(iniparser_getsecname(ini, i));
128     query += ":";
129     query += key;
130
131     value = std::string(iniparser_getstring(ini, query.c_str(), ""));
132     if (!value.empty())
133       break;
134   }
135
136   if (value.empty())
137     ml_logd("key %s is not found in config(%s)", key.c_str(),
138             conf_path.c_str());
139
140   iniparser_freedict(ini);
141
142   return value;
143 }
144
145 /**
146  * @brief Get the plugin paths
147  *
148  * @return std::vector<std::string> list of paths to search for
149  */
150 std::vector<std::string> getPluginPaths() {
151   std::vector<std::string> ret;
152
153   /*** @note NNTRAINER_PATH is an environment variable stating a @a directory
154    * where you would like to look for the layers, while NNTRAINER_CONF_PATH is a
155    * (buildtime hardcoded @a file path) to locate configuration file *.ini file
156    */
157   /*** @note for now, NNTRAINER_PATH is a SINGLE PATH rather than serise of path
158    * like PATH environment variable. this could be improved but for now, it is
159    * enough
160    */
161   const char *env_path = std::getenv("NNTRAINER_PATH");
162   if (env_path != nullptr) {
163     if (isFileExist(env_path)) {
164       ml_logd("NNTRAINER_PATH is defined and valid. path: %s", env_path);
165       ret.emplace_back(env_path);
166     } else {
167       ml_logw("NNTRAINER_PATH is given but it is not valid. path: %s",
168               env_path);
169     }
170   }
171
172   std::string plugin_path = getConfig("layer");
173   if (!plugin_path.empty()) {
174     ret.emplace_back(plugin_path);
175     ml_logd("DEFAULT CONF PATH, path: %s", plugin_path.c_str());
176   }
177
178   return ret;
179 }
180
181 /**
182  * @brief Get the Full Path from given string
183  * @details path is resolved in the following order
184  * 1) if @a path is absolute, return path
185  * ----------------------------------------
186  * 2) if @a base == "" && @a path == "", return "."
187  * 3) if @a base == "" && @a path != "", return @a path
188  * 4) if @a base != "" && @a path == "", return @a base
189  * 5) if @a base != "" && @a path != "", return @a base + "/" + path
190  *
191  * @param path path to calculate from base
192  * @param base base path
193  * @return const std::string
194  */
195 const std::string getFullPath(const std::string &path,
196                               const std::string &base) {
197   /// if path is absolute, return path
198   if (path[0] == '/') {
199     return path;
200   }
201
202   if (base == std::string()) {
203     return path == std::string() ? "." : path;
204   }
205
206   return path == std::string() ? base : base + "/" + path;
207 }
208
209 } // namespace
210
211 std::mutex factory_mutex;
212
213 /**
214  * @brief finialize global context
215  *
216  */
217 static void fini_global_context_nntrainer(void) __attribute__((destructor));
218
219 static void fini_global_context_nntrainer(void) {}
220
221 std::once_flag global_app_context_init_flag;
222
223 static void add_default_object(AppContext &ac) {
224   /// @note all layers should be added to the app_context to gaurantee that
225   /// createLayer/createOptimizer class is created
226   using OptType = ml::train::OptimizerType;
227   ac.registerFactory(nntrainer::createOptimizer<SGD>, SGD::type, OptType::SGD);
228   ac.registerFactory(nntrainer::createOptimizer<Adam>, Adam::type,
229                      OptType::ADAM);
230   ac.registerFactory(AppContext::unknownFactory<nntrainer::Optimizer>,
231                      "unknown", OptType::UNKNOWN);
232
233   using LRType = LearningRateType;
234   ac.registerFactory(
235     ml::train::createLearningRateScheduler<ConstantLearningRateScheduler>,
236     ConstantLearningRateScheduler::type, LRType::CONSTANT);
237   ac.registerFactory(
238     ml::train::createLearningRateScheduler<ExponentialLearningRateScheduler>,
239     ExponentialLearningRateScheduler::type, LRType::EXPONENTIAL);
240   ac.registerFactory(
241     ml::train::createLearningRateScheduler<StepLearningRateScheduler>,
242     StepLearningRateScheduler::type, LRType::STEP);
243
244   using LayerType = ml::train::LayerType;
245   ac.registerFactory(nntrainer::createLayer<InputLayer>, InputLayer::type,
246                      LayerType::LAYER_IN);
247   ac.registerFactory(nntrainer::createLayer<FullyConnectedLayer>,
248                      FullyConnectedLayer::type, LayerType::LAYER_FC);
249   ac.registerFactory(nntrainer::createLayer<BatchNormalizationLayer>,
250                      BatchNormalizationLayer::type, LayerType::LAYER_BN);
251   ac.registerFactory(nntrainer::createLayer<LayerNormalizationLayer>,
252                      LayerNormalizationLayer::type,
253                      LayerType::LAYER_LAYER_NORMALIZATION);
254   ac.registerFactory(nntrainer::createLayer<Conv2DLayer>, Conv2DLayer::type,
255                      LayerType::LAYER_CONV2D);
256   ac.registerFactory(nntrainer::createLayer<Conv1DLayer>, Conv1DLayer::type,
257                      LayerType::LAYER_CONV1D);
258   ac.registerFactory(nntrainer::createLayer<Pooling2DLayer>,
259                      Pooling2DLayer::type, LayerType::LAYER_POOLING2D);
260   ac.registerFactory(nntrainer::createLayer<FlattenLayer>, FlattenLayer::type,
261                      LayerType::LAYER_FLATTEN);
262   ac.registerFactory(nntrainer::createLayer<ReshapeLayer>, ReshapeLayer::type,
263                      LayerType::LAYER_RESHAPE);
264   ac.registerFactory(nntrainer::createLayer<ActivationLayer>,
265                      ActivationLayer::type, LayerType::LAYER_ACTIVATION);
266   ac.registerFactory(nntrainer::createLayer<AdditionLayer>, AdditionLayer::type,
267                      LayerType::LAYER_ADDITION);
268   ac.registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
269                      LayerType::LAYER_CONCAT);
270   ac.registerFactory(nntrainer::createLayer<MultiOutLayer>, MultiOutLayer::type,
271                      LayerType::LAYER_MULTIOUT);
272   ac.registerFactory(nntrainer::createLayer<EmbeddingLayer>,
273                      EmbeddingLayer::type, LayerType::LAYER_EMBEDDING);
274   ac.registerFactory(nntrainer::createLayer<RNNLayer>, RNNLayer::type,
275                      LayerType::LAYER_RNN);
276   ac.registerFactory(nntrainer::createLayer<RNNCellLayer>, RNNCellLayer::type,
277                      LayerType::LAYER_RNNCELL);
278   ac.registerFactory(nntrainer::createLayer<LSTMLayer>, LSTMLayer::type,
279                      LayerType::LAYER_LSTM);
280   ac.registerFactory(nntrainer::createLayer<LSTMCellLayer>, LSTMCellLayer::type,
281                      LayerType::LAYER_LSTMCELL);
282   ac.registerFactory(nntrainer::createLayer<ZoneoutLSTMCellLayer>,
283                      ZoneoutLSTMCellLayer::type,
284                      LayerType::LAYER_ZONEOUT_LSTMCELL);
285   ac.registerFactory(nntrainer::createLayer<SplitLayer>, SplitLayer::type,
286                      LayerType::LAYER_SPLIT);
287   ac.registerFactory(nntrainer::createLayer<GRULayer>, GRULayer::type,
288                      LayerType::LAYER_GRU);
289   ac.registerFactory(nntrainer::createLayer<GRUCellLayer>, GRUCellLayer::type,
290                      LayerType::LAYER_GRUCELL);
291   ac.registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
292                      LayerType::LAYER_PERMUTE);
293   ac.registerFactory(nntrainer::createLayer<DropOutLayer>, DropOutLayer::type,
294                      LayerType::LAYER_DROPOUT);
295   ac.registerFactory(nntrainer::createLayer<AttentionLayer>,
296                      AttentionLayer::type, LayerType::LAYER_ATTENTION);
297   ac.registerFactory(nntrainer::createLayer<MoLAttentionLayer>,
298                      MoLAttentionLayer::type, LayerType::LAYER_MOL_ATTENTION);
299   ac.registerFactory(nntrainer::createLayer<MultiHeadAttentionLayer>,
300                      MultiHeadAttentionLayer::type,
301                      LayerType::LAYER_MULTI_HEAD_ATTENTION);
302   ac.registerFactory(nntrainer::createLayer<ReduceMeanLayer>,
303                      ReduceMeanLayer::type, LayerType::LAYER_REDUCE_MEAN);
304   ac.registerFactory(nntrainer::createLayer<PositionalEncodingLayer>,
305                      PositionalEncodingLayer::type,
306                      LayerType::LAYER_POSITIONAL_ENCODING);
307   ac.registerFactory(nntrainer::createLayer<IdentityLayer>, IdentityLayer::type,
308                      LayerType::LAYER_IDENTITY);
309
310 #ifdef ENABLE_NNSTREAMER_BACKBONE
311   ac.registerFactory(nntrainer::createLayer<NNStreamerLayer>,
312                      NNStreamerLayer::type,
313                      LayerType::LAYER_BACKBONE_NNSTREAMER);
314 #endif
315 #ifdef ENABLE_TFLITE_BACKBONE
316   ac.registerFactory(nntrainer::createLayer<TfLiteLayer>, TfLiteLayer::type,
317                      LayerType::LAYER_BACKBONE_TFLITE);
318 #endif
319   ac.registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
320                      LayerType::LAYER_CENTROID_KNN);
321
322   /** proprocess layers */
323   ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
324                      PreprocessFlipLayer::type,
325                      LayerType::LAYER_PREPROCESS_FLIP);
326   ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
327                      PreprocessTranslateLayer::type,
328                      LayerType::LAYER_PREPROCESS_TRANSLATE);
329   ac.registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
330                      PreprocessL2NormLayer::type,
331                      LayerType::LAYER_PREPROCESS_L2NORM);
332
333   /** register losses */
334   ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
335                      LayerType::LAYER_LOSS_MSE);
336   ac.registerFactory(nntrainer::createLayer<CrossEntropySigmoidLossLayer>,
337                      CrossEntropySigmoidLossLayer::type,
338                      LayerType::LAYER_LOSS_CROSS_ENTROPY_SIGMOID);
339   ac.registerFactory(nntrainer::createLayer<CrossEntropySoftmaxLossLayer>,
340                      CrossEntropySoftmaxLossLayer::type,
341                      LayerType::LAYER_LOSS_CROSS_ENTROPY_SOFTMAX);
342   ac.registerFactory(nntrainer::createLayer<ConstantDerivativeLossLayer>,
343                      ConstantDerivativeLossLayer::type,
344                      LayerType::LAYER_LOSS_CONSTANT_DERIVATIVE);
345
346   ac.registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
347                      LayerType::LAYER_TIME_DIST);
348
349   ac.registerFactory(AppContext::unknownFactory<nntrainer::Layer>, "unknown",
350                      LayerType::LAYER_UNKNOWN);
351 }
352
353 static void add_extension_object(AppContext &ac) {
354   auto dir_list = getPluginPaths();
355
356   for (auto &path : dir_list) {
357     try {
358       ac.registerPluggableFromDirectory(path);
359     } catch (std::exception &e) {
360       ml_logw("tried to register extension from %s but failed, reason: %s",
361               path.c_str(), e.what());
362     }
363   }
364 }
365
366 static void registerer(AppContext &ac) noexcept {
367   try {
368     add_default_object(ac);
369     add_extension_object(ac);
370   } catch (std::exception &e) {
371     ml_loge("registering layers failed!!, reason: %s", e.what());
372   } catch (...) {
373     ml_loge("registering layer failed due to unknown reason");
374   }
375 };
376
377 AppContext &AppContext::Global() {
378   static AppContext instance;
379   /// in g++ there is a bug that hangs up if caller throws,
380   /// so registerer is noexcept although it'd better not
381   /// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70298
382   std::call_once(global_app_context_init_flag, registerer, std::ref(instance));
383   return instance;
384 }
385
386 void AppContext::setWorkingDirectory(const std::string &base) {
387   DIR *dir = opendir(base.c_str());
388
389   if (!dir) {
390     std::stringstream ss;
391     ss << func_tag << "path is not directory or has no permission: " << base;
392     throw std::invalid_argument(ss.str().c_str());
393   }
394   closedir(dir);
395
396   char *ret = realpath(base.c_str(), nullptr);
397
398   if (ret == nullptr) {
399     std::stringstream ss;
400     ss << func_tag << "failed to get canonical path for the path: ";
401     throw std::invalid_argument(ss.str().c_str());
402   }
403
404   working_path_base = std::string(ret);
405   ml_logd("working path base has set: %s", working_path_base.c_str());
406   free(ret);
407 }
408
409 const std::string AppContext::getWorkingPath(const std::string &path) {
410   return getFullPath(path, working_path_base);
411 }
412
413 /**
414  * @brief base case of iterate_prop, iterate_prop iterates the given tuple
415  *
416  * @tparam I size of tuple(automated)
417  * @tparam V container type of properties
418  * @tparam Ts types from tuple
419  * @param prop property container to be added to
420  * @param tup tuple to be iterated
421  * @return void
422  */
423 template <size_t I = 0, typename V, typename... Ts>
424 typename std::enable_if<I == sizeof...(Ts), void>::type inline parse_properties(
425   V &props, std::tuple<Ts...> &tup) {
426   // end of recursion.
427 }
428
429 /**
430  * @brief base case of iterate_prop, iterate_prop iterates the given tuple
431  *
432  * @tparam I size of tuple(automated)
433  * @tparam V container type of properties
434  * @tparam Ts types from tuple
435  * @param prop property container to be added to
436  * @param tup tuple to be iterated
437  * @return void
438  */
439 template <size_t I = 0, typename V, typename... Ts>
440   typename std::enable_if <
441   I<sizeof...(Ts), void>::type inline parse_properties(V &props,
442                                                        std::tuple<Ts...> &tup) {
443   std::string name = std::get<I>(tup);
444   std::string prop = getConfig(name);
445   if (!prop.empty())
446     props.push_back(name + "=" + prop);
447
448   parse_properties<I + 1>(props, tup);
449 }
450
451 const std::vector<std::string> AppContext::getProperties(void) {
452   std::vector<std::string> properties;
453
454   auto props = std::tuple("memory_swap", "memory_swap_path");
455   parse_properties(properties, props);
456
457   return properties;
458 }
459
460 int AppContext::registerLayer(const std::string &library_path,
461                               const std::string &base_path) {
462   const std::string full_path = getFullPath(library_path, base_path);
463
464   void *handle = dlopen(full_path.c_str(), RTLD_LAZY | RTLD_LOCAL);
465   const char *error_msg = dlerror();
466
467   NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
468     << func_tag << "open plugin failed, reason: " << error_msg;
469
470   nntrainer::LayerPluggable *pluggable =
471     reinterpret_cast<nntrainer::LayerPluggable *>(
472       dlsym(handle, "ml_train_layer_pluggable"));
473
474   error_msg = dlerror();
475   auto close_dl = [handle] { dlclose(handle); };
476   NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
477                         std::invalid_argument, close_dl)
478     << func_tag << "loading symbol failed, reason: " << error_msg;
479
480   auto layer = pluggable->createfunc();
481   NNTR_THROW_IF_CLEANUP(layer == nullptr, std::invalid_argument, close_dl)
482     << func_tag << "created pluggable layer is null";
483   auto type = layer->getType();
484   NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
485     << func_tag << "custom layer must specify type name, but it is empty";
486   pluggable->destroyfunc(layer);
487
488   FactoryType<nntrainer::Layer> factory_func =
489     [pluggable](const PropsType &prop) {
490       std::unique_ptr<nntrainer::Layer> layer =
491         std::make_unique<internal::PluggedLayer>(pluggable);
492
493       return layer;
494     };
495
496   return registerFactory<nntrainer::Layer>(factory_func, type);
497 }
498
499 int AppContext::registerOptimizer(const std::string &library_path,
500                                   const std::string &base_path) {
501   const std::string full_path = getFullPath(library_path, base_path);
502
503   void *handle = dlopen(full_path.c_str(), RTLD_LAZY | RTLD_LOCAL);
504   const char *error_msg = dlerror();
505
506   NNTR_THROW_IF(handle == nullptr, std::invalid_argument)
507     << func_tag << "open plugin failed, reason: " << error_msg;
508
509   nntrainer::OptimizerPluggable *pluggable =
510     reinterpret_cast<nntrainer::OptimizerPluggable *>(
511       dlsym(handle, "ml_train_optimizer_pluggable"));
512
513   error_msg = dlerror();
514   auto close_dl = [handle] { dlclose(handle); };
515   NNTR_THROW_IF_CLEANUP(error_msg != nullptr || pluggable == nullptr,
516                         std::invalid_argument, close_dl)
517     << func_tag << "loading symbol failed, reason: " << error_msg;
518
519   auto optimizer = pluggable->createfunc();
520   NNTR_THROW_IF_CLEANUP(optimizer == nullptr, std::invalid_argument, close_dl)
521     << func_tag << "created pluggable optimizer is null";
522   auto type = optimizer->getType();
523   NNTR_THROW_IF_CLEANUP(type == "", std::invalid_argument, close_dl)
524     << func_tag << "custom optimizer must specify type name, but it is empty";
525   pluggable->destroyfunc(optimizer);
526
527   FactoryType<nntrainer::Optimizer> factory_func =
528     [pluggable](const PropsType &prop) {
529       std::unique_ptr<nntrainer::Optimizer> optimizer =
530         std::make_unique<internal::PluggedOptimizer>(pluggable);
531
532       return optimizer;
533     };
534
535   return registerFactory<nntrainer::Optimizer>(factory_func, type);
536 }
537
538 std::vector<int>
539 AppContext::registerPluggableFromDirectory(const std::string &base_path) {
540   DIR *dir = opendir(base_path.c_str());
541
542   NNTR_THROW_IF(dir == nullptr, std::invalid_argument)
543     << func_tag << "failed to open the directory: " << base_path;
544
545   struct dirent *entry;
546
547   std::vector<int> keys;
548   while ((entry = readdir(dir)) != NULL) {
549     if (endswith(entry->d_name, solib_suffix)) {
550       if (endswith(entry->d_name, layerlib_suffix)) {
551         try {
552           int key = registerLayer(entry->d_name, base_path);
553           keys.emplace_back(key);
554         } catch (std::exception &e) {
555           closedir(dir);
556           throw;
557         }
558       } else if (endswith(entry->d_name, optimizerlib_suffix)) {
559         try {
560           int key = registerOptimizer(entry->d_name, base_path);
561           keys.emplace_back(key);
562         } catch (std::exception &e) {
563           closedir(dir);
564           throw;
565         }
566       }
567     }
568   }
569
570   closedir(dir);
571
572   return keys;
573 }
574
575 template <typename T>
576 const int AppContext::registerFactory(const FactoryType<T> factory,
577                                       const std::string &key,
578                                       const int int_key) {
579   static_assert(isSupported<T>::value,
580                 "given type is not supported for current app context");
581
582   auto &index = std::get<IndexType<T>>(factory_map);
583   auto &str_map = std::get<StrIndexType<T>>(index);
584   auto &int_map = std::get<IntIndexType>(index);
585
586   std::string assigned_key = key == "" ? factory({})->getType() : key;
587
588   std::transform(assigned_key.begin(), assigned_key.end(), assigned_key.begin(),
589                  [](unsigned char c) { return std::tolower(c); });
590
591   const std::lock_guard<std::mutex> lock(factory_mutex);
592   if (str_map.find(assigned_key) != str_map.end()) {
593     std::stringstream ss;
594     ss << "cannot register factory with already taken key: " << key;
595     throw std::invalid_argument(ss.str().c_str());
596   }
597
598   if (int_key != -1 && int_map.find(int_key) != int_map.end()) {
599     std::stringstream ss;
600     ss << "cannot register factory with already taken int key: " << int_key;
601     throw std::invalid_argument(ss.str().c_str());
602   }
603
604   int assigned_int_key = int_key == -1 ? str_map.size() + 1 : int_key;
605
606   str_map[assigned_key] = factory;
607   int_map[assigned_int_key] = assigned_key;
608
609   ml_logd("factory has registered with key: %s, int_key: %d",
610           assigned_key.c_str(), assigned_int_key);
611
612   return assigned_int_key;
613 }
614
615 /**
616  * @copydoc const int AppContext::registerFactory
617  */
618 template const int AppContext::registerFactory<nntrainer::Optimizer>(
619   const FactoryType<nntrainer::Optimizer> factory, const std::string &key,
620   const int int_key);
621
622 /**
623  * @copydoc const int AppContext::registerFactory
624  */
625 template const int AppContext::registerFactory<nntrainer::Layer>(
626   const FactoryType<nntrainer::Layer> factory, const std::string &key,
627   const int int_key);
628
629 /**
630  * @copydoc const int AppContext::registerFactory
631  */
632 template const int
633 AppContext::registerFactory<ml::train::LearningRateScheduler>(
634   const FactoryType<ml::train::LearningRateScheduler> factory,
635   const std::string &key, const int int_key);
636
637 } // namespace nntrainer