Add initial code
authorInki Dae <inki.dae@samsung.com>
Wed, 11 Aug 2021 08:00:27 +0000 (17:00 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 11 Aug 2021 08:00:27 +0000 (17:00 +0900)
Signed-off-by: Inki Dae <inki.dae@samsung.com>
CMakeLists.txt [new file with mode: 0644]
LICENSE.APLv2 [new file with mode: 0644]
README.md [new file with mode: 0644]
packaging/training-engine-nntrainer.spec [new file with mode: 0644]
src/training_engine_nntrainer.cpp [new file with mode: 0644]
src/training_engine_nntrainer_private.h [new file with mode: 0644]
training-engine-nntrainer.manifest [new file with mode: 0644]
training-engine-nntrainer.pc.in [new file with mode: 0644]

diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644 (file)
index 0000000..12a27b0
--- /dev/null
@@ -0,0 +1,33 @@
+PROJECT("training-engine-nntrainer")
+
+CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
+
+SET(dependents "dlog training-engine-interface-common nntrainer capi-ml-common capi-ml-training")
+
+# Find all source files and add the file list to a given variable, SOURCES
+AUX_SOURCE_DIRECTORY(src SOURCES)
+
+ADD_LIBRARY(${PROJECT_NAME} SHARED ${SOURCES})
+
+FIND_PACKAGE(PkgConfig REQUIRED)
+PKG_CHECK_MODULES(${PROJECT_NAME}_DEP REQUIRED ${dependents})
+
+TARGET_INCLUDE_DIRECTORIES(
+    ${PROJECT_NAME}
+    PUBLIC
+    ${PROJECT_SOURCE_DIR}/include
+       ${${PROJECT_NAME}_DEP_INCLUDE_DIRS}
+)
+
+TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${${PROJECT_NAME}_DEP_LIBRARIES})
+
+SET_TARGET_PROPERTIES(${PROJECT_NAME} PROPERTIES CLEAN_DIRECT_OUTPUT 1)
+
+#Install header and library files to given places
+INSTALL(TARGETS ${PROJECT_NAME} DESTINATION ${LIB_INSTALL_DIR})
+INSTALL(DIRECTORY ${PROJECT_SOURCE_DIR}/include/
+    DESTINATION include/media
+    FILES_MATCHING
+    PATTERN "*_private.h" EXCLUDE
+    PATTERN "*.h"
+)
diff --git a/LICENSE.APLv2 b/LICENSE.APLv2
new file mode 100644 (file)
index 0000000..bbe9d02
--- /dev/null
@@ -0,0 +1,206 @@
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.\r
+\r
+                                 Apache License\r
+                           Version 2.0, January 2004\r
+                        http://www.apache.org/licenses/\r
+\r
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\r
+\r
+   1. Definitions.\r
+\r
+      "License" shall mean the terms and conditions for use, reproduction,\r
+      and distribution as defined by Sections 1 through 9 of this document.\r
+\r
+      "Licensor" shall mean the copyright owner or entity authorized by\r
+      the copyright owner that is granting the License.\r
+\r
+      "Legal Entity" shall mean the union of the acting entity and all\r
+      other entities that control, are controlled by, or are under common\r
+      control with that entity. For the purposes of this definition,\r
+      "control" means (i) the power, direct or indirect, to cause the\r
+      direction or management of such entity, whether by contract or\r
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the\r
+      outstanding shares, or (iii) beneficial ownership of such entity.\r
+\r
+      "You" (or "Your") shall mean an individual or Legal Entity\r
+      exercising permissions granted by this License.\r
+\r
+      "Source" form shall mean the preferred form for making modifications,\r
+      including but not limited to software source code, documentation\r
+      source, and configuration files.\r
+\r
+      "Object" form shall mean any form resulting from mechanical\r
+      transformation or translation of a Source form, including but\r
+      not limited to compiled object code, generated documentation,\r
+      and conversions to other media types.\r
+\r
+      "Work" shall mean the work of authorship, whether in Source or\r
+      Object form, made available under the License, as indicated by a\r
+      copyright notice that is included in or attached to the work\r
+      (an example is provided in the Appendix below).\r
+\r
+      "Derivative Works" shall mean any work, whether in Source or Object\r
+      form, that is based on (or derived from) the Work and for which the\r
+      editorial revisions, annotations, elaborations, or other modifications\r
+      represent, as a whole, an original work of authorship. For the purposes\r
+      of this License, Derivative Works shall not include works that remain\r
+      separable from, or merely link (or bind by name) to the interfaces of,\r
+      the Work and Derivative Works thereof.\r
+\r
+      "Contribution" shall mean any work of authorship, including\r
+      the original version of the Work and any modifications or additions\r
+      to that Work or Derivative Works thereof, that is intentionally\r
+      submitted to Licensor for inclusion in the Work by the copyright owner\r
+      or by an individual or Legal Entity authorized to submit on behalf of\r
+      the copyright owner. For the purposes of this definition, "submitted"\r
+      means any form of electronic, verbal, or written communication sent\r
+      to the Licensor or its representatives, including but not limited to\r
+      communication on electronic mailing lists, source code control systems,\r
+      and issue tracking systems that are managed by, or on behalf of, the\r
+      Licensor for the purpose of discussing and improving the Work, but\r
+      excluding communication that is conspicuously marked or otherwise\r
+      designated in writing by the copyright owner as "Not a Contribution."\r
+\r
+      "Contributor" shall mean Licensor and any individual or Legal Entity\r
+      on behalf of whom a Contribution has been received by Licensor and\r
+      subsequently incorporated within the Work.\r
+\r
+   2. Grant of Copyright License. Subject to the terms and conditions of\r
+      this License, each Contributor hereby grants to You a perpetual,\r
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r
+      copyright license to reproduce, prepare Derivative Works of,\r
+      publicly display, publicly perform, sublicense, and distribute the\r
+      Work and such Derivative Works in Source or Object form.\r
+\r
+   3. Grant of Patent License. Subject to the terms and conditions of\r
+      this License, each Contributor hereby grants to You a perpetual,\r
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r
+      (except as stated in this section) patent license to make, have made,\r
+      use, offer to sell, sell, import, and otherwise transfer the Work,\r
+      where such license applies only to those patent claims licensable\r
+      by such Contributor that are necessarily infringed by their\r
+      Contribution(s) alone or by combination of their Contribution(s)\r
+      with the Work to which such Contribution(s) was submitted. If You\r
+      institute patent litigation against any entity (including a\r
+      cross-claim or counterclaim in a lawsuit) alleging that the Work\r
+      or a Contribution incorporated within the Work constitutes direct\r
+      or contributory patent infringement, then any patent licenses\r
+      granted to You under this License for that Work shall terminate\r
+      as of the date such litigation is filed.\r
+\r
+   4. Redistribution. You may reproduce and distribute copies of the\r
+      Work or Derivative Works thereof in any medium, with or without\r
+      modifications, and in Source or Object form, provided that You\r
+      meet the following conditions:\r
+\r
+      (a) You must give any other recipients of the Work or\r
+          Derivative Works a copy of this License; and\r
+\r
+      (b) You must cause any modified files to carry prominent notices\r
+          stating that You changed the files; and\r
+\r
+      (c) You must retain, in the Source form of any Derivative Works\r
+          that You distribute, all copyright, patent, trademark, and\r
+          attribution notices from the Source form of the Work,\r
+          excluding those notices that do not pertain to any part of\r
+          the Derivative Works; and\r
+\r
+      (d) If the Work includes a "NOTICE" text file as part of its\r
+          distribution, then any Derivative Works that You distribute must\r
+          include a readable copy of the attribution notices contained\r
+          within such NOTICE file, excluding those notices that do not\r
+          pertain to any part of the Derivative Works, in at least one\r
+          of the following places: within a NOTICE text file distributed\r
+          as part of the Derivative Works; within the Source form or\r
+          documentation, if provided along with the Derivative Works; or,\r
+          within a display generated by the Derivative Works, if and\r
+          wherever such third-party notices normally appear. The contents\r
+          of the NOTICE file are for informational purposes only and\r
+          do not modify the License. You may add Your own attribution\r
+          notices within Derivative Works that You distribute, alongside\r
+          or as an addendum to the NOTICE text from the Work, provided\r
+          that such additional attribution notices cannot be construed\r
+          as modifying the License.\r
+\r
+      You may add Your own copyright statement to Your modifications and\r
+      may provide additional or different license terms and conditions\r
+      for use, reproduction, or distribution of Your modifications, or\r
+      for any such Derivative Works as a whole, provided Your use,\r
+      reproduction, and distribution of the Work otherwise complies with\r
+      the conditions stated in this License.\r
+\r
+   5. Submission of Contributions. Unless You explicitly state otherwise,\r
+      any Contribution intentionally submitted for inclusion in the Work\r
+      by You to the Licensor shall be under the terms and conditions of\r
+      this License, without any additional terms or conditions.\r
+      Notwithstanding the above, nothing herein shall supersede or modify\r
+      the terms of any separate license agreement you may have executed\r
+      with Licensor regarding such Contributions.\r
+\r
+   6. Trademarks. This License does not grant permission to use the trade\r
+      names, trademarks, service marks, or product names of the Licensor,\r
+      except as required for reasonable and customary use in describing the\r
+      origin of the Work and reproducing the content of the NOTICE file.\r
+\r
+   7. Disclaimer of Warranty. Unless required by applicable law or\r
+      agreed to in writing, Licensor provides the Work (and each\r
+      Contributor provides its Contributions) on an "AS IS" BASIS,\r
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+      implied, including, without limitation, any warranties or conditions\r
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\r
+      PARTICULAR PURPOSE. You are solely responsible for determining the\r
+      appropriateness of using or redistributing the Work and assume any\r
+      risks associated with Your exercise of permissions under this License.\r
+\r
+   8. Limitation of Liability. In no event and under no legal theory,\r
+      whether in tort (including negligence), contract, or otherwise,\r
+      unless required by applicable law (such as deliberate and grossly\r
+      negligent acts) or agreed to in writing, shall any Contributor be\r
+      liable to You for damages, including any direct, indirect, special,\r
+      incidental, or consequential damages of any character arising as a\r
+      result of this License or out of the use or inability to use the\r
+      Work (including but not limited to damages for loss of goodwill,\r
+      work stoppage, computer failure or malfunction, or any and all\r
+      other commercial damages or losses), even if such Contributor\r
+      has been advised of the possibility of such damages.\r
+\r
+   9. Accepting Warranty or Additional Liability. While redistributing\r
+      the Work or Derivative Works thereof, You may choose to offer,\r
+      and charge a fee for, acceptance of support, warranty, indemnity,\r
+      or other liability obligations and/or rights consistent with this\r
+      License. However, in accepting such obligations, You may act only\r
+      on Your own behalf and on Your sole responsibility, not on behalf\r
+      of any other Contributor, and only if You agree to indemnify,\r
+      defend, and hold each Contributor harmless for any liability\r
+      incurred by, or claims asserted against, such Contributor by reason\r
+      of your accepting any such warranty or additional liability.\r
+\r
+   END OF TERMS AND CONDITIONS\r
+\r
+   APPENDIX: How to apply the Apache License to your work.\r
+\r
+      To apply the Apache License to your work, attach the following\r
+      boilerplate notice, with the fields enclosed by brackets "[]"\r
+      replaced with your own identifying information. (Don't include\r
+      the brackets!)  The text should be enclosed in the appropriate\r
+      comment syntax for the file format. We also recommend that a\r
+      file or class name and description of purpose be included on the\r
+      same "printed page" as the copyright notice for easier\r
+      identification within third-party archives.\r
+\r
+   Copyright [yyyy] [name of copyright owner]\r
+\r
+   Licensed under the Apache License, Version 2.0 (the "License");\r
+   you may not use this file except in compliance with the License.\r
+   You may obtain a copy of the License at\r
+\r
+       http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+   Unless required by applicable law or agreed to in writing, software\r
+   distributed under the License is distributed on an "AS IS" BASIS,\r
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+   See the License for the specific language governing permissions and\r
+   limitations under the License.\r
+\r
+\r
+\r
diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..dffce3f
--- /dev/null
+++ b/README.md
@@ -0,0 +1 @@
+# training-engine-nntrainer
\ No newline at end of file
diff --git a/packaging/training-engine-nntrainer.spec b/packaging/training-engine-nntrainer.spec
new file mode 100644 (file)
index 0000000..e27bd95
--- /dev/null
@@ -0,0 +1,47 @@
+Name:       training-engine-nntrainer
+Summary:    Training engine NNTrainer backend
+Version:    0.0.1
+Release:    1
+Group:      Multimedia/Libraries
+License:    Apache-2.0
+ExclusiveArch: %{arm} aarch64
+Source0:    %{name}-%{version}.tar.gz
+Requires(post): /sbin/ldconfig
+Requires(postun): /sbin/ldconfig
+BuildRequires: cmake
+BuildRequires: pkgconfig(dlog)
+BuildRequires: pkgconfig(training-engine-interface-common)
+BuildRequires: pkgconfig(nntrainer)
+BuildRequires: pkgconfig(capi-ml-training)
+BuildRequires: pkgconfig(capi-ml-common)
+
+%description
+Training Engine NNTrainer backend of training-engine-interface
+
+%prep
+%setup -q
+
+%build
+%if 0%{?sec_build_binary_debug_enable}
+export CFLAGS="$CFLAGS -DTIZEN_DEBUG_ENABLE"
+export CXXFLAGS="$CXXFLAGS -DTIZEN_DEBUG_ENABLE"
+export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE"
+%endif
+
+%cmake %{build_options} .
+
+make %{?jobs:-j%jobs}
+
+%install
+rm -rf %{buildroot}
+
+%make_install
+
+%post -p /sbin/ldconfig
+%postun -p /sbin/ldconfig
+
+%files
+%manifest %{name}.manifest
+%license LICENSE.APLv2
+%defattr(-,root,root,-)
+%{_libdir}/*.so
diff --git a/src/training_engine_nntrainer.cpp b/src/training_engine_nntrainer.cpp
new file mode 100644 (file)
index 0000000..186cbe4
--- /dev/null
@@ -0,0 +1,558 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <training_engine_error.h>
+#include "training_engine_nntrainer_private.h"
+
+#include <iostream>
+
+using namespace std;
+using namespace TrainingEngineInterface::Common;
+
+namespace TrainingEngineImpl
+{
+namespace NntrainerImpl
+{
+
+       int UpdateTrainData(float **data, float **label, bool *last, void *user_data)
+       {
+               // TODO.
+
+               return ML_ERROR_NONE;
+       }
+
+       TrainingNntrainer::TrainingNntrainer(void) :
+               train_data_set(), test_data_set(), verify_data_set()
+       {
+       }
+
+       TrainingNntrainer::~TrainingNntrainer()
+       {
+       }
+
+       int TrainingNntrainer::GetBackendCapacity(training_engine_capacity &capacity)
+       {
+               LOGI("ENTER");
+
+               // NNTrainer supports only NCHW tensor order.
+               capacity.supported_tensor_shape_type = TRAINING_TENSOR_SHAPE_NCHW;
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+
+       ml_train_layer_type_e TrainingNntrainer::ConvertLayerType(training_engine_layer_type_e type)
+       {
+               switch (type) {
+                       case TRAINING_LAYER_TYPE_INPUT:
+                               return ML_TRAIN_LAYER_TYPE_INPUT;
+                       case TRAINING_LAYER_TYPE_FC:
+                               return ML_TRAIN_LAYER_TYPE_FC;
+                       case TRAINING_LAYER_TYPE_BN:
+                               return ML_TRAIN_LAYER_TYPE_BN;
+                       case TRAINING_LAYER_TYPE_CONV2D:
+                               return ML_TRAIN_LAYER_TYPE_CONV2D;
+                       case TRAINING_LAYER_TYPE_POOLING2D:
+                               return ML_TRAIN_LAYER_TYPE_POOLING2D;
+                       case TRAINING_LAYER_TYPE_FLATTEN:
+                               return ML_TRAIN_LAYER_TYPE_FLATTEN;
+                       case TRAINING_LAYER_TYPE_ACTIVATION:
+                               return ML_TRAIN_LAYER_TYPE_ACTIVATION;
+                       case TRAINING_LAYER_TYPE_ADDITION:
+                               return ML_TRAIN_LAYER_TYPE_ADDITION;
+                       case TRAINING_LAYER_TYPE_CONCAT:
+                               return ML_TRAIN_LAYER_TYPE_CONCAT;
+                       case TRAINING_LAYER_TYPE_MULTIOUT:
+                               return ML_TRAIN_LAYER_TYPE_MULTIOUT;
+                       default:
+                               return ML_TRAIN_LAYER_TYPE_UNKNOWN;
+               }
+       }
+
+       ml_train_optimizer_type_e TrainingNntrainer::ConvertOptimizerType(training_engine_optimizer_type_e type)
+       {
+               switch (type) {
+                       case TRAINING_OPTIMIZER_TYPE_ADAM:
+                               return ML_TRAIN_OPTIMIZER_TYPE_ADAM;
+                       case TRAINING_OPTIMIZER_TYPE_SGD:
+                               return ML_TRAIN_OPTIMIZER_TYPE_SGD;
+                       default:
+                               return ML_TRAIN_OPTIMIZER_TYPE_UNKNOWN;
+               }
+       }
+
+       unique_ptr<training_engine_model> TrainingNntrainer::CreateModel(std::string backbone_model_path,
+                                                                                                                                        std::string model_config_path)
+       {
+               LOGI("ENTER");
+
+               // TODO. Consider backend model and using model config file.
+
+               auto model = make_unique<training_engine_model>();
+               ml_train_model_h model_handle;
+
+               int ret = ml_train_model_construct(&model_handle);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to create a model.");
+                       return NULL;
+               }
+
+               model->model_handle = static_cast<void *>(model_handle);
+
+               LOGI("LEAVE");
+
+               return model;
+       }
+
+       void TrainingNntrainer::DestroyModel(const training_engine_model *model)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ_NO_RETURN_VAL(model);
+
+               int ret = ml_train_model_destroy(model->model_handle);
+               if (ret != ML_ERROR_NONE)
+                       LOGE("Failed to destroy a model object.");
+
+               LOGI("LEAVE");
+       }
+
+       unique_ptr<training_engine_layer> TrainingNntrainer::CreateLayer(training_engine_layer_type_e type)
+       {
+               LOGI("ENTER");
+
+               auto layer = make_unique<training_engine_layer>();
+               ml_train_layer_h layer_handle;
+               ml_train_layer_type_e layer_type = ConvertLayerType(type);
+               if (layer_type == ML_TRAIN_LAYER_TYPE_UNKNOWN) {
+                       LOGE("Failed to convert a layer type.");
+                       return NULL;
+               }
+
+               int ret = ml_train_layer_create(&layer_handle, layer_type);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to create a layer.");
+                       return NULL;
+               }
+
+               layer->layer_handle = static_cast<void *>(layer_handle);
+
+               LOGI("LEAVE");
+
+               return layer;
+       }
+
+       void TrainingNntrainer::DestroyLayer(const training_engine_layer *layer)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ_NO_RETURN_VAL(layer);
+
+               int ret = ml_train_layer_destroy(static_cast<ml_train_layer_h>(layer->layer_handle));
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to destroy a layer handle.");
+                       return;
+               }
+
+               LOGI("LEAVE");
+       }
+
+       int TrainingNntrainer::SetLayerProperty(const training_engine_layer *layer,
+                                                                                       training_engine_layer_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(layer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               if (property.option.empty()) {
+                       LOGE("invalid property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               LOGI("layer option : %s", property.option.c_str());
+
+               int ret = ml_train_layer_set_property(static_cast<ml_train_layer_h>(layer->layer_handle),
+                                                                                         property.option.c_str(), NULL);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to set a layer property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       int TrainingNntrainer::AddLayer(const training_engine_model *model, const training_engine_layer *layer)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(layer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = ml_train_model_add_layer(static_cast<ml_train_model_h>(model->model_handle),
+                                                                                  static_cast<ml_train_layer_h>(layer->layer_handle));
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to add a given layer to a given model.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       unique_ptr<training_engine_optimizer> TrainingNntrainer::CreateOptimizer(training_engine_optimizer_type_e type)
+       {
+               LOGI("ENTER");
+
+               auto optimizer = make_unique<training_engine_optimizer>();
+               ml_train_optimizer_type_e optimizer_type = ConvertOptimizerType(type);
+               if (optimizer_type == ML_TRAIN_OPTIMIZER_TYPE_UNKNOWN) {
+                       LOGE("Failed to convert a optimizer type.");
+                       return NULL;
+               }
+
+               ml_train_optimizer_h optimizer_handle;
+
+               int ret = ml_train_optimizer_create(&optimizer_handle, optimizer_type);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to create a optimizer.");
+                       return NULL;
+               }
+
+               optimizer->optimizer_handle = static_cast<void *>(optimizer_handle);
+
+               LOGI("LEAVE");
+
+               return optimizer;
+       }
+
+       int TrainingNntrainer::SetOptimizerProperty(const training_engine_optimizer *optimizer,
+                                                                                               training_engine_optimizer_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(optimizer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               if (property.option.empty()) {
+                       LOGE("invalid property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               LOGI("optimizer option : %s", property.option.c_str());
+
+               int ret = ml_train_optimizer_set_property(static_cast<ml_train_optimizer_h>(optimizer->optimizer_handle),
+                                                                                         property.option.c_str(), NULL);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to set a optimizer property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       int TrainingNntrainer::AddOptimizer(const training_engine_model *model,
+                                                                               const training_engine_optimizer *optimizer)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(optimizer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = ml_train_model_set_optimizer(static_cast<ml_train_model_h>(model->model_handle),
+                                                                                          static_cast<ml_train_optimizer_h>(optimizer->optimizer_handle));
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to add a given optimizer to a given model.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       unique_ptr<training_engine_dataset> TrainingNntrainer::CreateDataset(training_engine_dataset_type_e type)
+       {
+               LOGI("ENTER");
+
+               ml_train_dataset_h handle;
+               int ret = ML_ERROR_NONE;
+
+               switch (type) {
+                       case TRAINING_DATASET_TYPE_TRAIN:
+                               ret = ml_train_dataset_create_with_generator(&handle, UpdateTrainData, NULL, NULL);
+                               break;
+                       case TRAINING_DATASET_TYPE_TEST:
+                               ret = ml_train_dataset_create_with_generator(&handle, NULL, NULL, UpdateTrainData);
+                               break;
+                       case TRAINING_DATASET_TYPE_VERIFY:
+                               ret = ml_train_dataset_create_with_generator(&handle, NULL, UpdateTrainData, NULL);
+                               break;
+                       default:
+                               LOGE("Invalid dataset type.");
+                               return NULL;
+               }
+
+               LOGI("LEAVE");
+
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to create a dataset.");
+                       return NULL;
+               }
+
+               auto dataset = make_unique<training_engine_dataset>();
+               dataset->type = type;
+               dataset->dataset_handle = static_cast<void *>(handle);
+
+               return dataset;
+       }
+
+       void TrainingNntrainer::DestroyDataset(const training_engine_dataset *dataset)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ_NO_RETURN_VAL(dataset);
+
+               int ret = ml_train_dataset_destroy(static_cast<ml_train_dataset_h>(dataset->dataset_handle));
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to destroy a dataset handle.");
+                       return;
+               }
+
+               LOGI("LEAVE");
+       }
+
+
+       int TrainingNntrainer::SetDatasetProperty(const training_engine_dataset *dataset,
+                                                                                         training_engine_dataset_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(dataset, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               if (property.option.empty()) {
+                       LOGE("invalid property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               LOGI("property option : %s", property.option.c_str());
+
+               int ret = ml_train_dataset_set_property(static_cast<ml_train_dataset_h>(dataset->dataset_handle),
+                                                                                         property.option.c_str(), NULL);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to set a dataset property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               LOGI("LEAVE");
+
+               return 0;
+       }
+
+       int TrainingNntrainer::AddDataToDataset(const training_engine_dataset *dataset,
+                                                                                       float *data, size_t data_len,
+                                                                                       float *label, size_t label_len,
+                                                                                       training_engine_dataset_type_e type)
+       {
+               LOGI("ENTER");
+
+               switch (type) {
+                       case TRAINING_DATASET_TYPE_TRAIN:
+                               for (size_t i = 0; i < data_len; ++i)
+                                       train_data_set.data_set.push_back(data[i]);
+
+                               for (size_t i = 0; i < label_len; ++i)
+                                       train_data_set.label_set.push_back(label[i]);
+
+                               train_data_set.total_data_len += data_len;
+                               train_data_set.total_label_len += label_len;
+
+                               break;
+                       case TRAINING_DATASET_TYPE_TEST:
+                               for (size_t i = 0; i < data_len; ++i)
+                                       test_data_set.data_set.push_back(data[i]);
+
+                               for (size_t i = 0; i < label_len; ++i)
+                                       test_data_set.label_set.push_back(label[i]);
+
+                               test_data_set.total_data_len += data_len;
+                               test_data_set.total_label_len += label_len;
+
+                               break;
+                       case TRAINING_DATASET_TYPE_VERIFY:
+                               for (size_t i = 0; i < data_len; ++i)
+                                       verify_data_set.data_set.push_back(data[i]);
+
+                               for (size_t i = 0; i < label_len; ++i)
+                                       verify_data_set.label_set.push_back(label[i]);
+
+                               verify_data_set.total_data_len += data_len;
+                               verify_data_set.total_label_len += label_len;
+                               break;
+                       default:
+                               LOGE("Invalid dataset type");
+                               return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       int TrainingNntrainer::AddDataToDataset(const training_engine_dataset *dataset,
+                                                                                       uint32_t *data, size_t data_len,
+                                                                                       uint32_t *label, size_t label_len,
+                                                                                       training_engine_dataset_type_e type)
+       {
+               LOGI("ENTER");
+
+               // TODO.
+
+               LOGI("LEAVE");
+
+               return 0;
+       }
+
+       int TrainingNntrainer::SetDataset(const training_engine_model *model,
+                                                                         const training_engine_dataset *dataset)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(dataset, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = ml_train_model_set_dataset(static_cast<ml_train_model_h>(model->model_handle),
+                                                                                          static_cast<ml_train_dataset_h>(dataset->dataset_handle));
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to add a given dataset to a given model.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               LOGI("LEAVE");
+
+               return 0;
+       }
+
+       int TrainingNntrainer::CompileModel(const training_engine_model *model,
+                                                                               training_engine_compile_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               std::vector<std::string> strings;
+               // The maximum count of model and compile properties is 5.
+               const unsigned int max_property_size = 5;
+               size_t original_size = property.options.size();
+
+               if (max_property_size < original_size) {
+                       LOGE("Invalid a number of property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               size_t actual_size = 0;
+
+               for (size_t index = 0; index < original_size; ++index) {
+                       strings.push_back(property.options[index]);
+                       actual_size++;
+               }
+
+               for (size_t index = 0; index < max_property_size - actual_size; ++index)
+                       strings.push_back("");
+
+               int ret = ml_train_model_compile(model->model_handle, strings[0], strings[1],
+                                                                                strings[2], strings[3], strings[4], NULL);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to compile a given model.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       int TrainingNntrainer::TrainModel(const training_engine_model *model,
+                                                                         training_engine_model_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               std::vector<std::string> strings;
+               // The maximum count of model and compile properties is 5.
+               const unsigned int max_property_size = 5;
+               size_t original_size = property.options.size();
+
+               if (max_property_size < original_size) {
+                       LOGE("Invalid a number of property.");
+                       return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               size_t actual_size = 0;
+
+               for (size_t index = 0; index < original_size; ++index) {
+                       strings.push_back(property.options[index]);
+                       actual_size++;
+               }
+
+               for (size_t index = 0; index < max_property_size - actual_size; ++index)
+                       strings.push_back("");
+
+               int ret = ml_train_model_run(model->model_handle, strings[0], strings[1],
+                                                                        strings[2], strings[3], strings[4], NULL);
+               if (ret != ML_ERROR_NONE) {
+                       LOGE("Failed to train a given model.");
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+
+               LOGI("LEAVE");
+
+               return 0;
+       }
+       extern "C"
+       {
+               class ITrainingEngineCommon *EngineCommonInit(void)
+               {
+                       LOGI("ENTER");
+
+                       auto engine = new TrainingNntrainer();
+
+                       LOGI("LEAVE");
+
+                       return engine;
+               }
+
+               void EngineCommonDestroy(class ITrainingEngineCommon *engine)
+               {
+                       LOGI("ENTER");
+
+                       if (engine)
+                               delete engine;
+
+                       LOGI("LEAVE");
+               }
+       }
+} /* NntrainerImpl */
+} /* TrainingEngineImpl */
diff --git a/src/training_engine_nntrainer_private.h b/src/training_engine_nntrainer_private.h
new file mode 100644 (file)
index 0000000..872edcc
--- /dev/null
@@ -0,0 +1,82 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TRAINING_ENGINE_IMPL_NNTRAINER_H__
+#define __TRAINING_ENGINE_IMPL_NNTRAINER_H__
+
+#include <memory>
+#include <training_engine_common.h>
+#include <nntrainer.h>
+
+#include <dlog/dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "TRAINING_ENGINE_NNTRAINER"
+
+namespace TrainingEngineImpl
+{
+namespace NntrainerImpl
+{
+
+       typedef struct dataset_type {
+               size_t total_data_len;
+               size_t total_label_len;
+               std::vector<float> data_set;
+               std::vector<float> label_set;
+       } dataset_type;
+
+       class TrainingNntrainer : public TrainingEngineInterface::Common::ITrainingEngineCommon
+       {
+       private:
+               ml_train_layer_type_e ConvertLayerType(training_engine_layer_type_e type);
+               ml_train_optimizer_type_e ConvertOptimizerType(training_engine_optimizer_type_e type);
+               dataset_type train_data_set;
+               dataset_type test_data_set;
+               dataset_type verify_data_set;
+
+       public:
+               TrainingNntrainer();
+               ~TrainingNntrainer();
+
+               int GetBackendCapacity(training_engine_capacity &capacity) final;
+               std::unique_ptr<training_engine_model> CreateModel(std::string backbone_model_path, std::string model_config_path) final;
+               void DestroyModel(const training_engine_model *model) final;
+               std::unique_ptr<training_engine_layer> CreateLayer(training_engine_layer_type_e type) final;
+               void DestroyLayer(const training_engine_layer *layer) final;
+               int SetLayerProperty(const training_engine_layer *layer, training_engine_layer_property &property) final;
+               int AddLayer(const training_engine_model *model, const training_engine_layer *layer) final;
+               std::unique_ptr<training_engine_optimizer> CreateOptimizer(training_engine_optimizer_type_e type) final;
+               int SetOptimizerProperty(const training_engine_optimizer *optimizer, training_engine_optimizer_property &property) final;
+               int AddOptimizer(const training_engine_model *model, const training_engine_optimizer *optimizer) final;
+               std::unique_ptr<training_engine_dataset> CreateDataset(training_engine_dataset_type_e type) final;
+               void DestroyDataset(const training_engine_dataset *dataset) final;
+               int SetDatasetProperty(const training_engine_dataset *dataset, training_engine_dataset_property &property) final;
+               int AddDataToDataset(const training_engine_dataset *dataset, float *data, size_t data_len,
+                                                        float *label, size_t label_len, training_engine_dataset_type_e type) final;
+               int AddDataToDataset(const training_engine_dataset *dataset, unsigned int *data, size_t data_len,
+                                                        unsigned int *label, size_t label_len, training_engine_dataset_type_e type) final;
+               int SetDataset(const training_engine_model *model, const training_engine_dataset *dataset) final;
+               int CompileModel(const training_engine_model *model, training_engine_compile_property &property) final;
+               int TrainModel(const training_engine_model *model, training_engine_model_property &property) final;
+       };
+
+} /* NntrainerImpl */
+} /* TrainingEngineImpl */
+
+#endif /* __TRAINING_ENGINE_NNTRAINER_PRIVATE_H__ */
diff --git a/training-engine-nntrainer.manifest b/training-engine-nntrainer.manifest
new file mode 100644 (file)
index 0000000..86dbb26
--- /dev/null
@@ -0,0 +1,5 @@
+<manifest>
+    <request>
+        <domain name="_" />
+    </request>
+</manifest>
diff --git a/training-engine-nntrainer.pc.in b/training-engine-nntrainer.pc.in
new file mode 100644 (file)
index 0000000..e7cd18f
--- /dev/null
@@ -0,0 +1,14 @@
+
+# Package Information for pkg-config
+
+prefix=@PREFIX@
+exec_prefix=/usr
+libdir=@LIB_INSTALL_DIR@
+includedir=/usr/include/media
+
+Name: @PC_NAME@
+Description: @PACKAGE_DESCRIPTION@
+Version: @VERSION@
+Requires: @PC_REQUIRED@
+Libs: -L${libdir} @PC_LDFLAGS@
+Cflags: -I${includedir} -I/usr/include