Add initial code master
authorInki Dae <inki.dae@samsung.com>
Fri, 6 Aug 2021 07:12:59 +0000 (16:12 +0900)
committerInki Dae <inki.dae@samsung.com>
Fri, 6 Aug 2021 07:12:59 +0000 (16:12 +0900)
Signed-off-by: Inki Dae <inki.dae@samsung.com>
15 files changed:
CMakeLists.txt [new file with mode: 0644]
LICENSE.APLv2 [new file with mode: 0644]
README.md [new file with mode: 0644]
include/training_engine_common.h [new file with mode: 0644]
include/training_engine_common_impl.h [new file with mode: 0644]
include/training_engine_error.h [new file with mode: 0644]
include/training_engine_type.h [new file with mode: 0644]
packaging/training-engine-interface.spec [new file with mode: 0644]
src/training_engine_common_impl.cpp [new file with mode: 0644]
test/CMakeLists.txt [new file with mode: 0644]
test/src/CMakeLists.txt [new file with mode: 0644]
test/src/test_gtest.cpp [new file with mode: 0644]
test/src/training_engine_tc.cpp [new file with mode: 0644]
training-engine-interface-common.manifest [new file with mode: 0644]
training-engine-interface-common.pc.in [new file with mode: 0644]

diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644 (file)
index 0000000..b3388cf
--- /dev/null
@@ -0,0 +1,45 @@
+PROJECT("training-engine-interface-common")
+CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
+
+# Find all source files and add the file list to a given variable, SOURCES
+AUX_SOURCE_DIRECTORY(src SOURCES)
+
+ADD_LIBRARY(${PROJECT_NAME} SHARED ${SOURCES})
+
+FIND_PACKAGE(PkgConfig REQUIRED)
+PKG_CHECK_MODULES(${PROJECT_NAME}_DEP REQUIRED dlog)
+
+TARGET_INCLUDE_DIRECTORIES(${PROJECT_NAME}
+    PUBLIC
+    ${PROJECT_SOURCE_DIR}/include
+    ${${PROJECT_NAME}_DEP_INCLUDE_DIRS}
+)
+
+TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${${PROJECT_NAME}_DEP_LIBRARIES})
+
+SET_TARGET_PROPERTIES(${PROJECT_NAME}
+    PROPERTIES
+    VERSION ${FULLVER}
+    SOVERSION ${MAJORVER}
+    CLEAN_DIRECT_OUTPUT 1
+)
+
+#Install header and library files to given places
+INSTALL(TARGETS ${PROJECT_NAME} DESTINATION ${LIB_INSTALL_DIR})
+INSTALL(DIRECTORY ${PROJECT_SOURCE_DIR}/include/ DESTINATION include/media
+    FILES_MATCHING
+    PATTERN "*_private.h" EXCLUDE
+    PATTERN "*.h"
+)
+
+CONFIGURE_FILE(${PROJECT_NAME}.pc.in
+    ${CMAKE_CURRENT_SOURCE_DIR}/${PROJECT_NAME}.pc
+    @ONLY
+)
+
+INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${PROJECT_NAME}.pc
+    DESTINATION
+    ${LIB_INSTALL_DIR}/pkgconfig
+)
+
+ADD_SUBDIRECTORY(${PROJECT_SOURCE_DIR}/test)
diff --git a/LICENSE.APLv2 b/LICENSE.APLv2
new file mode 100644 (file)
index 0000000..bbe9d02
--- /dev/null
@@ -0,0 +1,206 @@
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.\r
+\r
+                                 Apache License\r
+                           Version 2.0, January 2004\r
+                        http://www.apache.org/licenses/\r
+\r
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\r
+\r
+   1. Definitions.\r
+\r
+      "License" shall mean the terms and conditions for use, reproduction,\r
+      and distribution as defined by Sections 1 through 9 of this document.\r
+\r
+      "Licensor" shall mean the copyright owner or entity authorized by\r
+      the copyright owner that is granting the License.\r
+\r
+      "Legal Entity" shall mean the union of the acting entity and all\r
+      other entities that control, are controlled by, or are under common\r
+      control with that entity. For the purposes of this definition,\r
+      "control" means (i) the power, direct or indirect, to cause the\r
+      direction or management of such entity, whether by contract or\r
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the\r
+      outstanding shares, or (iii) beneficial ownership of such entity.\r
+\r
+      "You" (or "Your") shall mean an individual or Legal Entity\r
+      exercising permissions granted by this License.\r
+\r
+      "Source" form shall mean the preferred form for making modifications,\r
+      including but not limited to software source code, documentation\r
+      source, and configuration files.\r
+\r
+      "Object" form shall mean any form resulting from mechanical\r
+      transformation or translation of a Source form, including but\r
+      not limited to compiled object code, generated documentation,\r
+      and conversions to other media types.\r
+\r
+      "Work" shall mean the work of authorship, whether in Source or\r
+      Object form, made available under the License, as indicated by a\r
+      copyright notice that is included in or attached to the work\r
+      (an example is provided in the Appendix below).\r
+\r
+      "Derivative Works" shall mean any work, whether in Source or Object\r
+      form, that is based on (or derived from) the Work and for which the\r
+      editorial revisions, annotations, elaborations, or other modifications\r
+      represent, as a whole, an original work of authorship. For the purposes\r
+      of this License, Derivative Works shall not include works that remain\r
+      separable from, or merely link (or bind by name) to the interfaces of,\r
+      the Work and Derivative Works thereof.\r
+\r
+      "Contribution" shall mean any work of authorship, including\r
+      the original version of the Work and any modifications or additions\r
+      to that Work or Derivative Works thereof, that is intentionally\r
+      submitted to Licensor for inclusion in the Work by the copyright owner\r
+      or by an individual or Legal Entity authorized to submit on behalf of\r
+      the copyright owner. For the purposes of this definition, "submitted"\r
+      means any form of electronic, verbal, or written communication sent\r
+      to the Licensor or its representatives, including but not limited to\r
+      communication on electronic mailing lists, source code control systems,\r
+      and issue tracking systems that are managed by, or on behalf of, the\r
+      Licensor for the purpose of discussing and improving the Work, but\r
+      excluding communication that is conspicuously marked or otherwise\r
+      designated in writing by the copyright owner as "Not a Contribution."\r
+\r
+      "Contributor" shall mean Licensor and any individual or Legal Entity\r
+      on behalf of whom a Contribution has been received by Licensor and\r
+      subsequently incorporated within the Work.\r
+\r
+   2. Grant of Copyright License. Subject to the terms and conditions of\r
+      this License, each Contributor hereby grants to You a perpetual,\r
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r
+      copyright license to reproduce, prepare Derivative Works of,\r
+      publicly display, publicly perform, sublicense, and distribute the\r
+      Work and such Derivative Works in Source or Object form.\r
+\r
+   3. Grant of Patent License. Subject to the terms and conditions of\r
+      this License, each Contributor hereby grants to You a perpetual,\r
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r
+      (except as stated in this section) patent license to make, have made,\r
+      use, offer to sell, sell, import, and otherwise transfer the Work,\r
+      where such license applies only to those patent claims licensable\r
+      by such Contributor that are necessarily infringed by their\r
+      Contribution(s) alone or by combination of their Contribution(s)\r
+      with the Work to which such Contribution(s) was submitted. If You\r
+      institute patent litigation against any entity (including a\r
+      cross-claim or counterclaim in a lawsuit) alleging that the Work\r
+      or a Contribution incorporated within the Work constitutes direct\r
+      or contributory patent infringement, then any patent licenses\r
+      granted to You under this License for that Work shall terminate\r
+      as of the date such litigation is filed.\r
+\r
+   4. Redistribution. You may reproduce and distribute copies of the\r
+      Work or Derivative Works thereof in any medium, with or without\r
+      modifications, and in Source or Object form, provided that You\r
+      meet the following conditions:\r
+\r
+      (a) You must give any other recipients of the Work or\r
+          Derivative Works a copy of this License; and\r
+\r
+      (b) You must cause any modified files to carry prominent notices\r
+          stating that You changed the files; and\r
+\r
+      (c) You must retain, in the Source form of any Derivative Works\r
+          that You distribute, all copyright, patent, trademark, and\r
+          attribution notices from the Source form of the Work,\r
+          excluding those notices that do not pertain to any part of\r
+          the Derivative Works; and\r
+\r
+      (d) If the Work includes a "NOTICE" text file as part of its\r
+          distribution, then any Derivative Works that You distribute must\r
+          include a readable copy of the attribution notices contained\r
+          within such NOTICE file, excluding those notices that do not\r
+          pertain to any part of the Derivative Works, in at least one\r
+          of the following places: within a NOTICE text file distributed\r
+          as part of the Derivative Works; within the Source form or\r
+          documentation, if provided along with the Derivative Works; or,\r
+          within a display generated by the Derivative Works, if and\r
+          wherever such third-party notices normally appear. The contents\r
+          of the NOTICE file are for informational purposes only and\r
+          do not modify the License. You may add Your own attribution\r
+          notices within Derivative Works that You distribute, alongside\r
+          or as an addendum to the NOTICE text from the Work, provided\r
+          that such additional attribution notices cannot be construed\r
+          as modifying the License.\r
+\r
+      You may add Your own copyright statement to Your modifications and\r
+      may provide additional or different license terms and conditions\r
+      for use, reproduction, or distribution of Your modifications, or\r
+      for any such Derivative Works as a whole, provided Your use,\r
+      reproduction, and distribution of the Work otherwise complies with\r
+      the conditions stated in this License.\r
+\r
+   5. Submission of Contributions. Unless You explicitly state otherwise,\r
+      any Contribution intentionally submitted for inclusion in the Work\r
+      by You to the Licensor shall be under the terms and conditions of\r
+      this License, without any additional terms or conditions.\r
+      Notwithstanding the above, nothing herein shall supersede or modify\r
+      the terms of any separate license agreement you may have executed\r
+      with Licensor regarding such Contributions.\r
+\r
+   6. Trademarks. This License does not grant permission to use the trade\r
+      names, trademarks, service marks, or product names of the Licensor,\r
+      except as required for reasonable and customary use in describing the\r
+      origin of the Work and reproducing the content of the NOTICE file.\r
+\r
+   7. Disclaimer of Warranty. Unless required by applicable law or\r
+      agreed to in writing, Licensor provides the Work (and each\r
+      Contributor provides its Contributions) on an "AS IS" BASIS,\r
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+      implied, including, without limitation, any warranties or conditions\r
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\r
+      PARTICULAR PURPOSE. You are solely responsible for determining the\r
+      appropriateness of using or redistributing the Work and assume any\r
+      risks associated with Your exercise of permissions under this License.\r
+\r
+   8. Limitation of Liability. In no event and under no legal theory,\r
+      whether in tort (including negligence), contract, or otherwise,\r
+      unless required by applicable law (such as deliberate and grossly\r
+      negligent acts) or agreed to in writing, shall any Contributor be\r
+      liable to You for damages, including any direct, indirect, special,\r
+      incidental, or consequential damages of any character arising as a\r
+      result of this License or out of the use or inability to use the\r
+      Work (including but not limited to damages for loss of goodwill,\r
+      work stoppage, computer failure or malfunction, or any and all\r
+      other commercial damages or losses), even if such Contributor\r
+      has been advised of the possibility of such damages.\r
+\r
+   9. Accepting Warranty or Additional Liability. While redistributing\r
+      the Work or Derivative Works thereof, You may choose to offer,\r
+      and charge a fee for, acceptance of support, warranty, indemnity,\r
+      or other liability obligations and/or rights consistent with this\r
+      License. However, in accepting such obligations, You may act only\r
+      on Your own behalf and on Your sole responsibility, not on behalf\r
+      of any other Contributor, and only if You agree to indemnify,\r
+      defend, and hold each Contributor harmless for any liability\r
+      incurred by, or claims asserted against, such Contributor by reason\r
+      of your accepting any such warranty or additional liability.\r
+\r
+   END OF TERMS AND CONDITIONS\r
+\r
+   APPENDIX: How to apply the Apache License to your work.\r
+\r
+      To apply the Apache License to your work, attach the following\r
+      boilerplate notice, with the fields enclosed by brackets "[]"\r
+      replaced with your own identifying information. (Don't include\r
+      the brackets!)  The text should be enclosed in the appropriate\r
+      comment syntax for the file format. We also recommend that a\r
+      file or class name and description of purpose be included on the\r
+      same "printed page" as the copyright notice for easier\r
+      identification within third-party archives.\r
+\r
+   Copyright [yyyy] [name of copyright owner]\r
+\r
+   Licensed under the Apache License, Version 2.0 (the "License");\r
+   you may not use this file except in compliance with the License.\r
+   You may obtain a copy of the License at\r
+\r
+       http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+   Unless required by applicable law or agreed to in writing, software\r
+   distributed under the License is distributed on an "AS IS" BASIS,\r
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+   See the License for the specific language governing permissions and\r
+   limitations under the License.\r
+\r
+\r
+\r
diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..9af8e7a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,2 @@
+# training_engine_interface
+An adaptation framework for deploying various training engines - NNTriner, TFLITE, Caffe2 and so on - to Mediavision.
diff --git a/include/training_engine_common.h b/include/training_engine_common.h
new file mode 100644 (file)
index 0000000..f0527ec
--- /dev/null
@@ -0,0 +1,232 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TRAINING_ENGINE_COMMON_H__
+#define __TRAINING_ENGINE_COMMON_H__
+
+#include <map>
+#include <vector>
+#include <string>
+#include "training_engine_type.h"
+
+#define TE_CHECK_OBJ_NO_RETURN_VAL(obj)        if (obj == NULL) {                                         \
+                                                                                       LOGE("Invalid object (%s)", #obj); \
+                                                                                       return;                                                    \
+                                                                               }
+
+#define TE_CHECK_OBJ(obj, err) if (obj == NULL) {                                              \
+                                                                       LOGE("Invalid object (%s)", #obj);      \
+                                                                       return err;                                                     \
+                                                               }
+
+namespace TrainingEngineInterface
+{
+namespace Common
+{
+       class ITrainingEngineCommon
+       {
+       public:
+               virtual ~ITrainingEngineCommon() {};
+
+               /**
+                * @brief Get capacity from a given backend engine.
+                * @details This callback requests what supported features and constraints the backend engine has.
+                *          Upper layer should call this callback just after the backend engine library is loaded.
+                *
+                * @since_tizen 6.5
+                * @param[out] capacity A backend engine should add the features and constraints it has.
+                */
+               virtual int GetBackendCapacity(training_engine_capacity &capacity) = 0;
+
+               /**
+                * @brief Create a model object.
+                * @details This API creates a model object corresponding to specific training engine.
+                *
+                * @since_tizen 6.5
+                * @param[in] backbone_model_path A full path of backbone model which is a pre-trained model.
+                * @param[in] model_config_path A full path of model configuration file.
+                */
+               virtual training_engine_model *CreateModel(std::string backbone_model_path = "", std::string model_config_path = "") = 0;
+
+               /**
+                * @brief Destroy a given model object.
+                * @details This API releases all resources used for training model.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object to release.
+                */
+               virtual void DestroyModel(training_engine_model *model) = 0;
+
+               /**
+                * @brief Create a layer object.
+                * @details This API creates a layer object corresponding to specific training engine model.
+                *
+                * @since_tizen 6.5
+                * @param[in] type A enumeration value which indicates a layer type to be created.
+                */
+               virtual training_engine_layer *CreateLayer(training_engine_layer_type_e type) = 0;
+
+               /**
+                * @brief Destroy a given layer object.
+                * @details This API releases a created layer object.
+                *
+                * @since_tizen 6.5
+                * @param[in] layer A layer object to release.
+                */
+               virtual void DestroyLayer(training_engine_layer *layer) = 0;
+
+               /**
+                * @brief Set an additional information for a given layer.
+                * @details This API adds several options for a given layer.
+                *
+                * @since_tizen 6.5
+                * @param[in] layer A object points to a created layer.
+                * @param[in] property A property structure which contains several options for a given layer.
+                */
+               virtual int SetLayerProperty(training_engine_layer *layer, training_engine_layer_property &property) = 0;
+
+               /**
+                * @brief Add a created layer to a given model.
+                * @details This API adds a created layer to a given model.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object for a given layer to be added to.
+                * @param[in] layer A object points to a created layer.
+                */
+               virtual int AddLayer(training_engine_model *model, training_engine_layer *layer) = 0;
+
+               /**
+                * @brief Create a optimizer object.
+                * @details This API creates a model optimizer corresponding to specific training engine model.
+                *
+                * @since_tizen 6.5
+                * @param[in] type A enumeration value which indicates a optimizer type to be created.
+                */
+               virtual training_engine_optimizer *CreateOptimizer(training_engine_optimizer_type_e type) = 0;
+
+               /**
+                * @brief Set an additional information for a given optimizer.
+                * @details This API adds several options for a given optimizer.
+                *
+                * @since_tizen 6.5
+                * @param[in] optimizer A object points to a created optimizer.
+                * @param[in] property A property structure which contains several options for a given optimizer.
+                */
+               virtual int SetOptimizerProperty(training_engine_optimizer *optimizer, training_engine_optimizer_property &property) = 0;
+
+               /**
+                * @brief Add a created optimizer to a given model.
+                * @details This API adds a created optimizer to a given model.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object for a given optimizer to be added to.
+                * @param[in] optimizer A object points to a created optimizer.
+                */
+               virtual int AddOptimizer(training_engine_model *model, training_engine_optimizer *optimizer) = 0;
+
+               /**
+                * @brief Create a dataset object.
+                * @details This API creates a dataset to train a given model.
+                *
+                * @since_tizen 6.5
+                * @param[in] type A enumeration value which indicates a dataset type to be created.
+                */
+               virtual training_engine_dataset *CreateDataset(training_engine_dataset_type_e type) = 0;
+
+               /**
+                * @brief Destroy a given dataset object.
+                * @details This API releases a created dataset object.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A dataset object to release.
+                */
+               virtual void DestroyDataset(training_engine_dataset *dataset) = 0;
+
+               /**
+                * @brief Set an additional information for a given dataset.
+                * @details This API adds several options for a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A object points to a created dataset.
+                * @param[in] property A dataset structure which contains several options for a given dataset.
+                */
+               virtual int SetDatasetProperty(training_engine_dataset *dataset, training_engine_dataset_property &property) = 0;
+
+               /**
+                * @brief Add a new input data to a given dataset.
+                * @details This API adds a new input data to a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A data set object for a new input data to be added to.
+                * @param[in] data An array which contains input tensor values.
+                * @param[in] data_len a length of data.
+                * @param[in] label An array which contains label values to a input data.
+                * @param[in] label_len a length of label.
+                * @param[in] type An type which indicates what data and label set given data and label should be added to.
+                */
+               virtual int AddDataToDataset(training_engine_dataset *dataset, float *data, size_t data_len, float *label, size_t label_len, training_engine_dataset_type_e type) = 0;
+
+               /**
+                * @brief Add a new input data to a given dataset.
+                * @details This API adds a new input data to a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A data set object for a new input data to be added to.
+                * @param[in] data An array which contains input tensor values.
+                * @param[in] data_len a length of data.
+                * @param[in] label An array which contains label values to a input data.
+                * @param[in] label_len a length of label.
+                * @param[in] type An type which indicates what data and label set given data and label should be added to.
+                */
+               virtual int AddDataToDataset(training_engine_dataset *dataset, unsigned int *data, size_t data_len, unsigned int *label, size_t label_len, training_engine_dataset_type_e type) = 0;
+
+               /**
+                * @brief Set an additional information for a given dataset.
+                * @details This API adds several options for a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] layer A object points to a created dataset.
+                * @param[in] property A dataset structure which contains several options for a given dataset.
+                */
+               virtual int SetDataset(training_engine_model *model, training_engine_dataset *dataset) = 0;
+
+               /**
+                * @brief Compile a given model with a given property.
+                * @details This API compiles a given model referring to a given property information.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object points the model to be compiled.
+                * @param[in] property A structure which contains several compiler options.
+                */
+               virtual int CompileModel(training_engine_model *model, training_engine_compile_property &property) = 0;
+
+               /**
+                * @brief Train a given model with a given property.
+                * @details This API trains a given model with already-ready dataset.
+                *
+                * @since_tizen 6.5
+                * @parm[in] model A model object points the model to be trained.
+                * @parm[in] property A structure which contains several training options.
+                */
+               virtual int TrainModel(training_engine_model *model, training_engine_model_property &property) = 0;
+       };
+
+       typedef void destroy_t(ITrainingEngineCommon *);
+       typedef ITrainingEngineCommon *init_t(void);
+} /* Common */
+} /* TrainingEngineInterface */
+
+#endif /* __TRAINING_ENGINE_COMMON_H__ */
diff --git a/include/training_engine_common_impl.h b/include/training_engine_common_impl.h
new file mode 100644 (file)
index 0000000..aa68fdc
--- /dev/null
@@ -0,0 +1,235 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TRAINING_ENGINE_COMMON_IMPL_H__
+#define __TRAINING_ENGINE_COMMON_IMPL_H__
+
+#include <map>
+#include <vector>
+#include <string>
+
+#include "training_engine_common.h"
+
+namespace TrainingEngineInterface
+{
+namespace Common
+{
+       class TrainingEngineCommon
+       {
+       private:
+               void *mBackendModule;
+               ITrainingEngineCommon *mTraining;
+
+       public:
+               TrainingEngineCommon();
+
+               ~TrainingEngineCommon();
+
+               /**
+                * @brief Load a backend engine library with a given backend name.
+                * @details This callback loads a backend engine library with a given backend name.
+                *          In order to find a backend engine library corresponding to the given backend name,
+                *          this function makes a full name of the library file with given backend name.
+                *          After that, it opens the library file by calling dlopen function to find a entry point
+                *          function - EngineInit - of a actual backend library.
+                *
+                * @since_tizen 6.5
+                * @param[in] config A configuraion data needed to load a backend library.
+                */
+               int BindBackend(training_engine_config &config);
+
+               /**
+                * @brief Unload a backend engine library.
+                * @details This callback unload a backend engine library.
+                *
+                * @since_tizen 6.5
+                */
+               void UnbindBackend(void);
+
+               /**
+                * @brief Get capacity from a given backend engine.
+                * @details This callback requests what supported features and constraints the backend engine has.
+                *          Upper layer should call this callback just after the backend engine library is loaded.
+                *
+                * @since_tizen 6.5
+                * @param[out] capacity A backend engine should add the features and constraints it has.
+                */
+               int GetBackendCapacity(training_engine_capacity &capacity);
+
+               /**
+                * @brief Create a model object.
+                * @details This API creates a model object corresponding to specific training engine.
+                *
+                * @since_tizen 6.5
+                * @param[in] backbone_model_path A full path of backbone model which is a pre-trained model.
+                * @param[in] model_config_path A full path of model configuration file.
+                */
+               training_engine_model *CreateModel(std::string backbone_model_path = "", std::string model_config_path = "");
+
+               /**
+                * @brief Destroy a given model object.
+                * @details This API releases all resources used for training model.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object to release.
+                */
+               void DestroyModel(training_engine_model *model);
+
+               /**
+                * @brief Create a layer object.
+                * @details This API creates a layer object corresponding to specific training engine model.
+                *
+                * @since_tizen 6.5
+                * @param[in] type A enumeration value which indicates a layer type to be created.
+                */
+               training_engine_layer *CreateLayer(training_engine_layer_type_e type);
+
+               /**
+                * @brief Destroy a given layer object.
+                * @details This API releases a created layer object.
+                *
+                * @since_tizen 6.5
+                * @param[in] layer A layer object to release.
+                */
+               void DestroyLayer(training_engine_layer *layer);
+
+               /**
+                * @brief Set an additional information for a given layer.
+                * @details This API adds several options for a given layer.
+                *
+                * @since_tizen 6.5
+                * @param[in] layer A object points to a created layer.
+                * @param[in] property A property structure which contains several options for a given layer.
+                */
+               int SetLayerProperty(training_engine_layer *layer, training_engine_layer_property &property);
+
+               /**
+                * @brief Add a created layer to a given model.
+                * @details This API adds a created layer to a given model.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object for a given layer to be added to.
+                * @param[in] layer A object points to a created layer.
+                */
+               int AddLayer(training_engine_model *model, training_engine_layer *layer);
+
+               /**
+                * @brief Create a optimizer object.
+                * @details This API creates a model optimizer corresponding to specific training engine model.
+                *
+                * @since_tizen 6.5
+                * @param[in] type A enumeration value which indicates a optimizer type to be created.
+                */
+               training_engine_optimizer *CreateOptimizer(training_engine_optimizer_type_e type);
+
+               /**
+                * @brief Set an additional information for a given optimizer.
+                * @details This API adds several options for a given optimizer.
+                *
+                * @since_tizen 6.5
+                * @param[in] optimizer A object points to a created optimizer.
+                * @param[in] property A property structure which contains several options for a given optimizer.
+                */
+               int SetOptimizerProperty(training_engine_optimizer *optimizer, training_engine_optimizer_property &property);
+
+               /**
+                * @brief Add a created optimizer to a given model.
+                * @details This API adds a created optimizer to a given model.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object for a given optimizer to be added to.
+                * @param[in] optimizer A object points to a created optimizer.
+                */
+               int AddOptimizer(training_engine_model *model, training_engine_optimizer *optimizer);
+
+               /**
+                * @brief Create a dataset object.
+                * @details This API creates a dataset to train a given model.
+                *
+                * @since_tizen 6.5
+                * @param[in] type A enumeration value which indicates a dataset type to be created.
+                */
+               training_engine_dataset *CreateDataset(training_engine_dataset_type_e type);
+
+               /**
+                * @brief Destroy a given dataset object.
+                * @details This API releases a created dataset object.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A dataset object to release.
+                */
+               void DestroyDataset(training_engine_dataset *dataset);
+
+               /**
+                * @brief Set an additional information for a given dataset.
+                * @details This API adds several options for a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A object points to a created dataset.
+                * @param[in] property A dataset structure which contains several options for a given dataset.
+                */
+               int SetDatasetProperty(training_engine_dataset *dataset, training_engine_dataset_property &property);
+
+               /**
+                * @brief Add a new input data to a given dataset.
+                * @details This API adds a new input data to a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] dataset A data set object for a new input data to be added to.
+                * @param[in] data An array which contains input tensor values.
+                * @param[in] data_len A length of data.
+                * @param[in] label An array which contains label values to a input data.
+                * @param[in] label_len A length of label.
+                * @param[in] type An type which indicates what data and label set given data and label should be added to.
+                */
+               template <typename D, typename L>
+               int AddDataToDataset(training_engine_dataset *dataset, D *data, size_t data_len, L *label, size_t label_len, training_engine_dataset_type_e type);
+
+               /**
+                * @brief Set an additional information for a given dataset.
+                * @details This API adds several options for a given dataset.
+                *
+                * @since_tizen 6.5
+                * @param[in] layer A object points to a created dataset.
+                * @param[in] property A dataset structure which contains several options for a given dataset.
+                */
+               int SetDataset(training_engine_model *model, training_engine_dataset *dataset);
+
+               /**
+                * @brief Compile a given model with a given property.
+                * @details This API compiles a given model referring to a given property information.
+                *
+                * @since_tizen 6.5
+                * @param[in] model A model object points the model to be compiled.
+                * @param[in] property A structure which contains several compiler options.
+                */
+               int CompileModel(training_engine_model *model, training_engine_compile_property &property);
+
+               /**
+                * @brief Train a given model with a given property.
+                * @details This API trains a given model with already-ready dataset.
+                *
+                * @since_tizen 6.5
+                * @parm[in] model A model object points the model to be trained.
+                * @parm[in] property A structure which contains several training options.
+                */
+               int TrainModel(training_engine_model *model, training_engine_model_property &property);
+       };
+
+} /* Common */
+} /* TrainingEngineInterface */
+
+#endif /* __TRAINING_ENGINE_COMMON_IMPL_H__ */
diff --git a/include/training_engine_error.h b/include/training_engine_error.h
new file mode 100644 (file)
index 0000000..76363b9
--- /dev/null
@@ -0,0 +1,57 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TRAINING_ENGINE_ERROR_H__
+#define __TRAINING_ENGINE_ERROR_H__
+
+#include <tizen.h>
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* __cplusplus */
+
+       /**
+        * @file  training_engine_error.h
+        * @brief This file contains error type required by
+        *        training engine
+        */
+       typedef enum {
+               TRAINING_ENGINE_ERROR_NONE = TIZEN_ERROR_NONE, /**< Successful */
+               TRAINING_ENGINE_ERROR_NOT_SUPPORTED =
+                               TIZEN_ERROR_NOT_SUPPORTED, /**< Not supported */
+               TRAINING_ENGINE_ERROR_NO_DATA = TIZEN_ERROR_NO_DATA, /**< No data */
+               TRAINING_ENGINE_ERROR_KEY_NOT_AVAILABLE =
+                               TIZEN_ERROR_KEY_NOT_AVAILABLE, /**< Key not available */
+               TRAINING_ENGINE_ERROR_OUT_OF_MEMORY =
+                               TIZEN_ERROR_OUT_OF_MEMORY, /**< Out of memory */
+               TRAINING_ENGINE_ERROR_INVALID_PARAMETER =
+                               TIZEN_ERROR_INVALID_PARAMETER, /**< Invalid parameter */
+               TRAINING_ENGINE_ERROR_INVALID_OPERATION =
+                               TIZEN_ERROR_INVALID_OPERATION, /**< Invalid operation */
+               TRAINING_ENGINE_ERROR_PERMISSION_DENIED =
+                               TIZEN_ERROR_NOT_PERMITTED, /**< Not permitted */
+               TRAINING_ENGINE_ERROR_BAD_ADDRESS =
+                               TIZEN_ERROR_BAD_ADDRESS, /**< Bad address */
+               TRAINING_ENGINE_ERROR_INVALID_ADDRESS =
+                               TIZEN_ERROR_MEDIA_VISION | 0x10 /**< Invalid address */
+       } training_engine_error_e;
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* __TRAINING_ENGINE_ERROR_H__ */
diff --git a/include/training_engine_type.h b/include/training_engine_type.h
new file mode 100644 (file)
index 0000000..439f4d1
--- /dev/null
@@ -0,0 +1,213 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TRAINING_ENGINE_TYPE_H__
+#define __TRAINING_ENGINE_TYPE_H__
+
+#include <map>
+#include <vector>
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* __cplusplus */
+
+       typedef enum {
+               TRAINING_LAYER_TYPE_MIN = 0,
+               TRAINING_LAYER_TYPE_INPUT,              /**< Input Layer */
+               TRAINING_LAYER_TYPE_FC,             /**< Fully Connected Layer */
+               TRAINING_LAYER_TYPE_BN,             /**< Batch Normalization Layer */
+               TRAINING_LAYER_TYPE_CONV2D,         /**< Convolution 2D Layer */
+               TRAINING_LAYER_TYPE_POOLING2D,      /**< Pooling 2D Layer */
+               TRAINING_LAYER_TYPE_FLATTEN,        /**< Flatten Layer */
+               TRAINING_LAYER_TYPE_ACTIVATION,     /**< Activation Layer */
+               TRAINING_LAYER_TYPE_ADDITION,       /**< Addition Layer */
+               TRAINING_LAYER_TYPE_CONCAT,         /**< Concat Layer */
+               TRAINING_LAYER_TYPE_MULTIOUT,       /**< MultiOut Layer */
+               TRAINING_LAYER_TYPE_L2NORM,         /**< L2 Norm Layer */
+               TRAINING_LAYER_TYPE_CENTROID_KNN,   /**< Centroid k-Nearest Neighbor Layer */
+               TRAINING_LAYER_TYPE_MAX
+       } training_engine_layer_type_e;
+
+       typedef enum {
+               TRAINING_TENSOR_SHAPE_MIN = 0,
+               TRAINING_TENSOR_SHAPE_NHWC,
+               TRAINING_TENSOR_SHAPE_NCHW,
+               TRAINING_TENSOR_SHAPE_MAX
+       } training_engine_tensor_shape_type_e;
+
+       typedef enum {
+               TRAINING_OPTIMIZER_TYPE_MIN = 0,
+               TRAINING_OPTIMIZER_TYPE_ADAM,       /**< Adam optimizer */
+               TRAINING_OPTIMIZER_TYPE_SGD,        /**< Stochastic Gradient Descent optomizer */
+               TRAINING_OPTIMIZER_TYPE_MAX
+       } training_engine_optimizer_type_e;
+
+       typedef enum {
+               TRAINING_DATASET_TYPE_MIN = 0,
+               TRAINING_DATASET_TYPE_TRAIN,        /**< dataset for training a model */
+               TRAINING_DATASET_TYPE_TEST,         /**< dataset for testing a model */
+               TRAINING_DATASET_TYPE_VERIFY,       /**< dataset for verifying a model */
+               TRAINING_DATASET_TYPE_MAX
+       } training_engine_dataset_type_e;
+
+       /**
+        * @brief A structure to training model.
+        *
+        * @details This structure points to training engine specific model object.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_model {
+               void *model_handle;
+               // TODO. add a queue for keeping input data.
+       } training_engine_model;
+
+       /**
+        * @brief A structure to model layer.
+        *
+        * @details This structure points to training engine specific layer object.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_layer {
+               void *layer_handle;
+               // TODO.
+       } training_engine_layer;
+
+       /**
+        * @brief A structure to model optimizer.
+        *
+        * @details This structure points to training engine specific model optimizer object.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_optimizer {
+               void *optimizer_handle;
+               // TODO.
+       } training_engine_optimizer;
+
+       /**
+        * @brief A structure to model dataset.
+        *
+        * @details This structure points to training engine specific model dataset object.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_dataset {
+               void *dataset_handle;
+               training_engine_dataset_type_e type;
+               // TODO.
+       } training_engine_dataset;
+
+       /**
+        * @brief A layer property structure
+        *
+        * @details This structure is used to provide layer option to backend engine.
+        *          - String which contains option strings corresponding to a given layer.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_layer_property {
+               std::string option;
+               // TODO.
+       } training_engine_layer_property;
+
+       /**
+        * @brief A optimizer property structure
+        *
+        * @details This structure is used to provide optimizer option to backend engine.
+        *          - String which contains option strings corresponding to a given optimizer.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_optimizer_property {
+               std::string option;
+               // TODO.
+       } training_engine_optimizer_property;
+
+       /**
+        * @brief A dataset property structure
+        *
+        * @details This structure is used to provide dataset option to backend engine.
+        *          - String which contains option strings corresponding to a given dataset.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_dataset_property {
+               std::string option;
+               // TODO.
+       } training_engine_dataset_property;
+
+       /**
+        * @brief A compile property structure
+        *
+        * @details This structure is used to provide compile option to backend engine.
+        *          - String which contains option strings for compiling a given model.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_compile_property {
+               std::vector<std::string> options;
+               // TODO.
+       } training_engine_compile_property;
+
+       /**
+        * @brief A model property structure
+        *
+        * @details This structure is used to provide model option to backend engine.
+        *          - String which contains option strings corresponding to a given model.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_model_property {
+               std::vector<std::string> options;
+               // TODO.
+       } training_engine_model_property;
+
+       /**
+        * @brief A capacity structure to a backend engine.
+        *
+        * @details This structure is used to get information such as what features and
+        *                      constraints a given backend engine has, and it contains below information,
+        *                      - tensor shape type a given backend engine supports for such as NCHW or NHWC.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_capacity {
+               training_engine_tensor_shape_type_e supported_tensor_shape_type;
+               // TODO.
+       } training_engine_capacity;
+
+       /**
+        * @brief A training engine commcon config structure.
+        *
+        * @details This structure contains training engine common configuration.
+        *          - A backend name which indicates the training engine backend library
+        *            to be linked.
+        *
+        * @since_tizen 6.5
+        */
+       typedef struct _training_engine_config {
+               std::string backend_name;
+               // TODO.
+       } training_engine_config;
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* __TRAINING_ENGINE_TYPE_H__ */
diff --git a/packaging/training-engine-interface.spec b/packaging/training-engine-interface.spec
new file mode 100644 (file)
index 0000000..6ae0c53
--- /dev/null
@@ -0,0 +1,80 @@
+Name:        training-engine-interface
+Summary:     Common interface for training engines
+Version:     0.0.1
+Release:     1
+Group:       Multimedia/Framework
+License:     Apache-2.0
+Source0:     %{name}-%{version}.tar.gz
+BuildRequires: cmake
+BuildRequires: pkgconfig(dlog)
+BuildRequires: pkgconfig(iniparser)
+BuildRequires: pkgconfig(glib-2.0)
+BuildRequires: gtest-devel
+
+%description
+Common interface for training engines
+
+%package common
+Summary:    Provide common interface library files for training engine excution
+Group:      Multimedia/Framework
+
+%description common
+Common interface library files for training engine excution
+
+%package common-devel
+Summary:    Provide header and library files for backend development of training engine
+Group:      Multimedia/Framework
+Requires:   training-engine-interface-common
+
+%description common-devel
+Header and library files for backend development of training engine
+
+%package tools
+Summary:       Provide test binaries for testing training engine backends
+Group:         Multimedia/Framework
+Requires:      training-engine-interface-common
+
+%description tools
+Summary: Test binaries for testing training engine backends
+
+%prep
+%setup -q
+
+%build
+%if 0%{?sec_build_binary_debug_enable}
+export CFLAGS="$CFLAGS -DTIZEN_DEBUG_ENABLE"
+export CXXFLAGS="$CXXFLAGS -DTIZEN_DEBUG_ENABLE"
+export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE"
+%endif
+
+export CFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\" -DSYSCONFDIR=\\\"%{_hal_sysconfdir}\\\""
+export CXXFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\" -DSYSCONFDIR=\\\"%{_hal_sysconfdir}\\\""
+
+MAJORVER=`echo %{version} | awk 'BEGIN {FS="."}{print $1}'`
+%cmake . -DFULLVER=%{version} -DMAJORVER=${MAJORVER} -DTZ_SYS_BIN=%TZ_SYS_BIN \
+
+make %{?jobs:-j%jobs}
+
+%install
+rm -rf %{buildroot}
+
+mkdir -p %{buildroot}%{_bindir}
+%make_install
+
+install -m 755 test/bin/training_engine_tc %{buildroot}%{_bindir}
+
+%post -p /sbin/ldconfig
+%postun -p /sbin/ldconfig
+
+%files common
+%manifest training-engine-interface-common.manifest
+%license LICENSE.APLv2
+%{_libdir}/libtraining-engine-interface-common.so.*
+
+%files common-devel
+%{_includedir}/media/*.h
+%{_libdir}/pkgconfig/*common.pc
+%{_libdir}/lib*-common.so
+
+%files tools
+%{_bindir}/training_engine_tc
diff --git a/src/training_engine_common_impl.cpp b/src/training_engine_common_impl.cpp
new file mode 100644 (file)
index 0000000..66fb649
--- /dev/null
@@ -0,0 +1,407 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <unistd.h>
+#include <dlfcn.h>
+
+#include "training_engine_error.h"
+#include "training_engine_common_impl.h"
+
+extern "C"
+{
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "TRAINING_ENGINE_COMMON"
+}
+
+namespace TrainingEngineInterface
+{
+namespace Common
+{
+       TrainingEngineCommon::TrainingEngineCommon() :
+                       mBackendModule(),
+                       mTraining()
+       {
+               LOGI("ENTER");
+
+               LOGI("LEAVE");
+       }
+
+       TrainingEngineCommon::~TrainingEngineCommon()
+       {
+               LOGI("ENTER");
+
+               LOGI("LEAVE");
+       }
+
+       int TrainingEngineCommon::BindBackend(training_engine_config &config)
+       {
+               LOGI("ENTER");
+
+               const std::string lib_path("libtraining-engine-" + config.backend_name + ".so");
+
+               // Load training engine backend library with config->backend_name.
+               LOGI("lib: %s", lib_path.c_str());
+               mBackendModule = dlopen(lib_path.c_str(), RTLD_NOW);
+               LOGI("Handle : [%p]", mBackendModule);
+
+               if (!mBackendModule) {
+                       LOGE("Failed to dlopen %s", lib_path.c_str());
+                       LOGE("%s", dlerror());
+                       return TRAINING_ENGINE_ERROR_INVALID_ADDRESS;
+               }
+
+               init_t *EngineInit= (init_t *)dlsym(mBackendModule, "EngineCommonInit");
+               if (EngineInit == NULL) {
+                       LOGE("Failed to dlsym(%s).", dlerror());
+                       mBackendModule = NULL;
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               mTraining = EngineInit();
+               if (mTraining == NULL) {
+                       LOGE("Failed to EngineInit call.");
+                       dlclose(mBackendModule);
+                       mBackendModule = NULL;
+                       return TRAINING_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               LOGI("LEAVE");
+
+               return TRAINING_ENGINE_ERROR_NONE;
+       }
+
+       void TrainingEngineCommon::UnbindBackend(void)
+       {
+               LOGI("ENTER");
+
+               if (mBackendModule) {
+                       destroy_t *engineDestroy = (destroy_t *) dlsym(mBackendModule, "EngineCommonDestroy");
+                       engineDestroy(mTraining);
+                       dlclose(mBackendModule);
+                       mTraining = nullptr;
+                       mBackendModule = nullptr;
+               }
+
+               LOGI("LEAVE");
+       }
+
+       int TrainingEngineCommon::GetBackendCapacity(training_engine_capacity &capacity)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+
+               int ret = mTraining->GetBackendCapacity(capacity);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to get backend capacity.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       training_engine_model *TrainingEngineCommon::CreateModel(std::string backbone_model_path,
+                                                                                                                        std::string model_config_path)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, NULL);
+
+               training_engine_model *model = mTraining->CreateModel(backbone_model_path,
+                                                                                                                         model_config_path);
+               if (model == NULL)
+                       LOGE("Failed to create a model.");
+
+               LOGI("LEAVE");
+
+               return model;
+       }
+
+       void TrainingEngineCommon::DestroyModel(training_engine_model *model)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ_NO_RETURN_VAL(mTraining);
+               TE_CHECK_OBJ_NO_RETURN_VAL(model);
+
+               mTraining->DestroyModel(model);
+
+               LOGI("LEAVE");
+       }
+
+       training_engine_layer *TrainingEngineCommon::CreateLayer(training_engine_layer_type_e type)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, NULL);
+
+               if (TRAINING_LAYER_TYPE_MIN >= type || TRAINING_LAYER_TYPE_MAX <= type) {
+                       LOGE("Invalid layer parameter.");
+                       return NULL;
+               }
+
+               training_engine_layer *layer = mTraining->CreateLayer(type);
+               if (layer == NULL)
+                       LOGE("Failed to create a layer.");
+
+               LOGI("LEAVE");
+
+               return layer;
+       }
+
+       void TrainingEngineCommon::DestroyLayer(training_engine_layer *layer)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ_NO_RETURN_VAL(mTraining);
+               TE_CHECK_OBJ_NO_RETURN_VAL(layer);
+
+               mTraining->DestroyLayer(layer);
+
+               LOGI("LEAVE");
+       }
+
+       int TrainingEngineCommon::SetLayerProperty(training_engine_layer *layer,
+                                                                                          training_engine_layer_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(layer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->SetLayerProperty(layer, property);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to set layer property.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       int TrainingEngineCommon::AddLayer(training_engine_model *model, training_engine_layer *layer)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(layer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->AddLayer(model, layer);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to add a layer to a given model.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       training_engine_optimizer *TrainingEngineCommon::CreateOptimizer(training_engine_optimizer_type_e type)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, NULL);
+
+               if (TRAINING_OPTIMIZER_TYPE_MIN >= type || TRAINING_OPTIMIZER_TYPE_MAX <= type) {
+                       LOGE("Invalid optimizer parameter.");
+                       return NULL;
+               }
+
+               training_engine_optimizer *optimizer = mTraining->CreateOptimizer(type);
+               if (optimizer == NULL)
+                       LOGE("Failed to create a optimizer.");
+
+               LOGI("LEAVE");
+
+               return optimizer;
+       }
+
+       int TrainingEngineCommon::SetOptimizerProperty(training_engine_optimizer *optimizer,
+                                                                                                  training_engine_optimizer_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(optimizer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->SetOptimizerProperty(optimizer, property);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to set optimizer property.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       int TrainingEngineCommon::AddOptimizer(training_engine_model *model,
+                                                                                  training_engine_optimizer *optimizer)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(optimizer, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->AddOptimizer(model, optimizer);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to add a optimizer to a given model.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       training_engine_dataset *TrainingEngineCommon::CreateDataset(training_engine_dataset_type_e type)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, NULL);
+
+               if (TRAINING_DATASET_TYPE_MIN >= type || TRAINING_DATASET_TYPE_MAX <= type) {
+                       LOGE("Invalid dataset parameter.");
+                       return NULL;
+               }
+
+               // In case of nntrainer backend, the backend has to register callbacks
+               // for adding user-given data by calling ml_train_dataset_create_with_generator() API.
+               training_engine_dataset *dataset = mTraining->CreateDataset(type);
+               if (dataset == NULL)
+                       LOGE("Failed to create a dataset.");
+
+               LOGI("LEAVE");
+
+               return dataset;
+       }
+
+       void TrainingEngineCommon::DestroyDataset(training_engine_dataset *dataset)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ_NO_RETURN_VAL(mTraining);
+               TE_CHECK_OBJ_NO_RETURN_VAL(dataset);
+
+               mTraining->DestroyDataset(dataset);
+
+               LOGI("LEAVE");
+       }
+
+       int TrainingEngineCommon::SetDatasetProperty(training_engine_dataset *dataset,
+                                                                                                training_engine_dataset_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(dataset, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->SetDatasetProperty(dataset, property);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to set dataset property.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       template <typename D, typename L>
+       int TrainingEngineCommon::AddDataToDataset(training_engine_dataset *dataset,
+                                                                                          D *data, size_t data_len, L *label, size_t label_len,
+                                                                                          training_engine_dataset_type_e type)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(dataset, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(data, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(label, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               if (type <= TRAINING_DATASET_TYPE_MIN || type >= TRAINING_DATASET_TYPE_MAX) {
+                       LOGE("Invalid dataset type.");
+                       return TRAINING_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               // In case of nntrainer backend,
+               //     Data and label will be added a queue of nntrainer backend.
+               //     And then, all data in the queue will be updated into
+               //     nntrainer internal in the callback registered by CreateDataset()
+               //     The callback will be called in ml_train_model_run() API.
+               int ret = mTraining->AddDataToDataset(dataset, data, data_len, label, label_len, type);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to add data to dataset.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       int TrainingEngineCommon::SetDataset(training_engine_model *model,
+                                                                                training_engine_dataset *dataset)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+               TE_CHECK_OBJ(dataset, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->SetDataset(model, dataset);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to set dataset.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       int TrainingEngineCommon::CompileModel(training_engine_model *model,
+                                                                                  training_engine_compile_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->CompileModel(model, property);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to compile a given model.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+       int TrainingEngineCommon::TrainModel(training_engine_model *model,
+                                                                                training_engine_model_property &property)
+       {
+               LOGI("ENTER");
+
+               TE_CHECK_OBJ(mTraining, TRAINING_ENGINE_ERROR_INVALID_OPERATION);
+               TE_CHECK_OBJ(model, TRAINING_ENGINE_ERROR_INVALID_PARAMETER);
+
+               int ret = mTraining->TrainModel(model, property);
+               if (ret != TRAINING_ENGINE_ERROR_NONE)
+                       LOGE("Failed to train a given model.");
+
+               LOGI("LEAVE");
+
+               return ret;
+       }
+
+} /* Common */
+} /* TrainingEngineInterface */
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
new file mode 100644 (file)
index 0000000..b139acc
--- /dev/null
@@ -0,0 +1,7 @@
+cmake_minimum_required(VERSION 2.6)
+
+enable_testing()
+
+SET(TRAINING_ENGINE_INTERFACE_LIB_NAME "training-engine-interface-common")
+
+add_subdirectory(src)
diff --git a/test/src/CMakeLists.txt b/test/src/CMakeLists.txt
new file mode 100644 (file)
index 0000000..44d372f
--- /dev/null
@@ -0,0 +1,18 @@
+project(training_engine_test)
+
+set(TRAINING_ENGINE_TEST_CASE training_engine_tc)
+
+find_package(GTest REQUIRED)
+set(GTEST_LIBRARY gtest)
+
+set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/test/bin)
+
+add_executable(${TRAINING_ENGINE_TEST_CASE}
+               ${PROJECT_SOURCE_DIR}/training_engine_tc.cpp
+               )
+
+target_link_libraries(${TRAINING_ENGINE_TEST_CASE} ${GTEST_LIBRARY}
+                                      ${GTEST_MAIN_LIBRARY}
+                                      ${TRAINING_ENGINE_INTERFACE_LIB_NAME}
+                                      dl
+                                      )
diff --git a/test/src/test_gtest.cpp b/test/src/test_gtest.cpp
new file mode 100644 (file)
index 0000000..985ba1a
--- /dev/null
@@ -0,0 +1,26 @@
+/*
+ * test_gtest.cpp                                                              
+ *
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "gtest/gtest.h"
+
+int main(int argc, char **argv)
+{
+       ::testing::InitGoogleTest(&argc, argv);
+       int ret = RUN_ALL_TESTS();
+       return ret;
+}
diff --git a/test/src/training_engine_tc.cpp b/test/src/training_engine_tc.cpp
new file mode 100644 (file)
index 0000000..f15f39d
--- /dev/null
@@ -0,0 +1,129 @@
+/**
+ * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <tuple>
+#include <map>
+
+#include "gtest/gtest.h"
+
+#include "training_engine_error.h"
+#include "training_engine_common_impl.h"
+
+using namespace TrainingEngineInterface::Common;
+
+static const char *LayerNames[TRAINING_LAYER_TYPE_MAX + 1] = {
+       "",
+       "Input",
+       "Fully Connected",
+       "Batch Normalization",
+       "Convolution 2D",
+       "Pooling 2D",
+       "Flatten",
+       "Activation",
+       "Addition",
+       "Concat",
+       "MultiOut",
+       "L2NORM",
+       "Centroid K-Nearest Neighbor",
+       ""
+};
+
+typedef std::tuple<std::string> ParamType_One;
+typedef std::tuple<std::string, training_engine_layer_type_e, std::vector<std::string>> ParamType_Three;
+
+class TrainingEngineTestCase_G1 : public testing::TestWithParam<ParamType_One> {};
+class TrainingEngineTestCase_G3 : public testing::TestWithParam<ParamType_Three> {};
+
+TEST_P(TrainingEngineTestCase_G1, InitBackendP)
+{
+       std::string backend_name;
+
+       std::tie(backend_name) = GetParam();
+
+       std::cout << "backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<TrainingEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       training_engine_config config = { "nntrainer" };
+       int ret = engine->BindBackend(config);
+       ASSERT_EQ(ret, TRAINING_ENGINE_ERROR_NONE);
+
+       training_engine_capacity capacity = { TRAINING_TENSOR_SHAPE_MIN };
+       ret = engine->GetBackendCapacity(capacity);
+       ASSERT_EQ(ret, TRAINING_ENGINE_ERROR_NONE);
+
+       engine->UnbindBackend();
+}
+
+TEST_P(TrainingEngineTestCase_G3, LayerTestP)
+{
+       std::string backend_name;
+       training_engine_layer_type_e type;
+       std::vector<std::string> options;
+
+       std::tie(backend_name, type, options) = GetParam();
+
+       std::cout << "backend = " << backend_name << "layer = " << LayerNames[static_cast<int>(type)] << std::endl;
+
+       auto engine = std::make_unique<TrainingEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       training_engine_config config = { "nntrainer" };
+       int ret = engine->BindBackend(config);
+       ASSERT_EQ(ret, TRAINING_ENGINE_ERROR_NONE);
+
+       training_engine_capacity capacity = { TRAINING_TENSOR_SHAPE_MIN };
+       ret = engine->GetBackendCapacity(capacity);
+       ASSERT_EQ(ret, TRAINING_ENGINE_ERROR_NONE);
+
+       training_engine_model *model = engine->CreateModel();
+       ASSERT_TRUE(model);
+
+       training_engine_layer *layer = engine->CreateLayer(type);
+       ASSERT_TRUE(layer);
+
+       for (size_t i = 0; i <  options.size(); ++i) {
+               training_engine_layer_property property = { .option = options[i] };
+               ret = engine->SetLayerProperty(layer, property);
+               ASSERT_EQ(ret, TRAINING_ENGINE_ERROR_NONE);
+       }
+
+       ret = engine->AddLayer(model, layer);
+       ASSERT_EQ(ret, TRAINING_ENGINE_ERROR_NONE);
+
+       engine->DestroyLayer(layer);
+       engine->DestroyModel(model);
+
+       engine->UnbindBackend();
+}
+
+INSTANTIATE_TEST_CASE_P(Prefix, TrainingEngineTestCase_G1,
+                                               testing::Values(
+                                                               // parameter order : backend name
+                                                               // NNTRAINER.
+                                                               ParamType_One("nntrainer")
+                                                               /* TODO */
+                                                               ));
+
+INSTANTIATE_TEST_CASE_P(Prefix, TrainingEngineTestCase_G3,
+                                               testing::Values(
+                                                               // parameter order : backend name, layer type, property string
+                                                               // NNTRAINER.
+                                                               ParamType_Three("nntrainer", TRAINING_LAYER_TYPE_FC, { "unit=10", "activation=softmax", "bias_initializer=zeros" })
+                                                               /* TODO */
+                                                               ));
diff --git a/training-engine-interface-common.manifest b/training-engine-interface-common.manifest
new file mode 100644 (file)
index 0000000..86dbb26
--- /dev/null
@@ -0,0 +1,5 @@
+<manifest>
+    <request>
+        <domain name="_" />
+    </request>
+</manifest>
diff --git a/training-engine-interface-common.pc.in b/training-engine-interface-common.pc.in
new file mode 100644 (file)
index 0000000..e7cd18f
--- /dev/null
@@ -0,0 +1,14 @@
+
+# Package Information for pkg-config
+
+prefix=@PREFIX@
+exec_prefix=/usr
+libdir=@LIB_INSTALL_DIR@
+includedir=/usr/include/media
+
+Name: @PC_NAME@
+Description: @PACKAGE_DESCRIPTION@
+Version: @VERSION@
+Requires: @PC_REQUIRED@
+Libs: -L${libdir} @PC_LDFLAGS@
+Cflags: -I${includedir} -I/usr/include