/**
* @brief layer plugin common test cases
- * @todo expand this to typed_test_p for other types of example (eg) optimizer
*
*/
class LayerPluginCommonTest
--- /dev/null
+# build command for libmomentum_optimizer.so
+momentum_optimizer = shared_library('momentum_optimizer',
+ optimizer_example_momentum_src,
+ dependencies: [nntrainer_dep, nntrainer_ccapi_dep],
+ include_directories: layer_example_inc,
+ install: true,
+ install_dir: application_install_dir,
+ cpp_args: '-DPLUGGABLE'
+)
+
+momentum_optimizer_dep = declare_dependency(
+ link_with: momentum_optimizer,
+ include_directories: optimizer_example_inc
+ )
+
+if get_option('enable-test')
+ test_target = [
+ 'optimizer_plugin_common_test.cpp',
+ 'optimizer_plugin_momentum_test.cpp'
+ ]
+
+ exe = executable(
+ 'app_optimizer_plugin_test', test_target,
+ dependencies: [
+ gtest_main_dep,
+ nntrainer_dep,
+ nntrainer_ccapi_dep,
+ nntrainer_testutil_dep,
+ momentum_optimizer_dep,
+ ],
+ install: get_option('enable-test'),
+ install_dir: application_install_dir,
+ include_directories: optimizer_example_inc
+ )
+ testenv = environment()
+ testenv.set('NNTRAINER_PATH', meson.current_build_dir())
+ test('app_plugin_test', exe, env: testenv)
+endif
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2023 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file optimizer_plugin_common_test.cpp
+ * @date 30 March 2023
+ * @brief This file contains the parameterized common test of optimizer plugin
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+#include <optimizer_plugin_common_test.h>
+
+#include <fstream>
+#include <iostream>
+#include <optimizer.h>
+
+TEST_P(OptimizerPluginCommonTest, DlRegisterOpen_p) {
+ ac.registerOptimizer(plugin_lib_name, NNTRAINER_PATH);
+ auto optimizer = ac.createObject<nntrainer::Optimizer>(optimizer_type_name);
+
+ EXPECT_EQ(optimizer->getType(), optimizer_type_name);
+}
+
+TEST_P(OptimizerPluginCommonTest, DlRegisterWrongPath_n) {
+ EXPECT_THROW(ac.registerOptimizer("wrong_name.so"), std::invalid_argument);
+}
+
+TEST_P(OptimizerPluginCommonTest, DlRegisterDirectory_p) {
+ ac.registerPluggableFromDirectory(NNTRAINER_PATH);
+ auto optimizer = ac.createObject<nntrainer::Optimizer>(optimizer_type_name);
+ EXPECT_EQ(optimizer->getType(), optimizer_type_name);
+}
+
+TEST_P(OptimizerPluginCommonTest, DlRegisterDirectory_n) {
+ EXPECT_THROW(ac.registerPluggableFromDirectory("wrong path"),
+ std::invalid_argument);
+}
+
+TEST_P(OptimizerPluginCommonTest, DefaultEnvironmentPath_p) {
+ /// as NNTRAINER_PATH is fed to the test, this should success without an
+ /// error
+ std::shared_ptr<ml::train::Optimizer> opt =
+ ml::train::createOptimizer(optimizer_type_name);
+ EXPECT_EQ(opt->getType(), optimizer_type_name);
+}
+
+TEST_P(OptimizerPluginCommonTest, DefaultEnvironmentPathOptimizerNotExist_n) {
+ EXPECT_THROW(ml::train::createOptimizer("key_does_not_exist"),
+ std::invalid_argument);
+}
+
+TEST_P(OptimizerSemantics, DISABLED_setProperties_p) {
+ /// @todo check if setProperties does not collide with layerNode designated
+ /// properties
+ EXPECT_EQ(1, 1); /**< no assert tc from TCM, this is disabled test */
+}
+
+TEST_P(OptimizerSemantics, setProperties_n) {
+ /** must not crash */
+ EXPECT_THROW(opt->setProperty({"unknown_props=2"}), std::invalid_argument);
+}
+
+TEST_P(OptimizerSemantics, DISABLED_setPropertiesValidWithInvalid_n) {
+ EXPECT_EQ(1, 1); /**< no assert tc from TCM, this is disabled test */
+}
+
+TEST_P(OptimizerSemantics, gettersValidate_p) {
+ std::string type;
+
+ EXPECT_NO_THROW(type = opt->getType());
+ EXPECT_GT(type.size(), size_t(0));
+}
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2023 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file optimizer_plugin_common_test.h
+ * @date 31 March 2023
+ * @brief This file contains the parameterized common test of optimizer plugin
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+#ifndef __OPTIMIZER_PLUGIN_COMMON_TEST_H__
+#define __OPTIMIZER_PLUGIN_COMMON_TEST_H__
+#include <string>
+#include <tuple>
+
+#include <gtest/gtest.h>
+
+#include <app_context.h>
+#include <optimizer.h>
+
+static const char *NNTRAINER_PATH = std::getenv("NNTRAINER_PATH");
+
+using OptimizerFactoryType =
+ std::function<std::unique_ptr<nntrainer::Optimizer>(
+ const std::vector<std::string> &)>;
+
+using OptimizerSemanticsParamType =
+ std::tuple<OptimizerFactoryType /** optimizer factory */,
+ std::string /** Type of Optimizer */,
+ std::vector<std::string> /** Necessary Properties */,
+ unsigned int /** Options */, bool /** fail or succeed */,
+ unsigned int /** number of inputs */
+ >;
+
+/**
+ * @brief Optimizer Semantics class
+ *
+ */
+class OptimizerSemantics
+ : public ::testing::TestWithParam<OptimizerSemanticsParamType> {
+public:
+ /**
+ * @brief Destroy the Optimizer Semantics object
+ *
+ */
+ virtual ~OptimizerSemantics() {}
+
+ /**
+ * @brief SetUp test cases here
+ *
+ */
+ virtual void SetUp() {
+ auto f = std::get<0>(GetParam());
+ opt = std::move(f({}));
+ std::tie(std::ignore, expected_type, valid_properties, options, must_fail,
+ num_inputs) = GetParam();
+
+ num_inputs = std::max(1u, num_inputs);
+ }
+
+ /**
+ * @brief do here if any memory needs to be released
+ *
+ */
+ virtual void TearDown() {}
+
+protected:
+ std::unique_ptr<nntrainer::Optimizer> opt;
+ std::string expected_type;
+ std::vector<std::string> valid_properties;
+ unsigned int options;
+ bool must_fail;
+ unsigned int num_inputs;
+};
+
+/**
+ * @brief optimizer plugin common test cases
+ *
+ */
+class OptimizerPluginCommonTest
+ : public ::testing::TestWithParam<std::tuple<std::string, std::string>> {
+
+public:
+ /**
+ * @brief SetUp test cases here
+ *
+ */
+ virtual void SetUp() {
+ ASSERT_NE(NNTRAINER_PATH, nullptr)
+ << "NNTRAINER_PATH environment value must be set";
+
+ const auto ¶ms = GetParam();
+ plugin_lib_name = std::get<0>(params);
+ optimizer_type_name = std::get<1>(params);
+ ac = nntrainer::AppContext();
+ };
+
+ /**
+ * @brief do here if any memory needs to be released
+ *
+ */
+ virtual void TearDown(){};
+
+protected:
+ nntrainer::AppContext ac; /**< AppContext */
+ std::string plugin_lib_name; /**< plugin library name */
+ std::string optimizer_type_name; /**< optimizer type name */
+};
+#endif // __OPTIMIZER_PLUGIN_COMMON_TEST_H__
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2023 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file optimizer_plugin_momentun_test.cpp
+ * @date 30 March 2023
+ * @brief This file contains the execution part of momentum optimizer in
+ * LayerPlugin example
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+#include <tuple>
+
+#include <gtest/gtest.h>
+
+#include <momentum.h>
+#include <optimizer_plugin_common_test.h>
+
+#ifdef GTEST_BACKPORT
+#define GTEST_PARAMETER_TEST INSTANTIATE_TEST_CASE_P
+#else
+#define GTEST_PARAMETER_TEST INSTANTIATE_TEST_SUITE_P
+#endif
+
+GTEST_PARAMETER_TEST(
+ Momentum, OptimizerPluginCommonTest,
+ ::testing::Values(std::make_tuple("libmomentum_optimizer.so", "momentum")));
+auto semantic_momentum =
+ OptimizerSemanticsParamType(nntrainer::createOptimizer<custom::Momentum>,
+ custom::Momentum::type, {}, 0, false, 1);
+
+auto semantic_momentum_m = OptimizerSemanticsParamType(
+ nntrainer::createOptimizer<custom::Momentum>, custom::Momentum::type,
+ {"momentum=0.03"}, 0, false, 1);
+
+GTEST_PARAMETER_TEST(Momentum, OptimizerSemantics,
+ ::testing::Values(semantic_momentum, semantic_momentum_m));
layer_example_pow_src = files('pow.cpp')
layer_example_mae_src = files('mae_loss.cpp')
layer_example_rnnt_src = files('rnnt_loss.cpp')
+optimizer_example_momentum_src = files('momentum.cpp')
layer_example_inc = include_directories('./')
+optimizer_example_inc = include_directories('./')
subdir('LayerClient/jni')
subdir('LayerPlugin')
+subdir('OptimizerPlugin')
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2023 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file momentum.cpp
+ * @date 31 March 2023
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ * @brief This is the Momentum optimizer.
+ */
+
+#include <cmath>
+#include <fstream>
+
+#include <momentum.h>
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <node_exporter.h>
+
+namespace custom {
+
+Momentum::Momentum() : momentum_props(PropsM()) {
+ /** default properties */
+ auto &[m] = momentum_props;
+ m.set(0.9f);
+}
+
+enum MomentumParams { wm };
+
+std::vector<ml::train::TensorDim>
+Momentum::getOptimizerVariableDim(const ml::train::TensorDim &dim) {
+ return {dim};
+}
+
+void Momentum::exportTo(nntrainer::Exporter &exporter,
+ const ml::train::ExportMethods &method) const {
+ exporter.saveResult(momentum_props, method, this);
+ Optimizer::exportTo(exporter, method);
+}
+
+void Momentum::applyGradient(nntrainer::RunOptimizerContext &context) {
+
+ auto &m = std::get<PropsM>(momentum_props).get();
+
+ nntrainer::Tensor &x_grad = context.getGradient();
+ nntrainer::Tensor &accumulated =
+ context.getOptimizerVariable(MomentumParams::wm);
+
+ accumulated.multiply_i(m);
+ accumulated.add_i(x_grad);
+
+ x_grad.fill(accumulated);
+ context.applyGradient(context.getLearningRate());
+}
+
+void Momentum::setProperty(const std::vector<std::string> &values) {
+ auto left = loadProperties(values, momentum_props);
+ Optimizer::setProperty(left);
+}
+
+#ifdef PLUGGABLE
+
+nntrainer::Optimizer *create_momentum_optimizer() {
+ auto optimizer = new Momentum();
+ return optimizer;
+}
+
+void destory_momentum_optimizer(nntrainer::Optimizer *optimizer) {
+ delete optimizer;
+}
+
+extern "C" {
+nntrainer::OptimizerPluggable ml_train_optimizer_pluggable{
+ create_momentum_optimizer, destory_momentum_optimizer};
+}
+
+#endif
+
+} // namespace custom
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2023 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file momentum.h
+ * @date 31 March 2023
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ * @brief This is the Momentum optimizer.
+ */
+#ifndef __MOMENTUM_H__
+#define __MOMENTUM_H__
+#ifdef __cplusplus
+#include <tuple>
+
+#include <base_properties.h>
+#include <node_exporter.h>
+#include <optimizer_context.h>
+#include <optimizer_devel.h>
+
+namespace custom {
+
+/**
+ * @brief momentum property
+ */
+class PropsM : public nntrainer::Property<double> {
+public:
+ static constexpr const char *key = "momentum"; /**< unique key to access */
+ using prop_tag = nntrainer::double_prop_tag; /**< property type */
+};
+
+/**
+ * @class Momentum optimizer class
+ * @brief Momentum optimizer
+ */
+class Momentum final : public nntrainer::Optimizer {
+public:
+ /**
+ * @brief Constructor of Optimizer Class
+ */
+ Momentum();
+
+ /**
+ * @brief Destructor of Optimizer Class
+ */
+ ~Momentum() = default;
+
+ double getDefaultLearningRate() const override { return 0.001; }
+
+ /**
+ * @copydoc applyGradient(Weight &weight, int tensor_idx, double updated_lr,
+ * int iteration)
+ */
+ void applyGradient(nntrainer::RunOptimizerContext &context);
+
+ /**
+ * @copydoc Optimizer::getType()
+ */
+ const std::string getType() const { return Momentum::type; }
+
+ /**
+ * @copydoc Optimizer::getOptimizerVariableDim(const TensorDim &dim)
+ */
+ std::vector<ml::train::TensorDim>
+ getOptimizerVariableDim(const ml::train::TensorDim &dim) override;
+
+ /**
+ * @copydoc Optimizer::exportTo(Exporter &exporter, const
+ * ml::train::ExportMethods& method)
+ */
+ void exportTo(nntrainer::Exporter &exporter,
+ const ml::train::ExportMethods &method) const override;
+
+ /**
+ * @copydoc Optimizer::setProperty(const std::vector<std::string> &values)
+ */
+ void setProperty(const std::vector<std::string> &values) override;
+
+ inline static const std::string type = "momentum";
+
+private:
+ std::tuple<PropsM> momentum_props; /** momentum for grad */
+};
+} // namespace custom
+
+#endif /* __cplusplus */
+#endif /* __MOMENTUM_H__ */
/usr/include/nntrainer/layer_impl.h
# custom layer kits
/usr/include/nntrainer/app_context.h
+# logger
+/usr/include/nntrainer/nntrainer_log.h
+/usr/include/nntrainer/nntrainer_logger.h
# optimizer headers
/usr/include/nntrainer/optimizer_context.h
/usr/include/nntrainer/optimizer_devel.h
nntrainer_sources = []
nntrainer_headers = [
+ meson.current_source_dir() / 'nntrainer_log.h',
+ meson.current_source_dir() / 'nntrainer_logger.h',
meson.current_source_dir() / 'nntrainer_error.h',
meson.current_source_dir() / 'app_context.h',
]
%{_includedir}/nntrainer/optimizer_context.h
%{_includedir}/nntrainer/optimizer_devel.h
%{_includedir}/nntrainer/lr_scheduler.h
+# for logging
+%{_includedir}/nntrainer/nntrainer_log.h
+%{_includedir}/nntrainer/nntrainer_logger.h
%{_libdir}/pkgconfig/nntrainer.pc
# update this to enable external applications
# @todo filter out headers that should be hidden, and classifiy in the appropriate place if not