From: jijoong.moon Date: Fri, 31 Mar 2023 00:23:02 +0000 (+0900) Subject: [ Custom ] Add custom optimizer example in Application X-Git-Tag: accepted/tizen/8.0/unified/20231005.093407~172 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=6e2dd16079f8de6ed67e15acf36c284b471b254b;p=platform%2Fcore%2Fml%2Fnntrainer.git [ Custom ] Add custom optimizer example in Application This PR includes the custom optimizer 'momentum' example in Applicaiton/Custom It adds the testcase and demo implementaiton **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: jijoong.moon --- diff --git a/Applications/Custom/LayerPlugin/layer_plugin_common_test.h b/Applications/Custom/LayerPlugin/layer_plugin_common_test.h index 8c600af..a9f0db1 100644 --- a/Applications/Custom/LayerPlugin/layer_plugin_common_test.h +++ b/Applications/Custom/LayerPlugin/layer_plugin_common_test.h @@ -25,7 +25,6 @@ static const char *NNTRAINER_PATH = std::getenv("NNTRAINER_PATH"); /** * @brief layer plugin common test cases - * @todo expand this to typed_test_p for other types of example (eg) optimizer * */ class LayerPluginCommonTest diff --git a/Applications/Custom/OptimizerPlugin/meson.build b/Applications/Custom/OptimizerPlugin/meson.build new file mode 100644 index 0000000..7efb69e --- /dev/null +++ b/Applications/Custom/OptimizerPlugin/meson.build @@ -0,0 +1,38 @@ +# build command for libmomentum_optimizer.so +momentum_optimizer = shared_library('momentum_optimizer', + optimizer_example_momentum_src, + dependencies: [nntrainer_dep, nntrainer_ccapi_dep], + include_directories: layer_example_inc, + install: true, + install_dir: application_install_dir, + cpp_args: '-DPLUGGABLE' +) + +momentum_optimizer_dep = declare_dependency( + link_with: momentum_optimizer, + include_directories: optimizer_example_inc + ) + +if get_option('enable-test') + test_target = [ + 'optimizer_plugin_common_test.cpp', + 'optimizer_plugin_momentum_test.cpp' + ] + + exe = executable( + 'app_optimizer_plugin_test', test_target, + dependencies: [ + gtest_main_dep, + nntrainer_dep, + nntrainer_ccapi_dep, + nntrainer_testutil_dep, + momentum_optimizer_dep, + ], + install: get_option('enable-test'), + install_dir: application_install_dir, + include_directories: optimizer_example_inc + ) + testenv = environment() + testenv.set('NNTRAINER_PATH', meson.current_build_dir()) + test('app_plugin_test', exe, env: testenv) +endif diff --git a/Applications/Custom/OptimizerPlugin/optimizer_plugin_common_test.cpp b/Applications/Custom/OptimizerPlugin/optimizer_plugin_common_test.cpp new file mode 100644 index 0000000..592b4c2 --- /dev/null +++ b/Applications/Custom/OptimizerPlugin/optimizer_plugin_common_test.cpp @@ -0,0 +1,74 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 Jijoong Moon + * + * @file optimizer_plugin_common_test.cpp + * @date 30 March 2023 + * @brief This file contains the parameterized common test of optimizer plugin + * @see https://github.com/nnstreamer/nntrainer + * @author Jijoong Moon + * @bug No known bugs except for NYI items + * + */ +#include + +#include +#include +#include + +TEST_P(OptimizerPluginCommonTest, DlRegisterOpen_p) { + ac.registerOptimizer(plugin_lib_name, NNTRAINER_PATH); + auto optimizer = ac.createObject(optimizer_type_name); + + EXPECT_EQ(optimizer->getType(), optimizer_type_name); +} + +TEST_P(OptimizerPluginCommonTest, DlRegisterWrongPath_n) { + EXPECT_THROW(ac.registerOptimizer("wrong_name.so"), std::invalid_argument); +} + +TEST_P(OptimizerPluginCommonTest, DlRegisterDirectory_p) { + ac.registerPluggableFromDirectory(NNTRAINER_PATH); + auto optimizer = ac.createObject(optimizer_type_name); + EXPECT_EQ(optimizer->getType(), optimizer_type_name); +} + +TEST_P(OptimizerPluginCommonTest, DlRegisterDirectory_n) { + EXPECT_THROW(ac.registerPluggableFromDirectory("wrong path"), + std::invalid_argument); +} + +TEST_P(OptimizerPluginCommonTest, DefaultEnvironmentPath_p) { + /// as NNTRAINER_PATH is fed to the test, this should success without an + /// error + std::shared_ptr opt = + ml::train::createOptimizer(optimizer_type_name); + EXPECT_EQ(opt->getType(), optimizer_type_name); +} + +TEST_P(OptimizerPluginCommonTest, DefaultEnvironmentPathOptimizerNotExist_n) { + EXPECT_THROW(ml::train::createOptimizer("key_does_not_exist"), + std::invalid_argument); +} + +TEST_P(OptimizerSemantics, DISABLED_setProperties_p) { + /// @todo check if setProperties does not collide with layerNode designated + /// properties + EXPECT_EQ(1, 1); /**< no assert tc from TCM, this is disabled test */ +} + +TEST_P(OptimizerSemantics, setProperties_n) { + /** must not crash */ + EXPECT_THROW(opt->setProperty({"unknown_props=2"}), std::invalid_argument); +} + +TEST_P(OptimizerSemantics, DISABLED_setPropertiesValidWithInvalid_n) { + EXPECT_EQ(1, 1); /**< no assert tc from TCM, this is disabled test */ +} + +TEST_P(OptimizerSemantics, gettersValidate_p) { + std::string type; + + EXPECT_NO_THROW(type = opt->getType()); + EXPECT_GT(type.size(), size_t(0)); +} diff --git a/Applications/Custom/OptimizerPlugin/optimizer_plugin_common_test.h b/Applications/Custom/OptimizerPlugin/optimizer_plugin_common_test.h new file mode 100644 index 0000000..0f988c9 --- /dev/null +++ b/Applications/Custom/OptimizerPlugin/optimizer_plugin_common_test.h @@ -0,0 +1,112 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 Jijoong Moon + * + * @file optimizer_plugin_common_test.h + * @date 31 March 2023 + * @brief This file contains the parameterized common test of optimizer plugin + * @see https://github.com/nnstreamer/nntrainer + * @author Jihoon Lee + * @author Jijoong Moon + * @bug No known bugs except for NYI items + * + */ +#ifndef __OPTIMIZER_PLUGIN_COMMON_TEST_H__ +#define __OPTIMIZER_PLUGIN_COMMON_TEST_H__ +#include +#include + +#include + +#include +#include + +static const char *NNTRAINER_PATH = std::getenv("NNTRAINER_PATH"); + +using OptimizerFactoryType = + std::function( + const std::vector &)>; + +using OptimizerSemanticsParamType = + std::tuple /** Necessary Properties */, + unsigned int /** Options */, bool /** fail or succeed */, + unsigned int /** number of inputs */ + >; + +/** + * @brief Optimizer Semantics class + * + */ +class OptimizerSemantics + : public ::testing::TestWithParam { +public: + /** + * @brief Destroy the Optimizer Semantics object + * + */ + virtual ~OptimizerSemantics() {} + + /** + * @brief SetUp test cases here + * + */ + virtual void SetUp() { + auto f = std::get<0>(GetParam()); + opt = std::move(f({})); + std::tie(std::ignore, expected_type, valid_properties, options, must_fail, + num_inputs) = GetParam(); + + num_inputs = std::max(1u, num_inputs); + } + + /** + * @brief do here if any memory needs to be released + * + */ + virtual void TearDown() {} + +protected: + std::unique_ptr opt; + std::string expected_type; + std::vector valid_properties; + unsigned int options; + bool must_fail; + unsigned int num_inputs; +}; + +/** + * @brief optimizer plugin common test cases + * + */ +class OptimizerPluginCommonTest + : public ::testing::TestWithParam> { + +public: + /** + * @brief SetUp test cases here + * + */ + virtual void SetUp() { + ASSERT_NE(NNTRAINER_PATH, nullptr) + << "NNTRAINER_PATH environment value must be set"; + + const auto ¶ms = GetParam(); + plugin_lib_name = std::get<0>(params); + optimizer_type_name = std::get<1>(params); + ac = nntrainer::AppContext(); + }; + + /** + * @brief do here if any memory needs to be released + * + */ + virtual void TearDown(){}; + +protected: + nntrainer::AppContext ac; /**< AppContext */ + std::string plugin_lib_name; /**< plugin library name */ + std::string optimizer_type_name; /**< optimizer type name */ +}; +#endif // __OPTIMIZER_PLUGIN_COMMON_TEST_H__ diff --git a/Applications/Custom/OptimizerPlugin/optimizer_plugin_momentum_test.cpp b/Applications/Custom/OptimizerPlugin/optimizer_plugin_momentum_test.cpp new file mode 100644 index 0000000..3ae1b59 --- /dev/null +++ b/Applications/Custom/OptimizerPlugin/optimizer_plugin_momentum_test.cpp @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 Jijoong Moon + * + * @file optimizer_plugin_momentun_test.cpp + * @date 30 March 2023 + * @brief This file contains the execution part of momentum optimizer in + * LayerPlugin example + * @see https://github.com/nnstreamer/nntrainer + * @author Jijoong Moon + * @bug No known bugs except for NYI items + * + */ +#include + +#include + +#include +#include + +#ifdef GTEST_BACKPORT +#define GTEST_PARAMETER_TEST INSTANTIATE_TEST_CASE_P +#else +#define GTEST_PARAMETER_TEST INSTANTIATE_TEST_SUITE_P +#endif + +GTEST_PARAMETER_TEST( + Momentum, OptimizerPluginCommonTest, + ::testing::Values(std::make_tuple("libmomentum_optimizer.so", "momentum"))); +auto semantic_momentum = + OptimizerSemanticsParamType(nntrainer::createOptimizer, + custom::Momentum::type, {}, 0, false, 1); + +auto semantic_momentum_m = OptimizerSemanticsParamType( + nntrainer::createOptimizer, custom::Momentum::type, + {"momentum=0.03"}, 0, false, 1); + +GTEST_PARAMETER_TEST(Momentum, OptimizerSemantics, + ::testing::Values(semantic_momentum, semantic_momentum_m)); diff --git a/Applications/Custom/meson.build b/Applications/Custom/meson.build index 4aa6f8c..f3e1760 100644 --- a/Applications/Custom/meson.build +++ b/Applications/Custom/meson.build @@ -2,8 +2,11 @@ layer_example_pow_src = files('pow.cpp') layer_example_mae_src = files('mae_loss.cpp') layer_example_rnnt_src = files('rnnt_loss.cpp') +optimizer_example_momentum_src = files('momentum.cpp') layer_example_inc = include_directories('./') +optimizer_example_inc = include_directories('./') subdir('LayerClient/jni') subdir('LayerPlugin') +subdir('OptimizerPlugin') diff --git a/Applications/Custom/momentum.cpp b/Applications/Custom/momentum.cpp new file mode 100644 index 0000000..807f2fc --- /dev/null +++ b/Applications/Custom/momentum.cpp @@ -0,0 +1,81 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 Jijoong Moon + * + * @file momentum.cpp + * @date 31 March 2023 + * @see https://github.com/nnstreamer/nntrainer + * @author Parichay Kapoor + * @author Jijoong Moon + * @bug No known bugs except for NYI items + * @brief This is the Momentum optimizer. + */ + +#include +#include + +#include +#include +#include +#include + +namespace custom { + +Momentum::Momentum() : momentum_props(PropsM()) { + /** default properties */ + auto &[m] = momentum_props; + m.set(0.9f); +} + +enum MomentumParams { wm }; + +std::vector +Momentum::getOptimizerVariableDim(const ml::train::TensorDim &dim) { + return {dim}; +} + +void Momentum::exportTo(nntrainer::Exporter &exporter, + const ml::train::ExportMethods &method) const { + exporter.saveResult(momentum_props, method, this); + Optimizer::exportTo(exporter, method); +} + +void Momentum::applyGradient(nntrainer::RunOptimizerContext &context) { + + auto &m = std::get(momentum_props).get(); + + nntrainer::Tensor &x_grad = context.getGradient(); + nntrainer::Tensor &accumulated = + context.getOptimizerVariable(MomentumParams::wm); + + accumulated.multiply_i(m); + accumulated.add_i(x_grad); + + x_grad.fill(accumulated); + context.applyGradient(context.getLearningRate()); +} + +void Momentum::setProperty(const std::vector &values) { + auto left = loadProperties(values, momentum_props); + Optimizer::setProperty(left); +} + +#ifdef PLUGGABLE + +nntrainer::Optimizer *create_momentum_optimizer() { + auto optimizer = new Momentum(); + return optimizer; +} + +void destory_momentum_optimizer(nntrainer::Optimizer *optimizer) { + delete optimizer; +} + +extern "C" { +nntrainer::OptimizerPluggable ml_train_optimizer_pluggable{ + create_momentum_optimizer, destory_momentum_optimizer}; +} + +#endif + +} // namespace custom diff --git a/Applications/Custom/momentum.h b/Applications/Custom/momentum.h new file mode 100644 index 0000000..ed14e65 --- /dev/null +++ b/Applications/Custom/momentum.h @@ -0,0 +1,89 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2023 Jijoong Moon + * + * @file momentum.h + * @date 31 March 2023 + * @see https://github.com/nnstreamer/nntrainer + * @author Parichay Kapoor + * @author Jijoong Moon + * @bug No known bugs except for NYI items + * @brief This is the Momentum optimizer. + */ +#ifndef __MOMENTUM_H__ +#define __MOMENTUM_H__ +#ifdef __cplusplus +#include + +#include +#include +#include +#include + +namespace custom { + +/** + * @brief momentum property + */ +class PropsM : public nntrainer::Property { +public: + static constexpr const char *key = "momentum"; /**< unique key to access */ + using prop_tag = nntrainer::double_prop_tag; /**< property type */ +}; + +/** + * @class Momentum optimizer class + * @brief Momentum optimizer + */ +class Momentum final : public nntrainer::Optimizer { +public: + /** + * @brief Constructor of Optimizer Class + */ + Momentum(); + + /** + * @brief Destructor of Optimizer Class + */ + ~Momentum() = default; + + double getDefaultLearningRate() const override { return 0.001; } + + /** + * @copydoc applyGradient(Weight &weight, int tensor_idx, double updated_lr, + * int iteration) + */ + void applyGradient(nntrainer::RunOptimizerContext &context); + + /** + * @copydoc Optimizer::getType() + */ + const std::string getType() const { return Momentum::type; } + + /** + * @copydoc Optimizer::getOptimizerVariableDim(const TensorDim &dim) + */ + std::vector + getOptimizerVariableDim(const ml::train::TensorDim &dim) override; + + /** + * @copydoc Optimizer::exportTo(Exporter &exporter, const + * ml::train::ExportMethods& method) + */ + void exportTo(nntrainer::Exporter &exporter, + const ml::train::ExportMethods &method) const override; + + /** + * @copydoc Optimizer::setProperty(const std::vector &values) + */ + void setProperty(const std::vector &values) override; + + inline static const std::string type = "momentum"; + +private: + std::tuple momentum_props; /** momentum for grad */ +}; +} // namespace custom + +#endif /* __cplusplus */ +#endif /* __MOMENTUM_H__ */ diff --git a/debian/nntrainer-dev.install b/debian/nntrainer-dev.install index 1d8dfb8..c2222d1 100644 --- a/debian/nntrainer-dev.install +++ b/debian/nntrainer-dev.install @@ -19,6 +19,9 @@ /usr/include/nntrainer/layer_impl.h # custom layer kits /usr/include/nntrainer/app_context.h +# logger +/usr/include/nntrainer/nntrainer_log.h +/usr/include/nntrainer/nntrainer_logger.h # optimizer headers /usr/include/nntrainer/optimizer_context.h /usr/include/nntrainer/optimizer_devel.h diff --git a/nntrainer/meson.build b/nntrainer/meson.build index 1ba20d3..068322c 100644 --- a/nntrainer/meson.build +++ b/nntrainer/meson.build @@ -13,6 +13,8 @@ nntrainer_inc_abs = [ nntrainer_sources = [] nntrainer_headers = [ + meson.current_source_dir() / 'nntrainer_log.h', + meson.current_source_dir() / 'nntrainer_logger.h', meson.current_source_dir() / 'nntrainer_error.h', meson.current_source_dir() / 'app_context.h', ] diff --git a/packaging/nntrainer.spec b/packaging/nntrainer.spec index 9ea1216..0d9c008 100644 --- a/packaging/nntrainer.spec +++ b/packaging/nntrainer.spec @@ -532,6 +532,9 @@ cp -r result %{buildroot}%{_datadir}/nntrainer/unittest/ %{_includedir}/nntrainer/optimizer_context.h %{_includedir}/nntrainer/optimizer_devel.h %{_includedir}/nntrainer/lr_scheduler.h +# for logging +%{_includedir}/nntrainer/nntrainer_log.h +%{_includedir}/nntrainer/nntrainer_logger.h %{_libdir}/pkgconfig/nntrainer.pc # update this to enable external applications # @todo filter out headers that should be hidden, and classifiy in the appropriate place if not