From: Marcin Kaminski Date: Mon, 21 Feb 2022 19:54:43 +0000 (+0100) Subject: [ML][Training] Layer name handling X-Git-Tag: submit/tizen/20220302.124449~3^2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=35dffc31872cc5caec0f87620cae4f382ed6127e;p=platform%2Fcore%2Fapi%2Fwebapi-plugins.git [ML][Training] Layer name handling Changes: - proper handling of Layer.name attribute at JS level with name storing at C++ level Change-Id: I68782fd386bba3b709c481b9a5ac69f04c7287e0 --- diff --git a/src/ml/js/ml_trainer.js b/src/ml/js/ml_trainer.js index 1a69a128..81e8d837 100755 --- a/src/ml/js/ml_trainer.js +++ b/src/ml/js/ml_trainer.js @@ -66,12 +66,27 @@ var DatasetMode = { MODE_TEST: 'MODE_TEST' }; +var ValidGetNameExceptions = [ + 'InvalidValuesError', + 'AbortError' +]; + var Layer = function(id, type) { Object.defineProperties(this, { name: { enumerable: true, get: function() { - // TODO + var result = native_.callSync('MLTrainerLayerGetName', { id: this._id }); + + if (native_.isFailure(result)) { + throw native_.getErrorObjectAndValidate( + result, + ValidGetNameExceptions, + AbortError + ); + } + + return result.name } }, type: { @@ -632,7 +647,11 @@ MachineLearningTrainer.prototype.createLayer = function() { ); } - return new Layer(result.id, args.type); + var nLay = new Layer(result.id, args.type); + + nLay.setProperty("name", args.type.toString() + result.id.toString()); + + return nLay; }; function ValidateAndReturnDatasetPaths(train, valid, test) { diff --git a/src/ml/ml_instance.cc b/src/ml/ml_instance.cc index a59ffc84..74fcc6fc 100644 --- a/src/ml/ml_instance.cc +++ b/src/ml/ml_instance.cc @@ -184,6 +184,7 @@ MlInstance::MlInstance() REGISTER_METHOD(MLTrainerLayerSetProperty); REGISTER_METHOD(MLTrainerLayerCreate); + REGISTER_METHOD(MLTrainerLayerGetName); REGISTER_METHOD(MLTrainerLayerDispose); REGISTER_METHOD(MLTrainerOptimizerSetProperty); REGISTER_METHOD(MLTrainerOptimizerCreate); @@ -1759,6 +1760,24 @@ void MlInstance::MLTrainerLayerCreate(const picojson::value& args, picojson::obj ReportSuccess(out); } +void MlInstance::MLTrainerLayerGetName(const picojson::value& args, + picojson::object& out) { + ScopeLogger("args: %s", args.serialize().c_str()); + CHECK_ARGS(args, kId, double, out); + + auto id = static_cast(args.get(kId).get()); + + std::string name; + PlatformResult result = trainer_manager_.LayerGetName(id, name); + if (!result) { + ReportError(result, &out); + return; + } + + out[kName] = picojson::value(static_cast(name)); + ReportSuccess(out); +} + void MlInstance::MLTrainerLayerDispose(const picojson::value& args, picojson::object& out) { ScopeLogger("args: %s", args.serialize().c_str()); diff --git a/src/ml/ml_instance.h b/src/ml/ml_instance.h index 5e3095da..fbf50789 100644 --- a/src/ml/ml_instance.h +++ b/src/ml/ml_instance.h @@ -153,6 +153,8 @@ class MlInstance : public common::ParsedInstance { void MLTrainerLayerSetProperty(const picojson::value& args, picojson::object& out); void MLTrainerLayerCreate(const picojson::value& args, picojson::object& out); + void MLTrainerLayerGetName(const picojson::value& args, + picojson::object& out); void MLTrainerLayerDispose(const picojson::value& args, picojson::object& out); diff --git a/src/ml/ml_trainer_manager.cc b/src/ml/ml_trainer_manager.cc index 30703585..86d852b2 100644 --- a/src/ml/ml_trainer_manager.cc +++ b/src/ml/ml_trainer_manager.cc @@ -541,8 +541,7 @@ PlatformResult TrainerManager::CreateLayer(int& id, return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val)); } - layers_[next_layer_id_] = - std::make_shared>(n_layer); + layers_[next_layer_id_] = std::make_shared(n_layer); id = next_layer_id_++; return PlatformResult(); @@ -569,6 +568,25 @@ PlatformResult TrainerManager::LayerSetProperty(int id, ml_strerror(ret_val)); return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val)); } + + if (name == "name") { + LoggerI("Layer name set detected - changing to: %s", value.c_str()); + layer->setName(value); + } + return PlatformResult(); +} + +PlatformResult TrainerManager::LayerGetName(int id, std::string& name) { + ScopeLogger(); + + if (layers_.find(id) == layers_.end()) { + LoggerE("Could not find layer with id: %d", id); + return PlatformResult(ErrorCode::NOT_FOUND_ERR, "Could not find layer"); + } + + auto layer = layers_[id]; + + name = layer->getName(); return PlatformResult(); } diff --git a/src/ml/ml_trainer_manager.h b/src/ml/ml_trainer_manager.h index fd5feb38..009715ba 100644 --- a/src/ml/ml_trainer_manager.h +++ b/src/ml/ml_trainer_manager.h @@ -57,6 +57,7 @@ class TrainerManager { PlatformResult CreateLayer(int& id, ml_train_layer_type_e type); PlatformResult LayerSetProperty(int id, const std::string& name, const std::string& value); + PlatformResult LayerGetName(int id, std::string& name); PlatformResult LayerDispose(int id); PlatformResult CreateOptimizer(int& id, ml_train_optimizer_type_e type); @@ -80,7 +81,7 @@ class TrainerManager { std::map> models_; std::map>> optimizers_; - std::map>> layers_; + std::map> layers_; std::map>> datasets_; // mutex for thread synchronization is needed only for model as only diff --git a/src/ml/ml_trainer_objects.h b/src/ml/ml_trainer_objects.h index 98be4401..b8c32d0f 100644 --- a/src/ml/ml_trainer_objects.h +++ b/src/ml/ml_trainer_objects.h @@ -24,6 +24,7 @@ then the C library explodes. **/ #include +#include #include #include @@ -73,6 +74,8 @@ class NativeWrapper { public: NativeWrapper(T nativeHandle) : NativeWrapper() { native = nativeHandle; } + virtual ~NativeWrapper() = default; + T& operator=(const T&) = delete; T getNative() { return native; } @@ -82,6 +85,18 @@ class NativeWrapper { bool isAttached() { return attached; } }; +class LayerWrapper : public NativeWrapper { + std::string name; + + public: + LayerWrapper(ml_train_layer_h nativeHandle) : NativeWrapper(nativeHandle) {} + virtual ~LayerWrapper() = default; + + void setName(const std::string& newName) { name = newName; } + + std::string getName() { return name; } +}; + } // namespace ml } // namespace extension