[ML][Training] Layer name handling 71/271471/4
authorMarcin Kaminski <marcin.ka@partner.samsung.com>
Mon, 21 Feb 2022 19:54:43 +0000 (20:54 +0100)
committerMarcin Kaminski <marcin.ka@partner.samsung.com>
Wed, 23 Feb 2022 18:19:37 +0000 (19:19 +0100)
Changes:
- proper handling of Layer.name attribute at JS level
with name storing at C++ level

Change-Id: I68782fd386bba3b709c481b9a5ac69f04c7287e0

src/ml/js/ml_trainer.js
src/ml/ml_instance.cc
src/ml/ml_instance.h
src/ml/ml_trainer_manager.cc
src/ml/ml_trainer_manager.h
src/ml/ml_trainer_objects.h

index 1a69a1285b802d1691a11fd7b38c35bb080059ce..81e8d8372056c04dce062ce800cbde5f1614a467 100755 (executable)
@@ -66,12 +66,27 @@ var DatasetMode = {
     MODE_TEST: 'MODE_TEST'
 };
 
+var ValidGetNameExceptions = [
+    'InvalidValuesError',
+    'AbortError'
+];
+
 var Layer = function(id, type) {
     Object.defineProperties(this, {
         name: {
             enumerable: true,
             get: function() {
-                // TODO
+                var result = native_.callSync('MLTrainerLayerGetName', { id: this._id });
+
+                if (native_.isFailure(result)) {
+                    throw native_.getErrorObjectAndValidate(
+                        result,
+                        ValidGetNameExceptions,
+                        AbortError
+                    );
+                }
+
+                return result.name
             }
         },
         type: {
@@ -632,7 +647,11 @@ MachineLearningTrainer.prototype.createLayer = function() {
         );
     }
 
-    return new Layer(result.id, args.type);
+    var nLay = new Layer(result.id, args.type);
+
+    nLay.setProperty("name", args.type.toString() + result.id.toString());
+
+    return nLay;
 };
 
 function ValidateAndReturnDatasetPaths(train, valid, test) {
index a59ffc84902eb0520cb282e3b89ca24ec0891552..74fcc6fc917627bbfa47a5bd8cf0cebaf51f9f9d 100644 (file)
@@ -184,6 +184,7 @@ MlInstance::MlInstance()
 
   REGISTER_METHOD(MLTrainerLayerSetProperty);
   REGISTER_METHOD(MLTrainerLayerCreate);
+  REGISTER_METHOD(MLTrainerLayerGetName);
   REGISTER_METHOD(MLTrainerLayerDispose);
   REGISTER_METHOD(MLTrainerOptimizerSetProperty);
   REGISTER_METHOD(MLTrainerOptimizerCreate);
@@ -1759,6 +1760,24 @@ void MlInstance::MLTrainerLayerCreate(const picojson::value& args, picojson::obj
   ReportSuccess(out);
 }
 
+void MlInstance::MLTrainerLayerGetName(const picojson::value& args,
+                                       picojson::object& out) {
+  ScopeLogger("args: %s", args.serialize().c_str());
+  CHECK_ARGS(args, kId, double, out);
+
+  auto id = static_cast<int>(args.get(kId).get<double>());
+
+  std::string name;
+  PlatformResult result = trainer_manager_.LayerGetName(id, name);
+  if (!result) {
+    ReportError(result, &out);
+    return;
+  }
+
+  out[kName] = picojson::value(static_cast<std::string>(name));
+  ReportSuccess(out);
+}
+
 void MlInstance::MLTrainerLayerDispose(const picojson::value& args,
                                        picojson::object& out) {
   ScopeLogger("args: %s", args.serialize().c_str());
index 5e3095da69073eb170ef4c1ec32a1e6db8c263f7..fbf50789de9353dc55705c7e2f1a4154ee4319cd 100644 (file)
@@ -153,6 +153,8 @@ class MlInstance : public common::ParsedInstance {
 
   void MLTrainerLayerSetProperty(const picojson::value& args, picojson::object& out);
   void MLTrainerLayerCreate(const picojson::value& args, picojson::object& out);
+  void MLTrainerLayerGetName(const picojson::value& args,
+                             picojson::object& out);
   void MLTrainerLayerDispose(const picojson::value& args,
                              picojson::object& out);
 
index 307035850f9debcd310174e40da85454b25230b9..86d852b2e3a4a21b1115eacc4fbbcd1230a96205 100644 (file)
@@ -541,8 +541,7 @@ PlatformResult TrainerManager::CreateLayer(int& id,
     return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
   }
 
-  layers_[next_layer_id_] =
-      std::make_shared<NativeWrapper<ml_train_layer_h>>(n_layer);
+  layers_[next_layer_id_] = std::make_shared<LayerWrapper>(n_layer);
   id = next_layer_id_++;
 
   return PlatformResult();
@@ -569,6 +568,25 @@ PlatformResult TrainerManager::LayerSetProperty(int id,
             ml_strerror(ret_val));
     return PlatformResult(ErrorCode::ABORT_ERR, ml_strerror(ret_val));
   }
+
+  if (name == "name") {
+    LoggerI("Layer name set detected - changing to: %s", value.c_str());
+    layer->setName(value);
+  }
+  return PlatformResult();
+}
+
+PlatformResult TrainerManager::LayerGetName(int id, std::string& name) {
+  ScopeLogger();
+
+  if (layers_.find(id) == layers_.end()) {
+    LoggerE("Could not find layer with id: %d", id);
+    return PlatformResult(ErrorCode::NOT_FOUND_ERR, "Could not find layer");
+  }
+
+  auto layer = layers_[id];
+
+  name = layer->getName();
   return PlatformResult();
 }
 
index fd5feb38f98fc21df0ebb7505e1da2ffb40053e2..009715babe1461b7c65163b940a9c0d0bea50740 100644 (file)
@@ -57,6 +57,7 @@ class TrainerManager {
   PlatformResult CreateLayer(int& id, ml_train_layer_type_e type);
   PlatformResult LayerSetProperty(int id, const std::string& name,
                                   const std::string& value);
+  PlatformResult LayerGetName(int id, std::string& name);
   PlatformResult LayerDispose(int id);
 
   PlatformResult CreateOptimizer(int& id, ml_train_optimizer_type_e type);
@@ -80,7 +81,7 @@ class TrainerManager {
 
   std::map<int, std::shared_ptr<Model>> models_;
   std::map<int, std::shared_ptr<NativeWrapper<ml_train_optimizer_h>>> optimizers_;
-  std::map<int, std::shared_ptr<NativeWrapper<ml_train_layer_h>>> layers_;
+  std::map<int, std::shared_ptr<LayerWrapper>> layers_;
   std::map<int, std::shared_ptr<NativeWrapper<ml_train_dataset_h>>> datasets_;
 
   // mutex for thread synchronization is needed only for model as only
index 98be4401cb0bfe956b29e9e443a756ee8224d526..b8c32d0f1e0aec6bd15c142b91d1fb06292ed6da 100644 (file)
@@ -24,6 +24,7 @@ then the C library explodes.
 **/
 
 #include <mutex>
+#include <string>
 #include <vector>
 
 #include <nntrainer/nntrainer.h>
@@ -73,6 +74,8 @@ class NativeWrapper {
 
  public:
   NativeWrapper(T nativeHandle) : NativeWrapper() { native = nativeHandle; }
+  virtual ~NativeWrapper() = default;
+
   T& operator=(const T&) = delete;
 
   T getNative() { return native; }
@@ -82,6 +85,18 @@ class NativeWrapper {
   bool isAttached() { return attached; }
 };
 
+class LayerWrapper : public NativeWrapper<ml_train_layer_h> {
+  std::string name;
+
+ public:
+  LayerWrapper(ml_train_layer_h nativeHandle) : NativeWrapper(nativeHandle) {}
+  virtual ~LayerWrapper() = default;
+
+  void setName(const std::string& newName) { name = newName; }
+
+  std::string getName() { return name; }
+};
+
 }  // namespace ml
 }  // namespace extension