[ML][Train] Integrated createModel methods and fixes few issues 96/272796/1 accepted/tizen/unified/20220325.133310 submit/tizen/20220325.070959
authorPiotr Kosko/Tizen API (PLT) /SRPOL/Engineer/Samsung Electronics <p.kosko@samsung.com>
Thu, 24 Mar 2022 12:11:15 +0000 (13:11 +0100)
committerPiotr Kosko/Tizen API (PLT) /SRPOL/Engineer/Samsung Electronics <p.kosko@samsung.com>
Thu, 24 Mar 2022 12:11:15 +0000 (13:11 +0100)
[ACR] https://code.sec.samsung.net/jira/browse/TWDAPI-285

[Verification] Code compiles.
Checked in chrome console. New enum values works and createModel method
accepts configFile as param.

Change-Id: I41aceb2aca5cf345628f5e6c083e0c57e99f0a03

src/ml/js/ml_trainer.js
src/ml/ml_utils.cc

index e31a241..8087681 100755 (executable)
@@ -29,7 +29,7 @@ var DatasetType = {
 };
 
 var LayerType = {
-    LAYER_IN: 'LAYER_IN',
+    LAYER_IN: 'LAYER_INPUT',
     LAYER_FC: 'LAYER_FC',
     LAYER_BN: 'LAYER_BN',
     LAYER_CONV2D: 'LAYER_CONV2D',
@@ -39,12 +39,21 @@ var LayerType = {
     LAYER_ADDITION: 'LAYER_ADDITION',
     LAYER_CONCAT: 'LAYER_CONCAT',
     LAYER_MULTIOUT: 'LAYER_MULTIOUT',
+    LAYER_EMBEDDING: 'LAYER_EMBEDDING',
+    LAYER_RNN: 'LAYER_RNN',
+    LAYER_LSTM: 'LAYER_LSTM',
+    LAYER_SPLIT: 'LAYER_SPLIT',
+    LAYER_GRU: 'LAYER_GRU',
+    LAYER_PERMUTE: 'LAYER_PERMUTE',
+    LAYER_DROPOUT: 'LAYER_DROPOUT',
+    LAYER_BACKBONE_NNSTREAMER: 'LAYER_BACKBONE_NNSTREAMER',
+    LAYER_CENTROID_KNN: 'LAYER_CENTROID_KNN',
+    LAYER_PREPROCESS_FLIP: 'LAYER_PREPROCESS_FLIP',
+    LAYER_PREPROCESS_TRANSLATE: 'LAYER_PREPROCESS_TRANSLATE',
+    LAYER_PREPROCESS_L2NORM: 'LAYER_PREPROCESS_L2NORM',
     LAYER_LOSS_MSE: 'LAYER_LOSS_MSE',
     LAYER_LOSS_CROSS_ENTROPY_SIGMOID: 'LAYER_LOSS_CROSS_ENTROPY_SIGMOID',
     LAYER_LOSS_CROSS_ENTROPY_SOFTMAX: 'LAYER_LOSS_CROSS_ENTROPY_SOFTMAX',
-    LAYER_BACKBONE_NNSTREAMER: 'LAYER_BACKBONE_NNSTREAMER',
-    LAYER_EMBEDDING: 'LAYER_EMBEDDING',
-    LAYER_RNN: 'LAYER_RNN',
     LAYER_UNKNOWN: 'LAYER_UNKNOWN'
 };
 
@@ -799,13 +808,12 @@ MachineLearningTrainer.prototype.createOptimizer = function() {
 };
 
 var ValidCreateModelWithConfigurationExceptions = [
-    'InvalidValuesError',
     'NotFoundError',
     'SecurityError',
     'AbortError'
 ];
 
-MachineLearningTrainer.prototype.createModelWithConfiguration = function() {
+MachineLearningTrainer.prototype.createModel = function () {
     var args = validator_.validateArgs(arguments, [
         {
             name: 'configPath',
@@ -813,16 +821,17 @@ MachineLearningTrainer.prototype.createModelWithConfiguration = function() {
             optional: true
         }
     ]);
+    var nativeArgs = {
+        // empty by default
+    };
     if (args.has.configPath) {
         try {
-            args.configPath = tizen.filesystem.toURI(args.configPath);
+            // if path seems valid pass it to native layer
+            nativeArgs.configPath = tizen.filesystem.toURI(args.configPath);
         } catch (e) {
             throw new WebAPIException(WebAPIException.NOT_FOUND_ERR, 'Path is invalid');
         }
     }
-    var nativeArgs = {
-        configPath: args.configPath
-    };
 
     var result = native_.callSync('MLTrainerModelCreate', nativeArgs);
     if (native_.isFailure(result)) {
@@ -835,12 +844,3 @@ MachineLearningTrainer.prototype.createModelWithConfiguration = function() {
 
     return new Model(result.id);
 };
-
-MachineLearningTrainer.prototype.createModel = function() {
-    var result = native_.callSync('MLTrainerModelCreate', {});
-    if (native_.isFailure(result)) {
-        throw new WebAPIException(WebAPIException.AbortError, 'Could not create model');
-    }
-
-    return new Model(result.id);
-};
index 7d7a611..ab437c2 100644 (file)
@@ -71,7 +71,7 @@ const PlatformEnum<ml_train_optimizer_type_e> OptimizerTypeEnum{
     {"OPTIMIZER_UNKNOWN", ML_TRAIN_OPTIMIZER_TYPE_UNKNOWN}};
 
 const PlatformEnum<ml_train_layer_type_e> LayerTypeEnum{
-    {"LAYER_IN", ML_TRAIN_LAYER_TYPE_INPUT},
+    {"LAYER_INPUT", ML_TRAIN_LAYER_TYPE_INPUT},
     {"LAYER_FC", ML_TRAIN_LAYER_TYPE_FC},
     {"LAYER_BN", ML_TRAIN_LAYER_TYPE_BN},
     {"LAYER_CONV2D", ML_TRAIN_LAYER_TYPE_CONV2D},
@@ -83,12 +83,21 @@ const PlatformEnum<ml_train_layer_type_e> LayerTypeEnum{
     {"LAYER_MULTIOUT", ML_TRAIN_LAYER_TYPE_MULTIOUT},
     {"LAYER_EMBEDDING", ML_TRAIN_LAYER_TYPE_EMBEDDING},
     {"LAYER_RNN", ML_TRAIN_LAYER_TYPE_RNN},
+    {"LAYER_LSTM", ML_TRAIN_LAYER_TYPE_LSTM},
+    {"LAYER_SPLIT", ML_TRAIN_LAYER_TYPE_SPLIT},
+    {"LAYER_GRU", ML_TRAIN_LAYER_TYPE_GRU},
+    {"LAYER_PERMUTE", ML_TRAIN_LAYER_TYPE_PERMUTE},
+    {"LAYER_DROPOUT", ML_TRAIN_LAYER_TYPE_DROPOUT},
+    {"LAYER_BACKBONE_NNSTREAMER", ML_TRAIN_LAYER_TYPE_BACKBONE_NNSTREAMER},
+    {"LAYER_CENTROID_KNN", ML_TRAIN_LAYER_TYPE_CENTROID_KNN},
+    {"LAYER_PREPROCESS_FLIP", ML_TRAIN_LAYER_TYPE_PREPROCESS_FLIP},
+    {"LAYER_PREPROCESS_TRANSLATE", ML_TRAIN_LAYER_TYPE_PREPROCESS_TRANSLATE},
+    {"LAYER_PREPROCESS_L2NORM", ML_TRAIN_LAYER_TYPE_PREPROCESS_L2NORM},
     {"LAYER_LOSS_MSE", ML_TRAIN_LAYER_TYPE_LOSS_MSE},
     {"LAYER_LOSS_CROSS_ENTROPY_SIGMOID",
      ML_TRAIN_LAYER_TYPE_LOSS_CROSS_ENTROPY_SIGMOID},
     {"LAYER_LOSS_CROSS_ENTROPY_SOFTMAX",
      ML_TRAIN_LAYER_TYPE_LOSS_CROSS_ENTROPY_SOFTMAX},
-    {"LAYER_BACKBONE_NNSTREAMER", ML_TRAIN_LAYER_TYPE_BACKBONE_NNSTREAMER},
     {"LAYER_UNKNOWN", ML_TRAIN_LAYER_TYPE_UNKNOWN}};
 
 const PlatformEnum<ml_train_summary_type_e> SummaryTypeEnum{