From: Pawel Wasowski
Date: Tue, 31 Aug 2021 12:24:44 +0000 (+0200)
Subject: [ml] Add customRequirement to tizen.ml.checkNNFWAvailability
X-Git-Tag: submit/tizen/20210914.065046~3^2
X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=e150cd4c1523e019f50673dc9a0e152f968f766c;p=platform%2Fcore%2Fapi%2Fwebapi-plugins.git
[ml] Add customRequirement to tizen.ml.checkNNFWAvailability
ACR: TWDAPI-282
[Verification] Tested on TM1 in Chrome DevTools with the snippets below
tizen.ml.checkNNFWAvailability("TENSORFLOW_LITE", "CPU") // true
tizen.ml.checkNNFWAvailability("TENSORFLOW_LITE", "CPU",
'test custom requirement'); // true
The native implementation is not ready and returns the same value
whether the customRequirement is passed or not. That's why
tizen.ml.checkNNFWAvailability also returns "true" twice above
Change-Id: I971a5f49d4ea389ad953c28fc08da21bafb59ee2
Signed-off-by: Pawel Wasowski
---
diff --git a/src/ml/js/ml_manager.js b/src/ml/js/ml_manager.js
index 05dcef06..b094dfb1 100755
--- a/src/ml/js/ml_manager.js
+++ b/src/ml/js/ml_manager.js
@@ -75,11 +75,17 @@ MachineLearningManager.prototype.checkNNFWAvailability = function() {
type: types_.ENUM,
values: Object.values(HWType),
optional: false
+ },
+ {
+ name: 'customRequirement',
+ type: types_.STRING,
+ optional: true
}
]);
var callArgs = {
nnfw: args.nnfw,
- hw: args.hw
+ hw: args.hw,
+ customRequirement: args.customRequirement || null
};
var result = native_.callSync('MLCheckNNFWAvailability', callArgs);
diff --git a/src/ml/ml_instance.cc b/src/ml/ml_instance.cc
index 63ac32d5..ff312709 100644
--- a/src/ml/ml_instance.cc
+++ b/src/ml/ml_instance.cc
@@ -53,6 +53,7 @@ const std::string kLocation = "location";
const std::string kModelPath = "modelPath";
const std::string kName = "name";
const std::string kNnfw = "nnfw";
+const std::string kCustomRequirement = "customRequirement";
const std::string kNodeName = "nodeName";
const std::string kOpen = "open";
const std::string kOtherId = "otherId";
@@ -184,10 +185,15 @@ void MlInstance::MLCheckNNFWAvailability(const picojson::value& args, picojson::
ScopeLogger("args: %s", args.serialize().c_str());
CHECK_EXIST(args, kNnfw, out)
CHECK_EXIST(args, kHw, out)
+ CHECK_EXIST(args, kCustomRequirement, out)
std::string nnfw = args.get(kNnfw).get();
std::string hw = args.get(kHw).get();
- bool availability_val = util::CheckNNFWAvailability(nnfw, hw);
+ optional customRequirement;
+ if (args.get(kCustomRequirement).is()) {
+ customRequirement = args.get(kCustomRequirement).get();
+ }
+ bool availability_val = util::CheckNNFWAvailability(nnfw, hw, customRequirement);
picojson::value available = picojson::value{availability_val};
ReportSuccess(available, out);
diff --git a/src/ml/ml_utils.cc b/src/ml/ml_utils.cc
index fe0f5a88..2cd17b7b 100644
--- a/src/ml/ml_utils.cc
+++ b/src/ml/ml_utils.cc
@@ -93,7 +93,8 @@ PlatformResult ToPlatformResult(int ml_error_code, const std::string& error_mess
}
}
-bool CheckNNFWAvailability(const std::string& nnfw, const std::string& hw) {
+bool CheckNNFWAvailability(const std::string& nnfw, const std::string& hw,
+ optional customRequirement) {
ScopeLogger();
ml_nnfw_type_e nnfw_e = ML_NNFW_TYPE_ANY;
ml_nnfw_hw_e hw_e = ML_NNFW_HW_ANY;
@@ -108,15 +109,16 @@ bool CheckNNFWAvailability(const std::string& nnfw, const std::string& hw) {
LoggerE("HWTypeEnum.getValue() failed, error: %s", result.message().c_str());
return false;
}
+ const char* customRequirementPtr = customRequirement ? customRequirement->c_str() : nullptr;
bool available = false;
- int ret = ml_check_nnfw_availability(nnfw_e, hw_e, &available);
+ int ret = ml_check_nnfw_availability_full(nnfw_e, hw_e, customRequirementPtr, &available);
if (ML_ERROR_NONE != ret) {
- LoggerE("ml_check_nnfw_availability failed: %d (%s)", ret, get_error_message(ret));
+ LoggerE("ml_check_nnfw_availability_full failed: %d (%s)", ret, get_error_message(ret));
return false;
}
- LoggerD("ml_check_nnfw_availability: %s", available ? "true" : "false");
+ LoggerD("ml_check_nnfw_availability_full: %s", available ? "true" : "false");
return available;
}
diff --git a/src/ml/ml_utils.h b/src/ml/ml_utils.h
index 34dbb635..e91b5aa2 100644
--- a/src/ml/ml_utils.h
+++ b/src/ml/ml_utils.h
@@ -19,6 +19,14 @@
#include
+#if __cplusplus > 201402L
+#include
+using std::optional;
+#else
+#include "common/optional.h"
+using common::optional;
+#endif
+
#include "common/picojson.h"
#include "common/platform_enum.h"
#include "common/platform_result.h"
@@ -42,7 +50,8 @@ namespace util {
PlatformResult ToPlatformResult(int ml_error_code, const std::string& error_message);
-bool CheckNNFWAvailability(const std::string& nnfw, const std::string& hw);
+bool CheckNNFWAvailability(const std::string& nnfw, const std::string& hw,
+ const optional customRequirement);
PlatformResult GetDimensionsFromJsonArray(const picojson::array& dim,
unsigned int dimensions[ML_TENSOR_RANK_LIMIT]);