Add GetFileCustomProp 26/279726/2 accepted/tizen/unified/20220825.063643 submit/tizen/20220817.072344 submit/tizen/20220817.123406 submit/tizen/20220819.073800 submit/tizen/20220823.063557 submit/tizen/20220825.024015
authorKwanghoon Son <k.son@samsung.com>
Wed, 17 Aug 2022 04:31:56 +0000 (00:31 -0400)
committerKwanghoon Son <k.son@samsung.com>
Wed, 17 Aug 2022 07:04:05 +0000 (03:04 -0400)
[Version] : 0.4.6
[Issue type] : new feature

get customprop from file name {model_name}.custom

Change-Id: I7305018ae35a03d3680c6dd8b98e128d3559c497
Signed-off-by: Kwanghoon Son <k.son@samsung.com>
packaging/inference-engine-mlapi.spec
src/inference_engine_mlapi.cpp
src/inference_engine_mlapi_private.h

index d37f2c9..a1cd8c1 100644 (file)
@@ -1,6 +1,6 @@
 Name:       inference-engine-mlapi
 Summary:    ML Single API backend of NNStreamer for MediaVision
-Version:    0.4.5
+Version:    0.4.6
 Release:    0
 Group:      Multimedia/Libraries
 License:    Apache-2.0
index 46fa41b..f378bd1 100644 (file)
@@ -19,6 +19,7 @@
 #include "inference_engine_mlapi_private.h"
 
 #include <fstream>
+#include <sstream>
 #include <iostream>
 #include <unistd.h>
 #include <time.h>
@@ -292,7 +293,19 @@ namespace MLAPIImpl
                        throw std::runtime_error("shouldn't be reach here");
                }
        }
+       std::string InferenceMLAPI::GetFileCustomProp(std::string &path)
+       {
+               std::string custom;
+               std::string custom_file = path.substr(0, path.find_last_of(".")) + ".custom";
+               if (IsFileReadable(custom_file)) {
+                       std::ifstream fp(custom_file);
+                       std::stringstream buffer;
+                       buffer << fp.rdbuf();
+                       custom = buffer.str();
+               }
 
+               return custom;
+       }
        std::string InferenceMLAPI::GetCustomProp()
        {
                if (mPluginType != INFERENCE_BACKEND_SNPE)
@@ -365,6 +378,7 @@ namespace MLAPIImpl
                        }
                }
                auto customOp = GetCustomProp();
+               customOp += GetFileCustomProp(model_str);
                LOGI("customOp: %s", customOp.c_str());
 
                int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
index 07d57ef..0bbfbbf 100644 (file)
@@ -91,6 +91,7 @@ namespace MLAPIImpl
                std::tuple<ml_nnfw_type_e, ml_nnfw_hw_e> GetNNFWInfo();
                std::string GetModelPath(const std::vector<std::string>& model_paths);
                std::string GetCustomProp();
+               std::string GetFileCustomProp(std::string& path);
                int GetTensorInfo(std::map<std::string, int>& designated_layers,
                                                  std::map<std::string, inference_engine_tensor_buffer> &buffers,
                                                  ml_tensors_data_h& dataHandle, ml_tensors_info_h& infoHandle);