From d5c72161d823d138200dfbeb3a13ec48a152af5a Mon Sep 17 00:00:00 2001 From: Hyunsoo Park Date: Fri, 17 Dec 2021 16:49:05 +0900 Subject: [PATCH] tools: Modify random matrix function for supporting UINT8 DTYPE [Version] 0.2.1-0 [Issue Type] Update feature Change-Id: I4d404925d317558eb6b4c8170e1cc6558a76fc7c Signed-off-by: Hyunsoo Park --- packaging/inference-engine-interface.spec | 2 +- tools/src/inference_engine_cltuner.cpp | 22 ++++++++++++++++++---- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/packaging/inference-engine-interface.spec b/packaging/inference-engine-interface.spec index 2777a33..0fd9281 100644 --- a/packaging/inference-engine-interface.spec +++ b/packaging/inference-engine-interface.spec @@ -1,6 +1,6 @@ Name: inference-engine-interface Summary: Interface of inference engines -Version: 0.2.0 +Version: 0.2.1 Release: 0 Group: Multimedia/Framework License: Apache-2.0 diff --git a/tools/src/inference_engine_cltuner.cpp b/tools/src/inference_engine_cltuner.cpp index 475a676..277dd77 100644 --- a/tools/src/inference_engine_cltuner.cpp +++ b/tools/src/inference_engine_cltuner.cpp @@ -72,7 +72,7 @@ int ConfigureInputInfo(InferenceEngineCommon* backend, Metadata& metadata, tensorConfig.mTensorInfo.height = layerInfo.dims[1]; } else { LOGE("Invalid shape type[%d]", layerInfo.shapeType); - return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; } if (!inputMeta.option.empty()) { @@ -294,8 +294,14 @@ int CheckTuneFile(std::vector& model_paths) return INFERENCE_ENGINE_ERROR_NONE; } -void CopyRandomMatrixToMemory(inference_engine_tensor_buffer& buffer, InferenceConfig tensorConfig) +int CopyRandomMatrixToMemory(inference_engine_tensor_buffer& buffer, InferenceConfig tensorConfig) { + if (tensorConfig.mDataType <= INFERENCE_TENSOR_DATA_TYPE_NONE || + tensorConfig.mDataType >= INFERENCE_TENSOR_DATA_TYPE_MAX) { + LOGE("tensorConfig.mDataType [%d] is not supported", tensorConfig.mDataType); + return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + } + std::random_device rd; std::mt19937 generator(rd()); std::uniform_real_distribution<> distribution(1.0, 255.0); @@ -307,8 +313,12 @@ void CopyRandomMatrixToMemory(inference_engine_tensor_buffer& buffer, InferenceC for (int w_offset = 0; w_offset < width; w_offset++) for (int ch_offset = 0; ch_offset < ch; ch_offset++) { int offset = h_offset * width * ch + w_offset * ch + ch_offset; - static_cast(buffer.buffer)[offset] = distribution(generator); + if (tensorConfig.mDataType == INFERENCE_TENSOR_DATA_TYPE_FLOAT32) + static_cast(buffer.buffer)[offset] = distribution(generator); + else + static_cast(buffer.buffer)[offset] = distribution(generator); } + return INFERENCE_ENGINE_ERROR_NONE; } static gboolean process(std::vector& model_paths, @@ -407,7 +417,11 @@ static gboolean process(std::vector& model_paths, for (auto& input : inputs) { LOGI("input.second.size :[%zu]", input.second.size); - CopyRandomMatrixToMemory(input.second, tensorConfig); + ret = CopyRandomMatrixToMemory(input.second, tensorConfig); + if (ret != INFERENCE_ENGINE_ERROR_NONE) { + LOGE("CopyRandomMatrixToMemory failed"); + return FALSE; + } } std::chrono::system_clock::time_point StartTime = -- 2.34.1