set user-given number of threads 82/316182/3
authorInki Dae <inki.dae@samsung.com>
Wed, 11 Dec 2024 09:10:24 +0000 (18:10 +0900)
committerInki Dae <inki.dae@samsung.com>
Mon, 23 Dec 2024 00:04:55 +0000 (09:04 +0900)
[Version] : 0.0.9
[Issue type] : new feature

Set user-given number of threads value via SetPrivateData() function.

Change-Id: If55014021823fababf561285836fb7f0934b3abd
Signed-off-by: Inki Dae <inki.dae@samsung.com>
packaging/inference-engine-tflite.spec
src/inference_engine_tflite.cpp
src/inference_engine_tflite_private.h

index bc13623f0c8ed1d3ed32d09761d54aa88051b83a..63ba25941ba10c6f4e56b68394cec22d4a41b9e0 100644 (file)
@@ -1,6 +1,6 @@
 Name:       inference-engine-tflite
 Summary:    Tensorflow-Lite based implementation of inference-engine-interface
-Version:    0.0.8
+Version:    0.0.9
 Release:    0
 Group:      Multimedia/Libraries
 License:    Apache-2.0
index 81cdbdbb2ff04d82f01a0d8dc9ee5176dc148d29..74dc0c375a77185f6a91e4fc6c34f018211fa640 100644 (file)
@@ -44,7 +44,18 @@ namespace TFLiteImpl
 
        int InferenceTFLite::SetPrivateData(void *data)
        {
-               // Nothing to do yet.
+               if (!data) {
+                       LOGE("private data is nullptr. It should not be nullptr.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               auto private_data = static_cast<inference_engine_private_data *>(data);
+
+               // Just skip if it's not number of threads.
+               if (private_data->data_type != INFERENCE_ENGINE_PRIVATE_TYPE_NUM_OF_THREADS)
+                       return INFERENCE_ENGINE_ERROR_NONE;
+
+               _number_of_threads = private_data->int_value;
 
                return INFERENCE_ENGINE_ERROR_NONE;
        }
@@ -131,8 +142,14 @@ namespace TFLiteImpl
                        }
                }
 
-               const char *envNumThreads = std::getenv("TFLITE_NUM_THREADS");
-               int numThreads = envNumThreads ? std::atoi(envNumThreads) : MV_INFERENCE_TFLITE_MAX_THREAD_NUM;
+               int numThreads = MV_INFERENCE_TFLITE_MAX_THREAD_NUM;
+
+               if (_number_of_threads > 0) {
+                       numThreads = _number_of_threads;
+               } else {
+                       const char *envNumThreads = std::getenv("TFLITE_NUM_THREADS");
+                       numThreads = envNumThreads ? std::atoi(envNumThreads) : MV_INFERENCE_TFLITE_MAX_THREAD_NUM;
+               }
 
                LOGD("numThreads : [%d]", numThreads);
 
index 12e2bf3a57131dbffce722b50c96a38b3b5d5c7f..547e7b59aed3de12165166923797a8f1cb3d876d 100644 (file)
@@ -106,6 +106,7 @@ namespace TFLiteImpl
 
                TfLiteDelegate *mDelegate {};
                bool mIsDynamicTensorMode {};
+               int32_t _number_of_threads {};
        };
 
 } /* InferenceEngineImpl */