mv_machine_learning: code refactoring to AsyncManager
authorInki Dae <inki.dae@samsung.com>
Tue, 12 Sep 2023 09:18:58 +0000 (18:18 +0900)
committerKwanghoon Son <k.son@samsung.com>
Wed, 25 Oct 2023 01:54:03 +0000 (10:54 +0900)
[Issue type] : code refactoring

Do code refactoring to AsyncManager by introducing new public member functions -
push() and pop() - and by moving existing all member functions as private ones
excepting isWorking(), popFromInput() and pushToOutput() which are used
in InferenceCallback function of the object detection task group.

By doing this, it can support async API with AsyncManager easily because
what each task group has to do for it is to call push() in PerformAsync()
and to call pop() in getOutput(). In addition, this patch moves the logic
of inferenceCallback() to lamda block of performAsync().

For async API support, what each task group have to do is,
    void TaskGroup::performAsync()
    {
        ...
        // create AsyncManager class with lamda function.
        ...
        _async_manager->push(inputVectors);
    }

    void TaskGroup::getOutput()
    {
        if (_async_manager) {
            ...
            _current_result = _async_manager->pop();
        }
        ...
    }

Change-Id: I022c71b921b5351d739ba685c188625806162000
Signed-off-by: Inki Dae <inki.dae@samsung.com>
mv_machine_learning/common/include/async_manager.h
mv_machine_learning/common/src/async_manager.cpp
mv_machine_learning/object_detection/include/object_detection.h
mv_machine_learning/object_detection/src/object_detection.cpp

index ed33d1b..605190a 100644 (file)
@@ -49,24 +49,28 @@ private:
        std::atomic<bool> _exit_thread { false };
        std::condition_variable _outgoing_queue_event;
        std::condition_variable _incoming_queue_event;
+       unsigned long _input_frame_number {};
        CallbackType _callback;
 
-public:
-       AsyncManager(const CallbackType &cb);
-       virtual ~AsyncManager() = default;
-
-       bool isWorking();
        template<typename T> void pushToInput(AsyncInputQueue<T> &inputQueue);
-       template<typename T> AsyncInputQueue<T> popFromInput();
        template<typename T> bool isInputQueueEmpty();
        void waitforInputQueue();
        R popFromOutput();
        bool isOutputQueueEmpty();
        void waitforOutputQueue();
-       void pushToOutput(R &output);
        template<typename T> void inferenceThreadLoop();
        template<typename T> void invoke();
+
+public:
+       AsyncManager(const CallbackType &cb);
+       virtual ~AsyncManager() = default;
+
+       bool isWorking();
        void stop();
+       template<typename T> AsyncInputQueue<T> popFromInput();
+       void pushToOutput(R &output);
+       template<typename T> void push(std::vector<std::vector<T> > &inputs);
+       R pop();
 };
 
 } // machine_learning
index 95865c9..784992d 100644 (file)
@@ -27,7 +27,7 @@
 #define INVOKE(result_type)                                           \
        template void AsyncManager<result_type>::invoke<unsigned char>(); \
        template void AsyncManager<result_type>::invoke<float>()
-#define POP_FROM_OUTPUT(result_type) template ObjectDetectionResult AsyncManager<result_type>::popFromOutput()
+#define POP_FROM_OUTPUT(result_type) template result_type AsyncManager<result_type>::popFromOutput()
 #define PUSH_TO_INPUT(result_type)                                                                                    \
        template void AsyncManager<result_type>::pushToInput<unsigned char>(AsyncInputQueue<unsigned char> & inputQueue); \
        template void AsyncManager<result_type>::pushToInput<float>(AsyncInputQueue<float> & inputQueue)
 #define IS_WORKING(result_type) template bool AsyncManager<result_type>::isWorking()
 #define POP_FROM_INPUT(result_type)                                                    \
        template AsyncInputQueue<unsigned char> AsyncManager<result_type>::popFromInput(); \
-       template AsyncInputQueue<float> AsyncManager<ObjectDetectionResult>::popFromInput()
-#define WAIT_FOR_OUTPUT_QUEUE(result_type) template void AsyncManager<result_type>::waitforOutputQueue()
-#define WAIT_FOR_INPUT_QUEUE(result_type) template void AsyncManager<result_type>::waitforInputQueue()
+       template AsyncInputQueue<float> AsyncManager<result_type>::popFromInput()
 #define PUSH_TO_OUTPUT(result_type) template void AsyncManager<result_type>::pushToOutput(result_type &output)
 #define INFERENCE_THREAD_LOOP(result_type)                                         \
        template void AsyncManager<result_type>::inferenceThreadLoop<unsigned char>(); \
        template void AsyncManager<result_type>::inferenceThreadLoop<float>()
+#define PUSH(result_type)                                                                            \
+       template void AsyncManager<result_type>::push(std::vector<std::vector<unsigned char> > &inputs); \
+       template void AsyncManager<result_type>::push(std::vector<std::vector<float> > &inputs)
+#define POP(result_type) template result_type AsyncManager<result_type>::pop()
 
 using namespace std;
 using namespace mediavision::machine_learning::exception;
@@ -209,6 +211,31 @@ template<typename R> void AsyncManager<R>::stop()
        swap(_outgoing_queue, empty);
 }
 
+template<typename R> template<typename T> void AsyncManager<R>::push(std::vector<std::vector<T> > &inputs)
+{
+       _input_frame_number++;
+
+       if (!isInputQueueEmpty<T>()) {
+               LOGD("input frame number(%ld) has been skipped.", _input_frame_number);
+               return;
+       }
+
+       AsyncInputQueue<T> in_queue = { _input_frame_number, inputs };
+
+       pushToInput<T>(in_queue);
+
+       LOGD("Pushed : input frame number = %lu", in_queue.frame_number);
+
+       invoke<T>();
+}
+
+template<typename R> R AsyncManager<R>::pop()
+{
+       waitforOutputQueue();
+
+       return popFromOutput();
+}
+
 IS_INPUT_QUEUE_EMPTY(ObjectDetectionResult);
 INVOKE(ObjectDetectionResult);
 POP_FROM_OUTPUT(ObjectDetectionResult);
@@ -217,10 +244,10 @@ STOP(ObjectDetectionResult);
 ASYNC_MANAGER(ObjectDetectionResult);
 IS_WORKING(ObjectDetectionResult);
 POP_FROM_INPUT(ObjectDetectionResult);
-WAIT_FOR_OUTPUT_QUEUE(ObjectDetectionResult);
-WAIT_FOR_INPUT_QUEUE(ObjectDetectionResult);
 PUSH_TO_OUTPUT(ObjectDetectionResult);
 INFERENCE_THREAD_LOOP(ObjectDetectionResult);
+PUSH(ObjectDetectionResult);
+POP(ObjectDetectionResult);
 
 }
 }
\ No newline at end of file
index 0ea6625..6846b85 100644 (file)
@@ -46,7 +46,6 @@ private:
        ObjectDetectionTaskType _task_type;
        std::unique_ptr<AsyncManager<ObjectDetectionResult> > _async_manager;
        ObjectDetectionResult _current_result {};
-       unsigned long _input_frame_number {};
 
        void loadLabel();
        void getEngineList();
@@ -95,7 +94,6 @@ public:
        void performAsync(ObjectDetectionInput &input) override;
        ObjectDetectionResult &getOutput() override;
        ObjectDetectionResult &getOutputCache() override;
-       template<typename T, typename R> void inferenceCallback();
 };
 
 } // machine_learning
index c956e57..6b7e074 100644 (file)
@@ -351,31 +351,22 @@ void ObjectDetection::perform(mv_source_h &mv_src)
                throw InvalidOperation("Invalid model data type.");
 }
 
-template<typename T, typename R> void ObjectDetection::inferenceCallback()
-{
-       AsyncInputQueue<T> inputQueue = _async_manager->popFromInput<T>();
-
-       inference<T>(inputQueue.inputs);
-
-       R &resultQueue = result();
-
-       resultQueue.frame_number = inputQueue.frame_number;
-
-       _async_manager->pushToOutput(resultQueue);
-}
-
 template<typename T> void ObjectDetection::performAsync(ObjectDetectionInput &input, shared_ptr<MetaInfo> metaInfo)
 {
-       _input_frame_number++;
-
        if (!_async_manager) {
                _async_manager = make_unique<AsyncManager<ObjectDetectionResult> >(
-                               [this]() { inferenceCallback<T, ObjectDetectionResult>(); });
-       }
+                               [this]()
+                               {
+                                       AsyncInputQueue<T> inputQueue = _async_manager->popFromInput<T>();
 
-       if (!_async_manager->isInputQueueEmpty<T>()) {
-               LOGD("input frame number(%ld) has been skipped.", _input_frame_number);
-               return;
+                                       inference<T>(inputQueue.inputs);
+
+                                       ObjectDetectionResult &resultQueue = result();
+
+                                       resultQueue.frame_number = inputQueue.frame_number;
+                                       _async_manager->pushToOutput(resultQueue);
+                               }
+               );
        }
 
        vector<T> inputVector;
@@ -383,13 +374,8 @@ template<typename T> void ObjectDetection::performAsync(ObjectDetectionInput &in
        preprocess<T>(input.inference_src, metaInfo, inputVector);
 
        vector<vector<T> > inputVectors = { inputVector };
-       AsyncInputQueue<T> in_queue = { _input_frame_number, inputVectors };
-
-       _async_manager->pushToInput<T>(in_queue);
-
-       LOGD("Pushed : input frame number = %lu", in_queue.frame_number);
 
-       _async_manager->invoke<T>();
+       _async_manager->push(inputVectors);
 }
 
 void ObjectDetection::performAsync(ObjectDetectionInput &input)
@@ -412,8 +398,7 @@ ObjectDetectionResult &ObjectDetection::getOutput()
                if (!_async_manager->isWorking())
                        throw InvalidOperation("Object detection has been already destroyed so invalid operation.");
 
-               _async_manager->waitforOutputQueue();
-               _current_result = _async_manager->popFromOutput();
+               _current_result = _async_manager->pop();
        } else {
                // TODO. Check if inference request is completed or not here.
                //       If not then throw an exception.