std::atomic<bool> _exit_thread { false };
std::condition_variable _outgoing_queue_event;
std::condition_variable _incoming_queue_event;
+ unsigned long _input_frame_number {};
CallbackType _callback;
-public:
- AsyncManager(const CallbackType &cb);
- virtual ~AsyncManager() = default;
-
- bool isWorking();
template<typename T> void pushToInput(AsyncInputQueue<T> &inputQueue);
- template<typename T> AsyncInputQueue<T> popFromInput();
template<typename T> bool isInputQueueEmpty();
void waitforInputQueue();
R popFromOutput();
bool isOutputQueueEmpty();
void waitforOutputQueue();
- void pushToOutput(R &output);
template<typename T> void inferenceThreadLoop();
template<typename T> void invoke();
+
+public:
+ AsyncManager(const CallbackType &cb);
+ virtual ~AsyncManager() = default;
+
+ bool isWorking();
void stop();
+ template<typename T> AsyncInputQueue<T> popFromInput();
+ void pushToOutput(R &output);
+ template<typename T> void push(std::vector<std::vector<T> > &inputs);
+ R pop();
};
} // machine_learning
#define INVOKE(result_type) \
template void AsyncManager<result_type>::invoke<unsigned char>(); \
template void AsyncManager<result_type>::invoke<float>()
-#define POP_FROM_OUTPUT(result_type) template ObjectDetectionResult AsyncManager<result_type>::popFromOutput()
+#define POP_FROM_OUTPUT(result_type) template result_type AsyncManager<result_type>::popFromOutput()
#define PUSH_TO_INPUT(result_type) \
template void AsyncManager<result_type>::pushToInput<unsigned char>(AsyncInputQueue<unsigned char> & inputQueue); \
template void AsyncManager<result_type>::pushToInput<float>(AsyncInputQueue<float> & inputQueue)
#define IS_WORKING(result_type) template bool AsyncManager<result_type>::isWorking()
#define POP_FROM_INPUT(result_type) \
template AsyncInputQueue<unsigned char> AsyncManager<result_type>::popFromInput(); \
- template AsyncInputQueue<float> AsyncManager<ObjectDetectionResult>::popFromInput()
-#define WAIT_FOR_OUTPUT_QUEUE(result_type) template void AsyncManager<result_type>::waitforOutputQueue()
-#define WAIT_FOR_INPUT_QUEUE(result_type) template void AsyncManager<result_type>::waitforInputQueue()
+ template AsyncInputQueue<float> AsyncManager<result_type>::popFromInput()
#define PUSH_TO_OUTPUT(result_type) template void AsyncManager<result_type>::pushToOutput(result_type &output)
#define INFERENCE_THREAD_LOOP(result_type) \
template void AsyncManager<result_type>::inferenceThreadLoop<unsigned char>(); \
template void AsyncManager<result_type>::inferenceThreadLoop<float>()
+#define PUSH(result_type) \
+ template void AsyncManager<result_type>::push(std::vector<std::vector<unsigned char> > &inputs); \
+ template void AsyncManager<result_type>::push(std::vector<std::vector<float> > &inputs)
+#define POP(result_type) template result_type AsyncManager<result_type>::pop()
using namespace std;
using namespace mediavision::machine_learning::exception;
swap(_outgoing_queue, empty);
}
+template<typename R> template<typename T> void AsyncManager<R>::push(std::vector<std::vector<T> > &inputs)
+{
+ _input_frame_number++;
+
+ if (!isInputQueueEmpty<T>()) {
+ LOGD("input frame number(%ld) has been skipped.", _input_frame_number);
+ return;
+ }
+
+ AsyncInputQueue<T> in_queue = { _input_frame_number, inputs };
+
+ pushToInput<T>(in_queue);
+
+ LOGD("Pushed : input frame number = %lu", in_queue.frame_number);
+
+ invoke<T>();
+}
+
+template<typename R> R AsyncManager<R>::pop()
+{
+ waitforOutputQueue();
+
+ return popFromOutput();
+}
+
IS_INPUT_QUEUE_EMPTY(ObjectDetectionResult);
INVOKE(ObjectDetectionResult);
POP_FROM_OUTPUT(ObjectDetectionResult);
ASYNC_MANAGER(ObjectDetectionResult);
IS_WORKING(ObjectDetectionResult);
POP_FROM_INPUT(ObjectDetectionResult);
-WAIT_FOR_OUTPUT_QUEUE(ObjectDetectionResult);
-WAIT_FOR_INPUT_QUEUE(ObjectDetectionResult);
PUSH_TO_OUTPUT(ObjectDetectionResult);
INFERENCE_THREAD_LOOP(ObjectDetectionResult);
+PUSH(ObjectDetectionResult);
+POP(ObjectDetectionResult);
}
}
\ No newline at end of file
ObjectDetectionTaskType _task_type;
std::unique_ptr<AsyncManager<ObjectDetectionResult> > _async_manager;
ObjectDetectionResult _current_result {};
- unsigned long _input_frame_number {};
void loadLabel();
void getEngineList();
void performAsync(ObjectDetectionInput &input) override;
ObjectDetectionResult &getOutput() override;
ObjectDetectionResult &getOutputCache() override;
- template<typename T, typename R> void inferenceCallback();
};
} // machine_learning
throw InvalidOperation("Invalid model data type.");
}
-template<typename T, typename R> void ObjectDetection::inferenceCallback()
-{
- AsyncInputQueue<T> inputQueue = _async_manager->popFromInput<T>();
-
- inference<T>(inputQueue.inputs);
-
- R &resultQueue = result();
-
- resultQueue.frame_number = inputQueue.frame_number;
-
- _async_manager->pushToOutput(resultQueue);
-}
-
template<typename T> void ObjectDetection::performAsync(ObjectDetectionInput &input, shared_ptr<MetaInfo> metaInfo)
{
- _input_frame_number++;
-
if (!_async_manager) {
_async_manager = make_unique<AsyncManager<ObjectDetectionResult> >(
- [this]() { inferenceCallback<T, ObjectDetectionResult>(); });
- }
+ [this]()
+ {
+ AsyncInputQueue<T> inputQueue = _async_manager->popFromInput<T>();
- if (!_async_manager->isInputQueueEmpty<T>()) {
- LOGD("input frame number(%ld) has been skipped.", _input_frame_number);
- return;
+ inference<T>(inputQueue.inputs);
+
+ ObjectDetectionResult &resultQueue = result();
+
+ resultQueue.frame_number = inputQueue.frame_number;
+ _async_manager->pushToOutput(resultQueue);
+ }
+ );
}
vector<T> inputVector;
preprocess<T>(input.inference_src, metaInfo, inputVector);
vector<vector<T> > inputVectors = { inputVector };
- AsyncInputQueue<T> in_queue = { _input_frame_number, inputVectors };
-
- _async_manager->pushToInput<T>(in_queue);
-
- LOGD("Pushed : input frame number = %lu", in_queue.frame_number);
- _async_manager->invoke<T>();
+ _async_manager->push(inputVectors);
}
void ObjectDetection::performAsync(ObjectDetectionInput &input)
if (!_async_manager->isWorking())
throw InvalidOperation("Object detection has been already destroyed so invalid operation.");
- _async_manager->waitforOutputQueue();
- _current_result = _async_manager->popFromOutput();
+ _current_result = _async_manager->pop();
} else {
// TODO. Check if inference request is completed or not here.
// If not then throw an exception.