Refactor mv_depthstream_test_suite to support camera as well as 66/293266/7
authorTae-Young Chung <ty83.chung@samsung.com>
Wed, 17 May 2023 05:55:25 +0000 (14:55 +0900)
committerTae-Young Chung <ty83.chung@samsung.com>
Wed, 24 May 2023 08:12:47 +0000 (08:12 +0000)
vision-source

[Version] 0.23.49-1
[Issue type] update

Change-Id: I8d1b2f3c0f81a51523f417ea33d052c4dba50d52
Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
packaging/capi-media-vision.spec
test/testsuites/mv3d/CMakeLists.txt
test/testsuites/mv3d/depthstream_test_suite.cpp

index cc2163685904da879a39dd99b155a02cd045b758..231832fc486657d7bae8a252b4fd3e4263f2dbe3 100644 (file)
@@ -1,7 +1,7 @@
 Name:        capi-media-vision
 Summary:     Media Vision library for Tizen Native API
 Version:     0.23.49
-Release:     0
+Release:     1
 Group:       Multimedia/Framework
 License:     Apache-2.0 and BSD-3-Clause
 Source0:     %{name}-%{version}.tar.gz
@@ -53,6 +53,7 @@ Requires:      training-engine-interface-common
 
 %define build_depth_stream_testsuite 1
 BuildRequires: pkgconfig(vision-source)
+BuildRequires: pkgconfig(capi-media-camera)
 %ifarch aarch64
 %define enable_mv3d_pointcloud 1
 BuildRequires: Open3D-devel
index 38f75bbb7fab82f4e1830e50e73e23092f54af89..752b72a541e71178893bbb74dd35ec23dd619327 100644 (file)
@@ -42,11 +42,14 @@ if (${BUILD_DEPTH_STREAM_TESTSUITE})
       include_directories(${GLIB_PKG_INCLUDE_DIRS})
   endif()
 
-  SET(dependents "gstreamer-1.0 gstreamer-app-1.0 gstreamer-video-1.0 vision-source")
+  SET(dependents "vision-source capi-media-camera")
 
   INCLUDE(FindPkgConfig)
   pkg_check_modules(${PROJECT_NAME}_DEP REQUIRED ${dependents})
 
+  include_directories(${${PROJECT_NAME}_DEP_INCLUDE_DIRS})
+
+  SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-format-truncation -fpermissive")
   add_executable(mv_depthstream_test_suite depthstream_test_suite.cpp)
 
   target_link_libraries(mv_depthstream_test_suite ${MV_3D_LIB_NAME}
index 195e93fe53d573e7e29abea8c27a980fb1be91ee..5cf5421263649f5426adb56aba2959f477978eff 100644 (file)
@@ -22,6 +22,8 @@
 #include <stdlib.h>
 #include <unistd.h>
 #include <math.h>
+#include <memory>
+#include <functional>
 #include <inttypes.h>
 #include <opencv2/core.hpp>
 #include <opencv2/imgcodecs.hpp>
 #include "mv_3d.h"
 #include <mv_testsuite_common.h>
 #include <vision_source.h>
+#include <camera_internal.h>
 #ifdef WITH_DA_PROFILE
 #include <mv_common_internal.h>
 #endif
 
 #include <glib-2.0/glib.h>
-#include <gst/gst.h>
-#include <gst/video/video.h>
-#include <gst/app/gstappsrc.h>
 
 #ifdef BUILD_VISUALIZER
 #include "mv_util_visualizer_2d.h"
@@ -57,18 +57,11 @@ using namespace open3d;
 #endif
 
 #define VISION_SOURCE_DEVICE_ID 3
-#define __max(a, b) (((a) > (b)) ? (a) : (b))
-#define __min(a, b) (((a) < (b)) ? (a) : (b))
-
-#define TRUNCATE_MIN(a, b, c) ((a < b) ? c : a)
-#define TRUNCATE_MAX(a, b, c) ((a > b) ? c : a)
-#define TRANSLATE_VAL(val) (TRUNCATE_MAX(TRUNCATE_MIN(val, 0, -0.05), 1, 1.05) / 1.15 + 0.1)
-#define COLORMAP_JET_R(val) __max(0, __min(255, (int) (round(255 * (1.5 - 4 * fabs(val - .75))))))
-#define COLORMAP_JET_G(val) __max(0, __min(255, (int) (round(255 * (1.5 - 4 * fabs(val - .5))))))
-#define COLORMAP_JET_B(val) __max(0, __min(255, (int) (round(255 * (1.5 - 4 * fabs(val - .25))))))
 
 class StopWatch
 {
+private:
+       std::chrono::steady_clock::time_point start;
 public:
        StopWatch() = default;
        ~StopWatch() = default;
@@ -81,345 +74,489 @@ public:
        {
                return std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - start);
        }
-
-private:
-       std::chrono::steady_clock::time_point start;
 };
 
-typedef struct _appdata {
-       StopWatch stopWatch;
-       std::string dataPath;
-       std::string intrinsicName;
-       std::string rgbName;
-       std::string datasetName;
-       std::string dispResultFile;
-
-       float minDisp;
-       float maxDisp;
-       int fmt;
-
-       int imgWidth;
-       int imgHeight;
-       unsigned char *buffer;
-       mv_source_h mv_source;
-       mv_3d_h mv3d_handle;
-
-       struct {
-               std::string url;
-               mv_source_h source;
-               unsigned char *buffer;
-       } remoteData;
-
-} appdata;
-
-static appdata app;
-
-enum { STEREO_FORMAT_NONE = 0, STEREO_FORMAT_SIDE_BY_SIDE, STEREO_FORMAT_TOP_AND_BOTTOM };
-
-// MediaVision
-mv_engine_config_h engine_config;
-// Vision source handle
-vision_source_h vision_source_handle;
-
-static GMainLoop *loop;
-
-static bool isLive;
-static bool isAsync;
-static int camWidth;
-static int camHeight;
-static int stereoFormat;
-static int minDisp;
-static int maxDisp;
-static int display_xpos;
-static int display_ypos;
-static bool isRemote;
-
-static void int_handler(int sig)
+enum { STEREO_FORMAT_NONE, STEREO_FORMAT_SIDE_BY_SIDE, STEREO_FORMAT_TOP_AND_BOTTOM };
+enum { VISION_SOURCE, CAMERA_SOURCE };
+enum { NONE, OKAY };
+
+class Mv3d
 {
-       char c;
+private:
+       bool _isAsync;
+       int _sourceWidth;
+       int _sourceHeight;
+       unsigned char *_rawBuffer;
+
+       mv_3d_h _3dHandle;
+       mv_source_h _source;
+       mv_engine_config_h _engineConfig;
+
+       int _state;
+       std::string _calibFilePath;
+       std::string _depthFilePath;
+       StopWatch _timer;
+
+       static void _depth_stereo_cb(mv_source_h source, unsigned short *depth, unsigned int width, unsigned int height,
+                                                               void *user_data)
+       {
+#ifdef WITH_DA_PROFILE
+               std::cout << "_depth_stereo_cb: " << std::endl;
+               da_timestamp_s timestamp;
+               mv_source_get_timestamp(source, &timestamp);
+               std::cout << "timestamp [" << timestamp.timestamp << "], ";
+               std::cout << "ts_sof [" << timestamp.ts_sof << "], ";
+               std::cout << "ts_aux [" << timestamp.ts_aux << "], ";
+               std::cout << "ts_eof [" << timestamp.ts_eof << "], " << std::endl;
+#endif
+               cv::Mat dump(cv::Size(width, height), CV_16U, depth);
+               dump.setTo(0, dump==65535);
 
-       signal(sig, SIG_IGN);
-       while ((getchar()) != '\n')
-               ;
+               Mv3d* mv3d = static_cast<Mv3d *>(user_data);
+               cv::imwrite(mv3d->_depthFilePath, dump);
+       }
+public:
+       Mv3d(int minDisparity, int maxDisparity, bool isAsync, std::string calibFilePath, int depthWidth, int depthHeight, int stereoFormat, std::string depthFilePath)
+               :_isAsync(isAsync)
+               , _sourceWidth(depthWidth)
+               , _sourceHeight(depthHeight)
+               , _rawBuffer(nullptr)
+               , _3dHandle(nullptr)
+               , _source(nullptr)
+               , _engineConfig(nullptr)
+               , _state(NONE)
+               , _calibFilePath("/usr/share/dfs-qcmv/stereoCalibDA.yaml")
+               , _depthFilePath("/tmp/depthDump.png")
+       {
+               // MediaVision mv_engine_config_h
+               mv_create_engine_config(&_engineConfig);
+               mv_engine_config_set_int_attribute(_engineConfig, MV_3D_DEPTH_WIDTH, depthWidth);
+               mv_engine_config_set_int_attribute(_engineConfig, MV_3D_DEPTH_HEIGHT, depthHeight);
+               mv_engine_config_set_int_attribute(_engineConfig, MV_3D_DEPTH_MIN_DISPARITY, minDisparity);
+               mv_engine_config_set_int_attribute(_engineConfig, MV_3D_DEPTH_MAX_DISPARITY, maxDisparity);
+
+               if (!calibFilePath.empty())
+                       _calibFilePath = calibFilePath;
+               mv_engine_config_set_string_attribute(_engineConfig, MV_3D_DEPTH_STEREO_CONFIG_FILE_PATH, _calibFilePath.c_str());
+
+               if (!depthFilePath.empty())
+                       _depthFilePath = depthFilePath;
+
+               // MediaVision mv_3d_h
+               mv_3d_create(&_3dHandle);
+               mv_3d_configure(_3dHandle, _engineConfig);
+               mv_3d_set_depth_cb(_3dHandle, _depth_stereo_cb, this);
+
+               mv_create_source(&_source);
+
+               switch (stereoFormat) {
+               case STEREO_FORMAT_SIDE_BY_SIDE:
+                       _sourceWidth <<= 1;
+                       break;
+               case STEREO_FORMAT_TOP_AND_BOTTOM:
+                       _sourceHeight <<= 1;
+                       break;
+               default:
+                       break;
+               }
 
-       c = getchar();
-       if (c == 'y' || c == 'Y') {
-               sleep(1);
+               _rawBuffer = new unsigned char [_sourceWidth * _sourceHeight];
+       }
 
-               g_main_loop_quit(loop);
-       } else {
-               printf("no");
-               signal(SIGINT, int_handler);
+       ~Mv3d()
+       {
+               if (_engineConfig || _3dHandle || _source)
+                       stop();
        }
 
-       getchar(); // Get new line character
-}
+       void prepare()
+       {
+               mv_3d_prepare(_3dHandle);
+               _state = OKAY;
+       }
 
-static void _depth_stereo_remote_cb(mv_source_h source, unsigned short *depth, unsigned int width, unsigned int height,
-                                                                       void *user_data)
-{
-       auto mv3d = static_cast<appdata *>(user_data);
-
-       for (int y = 0; y < height; y++) {
-               unsigned short *src = depth + y * width;
-               unsigned char *dst = mv3d->remoteData.buffer + y * (width * 4);
-               for (int x = 0; x < width; x++) {
-                       float value =
-                                       TRANSLATE_VAL(((440.92750f * 21.87095f / static_cast<float>(src[x])) - 8.0f) / (88.f /*96-8*/));
-                       dst[x * 4] = COLORMAP_JET_R(value);
-                       dst[x * 4 + 1] = COLORMAP_JET_G(value);
-                       dst[x * 4 + 2] = COLORMAP_JET_B(value);
-                       dst[x * 4 + 3] = 255;
+       void run(vision_source_buffer_s *buffer)
+       {
+               if (_state != OKAY)
+                       return;
+
+               for (unsigned int h = 0; h < buffer->resolution.height; h++) {
+                       memcpy(_rawBuffer + buffer->resolution.width * h, buffer->planes[0].data + (buffer->planes[0].align_width * h),
+                               buffer->resolution.width);
                }
-       }
-       mv_source_fill_by_buffer(mv3d->remoteData.source, mv3d->remoteData.buffer, width * height * 4, width, height,
-                                                        MEDIA_VISION_COLORSPACE_RGBA);
-#if BUILD_VISUALIZER
-       mv_util_visualizer_2d(mv3d->remoteData.source, mv3d->remoteData.url.c_str());
-#endif
-       mv_source_clear(mv3d->remoteData.source);
-}
 
-static void _depth_stereo_cb(mv_source_h source, unsigned short *depth, unsigned int width, unsigned int height,
-                                                        void *user_data)
-{
-       auto mv3d = static_cast<appdata *>(user_data);
+               mv_source_fill_by_buffer(_source, _rawBuffer, _sourceWidth * _sourceHeight,
+                                               _sourceWidth, _sourceHeight, MEDIA_VISION_COLORSPACE_Y800);
 
 #ifdef WITH_DA_PROFILE
-       printf("_depth_stereo_cb: ");
-       da_timestamp_s timestamp;
-       mv_source_get_timestamp(source, &timestamp);
-       printf("timestamp [%" PRIu64 "],\
-                       ts_sof[%" PRIu64 "],\
-                       ts_aux[%" PRIu64 "],\
-                       ts_eof[%" PRIu64 "]\n",
-                  timestamp.timestamp, timestamp.ts_sof, timestamp.ts_aux, timestamp.ts_eof);
+               mv_source_set_timestamp(_source, static_cast<da_timestamp_s *>(buffer->priv));
 #endif
-       cv::Mat dump(cv::Size(width, height), CV_16U);
 
-       for (int y = 0; y < height; y++) {
-               unsigned short *ptr = depth + y * width;
-               for (int x = 0; x < width; x++) {
-                       dump.at<unsigned short>(y, x) =
-                                       static_cast<unsigned short>(440.92750f * 21.87095f / static_cast<float>(ptr[x]));
-               }
-       }
-
-       cv::imwrite(mv3d->dispResultFile, dump);
-}
+       if (_isAsync)
+               mv_3d_run_async(_3dHandle, _source, nullptr, nullptr);
+       else
+               mv_3d_run(_3dHandle, _source, nullptr, nullptr);
 
-static int _vision_source_cb(vision_source_buffer_s *buffer, void *user_data)
-{
-       auto app = static_cast<appdata *>(user_data);
-       if (app->mv_source == nullptr || app->mv3d_handle == nullptr) {
-               printf("mv_source or mv3d_handle is null\n");
-               return 0;
+               mv_source_clear(_source);
        }
 
-       printf("_vision_source_cb: %lld ms\n", static_cast<long long int>(app->stopWatch.elapsedTime().count()));
+       void run(media_packet_h pkt, unsigned long& timestamp, camera_frame_meta_s& meta)
+       {
+               mv_source_fill_by_media_packet(_source, pkt);
 
-       for (unsigned int h = 0; h < buffer->resolution.height; h++) {
-               memcpy(app->buffer + buffer->resolution.width * h, buffer->planes[0].data + (buffer->planes[0].align_width * h),
-                          buffer->resolution.width);
-       }
-       mv_source_fill_by_buffer(app->mv_source, app->buffer, app->imgWidth * app->imgHeight, app->imgWidth, app->imgHeight,
-                                                        MEDIA_VISION_COLORSPACE_Y800);
 #ifdef WITH_DA_PROFILE
-       mv_source_set_timestamp(app->mv_source, static_cast<da_timestamp_s *>(buffer->priv));
+               da_timestamp_s daTimestamp = {timestamp,
+                                                                meta.ts_soe,
+                                                                meta.ts_eoe,
+                                                                meta.ts_sof,
+                                                                meta.ts_eof,
+                                                                meta.ts_hal,
+                                                                meta.ts_qmf,
+                                                                meta.td_exp,
+                                                                meta.ts_aux,
+                                                                meta.td_aux,
+                                                                meta.seqnum,
+                                                                meta.flags };
+
+               mv_source_set_timestamp(_source, &daTimestamp);
 #endif
+               mv_3d_run_async(_3dHandle, _source, nullptr, nullptr);
 
-       if (isAsync)
-               mv_3d_run_async(app->mv3d_handle, app->mv_source, nullptr, nullptr);
-       else
-               mv_3d_run(app->mv3d_handle, app->mv_source, nullptr, nullptr);
+               mv_source_clear(_source);
+       }
 
-       mv_source_clear(app->mv_source);
+       void stop()
+       {
+               _state = NONE;
+               delete [] _rawBuffer;
 
-       return 0;
-}
+               if (_source) {
+                       mv_destroy_source(_source);
+                       _source = nullptr;
+               }
 
-int main(int argc, char *argv[])
-{
-       signal(SIGINT, int_handler);
-
-       camWidth = 640;
-       camHeight = 400;
-       isLive = true;
-       isAsync = true;
-       isRemote = false;
-       std::string stereoConfigFile;
-       std::string dispResultFile;
-       std::string remoteUrl;
-       // mv_depthstream_test_suite 1 8 96 0 0 640 400 /usr/share/dfs-qcmv/stereoCalibDA.yaml /tmp/test.png 1 192.168.0.4:50051
-       if (argc > 1) {
-               isAsync = (atoi(argv[1]) != 0);
-               minDisp = atoi(argv[2]);
-               maxDisp = atoi(argv[3]);
-               display_xpos = atoi(argv[4]);
-               display_ypos = atoi(argv[5]);
-               camWidth = atoi(argv[6]);
-               camHeight = atoi(argv[7]);
-               stereoFormat = atoi(argv[8]);
-               stereoConfigFile = argv[9];
-               dispResultFile = argv[10];
-               isRemote = (atoi(argv[11]) != 0);
-               if (isRemote)
-                       remoteUrl = argv[12];
-       }
-#ifdef MV_3D_POINTCLOUD_IS_AVAILABLE
-       utility::SetVerbosityLevel(utility::VerbosityLevel::Debug);
-#endif
+               if (_3dHandle) {
+                       mv_3d_destroy(_3dHandle);
+                       _3dHandle = nullptr;
+               }
 
-       int ret = MEDIA_VISION_ERROR_NONE;
-       printf("enter main\n");
-
-       vision_source_format_s format { .pixel_format = VISION_SOURCE_PIXEL_FORMAT_NV12,
-                                                                       .resolution = { camWidth, camHeight },
-                                                                       .fps = 10,
-                                                                       .quality = 0,
-                                                                       .bitrate = 0 };
-
-       switch (stereoFormat) {
-       case STEREO_FORMAT_SIDE_BY_SIDE:
-               format.resolution.width <<= 1;
-               break;
-       case STEREO_FORMAT_TOP_AND_BOTTOM:
-               format.resolution.height <<= 1;
-               break;
-       default:
-               break;
+               if (_engineConfig) {
+                       mv_destroy_engine_config(_engineConfig);
+                       _engineConfig = nullptr;
+               }
        }
 
-       // mediavision
-       // engine_config
-       ret = mv_create_engine_config(&engine_config);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       void resetTime()
+       {
+               _timer.resetTime();
        }
 
-       ret = mv_engine_config_set_int_attribute(engine_config, MV_3D_DEPTH_WIDTH, camWidth);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       long long int checkElapsedTime()
+       {
+               return static_cast<long long int>(_timer.elapsedTime().count());
        }
+};
 
-       ret = mv_engine_config_set_int_attribute(engine_config, MV_3D_DEPTH_HEIGHT, camHeight);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
-       }
+class IFeeder
+{
+public:
+       virtual ~IFeeder(){};
+       virtual void start(std::shared_ptr<Mv3d> mv3d) = 0;
+       virtual void stop() = 0;
+};
 
-       ret = mv_engine_config_set_int_attribute(engine_config, MV_3D_DEPTH_MIN_DISPARITY, minDisp);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
-       }
+class VisionSourceFeeder : public IFeeder
+{
+private:
+       Mv3d * _mv3d;
+       int _state;
+       const int _deviceId;
+
+       vision_source_h _handle;
+       vision_source_format_s _format;
+
+       static int _vision_source_cb(vision_source_buffer_s *buffer, void *user_data)
+       {
+               VisionSourceFeeder* visionSource = static_cast<VisionSourceFeeder*>(user_data);
+               std::cout << "_vision_source_cb: " << visionSource->_mv3d->checkElapsedTime() << " ms" << std::endl;
+               if (visionSource->_state != OKAY)
+                       return 0;
 
-       ret = mv_engine_config_set_int_attribute(engine_config, MV_3D_DEPTH_MAX_DISPARITY, maxDisp);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+               visionSource->_mv3d->run(buffer);
+               return 0;
        }
+public:
+       VisionSourceFeeder(int width, int height, int fps, int stereoFormat)
+                       : _mv3d(nullptr)
+                       , _state(NONE)
+                       , _deviceId(3)
+                       , _handle(nullptr)
+       {
+               vision_source_init(&_handle);
+               vision_source_open_device(_handle, _deviceId);
+
+               _format.pixel_format = VISION_SOURCE_PIXEL_FORMAT_NV12;
+               _format.resolution = { width, height },
+               _format.fps = fps;
+               _format.quality = 0;
+               _format.bitrate = 0;
+
+               switch (stereoFormat) {
+               case STEREO_FORMAT_SIDE_BY_SIDE:
+                       _format.resolution.width <<= 1;
+                       break;
+               case STEREO_FORMAT_TOP_AND_BOTTOM:
+                       _format.resolution.height <<= 1;
+                       break;
+               default:
+                       break;
+               }
+
+               vision_source_set_stream_format(_handle, &_format);
+       };
 
-       ret = mv_engine_config_set_string_attribute(engine_config, MV_3D_DEPTH_STEREO_CONFIG_FILE_PATH,
-                                                                                               stereoConfigFile.c_str());
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       ~VisionSourceFeeder()
+       {
+               if (_handle) {
+                       stop();
+               }
+       };
+
+       void start(std::shared_ptr<Mv3d> mv3d) override
+       {
+               _mv3d = mv3d.get();
+               _mv3d->resetTime();
+               _state = OKAY;
+               vision_source_start_stream(_handle, _vision_source_cb, this);
        }
-       app.dispResultFile = dispResultFile;
-       app.remoteData.url = remoteUrl;
 
-       // mv3d handle
-       ret = mv_3d_create(&app.mv3d_handle);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       void stop() override
+       {
+               _state = NONE;
+               vision_source_stop_stream(_handle);
+               vision_source_close_device(_handle);
+               vision_source_exit(_handle);
+               _handle = nullptr;
        }
+};
+
+class CameraSourceFeeder : public IFeeder
+{
+private:
+       Mv3d* _mv3d;
+       const camera_device_e _deviceId;
 
-       ret = mv_3d_configure(app.mv3d_handle, engine_config);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       camera_h _handle;
+       camera_frame_meta_s _frameMeta;
+
+       static void _camera_preview_cb(media_packet_h pkt, void *user_data)
+       {
+               CameraSourceFeeder* cameraSource = static_cast<CameraSourceFeeder*>(user_data);
+               std::cout << "_camera_preview_cb: " << cameraSource->_mv3d->checkElapsedTime()  << " ms" << std::endl;
+
+               unsigned long timestamp;
+               camera_attr_get_preview_frame_timestamp(cameraSource->_handle, &timestamp);
+               camera_attr_get_preview_frame_meta(cameraSource->_handle, &cameraSource->_frameMeta);
+               cameraSource->_mv3d->run(pkt, timestamp, cameraSource->_frameMeta);
+               media_packet_unref(pkt);
        }
 
-       if (isRemote)
-               ret = mv_3d_set_depth_cb(app.mv3d_handle, _depth_stereo_remote_cb, &app);
-       else
-               ret = mv_3d_set_depth_cb(app.mv3d_handle, _depth_stereo_cb, &app);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       static bool _supported_preview_fps_cb(camera_attr_fps_e fps, void *user_data)
+       {
+               CameraSourceFeeder* cameraSource = static_cast<CameraSourceFeeder*>(user_data);
+               if (fps == 10) {
+                       std::cout << "Set desired camera preview fps: "  << fps << std::endl;
+                       camera_attr_set_preview_fps(cameraSource->_handle, fps);
+                       return false;
+               }
+               return true;
        }
+public:
+       CameraSourceFeeder()
+                       : _mv3d(nullptr)
+                       , _deviceId(CAMERA_DEVICE_CAMERA3)
+                       , _handle(nullptr)
+       {
+               camera_create(_deviceId, &_handle);
+               camera_attr_foreach_supported_fps(_handle, _supported_preview_fps_cb, this);
+               if (!camera_is_supported_media_packet_preview_cb(_handle))
+                       throw std::runtime_error("not supported media packet preview cb");
+       }
+
+       ~CameraSourceFeeder()
+       {
+               stop();
+
+               camera_destroy(_handle);
+               _handle = nullptr;
+       };
+
+       void start(std::shared_ptr<Mv3d> mv3d) override
+       {
+               _mv3d = mv3d.get();
+               camera_set_media_packet_preview_cb(_handle, _camera_preview_cb, this);
 
-       ret = mv_3d_prepare(app.mv3d_handle);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+               mv3d->resetTime();
+               camera_start_preview(_handle);
        }
 
-       // source
-       ret = mv_create_source(&app.mv_source);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+       void stop() override
+       {
+               camera_state_e state;
+               camera_get_state(_handle, &state);
+               if (state == CAMERA_STATE_PREVIEW)
+                       camera_stop_preview(_handle);
        }
+};
 
-       ret = mv_create_source(&app.remoteData.source);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               goto _err;
+class FeederFactory
+{
+private:
+       FeederFactory() = delete;
+       FeederFactory(const FeederFactory&) = delete;
+public:
+       static std::unique_ptr<IFeeder> createVisionSourceFeeder(int width=640, int height=400, int fps=10, int stereoFormat=STEREO_FORMAT_TOP_AND_BOTTOM)
+       {
+               return std::make_unique<VisionSourceFeeder>(width, height, fps, stereoFormat);
        }
 
-       app.imgWidth = format.resolution.width;
-       app.imgHeight = format.resolution.height;
-       app.stopWatch.resetTime();
-       try {
-               app.buffer = new unsigned char[app.imgWidth * app.imgHeight];
-               app.remoteData.buffer = new unsigned char[app.imgWidth * app.imgHeight * 4];
-       } catch (const std::exception &e) {
-               goto _err;
+       static std::unique_ptr<IFeeder> createCameraSourceFeeder()
+       {
+               return std::make_unique<CameraSourceFeeder>();
        }
+};
 
-       // vision-source
-       ret = vision_source_init(&vision_source_handle);
-       if (ret != VISION_SOURCE_ERROR_NONE) {
-               goto _err;
+class DfsApp
+{
+private:
+       GMainLoop *_loop;
+       std::unique_ptr<IFeeder> _feeder;
+       std::shared_ptr<Mv3d> _mv3d;
+public:
+       DfsApp() : _loop(nullptr) {}
+
+       ~DfsApp()
+       {
+               if (_loop)
+                       stop();
        }
 
-       ret = vision_source_open_device(vision_source_handle, VISION_SOURCE_DEVICE_ID);
-       if (ret != VISION_SOURCE_ERROR_NONE) {
-               goto _err;
+       void createDfs(int minDisparity, int maxDisparity, bool isAsync, std::string calibFilePath = "",
+                       int sourceSelection = VISION_SOURCE, int stereoFormat = STEREO_FORMAT_TOP_AND_BOTTOM,
+                       int sourceWidth = 640, int sourceHeight = 400, std::string depthFilePath = "")
+       {
+               // Mv3d - mediavision dfs
+               _mv3d = std::make_shared<Mv3d>(minDisparity, maxDisparity, isAsync, calibFilePath, sourceWidth, sourceHeight, stereoFormat, depthFilePath);
+               _mv3d->prepare();
+
+               // loop
+               _loop = g_main_loop_new(nullptr, false);
+
+               // SourceFeeder
+               switch (sourceSelection) {
+               case VISION_SOURCE:
+                       _feeder = FeederFactory::createVisionSourceFeeder(sourceWidth, sourceHeight, 10, stereoFormat);
+                       break;
+               case CAMERA_SOURCE:
+                       _feeder = FeederFactory::createCameraSourceFeeder();
+                       break;
+               default:
+                       throw std::runtime_error("invalid source");
+               }
        }
 
-       loop = g_main_loop_new(NULL, false);
-       ret = vision_source_set_stream_format(vision_source_handle, &format);
-       if (ret != VISION_SOURCE_ERROR_NONE) {
-               goto _err;
+       void start()
+       {
+               _feeder->start(_mv3d);
+               g_main_loop_run(_loop);
        }
 
-       vision_source_start_stream(vision_source_handle, _vision_source_cb, &app);
-       printf("starting...\n");
-       g_main_loop_run(loop);
-       printf("stop\n");
+       void stop()
+       {
+               g_main_loop_quit(_loop);
 
-       vision_source_stop_stream(vision_source_handle);
+               if (_loop) {
+                       g_main_loop_unref(_loop);
+                       _loop = nullptr;
+               }
 
-_err:
-       if (loop)
-               g_main_loop_unref(loop);
+               _feeder->stop();
 
-       if (vision_source_handle) {
-               vision_source_close_device(vision_source_handle);
-               vision_source_exit(vision_source_handle);
+               _mv3d->stop();
        }
 
-       if (app.buffer)
-               delete[] app.buffer;
+       static void helper()
+       {
+               std::cout << "Usage:" << std::endl;
+               std::cout << "mv_depthstream_test_suite <isAsync> <minimum disparity search range> <maximum disparity search range>" << std::endl;
+               std::cout << "\t\t\t <width> <height> <stereo format> <source> <stereo calibration file path> <depth file path>" << std::endl;
+               std::cout << "\t isAsync: 0: mv_3d_run(), 1: mv_3d_run_async()" << std::endl;
+               std::cout << "\t stereo format: 1: side-by-side, 2: top-and-bottom" << std::endl;
+               std::cout << "\t source: 0: vision-source, 2: camera" << std::endl;
+               std::cout << "\t ex: mv_depthstream_test_suite 1 8 96 640 400 2 1 /usr/share/dfs-qcmv/stereoCalibDA.yaml /tmp/camerasource.png" << std::endl;
+               std::cout << "or simply run:" << std::endl;
+               std::cout << "mv_depthstream_test_suite <isAsync> <minimum disparity search range> <maximun disparity search range>" << std::endl;
+               std::cout << "\tex: mv_depthstream_test_suite 1 8 96" << std::endl;
+       }
 
-       if (app.mv_source)
-               mv_destroy_source(app.mv_source);
+};
 
-       if (app.mv3d_handle)
-               mv_3d_destroy(app.mv3d_handle);
+static DfsApp gApp;
 
-       if (engine_config)
-               mv_destroy_engine_config(engine_config);
+static void gSigHandler(int sig)
+{
+       char c;
 
-       if (app.remoteData.source)
-               mv_destroy_source(app.remoteData.source);
+       signal(sig, SIG_IGN);
+       while ((getchar()) != '\n')
+               ;
 
-       if (app.remoteData.buffer)
-               delete[] app.remoteData.buffer;
+       c = getchar();
+       if (c == 'y' || c == 'Y') {
+               sleep(1);
+
+               gApp.stop();
+       } else {
+               std::cout << "no" << std::endl;
+               signal(SIGINT, gSigHandler);
+       }
+
+       getchar(); // Get new line character
+}
+
+int main(int argc, char *argv[])
+{
+       try {
+               bool isAsync = false;
+               int minDisp = 8;
+               int maxDisp = 96;
+
+               if (argc >= 4) {
+                       isAsync = (atoi(argv[1]) != 0); // 0: mv_3d_run(), 1: mv_3d_run_async()
+                       minDisp = atoi(argv[2]); // 8
+                       maxDisp = atoi(argv[3]); // 96
+               }
+
+               if (argc == 10) {
+                       int camWidth = atoi(argv[4]); // 640
+                       int camHeight = atoi(argv[5]); // 400
+                       int stereoFormat = atoi(argv[6]); // 1: SIDE_BY_SIDE, 2: STEREO_FORMAT_TOP_AND_BOTTOM
+                       int sourceSelection = (atoi(argv[7]) != 0); // 0: vision-source, 1: camera
+                       std::string stereoConfigFile = argv[8];
+                       std::string dispResultFile = argv[9];
+
+                       gApp.createDfs(minDisp, maxDisp, isAsync, stereoConfigFile, sourceSelection, stereoFormat, camWidth, camHeight, dispResultFile);
+               } else if (argc == 4) {
+                       gApp.createDfs(minDisp, maxDisp, isAsync);
+               } else {
+                       gApp.helper();
+                       return 0;
+               }
+               signal(SIGINT, gSigHandler);
+               gApp.start();
+       } catch (const std::exception& e) {
+               std::cout << "fail to run mv_depthstream_test_suite" << std::endl;
+       }
 
        return 0;
 }