From 38a4eaf8a32b4bc5a60db739eb6ac55819010f29 Mon Sep 17 00:00:00 2001 From: Igor Murzov Date: Mon, 23 Nov 2020 14:05:55 +0300 Subject: [PATCH] Orbbec tutorial: Sync frames from two streams and process depth & color simultaneously --- .../videoio/orbbec-astra/orbbec_astra.markdown | 45 +++++++----- .../videoio/orbbec_astra/orbbec_astra.cpp | 83 ++++++++++++---------- 2 files changed, 74 insertions(+), 54 deletions(-) diff --git a/doc/tutorials/videoio/orbbec-astra/orbbec_astra.markdown b/doc/tutorials/videoio/orbbec-astra/orbbec_astra.markdown index 664e4f6..8c5ebcd 100644 --- a/doc/tutorials/videoio/orbbec-astra/orbbec_astra.markdown +++ b/doc/tutorials/videoio/orbbec-astra/orbbec_astra.markdown @@ -9,8 +9,8 @@ Using Orbbec Astra 3D cameras {#tutorial_orbbec_astra} This tutorial is devoted to the Astra Series of Orbbec 3D cameras (https://orbbec3d.com/product-astra-pro/). That cameras have a depth sensor in addition to a common color sensor. The depth sensors can be read using -the OpenNI interface with @ref cv::VideoCapture class. The video stream is provided through the regular camera -interface. +the open source OpenNI API with @ref cv::VideoCapture class. The video stream is provided through the regular +camera interface. ### Installation Instructions @@ -70,15 +70,20 @@ In order to use a depth sensor with OpenCV you should do the following steps: ### Code -To get both depth and color frames, two @ref cv::VideoCapture objects should be created: +The Astra Pro camera has two sensors -- a depth sensor and a color sensor. The depth sensors +can be read using the OpenNI interface with @ref cv::VideoCapture class. The video stream is +not available through OpenNI API and is only provided through the regular camera interface. +So, to get both depth and color frames, two @ref cv::VideoCapture objects should be created: @snippetlineno samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp Open streams -The first object will use the regular Video4Linux2 interface to access the color sensor. The second one +The first object will use the Video4Linux2 interface to access the color sensor. The second one is using OpenNI2 API to retrieve depth data. -Before using the created VideoCapture objects you may want to setup stream parameters by setting -objects' properties. The most important parameters are frame width, frame height and fps: +Before using the created VideoCapture objects you may want to set up stream parameters by setting +objects' properties. The most important parameters are frame width, frame height and fps. +For this example, we’ll configure width and height of both streams to VGA resolution as that’s +the maximum resolution available for both sensors and we’d like both stream parameters to be the same: @snippetlineno samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp Setup streams @@ -113,8 +118,9 @@ After the VideoCapture objects are set up you can start reading frames from them to avoid one stream blocking while another stream is being read. VideoCapture is not a thread-safe class, so you need to be careful to avoid any possible deadlocks or data races. -Example implementation that gets frames from each sensor in a new thread and stores them -in a list along with their timestamps: +As there are two video sources that should be read simultaneously, it’s necessary to create two +threads to avoid blocking. Example implementation that gets frames from each sensor in a new thread +and stores them in a list along with their timestamps: @snippetlineno samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp Read streams @@ -130,17 +136,24 @@ VideoCapture can retrieve the following data: -# data given from the color sensor is a regular BGR image (CV_8UC3). -When new data is available a reading thread notifies the main thread. A frame is stored in the -ordered list -- the first frame is the latest one: +When new data are available a reading thread notifies the main thread using a condition variable. +A frame is stored in the ordered list -- the first frame is the latest one. As depth and color frames +are read from independent sources two video streams may become out of sync even when both streams +are set up for the same frame rate. A post-synchronization procedure can be applied to the streams +to combine depth and color frames into pairs. The sample code below demonstrates this procedure: -@snippetlineno samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp Show color frame +@snippetlineno samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp Pair frames -Depth frames can be picked the same way from the `depthFrames` list. +In the code snippet above the execution is blocked until there are some frames in both frame lists. +When there are new frames, their timestamps are being checked -- if they differ more than a half of +the frame period then one of the frames is dropped. If timestamps are close enough, then two frames +are paired. Now, we have two frames: one containing color information and another one -- depth information. +In the example above retrieved frames are simply shown with cv::imshow function, but you can insert +any other processing code here. -After that, you'll have two frames: one containing color information and another one -- depth -information. In the sample images below you can see the color frame and the depth frame showing -the same scene. Looking at the color frame it's hard to distinguish plant leaves from leaves painted -on a wall, but the depth data makes it easy. +In the sample images below you can see the color frame and the depth frame representing the same scene. +Looking at the color frame it's hard to distinguish plant leaves from leaves painted on a wall, +but the depth data makes it easy. ![Color frame](images/astra_color.jpg) ![Depth frame](images/astra_depth.png) diff --git a/samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp b/samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp index a6dc6dd..bd626d5 100644 --- a/samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp +++ b/samples/cpp/tutorial_code/videoio/orbbec_astra/orbbec_astra.cpp @@ -69,7 +69,6 @@ int main() //! [Read streams] // Create two lists to store frames std::list depthFrames, colorFrames; - std::mutex depthFramesMtx, colorFramesMtx; const std::size_t maxFrames = 64; // Synchronization objects @@ -90,8 +89,6 @@ int main() Frame f; f.timestamp = cv::getTickCount(); depthStream.retrieve(f.frame, CAP_OPENNI_DEPTH_MAP); - //depthStream.retrieve(f.frame, CAP_OPENNI_DISPARITY_MAP); - //depthStream.retrieve(f.frame, CAP_OPENNI_IR_IMAGE); if (f.frame.empty()) { cerr << "ERROR: Failed to decode frame from depth stream" << endl; @@ -99,7 +96,7 @@ int main() } { - std::lock_guard lk(depthFramesMtx); + std::lock_guard lk(mtx); if (depthFrames.size() >= maxFrames) depthFrames.pop_front(); depthFrames.push_back(f); @@ -127,7 +124,7 @@ int main() } { - std::lock_guard lk(colorFramesMtx); + std::lock_guard lk(mtx); if (colorFrames.size() >= maxFrames) colorFrames.pop_front(); colorFrames.push_back(f); @@ -138,56 +135,66 @@ int main() }); //! [Read streams] - while (true) + //! [Pair frames] + // Pair depth and color frames + while (!isFinish) { std::unique_lock lk(mtx); - while (depthFrames.empty() && colorFrames.empty()) + while (!isFinish && (depthFrames.empty() || colorFrames.empty())) dataReady.wait(lk); - depthFramesMtx.lock(); - if (depthFrames.empty()) - { - depthFramesMtx.unlock(); - } - else + while (!depthFrames.empty() && !colorFrames.empty()) { + if (!lk.owns_lock()) + lk.lock(); + // Get a frame from the list - Mat depthMap = depthFrames.front().frame; + Frame depthFrame = depthFrames.front(); + int64 depthT = depthFrame.timestamp; + + // Get a frame from the list + Frame colorFrame = colorFrames.front(); + int64 colorT = colorFrame.timestamp; + + // Half of frame period is a maximum time diff between frames + const int64 maxTdiff = int64(1000000000 / (2 * colorStream.get(CAP_PROP_FPS))); + if (depthT + maxTdiff < colorT) + { + depthFrames.pop_front(); + continue; + } + else if (colorT + maxTdiff < depthT) + { + colorFrames.pop_front(); + continue; + } depthFrames.pop_front(); - depthFramesMtx.unlock(); + colorFrames.pop_front(); + lk.unlock(); + //! [Show frames] // Show depth frame Mat d8, dColor; - depthMap.convertTo(d8, CV_8U, 255.0 / 2500); + depthFrame.frame.convertTo(d8, CV_8U, 255.0 / 2500); applyColorMap(d8, dColor, COLORMAP_OCEAN); imshow("Depth (colored)", dColor); - } - - //! [Show color frame] - colorFramesMtx.lock(); - if (colorFrames.empty()) - { - colorFramesMtx.unlock(); - } - else - { - // Get a frame from the list - Mat colorFrame = colorFrames.front().frame; - colorFrames.pop_front(); - colorFramesMtx.unlock(); // Show color frame - imshow("Color", colorFrame); - } - //! [Show color frame] + imshow("Color", colorFrame.frame); + //! [Show frames] - // Exit on Esc key press - int key = waitKey(1); - if (key == 27) // ESC - break; + // Exit on Esc key press + int key = waitKey(1); + if (key == 27) // ESC + { + isFinish = true; + break; + } + } } + //! [Pair frames] - isFinish = true; + dataReady.notify_one(); depthReader.join(); colorReader.join(); -- 2.7.4