CAP_PROP_HW_ACCELERATION=50, //!< (**open-only**) Hardware acceleration type (see #VideoAccelerationType). Setting supported only via `params` parameter in cv::VideoCapture constructor / .open() method. Default value is backend-specific.
CAP_PROP_HW_DEVICE =51, //!< (**open-only**) Hardware device index (select GPU if multiple available). Device enumeration is acceleration type specific.
CAP_PROP_HW_ACCELERATION_USE_OPENCL=52, //!< (**open-only**) If non-zero, create new OpenCL context and bind it to current thread. The OpenCL context created with Video Acceleration context attached it (if not attached yet) for optimized GPU data copy between HW accelerated decoder and cv::UMat.
- CAP_PROP_OPEN_TIMEOUT_MSEC=53, //!< (**open-only**) timeout in milliseconds for opening a video capture (applicable for FFmpeg back-end only)
- CAP_PROP_READ_TIMEOUT_MSEC=54, //!< (**open-only**) timeout in milliseconds for reading from a video capture (applicable for FFmpeg back-end only)
+ CAP_PROP_OPEN_TIMEOUT_MSEC=53, //!< (**open-only**) timeout in milliseconds for opening a video capture (applicable for FFmpeg and GStreamer back-ends only)
+ CAP_PROP_READ_TIMEOUT_MSEC=54, //!< (**open-only**) timeout in milliseconds for reading from a video capture (applicable for FFmpeg and GStreamer back-ends only)
CAP_PROP_STREAM_OPEN_TIME_USEC =55, //<! (read-only) time in microseconds since Jan 1 1970 when stream was opened. Applicable for FFmpeg backend only. Useful for RTSP and other live streams
CAP_PROP_VIDEO_TOTAL_CHANNELS = 56, //!< (read-only) Number of video channels
CAP_PROP_VIDEO_STREAM = 57, //!< (**open-only**) Specify video stream, 0-based index. Use -1 to disable video stream from file or IP cameras. Default value is 0.
#define COLOR_ELEM_NAME COLOR_ELEM
#define CV_GST_FORMAT(format) (format)
+#define GSTREAMER_INTERRUPT_OPEN_DEFAULT_TIMEOUT_NS (30 * GST_SECOND)
+#define GSTREAMER_INTERRUPT_READ_DEFAULT_TIMEOUT_NS (30 * GST_SECOND)
namespace cv {
gint width;
gint height;
double fps;
+ GstClockTime openTimeout; // measured in nanoseconds
+ GstClockTime readTimeout; // measured in nanoseconds
bool isPosFramesSupported;
bool isPosFramesEmulated;
gint64 emulatedFrameNumber;
videoStream(0),
audioStream(-1),
duration(-1), width(-1), height(-1), fps(-1),
+ openTimeout(GSTREAMER_INTERRUPT_OPEN_DEFAULT_TIMEOUT_NS),
+ readTimeout(GSTREAMER_INTERRUPT_READ_DEFAULT_TIMEOUT_NS),
isPosFramesSupported(false),
isPosFramesEmulated(false),
emulatedFrameNumber(-1),
if (gst_app_sink_is_eos(GST_APP_SINK(sink.get())))
return false;
+#if FULL_GST_VERSION >= VERSION_NUM(1,10,0)
+ sample.attach(gst_app_sink_try_pull_sample(GST_APP_SINK(sink.get()), readTimeout));
+#else
sample.attach(gst_app_sink_pull_sample(GST_APP_SINK(sink.get())));
+#endif
if (!sample)
return false;
// the data. The gst_video_frame_map will parse the meta for us, or default to
// regular strides/offsets if no meta is present.
GstVideoFrame frame = {};
+#if FULL_GST_VERSION >= VERSION_NUM(1,6,0)
GstMapFlags flags = static_cast<GstMapFlags>(GST_MAP_READ | GST_VIDEO_FRAME_MAP_FLAG_NO_REF);
+#else
+ GstMapFlags flags = static_cast<GstMapFlags>(GST_MAP_READ);
+#endif
+
if (!gst_video_frame_map(&frame, &info, buf, flags))
{
CV_LOG_ERROR(NULL, "GStreamer: Failed to map GStreamer buffer to system memory");
if (status == GST_STATE_CHANGE_ASYNC)
{
// wait for status update
- status = gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
+ status = gst_element_get_state(pipeline, NULL, NULL, openTimeout);
}
if (status == GST_STATE_CHANGE_FAILURE)
{
if (status == GST_STATE_CHANGE_ASYNC)
{
// wait for status update
- status = gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
+ status = gst_element_get_state(pipeline, NULL, NULL, openTimeout);
}
if (status == GST_STATE_CHANGE_FAILURE)
{
return outputAudioFormat;
case CAP_PROP_AUDIO_BASE_INDEX:
return audioBaseIndex;
+ case CAP_PROP_OPEN_TIMEOUT_MSEC:
+ return GST_TIME_AS_MSECONDS(openTimeout);
+ case CAP_PROP_READ_TIMEOUT_MSEC:
+#if FULL_GST_VERSION >= VERSION_NUM(1,10,0)
+ return GST_TIME_AS_MSECONDS(readTimeout);
+#else
+ return 0;
+#endif
default:
CV_WARN("unhandled property: " << propId);
break;
gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), (guint) value);
return true;
}
+ case CAP_PROP_OPEN_TIMEOUT_MSEC:
+ {
+ if(value > 0)
+ {
+ openTimeout = GstClockTime(value * GST_MSECOND); // convert from ms to ns
+ return true;
+ }
+ else
+ {
+ CV_WARN("GStreamer open timeout should be positive");
+ return false;
+ }
+ }
+ case CAP_PROP_READ_TIMEOUT_MSEC:
+ {
+#if FULL_GST_VERSION >= VERSION_NUM(1,10,0)
+ if(value > 0)
+ {
+ readTimeout = GstClockTime(value * GST_MSECOND); // convert from ms to ns
+ return true;
+ }
+ else
+ {
+ CV_WARN("GStreamer read timeout should be positive");
+ return false;
+ }
+#else
+ CV_WARN("GStreamer before 1.10 does not support read timeout");
+ return false;
+#endif
+ }
default:
CV_WARN("GStreamer: unhandled property");
}
EXPECT_EQ(0, remove(temp_file.c_str()));
}
+TEST(videoio_gstreamer, timeout_property)
+{
+ if (!videoio_registry::hasBackend(CAP_GSTREAMER))
+ throw SkipTestException("GStreamer backend was not found");
+
+ VideoCapture cap;
+ cap.open("videotestsrc ! appsink", CAP_GSTREAMER);
+ ASSERT_TRUE(cap.isOpened());
+ const double default_timeout = 30000; // 30 seconds
+ const double open_timeout = 5678; // 3 seconds
+ const double read_timeout = 1234; // 1 second
+ EXPECT_NEAR(default_timeout, cap.get(CAP_PROP_OPEN_TIMEOUT_MSEC), 1e-3);
+ const double current_read_timeout = cap.get(CAP_PROP_READ_TIMEOUT_MSEC);
+ const bool read_timeout_supported = current_read_timeout > 0.0;
+ if (read_timeout_supported)
+ {
+ EXPECT_NEAR(default_timeout, current_read_timeout, 1e-3);
+ }
+ cap.set(CAP_PROP_OPEN_TIMEOUT_MSEC, open_timeout);
+ EXPECT_NEAR(open_timeout, cap.get(CAP_PROP_OPEN_TIMEOUT_MSEC), 1e-3);
+ if (read_timeout_supported)
+ {
+ cap.set(CAP_PROP_READ_TIMEOUT_MSEC, read_timeout);
+ EXPECT_NEAR(read_timeout, cap.get(CAP_PROP_READ_TIMEOUT_MSEC), 1e-3);
+ }
+}
+
}} // namespace