2 * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
3 * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-mfvideosrc
25 * Provides video capture from the Microsoft Media Foundation API.
27 * ## Example pipelines
29 * gst-launch-1.0 -v mfvideosrc ! fakesink
30 * ]| Capture from the default video capture device and render to fakesink.
33 * gst-launch-1.0 -v mfvideosrc device-index=1 ! fakesink
34 * ]| Capture from the second video device (if available) and render to fakesink.
41 #include "gstmfconfig.h"
43 #include "gstmfvideosrc.h"
44 #include "gstmfutils.h"
45 #include "gstmfsourceobject.h"
48 #if GST_MF_WINAPI_DESKTOP
49 #include "gstmfcapturedshow.h"
52 GST_DEBUG_CATEGORY (gst_mf_video_src_debug);
53 #define GST_CAT_DEFAULT gst_mf_video_src_debug
55 #if (GST_MF_WINAPI_APP && !GST_MF_WINAPI_DESKTOP)
56 /* FIXME: need support JPEG for UWP */
57 #define SRC_TEMPLATE_CAPS \
58 GST_VIDEO_CAPS_MAKE (GST_MF_VIDEO_FORMATS)
60 #define SRC_TEMPLATE_CAPS \
61 GST_VIDEO_CAPS_MAKE (GST_MF_VIDEO_FORMATS) "; " \
62 "image/jpeg, width = " GST_VIDEO_SIZE_RANGE ", " \
63 "height = " GST_VIDEO_SIZE_RANGE ", " \
64 "framerate = " GST_VIDEO_FPS_RANGE
67 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
70 GST_STATIC_CAPS (SRC_TEMPLATE_CAPS));
76 GstMFSourceObject *source;
101 #define DEFAULT_DEVICE_PATH nullptr
102 #define DEFAULT_DEVICE_NAME nullptr
103 #define DEFAULT_DEVICE_INDEX -1
105 static void gst_mf_video_src_finalize (GObject * object);
106 static void gst_mf_video_src_get_property (GObject * object, guint prop_id,
107 GValue * value, GParamSpec * pspec);
108 static void gst_mf_video_src_set_property (GObject * object, guint prop_id,
109 const GValue * value, GParamSpec * pspec);
111 static gboolean gst_mf_video_src_start (GstBaseSrc * src);
112 static gboolean gst_mf_video_src_stop (GstBaseSrc * src);
113 static gboolean gst_mf_video_src_set_caps (GstBaseSrc * src, GstCaps * caps);
114 static GstCaps *gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter);
115 static GstCaps *gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps);
116 static gboolean gst_mf_video_src_unlock (GstBaseSrc * src);
117 static gboolean gst_mf_video_src_unlock_stop (GstBaseSrc * src);
118 static gboolean gst_mf_video_src_query (GstBaseSrc * src, GstQuery * query);
120 static GstFlowReturn gst_mf_video_src_create (GstPushSrc * pushsrc,
121 GstBuffer ** buffer);
123 #define gst_mf_video_src_parent_class parent_class
124 G_DEFINE_TYPE (GstMFVideoSrc, gst_mf_video_src, GST_TYPE_PUSH_SRC);
127 gst_mf_video_src_class_init (GstMFVideoSrcClass * klass)
129 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
130 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
131 GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
132 GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS (klass);
133 GParamFlags flags = (GParamFlags) (G_PARAM_READWRITE |
134 GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS);
136 gobject_class->finalize = gst_mf_video_src_finalize;
137 gobject_class->get_property = gst_mf_video_src_get_property;
138 gobject_class->set_property = gst_mf_video_src_set_property;
140 g_object_class_install_property (gobject_class, PROP_DEVICE_PATH,
141 g_param_spec_string ("device-path", "Device Path",
142 "The device path", DEFAULT_DEVICE_PATH, flags));
143 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
144 g_param_spec_string ("device-name", "Device Name",
145 "The human-readable device name", DEFAULT_DEVICE_NAME, flags));
146 g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
147 g_param_spec_int ("device-index", "Device Index",
148 "The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
150 #if GST_MF_WINAPI_APP
152 * GstMFVideoSrc:dispatcher:
154 * ICoreDispatcher COM object used for activating device from UI thread.
158 g_object_class_install_property (gobject_class, PROP_DISPATCHER,
159 g_param_spec_pointer ("dispatcher", "Dispatcher",
160 "ICoreDispatcher COM object to use. In order for application to ask "
161 "permission of capture device, device activation should be running "
162 "on UI thread via ICoreDispatcher. This element will increase "
163 "the reference count of given ICoreDispatcher and release it after "
164 "use. Therefore, caller does not need to consider additional "
165 "reference count management",
166 (GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE |
167 GST_PARAM_MUTABLE_READY | G_PARAM_WRITABLE |
168 G_PARAM_STATIC_STRINGS)));
171 gst_element_class_set_static_metadata (element_class,
172 "Media Foundation Video Source",
173 "Source/Video/Hardware",
174 "Capture video stream through Windows Media Foundation",
175 "Seungha Yang <seungha.yang@navercorp.com>");
177 gst_element_class_add_static_pad_template (element_class, &src_template);
179 basesrc_class->start = GST_DEBUG_FUNCPTR (gst_mf_video_src_start);
180 basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_stop);
181 basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_mf_video_src_set_caps);
182 basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_mf_video_src_get_caps);
183 basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_mf_video_src_fixate);
184 basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock);
185 basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock_stop);
186 basesrc_class->query = GST_DEBUG_FUNCPTR (gst_mf_video_src_query);
188 pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_mf_video_src_create);
190 GST_DEBUG_CATEGORY_INIT (gst_mf_video_src_debug, "mfvideosrc", 0,
195 gst_mf_video_src_init (GstMFVideoSrc * self)
197 gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
198 gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
200 self->device_index = DEFAULT_DEVICE_INDEX;
204 gst_mf_video_src_finalize (GObject * object)
206 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
208 g_free (self->device_name);
209 g_free (self->device_path);
211 G_OBJECT_CLASS (parent_class)->finalize (object);
215 gst_mf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
218 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
221 case PROP_DEVICE_PATH:
222 g_value_set_string (value, self->device_path);
224 case PROP_DEVICE_NAME:
225 g_value_set_string (value, self->device_name);
227 case PROP_DEVICE_INDEX:
228 g_value_set_int (value, self->device_index);
231 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
237 gst_mf_video_src_set_property (GObject * object, guint prop_id,
238 const GValue * value, GParamSpec * pspec)
240 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
243 case PROP_DEVICE_PATH:
244 g_free (self->device_path);
245 self->device_path = g_value_dup_string (value);
247 case PROP_DEVICE_NAME:
248 g_free (self->device_name);
249 self->device_name = g_value_dup_string (value);
251 case PROP_DEVICE_INDEX:
252 self->device_index = g_value_get_int (value);
254 #if GST_MF_WINAPI_APP
255 case PROP_DISPATCHER:
256 self->dispatcher = g_value_get_pointer (value);
260 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
266 gst_mf_video_src_start (GstBaseSrc * src)
268 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
270 GST_DEBUG_OBJECT (self, "Start");
272 self->source = gst_mf_source_object_new (GST_MF_SOURCE_TYPE_VIDEO,
273 self->device_index, self->device_name, self->device_path, nullptr);
277 self->use_dshow = FALSE;
280 #if GST_MF_WINAPI_DESKTOP
281 self->use_dshow = TRUE;
282 self->source = gst_mf_capture_dshow_new (GST_MF_SOURCE_TYPE_VIDEO,
283 self->device_index, self->device_name, self->device_path);
288 GST_ERROR_OBJECT (self, "Couldn't create capture object");
292 gst_mf_source_object_set_client (self->source, GST_ELEMENT (self));
298 gst_mf_video_src_stop (GstBaseSrc * src)
300 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
302 GST_DEBUG_OBJECT (self, "Stop");
305 gst_mf_source_object_stop (self->source);
306 gst_object_unref (self->source);
307 self->source = nullptr;
310 self->started = FALSE;
316 gst_mf_video_src_set_caps (GstBaseSrc * src, GstCaps * caps)
318 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
320 GST_DEBUG_OBJECT (self, "Set caps %" GST_PTR_FORMAT, caps);
323 GST_ERROR_OBJECT (self, "No capture engine yet");
327 if (!gst_mf_source_object_set_caps (self->source, caps)) {
328 GST_ERROR_OBJECT (self, "CaptureEngine couldn't accept caps");
332 gst_video_info_from_caps (&self->info, caps);
333 if (GST_VIDEO_INFO_FORMAT (&self->info) != GST_VIDEO_FORMAT_ENCODED)
334 gst_base_src_set_blocksize (src, GST_VIDEO_INFO_SIZE (&self->info));
340 gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter)
342 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
343 GstCaps *caps = nullptr;
346 caps = gst_mf_source_object_get_caps (self->source);
349 caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src));
353 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
354 gst_caps_unref (caps);
358 GST_DEBUG_OBJECT (self, "Returning caps %" GST_PTR_FORMAT, caps);
364 gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
366 GstStructure *structure;
367 GstCaps *fixated_caps;
370 fixated_caps = gst_caps_make_writable (caps);
372 for (i = 0; i < gst_caps_get_size (fixated_caps); ++i) {
373 structure = gst_caps_get_structure (fixated_caps, i);
374 gst_structure_fixate_field_nearest_int (structure, "width", G_MAXINT);
375 gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
376 gst_structure_fixate_field_nearest_fraction (structure, "framerate",
380 fixated_caps = gst_caps_fixate (fixated_caps);
386 gst_mf_video_src_unlock (GstBaseSrc * src)
388 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
391 gst_mf_source_object_set_flushing (self->source, TRUE);
397 gst_mf_video_src_unlock_stop (GstBaseSrc * src)
399 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
402 gst_mf_source_object_set_flushing (self->source, FALSE);
408 gst_mf_video_src_query (GstBaseSrc * src, GstQuery * query)
410 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
412 switch (GST_QUERY_TYPE (query)) {
413 case GST_QUERY_LATENCY:
415 gst_query_set_latency (query, TRUE, 0, self->latency);
424 return GST_BASE_SRC_CLASS (parent_class)->query (src, query);
428 gst_mf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
430 GstMFVideoSrc *self = GST_MF_VIDEO_SRC (pushsrc);
431 GstFlowReturn ret = GST_FLOW_OK;
432 GstBuffer *buf = nullptr;
433 GstSample *sample = nullptr;
435 GstClockTime running_time = GST_CLOCK_TIME_NONE;
436 GstClockTimeDiff diff;
438 if (!self->started) {
439 if (!gst_mf_source_object_start (self->source)) {
440 GST_ERROR_OBJECT (self, "Failed to start capture object");
442 return GST_FLOW_ERROR;
445 self->started = TRUE;
448 if (self->use_dshow) {
449 ret = gst_mf_source_object_get_sample (self->source, &sample);
450 } else if (GST_VIDEO_INFO_FORMAT (&self->info) != GST_VIDEO_FORMAT_ENCODED) {
451 ret = GST_BASE_SRC_CLASS (parent_class)->alloc (GST_BASE_SRC (self), 0,
452 GST_VIDEO_INFO_SIZE (&self->info), &buf);
454 if (ret != GST_FLOW_OK)
457 ret = gst_mf_source_object_fill (self->source, buf);
459 ret = gst_mf_source_object_create (self->source, &buf);
462 if (ret != GST_FLOW_OK)
465 /* DirectShow capture object will set caps if it's got updated */
467 if (gst_sample_get_caps (sample)) {
468 if (!gst_base_src_negotiate (GST_BASE_SRC (self))) {
469 GST_ERROR_OBJECT (self, "Failed to negotiate with new caps");
470 gst_sample_unref (sample);
471 return GST_FLOW_NOT_NEGOTIATED;
473 GST_DEBUG_OBJECT (self, "Renegotiated");
477 buf = gst_sample_get_buffer (sample);
478 gst_buffer_ref (buf);
479 gst_sample_unref (sample);
482 GST_BUFFER_OFFSET (buf) = self->n_frames;
483 GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
486 GST_LOG_OBJECT (self,
487 "Captured buffer timestamp %" GST_TIME_FORMAT ", duration %"
488 GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
489 GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
492 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
496 now = gst_clock_get_time (clock);
497 running_time = now - GST_ELEMENT_CAST (self)->base_time;
498 gst_object_unref (clock);
501 diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (buf), running_time);
502 if (diff > self->latency) {
503 self->latency = (GstClockTime) diff;
504 GST_DEBUG_OBJECT (self, "Updated latency value %" GST_TIME_FORMAT,
505 GST_TIME_ARGS (self->latency));