3 * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
4 * 2006 Edgard Lima <edgard.lima@indt.org.br>
6 * gstv4l2object.c: base class for V4L2 elements
8 * This library is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU Library General Public License as published
10 * by the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version. This library is distributed in the hope
12 * that it will be useful, but WITHOUT ANY WARRANTY; without even the
13 * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
14 * PURPOSE. See the GNU Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
21 /* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
22 * with newer GLib versions (>= 2.31.0) */
23 #define GLIB_DISABLE_DEPRECATION_WARNINGS
36 #include <gudev/gudev.h>
39 #include "v4l2_calls.h"
40 #include "gstv4l2tuner.h"
41 #include "gstv4l2colorbalance.h"
43 #include "gst/gst-i18n-plugin.h"
45 #include <gst/video/video.h>
47 GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
48 GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
49 #define GST_CAT_DEFAULT v4l2_debug
51 #define DEFAULT_PROP_DEVICE_NAME NULL
52 #define DEFAULT_PROP_DEVICE_FD -1
53 #define DEFAULT_PROP_FLAGS 0
54 #define DEFAULT_PROP_TV_NORM 0
55 #define DEFAULT_PROP_CHANNEL NULL
56 #define DEFAULT_PROP_FREQUENCY 0
57 #define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
59 #define ENCODED_BUFFER_SIZE (1 * 1024 * 1024)
64 V4L2_STD_OBJECT_PROPS,
68 * common format / caps utilities:
72 GST_V4L2_RAW = 1 << 0,
73 GST_V4L2_CODEC = 1 << 1,
74 GST_V4L2_TRANSPORT = 1 << 2,
75 GST_V4L2_NO_PARSE = 1 << 3,
83 GstV4L2FormatFlags flags;
86 static const GstV4L2FormatDesc gst_v4l2_formats[] = {
87 /* from Linux 2.6.15 videodev2.h */
88 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
102 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
103 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
104 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
106 /* two planes -- one Y, one Cr + Cb interleaved */
107 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
109 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
110 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
111 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
113 /* The following formats are not defined in the V4L2 specification */
114 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
115 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
116 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
117 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
119 /* see http://www.siliconimaging.com/RGB%20Bayer.htm */
120 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC},
122 /* compressed formats */
123 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
124 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
125 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
126 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
127 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
128 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
129 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
130 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
131 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
132 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
133 /* VP8 not parseable */
134 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
136 /* Vendor-specific formats */
137 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
138 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
139 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
140 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
141 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
144 #define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
146 static GSList *gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object);
149 #define GST_TYPE_V4L2_DEVICE_FLAGS (gst_v4l2_device_get_type ())
151 gst_v4l2_device_get_type (void)
153 static GType v4l2_device_type = 0;
155 if (v4l2_device_type == 0) {
156 static const GFlagsValue values[] = {
157 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
158 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
159 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
161 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
162 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
164 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
165 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
171 g_flags_register_static ("GstV4l2DeviceTypeFlags", values);
174 return v4l2_device_type;
177 #define GST_TYPE_V4L2_TV_NORM (gst_v4l2_tv_norm_get_type ())
179 gst_v4l2_tv_norm_get_type (void)
181 static GType v4l2_tv_norm = 0;
184 static const GEnumValue tv_norms[] = {
187 {V4L2_STD_NTSC, "NTSC", "NTSC"},
188 {V4L2_STD_NTSC_M, "NTSC-M", "NTSC-M"},
189 {V4L2_STD_NTSC_M_JP, "NTSC-M-JP", "NTSC-M-JP"},
190 {V4L2_STD_NTSC_M_KR, "NTSC-M-KR", "NTSC-M-KR"},
191 {V4L2_STD_NTSC_443, "NTSC-443", "NTSC-443"},
193 {V4L2_STD_PAL, "PAL", "PAL"},
194 {V4L2_STD_PAL_BG, "PAL-BG", "PAL-BG"},
195 {V4L2_STD_PAL_B, "PAL-B", "PAL-B"},
196 {V4L2_STD_PAL_B1, "PAL-B1", "PAL-B1"},
197 {V4L2_STD_PAL_G, "PAL-G", "PAL-G"},
198 {V4L2_STD_PAL_H, "PAL-H", "PAL-H"},
199 {V4L2_STD_PAL_I, "PAL-I", "PAL-I"},
200 {V4L2_STD_PAL_DK, "PAL-DK", "PAL-DK"},
201 {V4L2_STD_PAL_D, "PAL-D", "PAL-D"},
202 {V4L2_STD_PAL_D1, "PAL-D1", "PAL-D1"},
203 {V4L2_STD_PAL_K, "PAL-K", "PAL-K"},
204 {V4L2_STD_PAL_M, "PAL-M", "PAL-M"},
205 {V4L2_STD_PAL_N, "PAL-N", "PAL-N"},
206 {V4L2_STD_PAL_Nc, "PAL-Nc", "PAL-Nc"},
207 {V4L2_STD_PAL_60, "PAL-60", "PAL-60"},
209 {V4L2_STD_SECAM, "SECAM", "SECAM"},
210 {V4L2_STD_SECAM_B, "SECAM-B", "SECAM-B"},
211 {V4L2_STD_SECAM_G, "SECAM-G", "SECAM-G"},
212 {V4L2_STD_SECAM_H, "SECAM-H", "SECAM-H"},
213 {V4L2_STD_SECAM_DK, "SECAM-DK", "SECAM-DK"},
214 {V4L2_STD_SECAM_D, "SECAM-D", "SECAM-D"},
215 {V4L2_STD_SECAM_K, "SECAM-K", "SECAM-K"},
216 {V4L2_STD_SECAM_K1, "SECAM-K1", "SECAM-K1"},
217 {V4L2_STD_SECAM_L, "SECAM-L", "SECAM-L"},
218 {V4L2_STD_SECAM_LC, "SECAM-Lc", "SECAM-Lc"},
223 v4l2_tv_norm = g_enum_register_static ("V4L2_TV_norms", tv_norms);
230 gst_v4l2_io_mode_get_type (void)
232 static GType v4l2_io_mode = 0;
235 static const GEnumValue io_modes[] = {
236 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
237 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
238 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
239 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
240 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
244 v4l2_io_mode = g_enum_register_static ("GstV4l2IOMode", io_modes);
250 gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
251 const char *default_device)
253 g_object_class_install_property (gobject_class, PROP_DEVICE,
254 g_param_spec_string ("device", "Device", "Device location",
255 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
256 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
257 g_param_spec_string ("device-name", "Device name",
258 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
259 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
260 g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
261 g_param_spec_int ("device-fd", "File descriptor",
262 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
263 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
264 g_object_class_install_property (gobject_class, PROP_FLAGS,
265 g_param_spec_flags ("flags", "Flags", "Device type flags",
266 GST_TYPE_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
267 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
270 * GstV4l2Src:brightness:
272 * Picture brightness, or more precisely, the black level
274 g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
275 g_param_spec_int ("brightness", "Brightness",
276 "Picture brightness, or more precisely, the black level", G_MININT,
278 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
280 * GstV4l2Src:contrast:
282 * Picture contrast or luma gain
284 g_object_class_install_property (gobject_class, PROP_CONTRAST,
285 g_param_spec_int ("contrast", "Contrast",
286 "Picture contrast or luma gain", G_MININT,
288 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
290 * GstV4l2Src:saturation:
292 * Picture color saturation or chroma gain
294 g_object_class_install_property (gobject_class, PROP_SATURATION,
295 g_param_spec_int ("saturation", "Saturation",
296 "Picture color saturation or chroma gain", G_MININT,
298 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
302 * Hue or color balance
304 g_object_class_install_property (gobject_class, PROP_HUE,
305 g_param_spec_int ("hue", "Hue",
306 "Hue or color balance", G_MININT,
308 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
315 g_object_class_install_property (gobject_class, PROP_TV_NORM,
316 g_param_spec_enum ("norm", "TV norm",
318 GST_TYPE_V4L2_TV_NORM, DEFAULT_PROP_TV_NORM,
319 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
322 * GstV4l2Src:io-mode:
326 g_object_class_install_property (gobject_class, PROP_IO_MODE,
327 g_param_spec_enum ("io-mode", "IO mode",
329 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
330 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
333 * GstV4l2Src:extra-controls:
335 * Additional v4l2 controls for the device. The controls are identified
336 * by the control name (lowercase with '_' for any non-alphanumeric
341 g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
342 g_param_spec_boxed ("extra-controls", "Extra Controls",
343 "Extra v4l2 controls (CIDs) for the device",
344 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
347 * GstV4l2Src:pixel-aspect-ratio:
349 * The pixel aspect ratio of the device. This overwrites the pixel aspect
350 * ratio queried from the device.
354 g_object_class_install_property (gobject_class, PROP_PIXEL_ASPECT_RATIO,
355 g_param_spec_string ("pixel-aspect-ratio", "Pixel Aspect Ratio",
356 "Overwrite the pixel aspect ratio of the device", "1/1",
357 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
360 * GstV4l2Src:force-aspect-ratio:
362 * When enabled, the pixel aspect ratio queried from the device or set
363 * with the pixel-aspect-ratio property will be enforced.
367 g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
368 g_param_spec_boolean ("force-aspect-ratio", "Force aspect ratio",
369 "When enabled, the pixel aspect ratio will be enforced", TRUE,
370 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
375 gst_v4l2_object_new (GstElement * element,
376 enum v4l2_buf_type type,
377 const char *default_device,
378 GstV4l2GetInOutFunction get_in_out_func,
379 GstV4l2SetInOutFunction set_in_out_func,
380 GstV4l2UpdateFpsFunction update_fps_func)
382 GstV4l2Object *v4l2object;
385 * some default values
387 v4l2object = g_new0 (GstV4l2Object, 1);
389 v4l2object->type = type;
390 v4l2object->formats = NULL;
392 v4l2object->element = element;
393 v4l2object->get_in_out_func = get_in_out_func;
394 v4l2object->set_in_out_func = set_in_out_func;
395 v4l2object->update_fps_func = update_fps_func;
397 v4l2object->video_fd = -1;
398 v4l2object->poll = gst_poll_new (TRUE);
399 v4l2object->active = FALSE;
400 v4l2object->videodev = g_strdup (default_device);
402 v4l2object->norms = NULL;
403 v4l2object->channels = NULL;
404 v4l2object->colors = NULL;
406 v4l2object->xwindow_id = 0;
408 v4l2object->keep_aspect = TRUE;
410 v4l2object->n_v4l2_planes = 0;
413 * this boolean only applies in v4l2-MPLANE mode.
414 * TRUE: means it prefers to use several v4l2 (non contiguous)
415 * planes. For example if the device supports NV12 and NV12M
416 * both in MPLANE mode, then it will prefer NV12M
417 * FALSE: means it prefers to use one v4l2 plane (which contains
418 * all gst planes as if it was working in non-v4l2-MPLANE mode.
419 * For example if the device supports NV12 and NV12M
420 * both in MPLANE mode, then it will prefer NV12
422 * this boolean is also used to manage the case where the
423 * device only supports the mode MPLANE and at the same time it
424 * does not support both NV12 and NV12M. So in this case we first
425 * try to use the prefered config, and at least try the other case
426 * if it fails. For example in MPLANE mode if it has NV12 and not
427 * NV21M then even if you set prefered_non_contiguous to TRUE it will
430 v4l2object->prefered_non_contiguous = TRUE;
432 v4l2object->no_initial_format = FALSE;
437 static gboolean gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object);
441 gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
443 g_return_if_fail (v4l2object != NULL);
445 if (v4l2object->videodev)
446 g_free (v4l2object->videodev);
448 if (v4l2object->poll)
449 gst_poll_free (v4l2object->poll);
451 if (v4l2object->channel)
452 g_free (v4l2object->channel);
454 if (v4l2object->formats) {
455 gst_v4l2_object_clear_format_list (v4l2object);
458 if (v4l2object->probed_caps) {
459 gst_caps_unref (v4l2object->probed_caps);
467 gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
469 g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
470 g_slist_free (v4l2object->formats);
471 v4l2object->formats = NULL;
477 gst_v4l2_object_prop_to_cid (guint prop_id)
482 case PROP_BRIGHTNESS:
483 cid = V4L2_CID_BRIGHTNESS;
486 cid = V4L2_CID_CONTRAST;
488 case PROP_SATURATION:
489 cid = V4L2_CID_SATURATION;
495 GST_WARNING ("unmapped property id: %d", prop_id);
502 gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
503 guint prop_id, const GValue * value, GParamSpec * pspec)
507 g_free (v4l2object->videodev);
508 v4l2object->videodev = g_value_dup_string (value);
510 case PROP_BRIGHTNESS:
512 case PROP_SATURATION:
515 gint cid = gst_v4l2_object_prop_to_cid (prop_id);
518 if (GST_V4L2_IS_OPEN (v4l2object)) {
519 gst_v4l2_set_attribute (v4l2object, cid, g_value_get_int (value));
526 v4l2object->tv_norm = g_value_get_enum (value);
530 if (GST_V4L2_IS_OPEN (v4l2object)) {
531 GstTuner *tuner = GST_TUNER (v4l2object->element);
532 GstTunerChannel *channel = gst_tuner_find_channel_by_name (tuner,
533 (gchar *) g_value_get_string (value));
536 /* like gst_tuner_set_channel (tuner, channel)
537 without g_object_notify */
538 gst_v4l2_tuner_set_channel (v4l2object, channel);
541 g_free (v4l2object->channel);
542 v4l2object->channel = g_value_dup_string (value);
546 if (GST_V4L2_IS_OPEN (v4l2object)) {
547 GstTuner *tuner = GST_TUNER (v4l2object->element);
548 GstTunerChannel *channel = gst_tuner_get_channel (tuner);
551 GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
553 gst_tuner_set_frequency (tuner, channel, g_value_get_ulong (value))
554 without g_object_notify */
555 gst_v4l2_tuner_set_frequency (v4l2object, channel,
556 g_value_get_ulong (value));
559 v4l2object->frequency = g_value_get_ulong (value);
564 v4l2object->req_mode = g_value_get_enum (value);
566 case PROP_EXTRA_CONTROLS:{
567 const GstStructure *s = gst_value_get_structure (value);
569 if (v4l2object->extra_controls)
570 gst_structure_free (v4l2object->extra_controls);
572 v4l2object->extra_controls = s ? gst_structure_copy (s) : NULL;
573 if (GST_V4L2_IS_OPEN (v4l2object))
574 gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
577 case PROP_PIXEL_ASPECT_RATIO:
578 g_free (v4l2object->par);
579 v4l2object->par = g_new0 (GValue, 1);
580 g_value_init (v4l2object->par, GST_TYPE_FRACTION);
581 if (!g_value_transform (value, v4l2object->par)) {
582 g_warning ("Could not transform string to aspect ratio");
583 gst_value_set_fraction (v4l2object->par, 1, 1);
585 GST_DEBUG_OBJECT (v4l2object->element, "set PAR to %d/%d",
586 gst_value_get_fraction_numerator (v4l2object->par),
587 gst_value_get_fraction_denominator (v4l2object->par));
589 case PROP_FORCE_ASPECT_RATIO:
590 v4l2object->keep_aspect = g_value_get_boolean (value);
601 gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object,
602 guint prop_id, GValue * value, GParamSpec * pspec)
606 g_value_set_string (value, v4l2object->videodev);
608 case PROP_DEVICE_NAME:
610 const guchar *new = NULL;
612 if (GST_V4L2_IS_OPEN (v4l2object)) {
613 new = v4l2object->vcap.card;
614 } else if (gst_v4l2_open (v4l2object)) {
615 new = v4l2object->vcap.card;
616 gst_v4l2_close (v4l2object);
618 g_value_set_string (value, (gchar *) new);
623 if (GST_V4L2_IS_OPEN (v4l2object))
624 g_value_set_int (value, v4l2object->video_fd);
626 g_value_set_int (value, DEFAULT_PROP_DEVICE_FD);
633 if (GST_V4L2_IS_OPEN (v4l2object)) {
634 flags |= v4l2object->vcap.capabilities &
635 (V4L2_CAP_VIDEO_CAPTURE |
636 V4L2_CAP_VIDEO_OUTPUT |
637 V4L2_CAP_VIDEO_OVERLAY |
638 V4L2_CAP_VBI_CAPTURE |
639 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
641 if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
642 flags |= V4L2_CAP_VIDEO_CAPTURE;
644 if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
645 flags |= V4L2_CAP_VIDEO_OUTPUT;
647 g_value_set_flags (value, flags);
650 case PROP_BRIGHTNESS:
652 case PROP_SATURATION:
655 gint cid = gst_v4l2_object_prop_to_cid (prop_id);
658 if (GST_V4L2_IS_OPEN (v4l2object)) {
660 if (gst_v4l2_get_attribute (v4l2object, cid, &v)) {
661 g_value_set_int (value, v);
669 g_value_set_enum (value, v4l2object->tv_norm);
672 g_value_set_enum (value, v4l2object->req_mode);
674 case PROP_EXTRA_CONTROLS:
675 gst_value_set_structure (value, v4l2object->extra_controls);
677 case PROP_PIXEL_ASPECT_RATIO:
679 g_value_transform (v4l2object->par, value);
681 case PROP_FORCE_ASPECT_RATIO:
682 g_value_set_boolean (value, v4l2object->keep_aspect);
692 gst_v4l2_set_defaults (GstV4l2Object * v4l2object)
694 GstTunerNorm *norm = NULL;
695 GstTunerChannel *channel = NULL;
698 if (!GST_IS_TUNER (v4l2object->element))
701 tuner = GST_TUNER (v4l2object->element);
703 if (v4l2object->tv_norm)
704 norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
705 GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
706 "norm=%p", (guint64) v4l2object->tv_norm, norm);
708 gst_tuner_set_norm (tuner, norm);
711 GST_TUNER_NORM (gst_tuner_get_norm (GST_TUNER (v4l2object->element)));
713 v4l2object->tv_norm =
714 gst_v4l2_tuner_get_std_id_by_norm (v4l2object, norm);
715 gst_tuner_norm_changed (tuner, norm);
719 if (v4l2object->channel)
720 channel = gst_tuner_find_channel_by_name (tuner, v4l2object->channel);
722 gst_tuner_set_channel (tuner, channel);
725 GST_TUNER_CHANNEL (gst_tuner_get_channel (GST_TUNER
726 (v4l2object->element)));
728 g_free (v4l2object->channel);
729 v4l2object->channel = g_strdup (channel->label);
730 gst_tuner_channel_changed (tuner, channel);
735 && GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
736 if (v4l2object->frequency != 0) {
737 gst_tuner_set_frequency (tuner, channel, v4l2object->frequency);
739 v4l2object->frequency = gst_tuner_get_frequency (tuner, channel);
740 if (v4l2object->frequency == 0) {
742 gst_tuner_set_frequency (tuner, channel, 1000);
750 gst_v4l2_object_open (GstV4l2Object * v4l2object)
752 if (gst_v4l2_open (v4l2object))
753 gst_v4l2_set_defaults (v4l2object);
761 gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other)
765 ret = gst_v4l2_dup (v4l2object, other);
771 gst_v4l2_object_close (GstV4l2Object * v4l2object)
773 if (!gst_v4l2_close (v4l2object))
776 gst_caps_replace (&v4l2object->probed_caps, NULL);
778 if (v4l2object->formats) {
779 gst_v4l2_object_clear_format_list (v4l2object);
785 static struct v4l2_fmtdesc *
786 gst_v4l2_object_get_format_from_fourcc (GstV4l2Object * v4l2object,
789 struct v4l2_fmtdesc *fmt;
795 walk = gst_v4l2_object_get_format_list (v4l2object);
797 fmt = (struct v4l2_fmtdesc *) walk->data;
798 if (fmt->pixelformat == fourcc)
800 /* special case for jpeg */
801 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
802 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
803 fmt->pixelformat == V4L2_PIX_FMT_PJPG) {
804 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
805 fourcc == V4L2_PIX_FMT_PJPG) {
809 walk = g_slist_next (walk);
817 /* complete made up ranking, the values themselves are meaningless */
818 /* These ranks MUST be X such that X<<15 fits on a signed int - see
819 the comment at the end of gst_v4l2_object_format_get_rank. */
820 #define YUV_BASE_RANK 1000
821 #define JPEG_BASE_RANK 500
822 #define DV_BASE_RANK 200
823 #define RGB_BASE_RANK 100
824 #define YUV_ODD_BASE_RANK 50
825 #define RGB_ODD_BASE_RANK 25
826 #define BAYER_BASE_RANK 15
827 #define S910_BASE_RANK 10
828 #define GREY_BASE_RANK 5
829 #define PWC_BASE_RANK 1
832 gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
834 guint32 fourcc = fmt->pixelformat;
835 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
839 case V4L2_PIX_FMT_MJPEG:
840 case V4L2_PIX_FMT_PJPG:
841 rank = JPEG_BASE_RANK;
843 case V4L2_PIX_FMT_JPEG:
844 rank = JPEG_BASE_RANK + 1;
846 case V4L2_PIX_FMT_MPEG: /* MPEG */
847 rank = JPEG_BASE_RANK + 2;
850 case V4L2_PIX_FMT_RGB332:
851 case V4L2_PIX_FMT_RGB555:
852 case V4L2_PIX_FMT_RGB555X:
853 case V4L2_PIX_FMT_RGB565:
854 case V4L2_PIX_FMT_RGB565X:
855 rank = RGB_ODD_BASE_RANK;
858 case V4L2_PIX_FMT_RGB24:
859 case V4L2_PIX_FMT_BGR24:
860 rank = RGB_BASE_RANK - 1;
863 case V4L2_PIX_FMT_RGB32:
864 case V4L2_PIX_FMT_BGR32:
865 rank = RGB_BASE_RANK;
868 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
869 rank = GREY_BASE_RANK;
872 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
873 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
874 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
875 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
876 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
877 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
878 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
879 rank = YUV_ODD_BASE_RANK;
882 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
883 rank = YUV_BASE_RANK + 3;
885 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
886 rank = YUV_BASE_RANK + 2;
888 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
889 rank = YUV_BASE_RANK + 7;
891 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
892 rank = YUV_BASE_RANK + 10;
894 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
895 rank = YUV_BASE_RANK + 6;
897 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
898 rank = YUV_BASE_RANK + 9;
900 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
901 rank = YUV_BASE_RANK + 5;
903 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
904 rank = YUV_BASE_RANK + 4;
906 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
907 rank = YUV_BASE_RANK + 8;
910 case V4L2_PIX_FMT_DV:
914 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
918 case V4L2_PIX_FMT_SBGGR8:
919 rank = BAYER_BASE_RANK;
922 case V4L2_PIX_FMT_SN9C10X:
923 rank = S910_BASE_RANK;
926 case V4L2_PIX_FMT_PWC1:
927 rank = PWC_BASE_RANK;
929 case V4L2_PIX_FMT_PWC2:
930 rank = PWC_BASE_RANK;
938 /* All ranks are below 1<<15 so a shift by 15
939 * will a) make all non-emulated formats larger
940 * than emulated and b) will not overflow
951 format_cmp_func (gconstpointer a, gconstpointer b)
953 const struct v4l2_fmtdesc *fa = a;
954 const struct v4l2_fmtdesc *fb = b;
956 if (fa->pixelformat == fb->pixelformat)
959 return gst_v4l2_object_format_get_rank (fb) -
960 gst_v4l2_object_format_get_rank (fa);
963 /******************************************************
964 * gst_v4l2_object_fill_format_list():
965 * create list of supported capture formats
966 * return value: TRUE on success, FALSE on error
967 ******************************************************/
969 gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object,
970 enum v4l2_buf_type type)
973 struct v4l2_fmtdesc *format;
975 GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations");
977 /* format enumeration */
979 format = g_new0 (struct v4l2_fmtdesc, 1);
984 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
985 if (errno == EINVAL) {
987 break; /* end of enumeration */
993 GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index);
994 GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type);
995 GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags);
996 GST_LOG_OBJECT (v4l2object->element, "description: '%s'",
997 format->description);
998 GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT,
999 GST_FOURCC_ARGS (format->pixelformat));
1001 /* sort formats according to our preference; we do this, because caps
1002 * are probed in the order the formats are in the list, and the order of
1003 * formats in the final probed caps matters for things like fixation */
1004 v4l2object->formats = g_slist_insert_sorted (v4l2object->formats, format,
1005 (GCompareFunc) format_cmp_func);
1008 #ifndef GST_DISABLE_GST_DEBUG
1012 GST_INFO_OBJECT (v4l2object->element, "got %d format(s):", n);
1013 for (l = v4l2object->formats; l != NULL; l = l->next) {
1016 GST_INFO_OBJECT (v4l2object->element,
1017 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat),
1018 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1028 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
1029 (_("Failed to enumerate possible video formats device '%s' can work with"), v4l2object->videodev), ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)", n, v4l2object->videodev, errno, g_strerror (errno)));
1036 * Get the list of supported capture formats, a list of
1037 * <code>struct v4l2_fmtdesc</code>.
1040 gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
1042 if (!v4l2object->formats) {
1044 /* check usual way */
1045 gst_v4l2_object_fill_format_list (v4l2object, v4l2object->type);
1047 /* if our driver supports multi-planar
1048 * and if formats are still empty then we can workaround driver bug
1049 * by also looking up formats as if our device was not supporting
1051 if (!v4l2object->formats) {
1052 switch (v4l2object->type) {
1053 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1054 gst_v4l2_object_fill_format_list (v4l2object,
1055 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1058 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1059 gst_v4l2_object_fill_format_list (v4l2object,
1060 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1068 return v4l2object->formats;
1071 static GstVideoFormat
1072 gst_v4l2_object_v4l2fourcc_to_video_format (guint32 fourcc)
1074 GstVideoFormat format;
1077 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1078 format = GST_VIDEO_FORMAT_GRAY8;
1080 case V4L2_PIX_FMT_RGB555:
1081 format = GST_VIDEO_FORMAT_RGB15;
1083 case V4L2_PIX_FMT_RGB565:
1084 format = GST_VIDEO_FORMAT_RGB16;
1086 case V4L2_PIX_FMT_RGB24:
1087 format = GST_VIDEO_FORMAT_RGB;
1089 case V4L2_PIX_FMT_BGR24:
1090 format = GST_VIDEO_FORMAT_BGR;
1092 case V4L2_PIX_FMT_RGB32:
1093 format = GST_VIDEO_FORMAT_xRGB;
1095 case V4L2_PIX_FMT_BGR32:
1096 format = GST_VIDEO_FORMAT_BGRx;
1098 case V4L2_PIX_FMT_NV12:
1099 case V4L2_PIX_FMT_NV12M:
1100 format = GST_VIDEO_FORMAT_NV12;
1102 case V4L2_PIX_FMT_NV12MT:
1103 format = GST_VIDEO_FORMAT_NV12_64Z32;
1105 case V4L2_PIX_FMT_NV21:
1106 case V4L2_PIX_FMT_NV21M:
1107 format = GST_VIDEO_FORMAT_NV21;
1109 case V4L2_PIX_FMT_YVU410:
1110 format = GST_VIDEO_FORMAT_YVU9;
1112 case V4L2_PIX_FMT_YUV410:
1113 format = GST_VIDEO_FORMAT_YUV9;
1115 case V4L2_PIX_FMT_YUV420:
1116 format = GST_VIDEO_FORMAT_I420;
1118 case V4L2_PIX_FMT_YUYV:
1119 format = GST_VIDEO_FORMAT_YUY2;
1121 case V4L2_PIX_FMT_YVU420:
1122 format = GST_VIDEO_FORMAT_YV12;
1124 case V4L2_PIX_FMT_UYVY:
1125 format = GST_VIDEO_FORMAT_UYVY;
1128 case V4L2_PIX_FMT_Y41P:
1129 format = GST_VIDEO_FORMAT_Y41P;
1132 case V4L2_PIX_FMT_YUV411P:
1133 format = GST_VIDEO_FORMAT_Y41B;
1135 case V4L2_PIX_FMT_YUV422P:
1136 format = GST_VIDEO_FORMAT_Y42B;
1138 case V4L2_PIX_FMT_YVYU:
1139 format = GST_VIDEO_FORMAT_YVYU;
1142 format = GST_VIDEO_FORMAT_UNKNOWN;
1149 static GstStructure *
1150 gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc)
1152 GstStructure *structure = NULL;
1155 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1156 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1157 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1158 structure = gst_structure_new_empty ("image/jpeg");
1160 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1161 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1162 /* FIXME: get correct fourccs here */
1164 case V4L2_PIX_FMT_MPEG1:
1165 structure = gst_structure_new ("video/mpeg",
1166 "mpegversion", G_TYPE_INT, 2, NULL);
1168 case V4L2_PIX_FMT_MPEG2:
1169 structure = gst_structure_new ("video/mpeg",
1170 "mpegversion", G_TYPE_INT, 2, NULL);
1172 case V4L2_PIX_FMT_MPEG4:
1173 structure = gst_structure_new ("video/mpeg",
1174 "mpegversion", G_TYPE_INT, 4, "systemstream",
1175 G_TYPE_BOOLEAN, FALSE, NULL);
1177 case V4L2_PIX_FMT_H263:
1178 structure = gst_structure_new ("video/x-h263",
1179 "variant", G_TYPE_STRING, "itu", NULL);
1181 case V4L2_PIX_FMT_H264: /* H.264 */
1182 structure = gst_structure_new ("video/x-h264",
1183 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1184 G_TYPE_STRING, "au", NULL);
1186 case V4L2_PIX_FMT_VP8:
1187 structure = gst_structure_new_empty ("video/x-vp8");
1189 case V4L2_PIX_FMT_RGB332:
1190 case V4L2_PIX_FMT_RGB555X:
1191 case V4L2_PIX_FMT_RGB565X:
1192 /* FIXME: get correct fourccs here */
1194 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1195 case V4L2_PIX_FMT_RGB555:
1196 case V4L2_PIX_FMT_RGB565:
1197 case V4L2_PIX_FMT_RGB24:
1198 case V4L2_PIX_FMT_BGR24:
1199 case V4L2_PIX_FMT_RGB32:
1200 case V4L2_PIX_FMT_BGR32:
1201 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1202 case V4L2_PIX_FMT_NV12M:
1203 case V4L2_PIX_FMT_NV12MT:
1204 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1205 case V4L2_PIX_FMT_NV21M:
1206 case V4L2_PIX_FMT_YVU410:
1207 case V4L2_PIX_FMT_YUV410:
1208 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1209 case V4L2_PIX_FMT_YUYV:
1210 case V4L2_PIX_FMT_YVU420:
1211 case V4L2_PIX_FMT_UYVY:
1213 case V4L2_PIX_FMT_Y41P:
1215 case V4L2_PIX_FMT_YUV422P:
1216 case V4L2_PIX_FMT_YVYU:
1217 case V4L2_PIX_FMT_YUV411P:{
1218 GstVideoFormat format;
1219 format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
1220 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1221 structure = gst_structure_new ("video/x-raw",
1222 "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
1225 case V4L2_PIX_FMT_DV:
1227 gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1230 case V4L2_PIX_FMT_MPEG: /* MPEG */
1231 structure = gst_structure_new ("video/mpegts",
1232 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1234 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1236 case V4L2_PIX_FMT_SBGGR8:
1237 structure = gst_structure_new_empty ("video/x-bayer");
1239 case V4L2_PIX_FMT_SN9C10X:
1240 structure = gst_structure_new_empty ("video/x-sonix");
1242 case V4L2_PIX_FMT_PWC1:
1243 structure = gst_structure_new_empty ("video/x-pwc1");
1245 case V4L2_PIX_FMT_PWC2:
1246 structure = gst_structure_new_empty ("video/x-pwc2");
1249 GST_DEBUG ("Unknown fourcc 0x%08x %" GST_FOURCC_FORMAT,
1250 fourcc, GST_FOURCC_ARGS (fourcc));
1258 gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
1260 GstStructure *template;
1263 template = gst_v4l2_object_v4l2fourcc_to_bare_struct (fourcc);
1265 if (template == NULL)
1268 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
1269 if (gst_v4l2_formats[i].format != fourcc)
1272 if (gst_v4l2_formats[i].dimensions) {
1273 gst_structure_set (template,
1274 "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1275 "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1276 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
1287 gst_v4l2_object_get_caps_helper (GstV4L2FormatFlags flags)
1289 GstStructure *structure;
1293 caps = gst_caps_new_empty ();
1294 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
1296 if ((gst_v4l2_formats[i].flags & flags) == 0)
1300 gst_v4l2_object_v4l2fourcc_to_bare_struct (gst_v4l2_formats[i].format);
1302 if (gst_v4l2_formats[i].dimensions) {
1303 gst_structure_set (structure,
1304 "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1305 "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1306 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
1308 gst_caps_append_structure (caps, structure);
1312 return gst_caps_simplify (caps);
1316 gst_v4l2_object_get_all_caps (void)
1318 static GstCaps *caps = NULL;
1321 caps = gst_v4l2_object_get_caps_helper (GST_V4L2_ALL);
1323 return gst_caps_ref (caps);
1327 gst_v4l2_object_get_raw_caps (void)
1329 static GstCaps *caps = NULL;
1332 caps = gst_v4l2_object_get_caps_helper (GST_V4L2_RAW);
1334 return gst_caps_ref (caps);
1338 gst_v4l2_object_get_codec_caps (void)
1340 static GstCaps *caps = NULL;
1343 caps = gst_v4l2_object_get_caps_helper (GST_V4L2_CODEC);
1345 return gst_caps_ref (caps);
1348 /* gst_v4l2_object_choose_fourcc:
1349 * @obj a #GstV4l2Object
1350 * @fourcc_splane The format type in single plane representation
1351 * @fourcc_mplane The format type in multi-plane representation
1352 * @fourcc Set to the first format to try
1353 * @fourcc_alt The alternative format to use, or zero if mplane is not
1354 * supported. Note that if alternate is used, the prefered_non_contiguous
1355 * setting need to be inversed.
1357 * Certain format can be stored into multi-planar buffer type with two
1358 * representation. As an example, NV12, which has two planes, can be stored
1359 * into 1 plane of multi-planar buffer sturcture, or two. This function will
1360 * choose the right format to use base on the object settings.
1363 gst_v4l2_object_choose_fourcc (GstV4l2Object * obj, guint32 fourcc_splane,
1364 guint32 fourcc_mplane, guint32 * fourcc, guint32 * fourcc_alt)
1366 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1367 if (obj->prefered_non_contiguous) {
1368 *fourcc = fourcc_mplane;
1369 *fourcc_alt = fourcc_splane;
1371 *fourcc = fourcc_splane;
1372 *fourcc_alt = fourcc_mplane;
1375 *fourcc = fourcc_splane;
1380 /* collect data for the given caps
1381 * @caps: given input caps
1382 * @format: location for the v4l format
1383 * @w/@h: location for width and height
1384 * @fps_n/@fps_d: location for framerate
1385 * @size: location for expected size of the frame or 0 if unknown
1388 gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
1389 struct v4l2_fmtdesc **format, GstVideoInfo * info)
1391 GstStructure *structure;
1392 guint32 fourcc, fourcc_alt = 0;
1393 const gchar *mimetype;
1394 struct v4l2_fmtdesc *fmt;
1396 /* default unknown values */
1399 structure = gst_caps_get_structure (caps, 0);
1401 mimetype = gst_structure_get_name (structure);
1403 if (!gst_video_info_from_caps (info, caps))
1404 goto invalid_format;
1406 if (g_str_equal (mimetype, "video/x-raw")) {
1407 switch (GST_VIDEO_INFO_FORMAT (info)) {
1408 case GST_VIDEO_FORMAT_I420:
1409 fourcc = V4L2_PIX_FMT_YUV420;
1411 case GST_VIDEO_FORMAT_YUY2:
1412 fourcc = V4L2_PIX_FMT_YUYV;
1415 case GST_VIDEO_FORMAT_Y41P:
1416 fourcc = V4L2_PIX_FMT_Y41P;
1419 case GST_VIDEO_FORMAT_UYVY:
1420 fourcc = V4L2_PIX_FMT_UYVY;
1422 case GST_VIDEO_FORMAT_YV12:
1423 fourcc = V4L2_PIX_FMT_YVU420;
1425 case GST_VIDEO_FORMAT_Y41B:
1426 fourcc = V4L2_PIX_FMT_YUV411P;
1428 case GST_VIDEO_FORMAT_Y42B:
1429 fourcc = V4L2_PIX_FMT_YUV422P;
1431 case GST_VIDEO_FORMAT_NV12:
1432 gst_v4l2_object_choose_fourcc (v4l2object, V4L2_PIX_FMT_NV12,
1433 V4L2_PIX_FMT_NV12M, &fourcc, &fourcc_alt);
1435 case GST_VIDEO_FORMAT_NV12_64Z32:
1436 fourcc = V4L2_PIX_FMT_NV12MT;
1438 case GST_VIDEO_FORMAT_NV21:
1439 gst_v4l2_object_choose_fourcc (v4l2object, V4L2_PIX_FMT_NV21,
1440 V4L2_PIX_FMT_NV21M, &fourcc, &fourcc_alt);
1442 case GST_VIDEO_FORMAT_YVYU:
1443 fourcc = V4L2_PIX_FMT_YVYU;
1445 case GST_VIDEO_FORMAT_RGB15:
1446 fourcc = V4L2_PIX_FMT_RGB555;
1448 case GST_VIDEO_FORMAT_RGB16:
1449 fourcc = V4L2_PIX_FMT_RGB565;
1451 case GST_VIDEO_FORMAT_RGB:
1452 fourcc = V4L2_PIX_FMT_RGB24;
1454 case GST_VIDEO_FORMAT_BGR:
1455 fourcc = V4L2_PIX_FMT_BGR24;
1457 case GST_VIDEO_FORMAT_xRGB:
1458 case GST_VIDEO_FORMAT_ARGB:
1459 fourcc = V4L2_PIX_FMT_RGB32;
1461 case GST_VIDEO_FORMAT_BGRx:
1462 case GST_VIDEO_FORMAT_BGRA:
1463 fourcc = V4L2_PIX_FMT_BGR32;
1465 case GST_VIDEO_FORMAT_GRAY8:
1466 fourcc = V4L2_PIX_FMT_GREY;
1471 if (g_str_equal (mimetype, "video/mpegts")) {
1472 fourcc = V4L2_PIX_FMT_MPEG;
1473 } else if (g_str_equal (mimetype, "video/x-dv")) {
1474 fourcc = V4L2_PIX_FMT_DV;
1475 } else if (g_str_equal (mimetype, "image/jpeg")) {
1476 fourcc = V4L2_PIX_FMT_JPEG;
1477 } else if (g_str_equal (mimetype, "video/mpeg")) {
1479 if (gst_structure_get_int (structure, "mpegversion", &version)) {
1482 fourcc = V4L2_PIX_FMT_MPEG1;
1485 fourcc = V4L2_PIX_FMT_MPEG2;
1488 fourcc = V4L2_PIX_FMT_MPEG4;
1494 } else if (g_str_equal (mimetype, "video/x-h263")) {
1495 fourcc = V4L2_PIX_FMT_H263;
1496 } else if (g_str_equal (mimetype, "video/x-h264")) {
1497 fourcc = V4L2_PIX_FMT_H264;
1498 } else if (g_str_equal (mimetype, "video/x-vp8")) {
1499 fourcc = V4L2_PIX_FMT_VP8;
1500 } else if (g_str_equal (mimetype, "video/x-bayer")) {
1501 fourcc = V4L2_PIX_FMT_SBGGR8;
1502 } else if (g_str_equal (mimetype, "video/x-sonix")) {
1503 fourcc = V4L2_PIX_FMT_SN9C10X;
1504 } else if (g_str_equal (mimetype, "video/x-pwc1")) {
1505 fourcc = V4L2_PIX_FMT_PWC1;
1506 } else if (g_str_equal (mimetype, "video/x-pwc2")) {
1507 fourcc = V4L2_PIX_FMT_PWC2;
1512 goto unhandled_format;
1514 fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
1516 if (fmt == NULL && fourcc_alt != 0) {
1517 GST_DEBUG_OBJECT (v4l2object, "No support for %" GST_FOURCC_FORMAT
1518 " trying %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc),
1519 GST_FOURCC_ARGS (fourcc_alt));
1520 v4l2object->prefered_non_contiguous = !v4l2object->prefered_non_contiguous;
1521 fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc_alt);
1525 goto unsupported_format;
1534 GST_DEBUG_OBJECT (v4l2object, "invalid format");
1539 GST_DEBUG_OBJECT (v4l2object, "unhandled format");
1544 GST_DEBUG_OBJECT (v4l2object, "unsupported format");
1550 gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
1551 guint32 pixelformat, gint * width, gint * height, gboolean * interlaced);
1554 gst_v4l2_object_add_aspect_ratio (GstV4l2Object * v4l2object, GstStructure * s)
1556 struct v4l2_cropcap cropcap;
1557 int num = 1, den = 1;
1559 if (!v4l2object->keep_aspect)
1562 if (v4l2object->par) {
1563 num = gst_value_get_fraction_numerator (v4l2object->par);
1564 den = gst_value_get_fraction_denominator (v4l2object->par);
1568 memset (&cropcap, 0, sizeof (cropcap));
1570 cropcap.type = v4l2object->type;
1571 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
1572 goto cropcap_failed;
1574 num = cropcap.pixelaspect.numerator;
1575 den = cropcap.pixelaspect.denominator;
1578 gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, num, den,
1583 if (errno != ENOTTY)
1584 GST_WARNING_OBJECT (v4l2object->element,
1585 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
1586 g_strerror (errno));
1591 /* The frame interval enumeration code first appeared in Linux 2.6.19. */
1592 static GstStructure *
1593 gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object,
1594 guint32 pixelformat,
1595 guint32 width, guint32 height, const GstStructure * template)
1597 gint fd = v4l2object->video_fd;
1598 struct v4l2_frmivalenum ival;
1601 GValue rates = { 0, };
1602 gboolean interlaced;
1603 gint int_width = width;
1604 gint int_height = height;
1606 if (v4l2object->never_interlaced) {
1609 /* Interlaced detection using VIDIOC_TRY/S_FMT */
1610 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat,
1611 &int_width, &int_height, &interlaced))
1615 memset (&ival, 0, sizeof (struct v4l2_frmivalenum));
1617 ival.pixel_format = pixelformat;
1619 ival.height = height;
1621 GST_LOG_OBJECT (v4l2object->element,
1622 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
1623 GST_FOURCC_ARGS (pixelformat));
1625 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
1626 * fraction to get framerate */
1627 if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
1628 goto enum_frameintervals_failed;
1630 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1631 GValue rate = { 0, };
1633 g_value_init (&rates, GST_TYPE_LIST);
1634 g_value_init (&rate, GST_TYPE_FRACTION);
1637 num = ival.discrete.numerator;
1638 denom = ival.discrete.denominator;
1640 if (num > G_MAXINT || denom > G_MAXINT) {
1641 /* let us hope we don't get here... */
1646 GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d",
1649 /* swap to get the framerate */
1650 gst_value_set_fraction (&rate, denom, num);
1651 gst_value_list_append_value (&rates, &rate);
1654 } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
1655 } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1656 GValue min = { 0, };
1657 GValue step = { 0, };
1658 GValue max = { 0, };
1659 gboolean added = FALSE;
1660 guint32 minnum, mindenom;
1661 guint32 maxnum, maxdenom;
1663 g_value_init (&rates, GST_TYPE_LIST);
1665 g_value_init (&min, GST_TYPE_FRACTION);
1666 g_value_init (&step, GST_TYPE_FRACTION);
1667 g_value_init (&max, GST_TYPE_FRACTION);
1670 minnum = ival.stepwise.min.numerator;
1671 mindenom = ival.stepwise.min.denominator;
1672 if (minnum > G_MAXINT || mindenom > G_MAXINT) {
1676 GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d",
1678 gst_value_set_fraction (&min, minnum, mindenom);
1681 maxnum = ival.stepwise.max.numerator;
1682 maxdenom = ival.stepwise.max.denominator;
1683 if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
1688 GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d",
1690 gst_value_set_fraction (&max, maxnum, maxdenom);
1693 num = ival.stepwise.step.numerator;
1694 denom = ival.stepwise.step.denominator;
1695 if (num > G_MAXINT || denom > G_MAXINT) {
1700 if (num == 0 || denom == 0) {
1701 /* in this case we have a wrong fraction or no step, set the step to max
1702 * so that we only add the min value in the loop below */
1707 /* since we only have gst_value_fraction_subtract and not add, negate the
1709 GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d",
1711 gst_value_set_fraction (&step, -num, denom);
1713 while (gst_value_compare (&min, &max) != GST_VALUE_GREATER_THAN) {
1714 GValue rate = { 0, };
1716 num = gst_value_get_fraction_numerator (&min);
1717 denom = gst_value_get_fraction_denominator (&min);
1718 GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d",
1721 /* invert to get the framerate */
1722 g_value_init (&rate, GST_TYPE_FRACTION);
1723 gst_value_set_fraction (&rate, denom, num);
1724 gst_value_list_append_value (&rates, &rate);
1727 /* we're actually adding because step was negated above. This is because
1728 * there is no _add function... */
1729 if (!gst_value_fraction_subtract (&min, &min, &step)) {
1730 GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!");
1735 /* no range was added, leave the default range from the template */
1736 GST_WARNING_OBJECT (v4l2object->element,
1737 "no range added, leaving default");
1738 g_value_unset (&rates);
1740 } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1741 guint32 maxnum, maxdenom;
1743 g_value_init (&rates, GST_TYPE_FRACTION_RANGE);
1745 num = ival.stepwise.min.numerator;
1746 denom = ival.stepwise.min.denominator;
1747 if (num > G_MAXINT || denom > G_MAXINT) {
1752 maxnum = ival.stepwise.max.numerator;
1753 maxdenom = ival.stepwise.max.denominator;
1754 if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
1759 GST_LOG_OBJECT (v4l2object->element,
1760 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
1763 gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num);
1769 s = gst_structure_copy (template);
1770 gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
1771 "height", G_TYPE_INT, (gint) height, NULL);
1772 gst_v4l2_object_add_aspect_ratio (v4l2object, s);
1773 if (g_str_equal (gst_structure_get_name (s), "video/x-raw"))
1774 gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
1775 (interlaced ? "mixed" : "progressive"), NULL);
1777 if (G_IS_VALUE (&rates)) {
1778 /* only change the framerate on the template when we have a valid probed new
1780 gst_structure_set_value (s, "framerate", &rates);
1781 g_value_unset (&rates);
1782 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
1783 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
1784 gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1,
1790 enum_frameintervals_failed:
1792 GST_DEBUG_OBJECT (v4l2object->element,
1793 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
1794 GST_FOURCC_ARGS (pixelformat), width, height);
1799 /* I don't see how this is actually an error, we ignore the format then */
1800 GST_WARNING_OBJECT (v4l2object->element,
1801 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
1802 GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
1808 sort_by_frame_size (GstStructure * s1, GstStructure * s2)
1812 gst_structure_get_int (s1, "width", &w1);
1813 gst_structure_get_int (s1, "height", &h1);
1814 gst_structure_get_int (s2, "width", &w2);
1815 gst_structure_get_int (s2, "height", &h2);
1817 /* I think it's safe to assume that this won't overflow for a while */
1818 return ((w2 * h2) - (w1 * h1));
1822 gst_v4l2_object_update_and_append (GstV4l2Object * v4l2object,
1823 guint32 format, GstCaps * caps, GstStructure * s)
1825 /* Encoded stream on output buffer need to be parsed */
1826 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
1827 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
1830 for (; i < GST_V4L2_FORMAT_COUNT; i++) {
1831 if (format == gst_v4l2_formats[i].format &&
1832 gst_v4l2_formats[i].flags & GST_V4L2_CODEC &&
1833 !(gst_v4l2_formats[i].flags & GST_V4L2_NO_PARSE)) {
1834 gst_structure_set (s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
1840 gst_caps_append_structure (caps, s);
1844 gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object,
1845 guint32 pixelformat, const GstStructure * template)
1847 GstCaps *ret = gst_caps_new_empty ();
1849 gint fd = v4l2object->video_fd;
1850 struct v4l2_frmsizeenum size;
1851 GList *results = NULL;
1854 if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')) {
1855 gst_caps_append_structure (ret, gst_structure_copy (template));
1859 memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
1861 size.pixel_format = pixelformat;
1863 GST_DEBUG_OBJECT (v4l2object->element,
1864 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
1865 GST_FOURCC_ARGS (pixelformat));
1867 if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
1868 goto enum_framesizes_failed;
1870 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
1872 GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d",
1873 size.discrete.width, size.discrete.height);
1875 w = MIN (size.discrete.width, G_MAXINT);
1876 h = MIN (size.discrete.height, G_MAXINT);
1880 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
1881 pixelformat, w, h, template);
1884 results = g_list_prepend (results, tmp);
1888 } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
1889 GST_DEBUG_OBJECT (v4l2object->element,
1890 "done iterating discrete frame sizes");
1891 } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
1892 GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:");
1893 GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
1894 size.stepwise.min_width);
1895 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
1896 size.stepwise.min_height);
1897 GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
1898 size.stepwise.max_width);
1899 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
1900 size.stepwise.max_height);
1901 GST_DEBUG_OBJECT (v4l2object->element, "step width: %d",
1902 size.stepwise.step_width);
1903 GST_DEBUG_OBJECT (v4l2object->element, "step height: %d",
1904 size.stepwise.step_height);
1906 for (w = size.stepwise.min_width, h = size.stepwise.min_height;
1907 w <= size.stepwise.max_width && h <= size.stepwise.max_height;
1908 w += size.stepwise.step_width, h += size.stepwise.step_height) {
1909 if (w == 0 || h == 0)
1913 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
1914 pixelformat, w, h, template);
1917 results = g_list_prepend (results, tmp);
1919 GST_DEBUG_OBJECT (v4l2object->element,
1920 "done iterating stepwise frame sizes");
1921 } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
1924 GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:");
1925 GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
1926 size.stepwise.min_width);
1927 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
1928 size.stepwise.min_height);
1929 GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
1930 size.stepwise.max_width);
1931 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
1932 size.stepwise.max_height);
1934 w = MAX (size.stepwise.min_width, 1);
1935 h = MAX (size.stepwise.min_height, 1);
1936 maxw = MIN (size.stepwise.max_width, G_MAXINT);
1937 maxh = MIN (size.stepwise.max_height, G_MAXINT);
1940 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object, pixelformat,
1943 gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
1944 (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
1947 /* no point using the results list here, since there's only one struct */
1948 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
1954 /* we use an intermediary list to store and then sort the results of the
1955 * probing because we can't make any assumptions about the order in which
1956 * the driver will give us the sizes, but we want the final caps to contain
1957 * the results starting with the highest resolution and having the lowest
1958 * resolution last, since order in caps matters for things like fixation. */
1959 results = g_list_sort (results, (GCompareFunc) sort_by_frame_size);
1960 while (results != NULL) {
1961 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret,
1963 results = g_list_delete_link (results, results);
1966 if (gst_caps_is_empty (ret))
1967 goto enum_framesizes_no_results;
1972 enum_framesizes_failed:
1974 /* I don't see how this is actually an error */
1975 GST_DEBUG_OBJECT (v4l2object->element,
1976 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
1977 " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
1978 goto default_frame_sizes;
1980 enum_framesizes_no_results:
1982 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
1983 * question doesn't actually support it yet */
1984 GST_DEBUG_OBJECT (v4l2object->element,
1985 "No results for pixelformat %" GST_FOURCC_FORMAT
1986 " enumerating frame sizes, trying fallback",
1987 GST_FOURCC_ARGS (pixelformat));
1988 goto default_frame_sizes;
1992 GST_WARNING_OBJECT (v4l2object->element,
1993 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
1994 ": %u", GST_FOURCC_ARGS (pixelformat), size.type);
1995 goto default_frame_sizes;
1998 default_frame_sizes:
2000 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2001 gboolean interlaced;
2003 /* This code is for Linux < 2.6.19 */
2005 max_w = max_h = GST_V4L2_MAX_SIZE;
2006 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
2007 &min_h, &interlaced)) {
2008 GST_WARNING_OBJECT (v4l2object->element,
2009 "Could not probe minimum capture size for pixelformat %"
2010 GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
2012 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
2013 &max_h, &interlaced)) {
2014 GST_WARNING_OBJECT (v4l2object->element,
2015 "Could not probe maximum capture size for pixelformat %"
2016 GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
2019 /* Since we can't get framerate directly, try to use the current norm */
2020 if (v4l2object->tv_norm && v4l2object->norms) {
2022 GstTunerNorm *norm = NULL;
2023 GstTunerNorm *current =
2024 gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
2026 for (norms = v4l2object->norms; norms != NULL; norms = norms->next) {
2027 norm = (GstTunerNorm *) norms->data;
2028 if (!strcmp (norm->label, current->label))
2031 /* If it's possible, set framerate to that (discrete) value */
2033 fix_num = gst_value_get_fraction_numerator (&norm->framerate);
2034 fix_denom = gst_value_get_fraction_denominator (&norm->framerate);
2038 tmp = gst_structure_copy (template);
2040 gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2042 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2043 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
2044 /* if norm can't be used, copy the template framerate */
2045 gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2050 gst_structure_set (tmp, "width", G_TYPE_INT, max_w, NULL);
2052 gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2055 gst_structure_set (tmp, "height", G_TYPE_INT, max_h, NULL);
2057 gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2059 if (g_str_equal (gst_structure_get_name (tmp), "video/x-raw"))
2060 gst_structure_set (tmp, "interlace-mode", G_TYPE_STRING,
2061 (interlaced ? "mixed" : "progressive"), NULL);
2062 gst_v4l2_object_add_aspect_ratio (v4l2object, tmp);
2064 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
2070 gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
2071 guint32 pixelformat, gint * width, gint * height, gboolean * interlaced)
2073 struct v4l2_format fmt, prevfmt;
2076 int prevfmt_valid = FALSE;
2077 gboolean ret = FALSE;
2079 g_return_val_if_fail (width != NULL, FALSE);
2080 g_return_val_if_fail (height != NULL, FALSE);
2082 GST_LOG_OBJECT (v4l2object->element,
2083 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2084 *width, *height, GST_FOURCC_ARGS (pixelformat));
2086 fd = v4l2object->video_fd;
2088 memset (&fmt, 0, sizeof (struct v4l2_format));
2089 memset (&prevfmt, 0, sizeof (struct v4l2_format));
2091 /* Some drivers are buggy and will modify the currently set format
2092 when processing VIDIOC_TRY_FMT, so we remember what is set at the
2093 minute, and will reset it when done. */
2094 if (!v4l2object->no_initial_format) {
2095 prevfmt.type = v4l2object->type;
2096 prevfmt_valid = (v4l2_ioctl (fd, VIDIOC_G_FMT, &prevfmt) >= 0);
2099 /* get size delimiters */
2100 memset (&fmt, 0, sizeof (fmt));
2101 fmt.type = v4l2object->type;
2102 fmt.fmt.pix.width = *width;
2103 fmt.fmt.pix.height = *height;
2104 fmt.fmt.pix.pixelformat = pixelformat;
2105 fmt.fmt.pix.field = V4L2_FIELD_NONE;
2107 r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
2108 if (r < 0 && errno == EINVAL) {
2109 /* try again with interlaced video */
2110 fmt.fmt.pix.width = *width;
2111 fmt.fmt.pix.height = *height;
2112 fmt.fmt.pix.pixelformat = pixelformat;
2113 fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
2114 r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
2118 /* The driver might not implement TRY_FMT, in which case we will try
2120 if (errno != ENOTTY)
2123 /* Only try S_FMT if we're not actively capturing yet, which we shouldn't
2124 be, because we're still probing */
2125 if (GST_V4L2_IS_ACTIVE (v4l2object))
2128 GST_LOG_OBJECT (v4l2object->element,
2129 "Failed to probe size limit with VIDIOC_TRY_FMT, trying VIDIOC_S_FMT");
2131 fmt.fmt.pix.width = *width;
2132 fmt.fmt.pix.height = *height;
2134 r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
2135 if (r < 0 && errno == EINVAL) {
2136 /* try again with progressive video */
2137 fmt.fmt.pix.width = *width;
2138 fmt.fmt.pix.height = *height;
2139 fmt.fmt.pix.pixelformat = pixelformat;
2140 fmt.fmt.pix.field = V4L2_FIELD_NONE;
2141 r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
2148 GST_LOG_OBJECT (v4l2object->element,
2149 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2151 *width = fmt.fmt.pix.width;
2152 *height = fmt.fmt.pix.height;
2154 switch (fmt.fmt.pix.field) {
2155 case V4L2_FIELD_ANY:
2156 case V4L2_FIELD_NONE:
2157 *interlaced = FALSE;
2159 case V4L2_FIELD_INTERLACED:
2160 case V4L2_FIELD_INTERLACED_TB:
2161 case V4L2_FIELD_INTERLACED_BT:
2165 GST_WARNING_OBJECT (v4l2object->element,
2166 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u",
2167 GST_FOURCC_ARGS (pixelformat), *width, *height);
2175 GST_WARNING_OBJECT (v4l2object->element,
2176 "Unable to try format: %s", g_strerror (errno));
2179 if (v4l2_ioctl (fd, VIDIOC_S_FMT, &prevfmt) < 0) {
2180 GST_WARNING_OBJECT (v4l2object->element,
2181 "Unable to restore format after trying format: %s",
2182 g_strerror (errno));
2189 gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
2193 GST_DEBUG_OBJECT (v4l2object->element, "initializing the capture system");
2195 GST_V4L2_CHECK_OPEN (v4l2object);
2196 GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
2198 /* find transport */
2199 mode = v4l2object->req_mode;
2201 if (v4l2object->vcap.capabilities & V4L2_CAP_READWRITE) {
2202 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2203 mode = GST_V4L2_IO_RW;
2204 } else if (v4l2object->req_mode == GST_V4L2_IO_RW)
2205 goto method_not_supported;
2207 if (v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
2208 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2209 mode = GST_V4L2_IO_MMAP;
2210 } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP)
2211 goto method_not_supported;
2213 /* if still no transport selected, error out */
2214 if (mode == GST_V4L2_IO_AUTO)
2215 goto no_supported_capture_method;
2217 GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode);
2218 v4l2object->mode = mode;
2220 /* Map the buffers */
2221 GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool");
2223 if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
2224 goto buffer_pool_new_failed;
2226 GST_V4L2_SET_ACTIVE (v4l2object);
2231 buffer_pool_new_failed:
2233 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
2234 (_("Could not map buffers from device '%s'"),
2235 v4l2object->videodev),
2236 ("Failed to create buffer pool: %s", g_strerror (errno)));
2239 method_not_supported:
2241 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
2242 (_("The driver of device '%s' does not support the IO method %d"),
2243 v4l2object->videodev, mode), (NULL));
2246 no_supported_capture_method:
2248 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
2249 (_("The driver of device '%s' does not support any known IO "
2250 "method."), v4l2object->videodev), (NULL));
2256 gst_v4l2_object_save_format (GstV4l2Object * v4l2object,
2257 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
2258 GstVideoInfo * info, GstVideoAlignment * align)
2260 const GstVideoFormatInfo *finfo = info->finfo;
2263 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
2264 /* figure out the frame layout */
2265 v4l2object->n_v4l2_planes = MAX (1, format->fmt.pix_mp.num_planes);
2266 v4l2object->sizeimage = 0;
2267 for (i = 0; i < format->fmt.pix_mp.num_planes; i++) {
2268 v4l2object->bytesperline[i] =
2269 format->fmt.pix_mp.plane_fmt[i].bytesperline;
2270 v4l2object->sizeimage += format->fmt.pix_mp.plane_fmt[i].sizeimage;
2273 /* only one plane in non-MPLANE mode */
2274 v4l2object->n_v4l2_planes = 1;
2276 /* figure out the frame layout */
2277 for (i = 0; i < finfo->n_planes; i++) {
2278 guint stride = format->fmt.pix.bytesperline;
2280 switch (finfo->format) {
2281 case GST_VIDEO_FORMAT_NV12:
2282 case GST_VIDEO_FORMAT_NV21:
2283 case GST_VIDEO_FORMAT_NV16:
2284 case GST_VIDEO_FORMAT_NV24:
2285 v4l2object->bytesperline[i] = (i == 0 ? 1 : 2) *
2286 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, stride);
2289 v4l2object->bytesperline[i] =
2290 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, stride);
2294 GST_DEBUG_OBJECT (v4l2object->element,
2295 "Extrapolated stride for plane %d from %d to %d", i, stride,
2296 v4l2object->bytesperline[i]);
2299 v4l2object->sizeimage = format->fmt.pix.sizeimage;
2302 GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %u",
2303 v4l2object->sizeimage);
2305 /* To avoid copies, we need crop_meta if top or left padding is set */
2306 v4l2object->need_crop_meta =
2307 ((align->padding_top + align->padding_left) != 0);
2309 /* ... or video meta if bottom or right padding is set */
2310 v4l2object->need_video_meta = (v4l2object->need_crop_meta ||
2311 ((align->padding_bottom + align->padding_right) != 0));
2313 /* ... or also video meta if stride is non "standard" */
2314 if (GST_VIDEO_INFO_PLANE_STRIDE (info, 0) != v4l2object->bytesperline[0])
2315 v4l2object->need_video_meta = TRUE;
2317 /* ... or also video meta if we use multiple, non-contiguous, planes */
2318 if (v4l2object->n_v4l2_planes > 1)
2319 v4l2object->need_video_meta = TRUE;
2321 gst_video_info_align (info, align);
2322 v4l2object->info = *info;
2323 v4l2object->align = *align;
2324 v4l2object->fmtdesc = fmtdesc;
2326 /* if we have a framerate pre-calculate duration */
2327 if (info->fps_n > 0 && info->fps_d > 0) {
2328 v4l2object->duration = gst_util_uint64_scale_int (GST_SECOND, info->fps_d,
2331 v4l2object->duration = GST_CLOCK_TIME_NONE;
2336 gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
2338 gint fd = v4l2object->video_fd;
2339 struct v4l2_format format;
2340 struct v4l2_streamparm streamparm;
2341 enum v4l2_field field;
2342 guint32 pixelformat;
2343 struct v4l2_fmtdesc *fmtdesc;
2345 GstVideoAlignment align;
2346 gint width, height, fps_n, fps_d;
2349 gboolean is_mplane, format_changed;
2351 GST_V4L2_CHECK_OPEN (v4l2object);
2352 GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
2354 is_mplane = V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type);
2356 gst_video_info_init (&info);
2357 gst_video_alignment_reset (&align);
2359 if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
2362 pixelformat = fmtdesc->pixelformat;
2363 width = GST_VIDEO_INFO_WIDTH (&info);
2364 height = GST_VIDEO_INFO_HEIGHT (&info);
2365 fps_n = GST_VIDEO_INFO_FPS_N (&info);
2366 fps_d = GST_VIDEO_INFO_FPS_D (&info);
2368 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
2369 * or if contiguous is prefered */
2370 n_v4l_planes = GST_VIDEO_INFO_N_PLANES (&info);
2371 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
2374 if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
2375 GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
2376 /* ideally we would differentiate between types of interlaced video
2377 * but there is not sufficient information in the caps..
2379 field = V4L2_FIELD_INTERLACED;
2381 GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
2382 field = V4L2_FIELD_NONE;
2385 GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
2386 "%" GST_FOURCC_FORMAT " stride: %d", width, height,
2387 GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
2389 memset (&format, 0x00, sizeof (struct v4l2_format));
2390 format.type = v4l2object->type;
2392 if (v4l2object->no_initial_format) {
2393 format_changed = TRUE;
2395 if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0)
2396 goto get_fmt_failed;
2398 /* Note that four first fields are the same between v4l2_pix_format and
2399 * v4l2_pix_format_mplane, so we don't need to duplicate he checks */
2401 /* If no size in caps, use configured size */
2402 if (width == 0 && height == 0) {
2403 width = format.fmt.pix_mp.width;
2404 height = format.fmt.pix_mp.height;
2407 format_changed = format.type != v4l2object->type ||
2408 format.fmt.pix_mp.width != width ||
2409 format.fmt.pix_mp.height != height ||
2410 format.fmt.pix_mp.pixelformat != pixelformat ||
2411 format.fmt.pix_mp.field != field;
2414 #ifndef GST_DISABLE_GST_DEBUG
2416 GST_DEBUG_OBJECT (v4l2object->element, "Current size is %dx%d, format "
2417 "%" GST_FOURCC_FORMAT " colorspace %d, nb planes %d",
2418 format.fmt.pix_mp.width, format.fmt.pix_mp.height,
2419 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2420 format.fmt.pix_mp.colorspace, format.fmt.pix_mp.num_planes);
2422 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
2423 GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
2424 format.fmt.pix_mp.plane_fmt[i].bytesperline);
2426 GST_DEBUG_OBJECT (v4l2object->element, "Current size is %dx%d, format "
2427 "%" GST_FOURCC_FORMAT " stride %d, colorspace %d",
2428 format.fmt.pix.width, format.fmt.pix.height,
2429 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2430 format.fmt.pix.bytesperline, format.fmt.pix.colorspace);
2434 /* If nothing changed, we are done */
2435 if (!format_changed)
2438 /* something different, set the format */
2439 GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
2440 "%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
2443 format.type = v4l2object->type;
2444 format.fmt.pix_mp.pixelformat = pixelformat;
2445 format.fmt.pix_mp.width = width;
2446 format.fmt.pix_mp.height = height;
2447 format.fmt.pix_mp.field = field;
2448 format.fmt.pix_mp.num_planes = n_v4l_planes;
2450 /* try to ask our prefered stride but it's not a failure
2451 * if not accepted */
2452 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
2453 format.fmt.pix_mp.plane_fmt[i].bytesperline =
2454 GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
2456 if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
2457 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
2459 format.type = v4l2object->type;
2460 format.fmt.pix.width = width;
2461 format.fmt.pix.height = height;
2462 format.fmt.pix.pixelformat = pixelformat;
2463 format.fmt.pix.field = field;
2464 /* try to ask our prefered stride */
2465 format.fmt.pix.bytesperline = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
2467 if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
2468 format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
2471 if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
2472 goto set_fmt_failed;
2474 GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
2475 "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
2476 format.fmt.pix_mp.height,
2477 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2478 is_mplane ? format.fmt.pix_mp.num_planes : 1);
2480 #ifndef GST_DISABLE_GST_DEBUG
2482 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
2483 GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
2484 format.fmt.pix_mp.plane_fmt[i].bytesperline);
2488 if (format.fmt.pix.pixelformat != pixelformat)
2489 goto invalid_pixelformat;
2491 /* Only negotiate size with raw data.
2492 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
2493 * in ASF mode for example, there is also not reason for a driver to
2494 * change the size. */
2495 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED) {
2496 /* We can crop larger images */
2497 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
2498 goto invalid_dimensions;
2500 /* Note, this will be adjusted if upstream has non-centered cropping. */
2501 align.padding_top = 0;
2502 align.padding_bottom = format.fmt.pix.height - height;
2503 align.padding_left = 0;
2504 align.padding_right = format.fmt.pix.width - width;
2507 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
2508 goto invalid_planes;
2510 /* Is there a reason we require the caller to always specify a framerate? */
2511 GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
2514 memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
2515 streamparm.type = v4l2object->type;
2517 if (v4l2_ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
2518 goto get_parm_failed;
2520 GST_VIDEO_INFO_FPS_N (&info) =
2521 streamparm.parm.capture.timeperframe.denominator;
2522 GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator;
2524 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
2525 || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
2526 GST_DEBUG_OBJECT (v4l2object->element, "Got framerate: %u/%u",
2527 streamparm.parm.capture.timeperframe.denominator,
2528 streamparm.parm.capture.timeperframe.numerator);
2530 /* We used to skip frame rate setup if the camera was already setup
2531 * with the requested frame rate. This breaks some cameras though,
2532 * causing them to not output data (several models of Thinkpad cameras
2533 * have this problem at least).
2534 * So, don't skip. */
2535 GST_LOG_OBJECT (v4l2object->element, "Setting framerate to %u/%u", fps_n,
2537 /* We want to change the frame rate, so check whether we can. Some cheap USB
2538 * cameras don't have the capability */
2539 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
2540 GST_DEBUG_OBJECT (v4l2object->element,
2541 "Not setting framerate (not supported)");
2545 /* Note: V4L2 wants the frame interval, we have the frame rate */
2546 streamparm.parm.capture.timeperframe.numerator = fps_d;
2547 streamparm.parm.capture.timeperframe.denominator = fps_n;
2549 /* some cheap USB cam's won't accept any change */
2550 if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
2551 goto set_parm_failed;
2553 /* get new values */
2554 fps_d = streamparm.parm.capture.timeperframe.numerator;
2555 fps_n = streamparm.parm.capture.timeperframe.denominator;
2557 GST_INFO_OBJECT (v4l2object->element, "Set framerate to %u/%u", fps_n,
2560 GST_VIDEO_INFO_FPS_N (&info) = fps_n;
2561 GST_VIDEO_INFO_FPS_D (&info) = fps_d;
2565 gst_v4l2_object_save_format (v4l2object, fmtdesc, &format, &info, &align);
2567 /* now configure the pool */
2568 if (!gst_v4l2_object_setup_pool (v4l2object, caps))
2576 GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT,
2582 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2583 (_("Device '%s' does not support video capture"),
2584 v4l2object->videodev),
2585 ("Call to G_FMT failed: (%s)", g_strerror (errno)));
2590 if (errno == EBUSY) {
2591 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, BUSY,
2592 (_("Device '%s' is busy"), v4l2object->videodev),
2593 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
2594 GST_FOURCC_ARGS (pixelformat), width, height,
2595 g_strerror (errno)));
2597 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2598 (_("Device '%s' cannot capture at %dx%d"),
2599 v4l2object->videodev, width, height),
2600 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
2601 GST_FOURCC_ARGS (pixelformat), width, height,
2602 g_strerror (errno)));
2608 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2609 (_("Device '%s' cannot capture at %dx%d"),
2610 v4l2object->videodev, width, height),
2611 ("Tried to capture at %dx%d, but device returned size %dx%d",
2612 width, height, format.fmt.pix.width, format.fmt.pix.height));
2615 invalid_pixelformat:
2617 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2618 (_("Device '%s' cannot capture in the specified format"),
2619 v4l2object->videodev),
2620 ("Tried to capture in %" GST_FOURCC_FORMAT
2621 ", but device returned format" " %" GST_FOURCC_FORMAT,
2622 GST_FOURCC_ARGS (pixelformat),
2623 GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
2628 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2629 (_("Device '%s' does support non-contiguous planes"),
2630 v4l2object->videodev),
2631 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
2636 /* it's possible that this call is not supported */
2637 if (errno != EINVAL && errno != ENOTTY) {
2638 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
2639 (_("Could not get parameters on device '%s'"),
2640 v4l2object->videodev), GST_ERROR_SYSTEM);
2646 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
2647 (_("Video device did not accept new frame rate setting.")),
2653 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2654 (_("Video device could not create buffer pool.")), GST_ERROR_SYSTEM);
2660 * gst_v4l2_object_acquire_format:
2661 * @v4l2object the object
2662 * @info a GstVideoInfo to be filled
2664 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
2665 * the output format is choosen by the HW.
2667 * Returns: %TRUE on success, %FALSE on failure.
2670 gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info)
2672 struct v4l2_fmtdesc *fmtdesc;
2673 struct v4l2_format fmt;
2674 struct v4l2_crop crop;
2675 GstVideoFormat format;
2676 guint width, height;
2677 GstVideoAlignment align;
2679 gst_video_info_init (info);
2680 gst_video_alignment_reset (&align);
2682 memset (&fmt, 0x00, sizeof (struct v4l2_format));
2683 fmt.type = v4l2object->type;
2684 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
2685 goto get_fmt_failed;
2687 fmtdesc = gst_v4l2_object_get_format_from_fourcc (v4l2object,
2688 fmt.fmt.pix.pixelformat);
2689 if (fmtdesc == NULL)
2690 goto unsupported_format;
2692 /* No need to care about mplane, the four first params are the same */
2693 format = gst_v4l2_object_v4l2fourcc_to_video_format (fmt.fmt.pix.pixelformat);
2695 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
2696 if (format == GST_VIDEO_FORMAT_UNKNOWN)
2697 goto unsupported_format;
2699 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
2700 goto invalid_dimensions;
2702 width = fmt.fmt.pix.width;
2703 height = fmt.fmt.pix.height;
2705 memset (&crop, 0, sizeof (struct v4l2_crop));
2706 crop.type = v4l2object->type;
2707 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0) {
2708 align.padding_left = crop.c.left;
2709 align.padding_top = crop.c.top;
2710 align.padding_right = width - crop.c.width - crop.c.left;
2711 align.padding_bottom = height - crop.c.height - crop.c.top;
2712 width = crop.c.width;
2713 height = crop.c.height;
2716 gst_video_info_set_format (info, format, width, height);
2718 switch (fmt.fmt.pix.field) {
2719 case V4L2_FIELD_ANY:
2720 case V4L2_FIELD_NONE:
2721 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
2723 case V4L2_FIELD_INTERLACED:
2724 case V4L2_FIELD_INTERLACED_TB:
2725 case V4L2_FIELD_INTERLACED_BT:
2726 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
2729 goto unsupported_field;
2732 gst_v4l2_object_save_format (v4l2object, fmtdesc, &fmt, info, &align);
2734 /* Shall we setup the pool ? */
2740 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
2741 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
2746 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
2747 (_("Video device returned invalid dimensions.")),
2748 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
2749 fmt.fmt.pix.height));
2754 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2755 (_("Video devices uses an unsupported interlacing method.")),
2756 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
2761 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2762 (_("Video devices uses an unsupported pixel format.")),
2763 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
2764 GST_FOURCC_ARGS (fmt.fmt.pix.pixelformat)));
2770 gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps)
2775 if (!v4l2object->pool)
2778 s = gst_buffer_pool_get_config (GST_BUFFER_POOL_CAST (v4l2object->pool));
2779 gst_buffer_pool_config_get_params (s, &oldcaps, NULL, NULL, NULL);
2781 return oldcaps && gst_caps_is_equal (caps, oldcaps);
2785 gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
2787 GST_LOG_OBJECT (v4l2object->element, "flush poll");
2788 gst_poll_set_flushing (v4l2object->poll, TRUE);
2794 gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object)
2796 GST_LOG_OBJECT (v4l2object->element, "flush stop poll");
2797 gst_poll_set_flushing (v4l2object->poll, FALSE);
2803 gst_v4l2_object_stop (GstV4l2Object * v4l2object)
2805 GST_DEBUG_OBJECT (v4l2object->element, "stopping");
2807 if (!GST_V4L2_IS_OPEN (v4l2object))
2809 if (!GST_V4L2_IS_ACTIVE (v4l2object))
2812 if (v4l2object->pool) {
2813 GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool");
2814 gst_buffer_pool_set_active (GST_BUFFER_POOL_CAST (v4l2object->pool), FALSE);
2815 gst_object_unref (v4l2object->pool);
2816 v4l2object->pool = NULL;
2819 GST_V4L2_SET_INACTIVE (v4l2object);
2826 gst_v4l2_object_copy (GstV4l2Object * v4l2object, GstBuffer * dest,
2829 const GstVideoFormatInfo *finfo = v4l2object->info.finfo;
2831 if (finfo && (finfo->format != GST_VIDEO_FORMAT_UNKNOWN &&
2832 finfo->format != GST_VIDEO_FORMAT_ENCODED)) {
2833 GstVideoFrame src_frame, dest_frame;
2835 GST_DEBUG_OBJECT (v4l2object->element, "copy video frame");
2837 /* FIXME This won't work if cropping apply */
2839 /* we have raw video, use videoframe copy to get strides right */
2840 if (!gst_video_frame_map (&src_frame, &v4l2object->info, src, GST_MAP_READ))
2841 goto invalid_buffer;
2843 if (!gst_video_frame_map (&dest_frame, &v4l2object->info, dest,
2845 gst_video_frame_unmap (&src_frame);
2846 goto invalid_buffer;
2849 gst_video_frame_copy (&dest_frame, &src_frame);
2851 gst_video_frame_unmap (&src_frame);
2852 gst_video_frame_unmap (&dest_frame);
2856 GST_DEBUG_OBJECT (v4l2object->element, "copy raw bytes");
2857 gst_buffer_map (src, &map, GST_MAP_READ);
2858 gst_buffer_fill (dest, 0, map.data, gst_buffer_get_size (src));
2859 gst_buffer_unmap (src, &map);
2860 gst_buffer_resize (dest, 0, gst_buffer_get_size (src));
2862 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, v4l2object->element,
2863 "slow copy into buffer %p", dest);
2870 /* No Window available to put our image into */
2871 GST_WARNING_OBJECT (v4l2object->element, "could not map image");
2877 gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter)
2883 if (v4l2object->probed_caps == NULL) {
2884 formats = gst_v4l2_object_get_format_list (v4l2object);
2886 ret = gst_caps_new_empty ();
2888 for (walk = formats; walk; walk = walk->next) {
2889 struct v4l2_fmtdesc *format;
2890 GstStructure *template;
2892 format = (struct v4l2_fmtdesc *) walk->data;
2895 gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat);
2900 tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
2901 format->pixelformat, template);
2903 gst_caps_append (ret, tmp);
2905 gst_structure_free (template);
2907 GST_DEBUG_OBJECT (v4l2object->element, "unknown format %u",
2908 format->pixelformat);
2911 v4l2object->probed_caps = ret;
2915 ret = gst_caps_intersect_full (filter, v4l2object->probed_caps,
2916 GST_CAPS_INTERSECT_FIRST);
2918 ret = gst_caps_ref (v4l2object->probed_caps);
2921 GST_INFO_OBJECT (v4l2object->element, "probed caps: %" GST_PTR_FORMAT, ret);
2922 LOG_CAPS (v4l2object->element, ret);
2928 gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query)
2930 GstBufferPool *pool;
2931 guint size, min, max;
2933 gboolean has_video_meta, has_crop_meta;
2934 gboolean can_use_own_pool;
2935 struct v4l2_control ctl = { 0, };
2937 GST_DEBUG_OBJECT (obj->element, "decide allocation");
2939 g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2940 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
2942 if (obj->pool == NULL) {
2944 gst_query_parse_allocation (query, &caps, NULL);
2946 if (!gst_v4l2_object_setup_pool (obj, caps))
2950 if (gst_query_get_n_allocation_pools (query) > 0) {
2951 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
2960 GST_DEBUG_OBJECT (obj->element, "allocation: size:%u min:%u max:%u pool:%"
2961 GST_PTR_FORMAT, size, min, max, pool);
2964 /* if there is a min-buffers suggestion, use it. We add 1 because we need 1
2965 * buffer extra to capture while the other two buffers are downstream */
2971 /* Certain driver may expose a minimum through controls */
2972 ctl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
2973 if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CTRL, &ctl) >= 0) {
2974 GST_DEBUG_OBJECT (obj->element, "driver require a minimum of %d buffers",
2976 obj->min_buffers_for_capture = ctl.value;
2980 /* Request a bigger max, if one was suggested but it's too small */
2981 if (max != 0 && max < min)
2985 gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
2987 gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
2990 can_use_own_pool = ((has_crop_meta || !obj->need_crop_meta) &&
2991 (has_video_meta || !obj->need_video_meta));
2994 switch (obj->mode) {
2995 case GST_V4L2_IO_RW:
2997 /* in READ/WRITE mode, prefer a downstream pool because our own pool
2998 * doesn't help much, we have to write to it as well */
2999 GST_DEBUG_OBJECT (obj->element,
3000 "read/write mode: using downstream pool");
3001 /* use the bigest size, when we use our own pool we can't really do any
3002 * other size than what the hardware gives us but for downstream pools
3004 size = MAX (size, obj->sizeimage);
3005 } else if (can_use_own_pool) {
3006 /* no downstream pool, use our own then */
3007 GST_DEBUG_OBJECT (obj->element,
3008 "read/write mode: no downstream pool, using our own");
3010 size = obj->sizeimage;
3013 case GST_V4L2_IO_MMAP:
3014 case GST_V4L2_IO_USERPTR:
3015 case GST_V4L2_IO_DMABUF:
3016 /* in streaming mode, prefer our own pool */
3017 /* Check if we can use it ... */
3018 if (can_use_own_pool) {
3020 gst_object_unref (pool);
3022 size = obj->sizeimage;
3024 GST_DEBUG_OBJECT (obj->element,
3025 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
3027 GST_DEBUG_OBJECT (obj->element,
3028 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
3030 size = MAX (size, obj->sizeimage);
3033 case GST_V4L2_IO_AUTO:
3035 GST_WARNING_OBJECT (obj->element, "unhandled mode");
3040 GstStructure *config;
3043 config = gst_buffer_pool_get_config (pool);
3044 gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL);
3045 gst_buffer_pool_config_set_params (config, caps, size, min, max);
3047 /* if downstream supports video metadata, add this to the pool config */
3048 if (has_video_meta) {
3049 GST_DEBUG_OBJECT (pool, "activate Video Meta");
3050 gst_buffer_pool_config_add_option (config,
3051 GST_BUFFER_POOL_OPTION_VIDEO_META);
3054 gst_buffer_pool_set_config (pool, config);
3057 /* Our pool may be incompatible, though we'll need the metadata in order to
3058 * copy to a downstream compatible buffer */
3059 if (pool != obj->pool && obj->need_video_meta) {
3060 GstStructure *config;
3063 config = gst_buffer_pool_get_config (obj->pool);
3064 gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL);
3065 gst_buffer_pool_config_set_params (config, caps, obj->sizeimage, min, 0);
3066 gst_buffer_pool_config_add_option (config,
3067 GST_BUFFER_POOL_OPTION_VIDEO_META);
3069 gst_buffer_pool_set_config (obj->pool, config);
3072 if (obj->need_crop_meta)
3073 gst_v4l2_buffer_pool_add_crop_meta (GST_V4L2_BUFFER_POOL (obj->pool),
3074 obj->need_crop_meta);
3077 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
3079 gst_query_add_allocation_pool (query, pool, size, min, max);
3085 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
3086 (_("Video device could not create buffer pool.")), GST_ERROR_SYSTEM);