3 * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
4 * 2006 Edgard Lima <edgard.lima@indt.org.br>
6 * gstv4l2object.c: base class for V4L2 elements
8 * This library is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU Library General Public License as published
10 * by the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version. This library is distributed in the hope
12 * that it will be useful, but WITHOUT ANY WARRANTY; without even the
13 * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
14 * PURPOSE. See the GNU Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
21 /* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
22 * with newer GLib versions (>= 2.31.0) */
23 #define GLIB_DISABLE_DEPRECATION_WARNINGS
36 #include <gudev/gudev.h>
39 #include "v4l2_calls.h"
40 #include "gstv4l2tuner.h"
41 #include "gstv4l2colorbalance.h"
43 #include "gst/gst-i18n-plugin.h"
45 #include <gst/video/video.h>
47 GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
48 #define GST_CAT_DEFAULT v4l2_debug
50 #define DEFAULT_PROP_DEVICE_NAME NULL
51 #define DEFAULT_PROP_DEVICE_FD -1
52 #define DEFAULT_PROP_FLAGS 0
53 #define DEFAULT_PROP_TV_NORM 0
54 #define DEFAULT_PROP_CHANNEL NULL
55 #define DEFAULT_PROP_FREQUENCY 0
56 #define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
58 #define ENCODED_BUFFER_SIZE (1 * 1024 * 1024)
63 V4L2_STD_OBJECT_PROPS,
67 * common format / caps utilities:
71 GST_V4L2_RAW = 1 << 0,
72 GST_V4L2_CODEC = 1 << 1,
73 GST_V4L2_TRANSPORT = 1 << 2,
74 GST_V4L2_NO_PARSE = 1 << 3,
82 GstV4L2FormatFlags flags;
85 static const GstV4L2FormatDesc gst_v4l2_formats[] = {
86 /* from Linux 2.6.15 videodev2.h */
87 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
88 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
102 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
103 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
105 /* two planes -- one Y, one Cr + Cb interleaved */
106 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
107 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
109 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
110 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
112 /* The following formats are not defined in the V4L2 specification */
113 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
114 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
115 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
116 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
118 /* see http://www.siliconimaging.com/RGB%20Bayer.htm */
119 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC},
121 /* compressed formats */
122 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
123 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
124 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
125 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
126 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
127 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
128 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
129 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
130 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
131 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
132 /* VP8 not parseable */
133 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
135 /* Vendor-specific formats */
136 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
137 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
138 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
139 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
140 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
143 #define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
145 static GSList *gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object);
148 #define GST_TYPE_V4L2_DEVICE_FLAGS (gst_v4l2_device_get_type ())
150 gst_v4l2_device_get_type (void)
152 static GType v4l2_device_type = 0;
154 if (v4l2_device_type == 0) {
155 static const GFlagsValue values[] = {
156 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
157 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
158 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
160 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
161 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
163 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
164 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
170 g_flags_register_static ("GstV4l2DeviceTypeFlags", values);
173 return v4l2_device_type;
176 #define GST_TYPE_V4L2_TV_NORM (gst_v4l2_tv_norm_get_type ())
178 gst_v4l2_tv_norm_get_type (void)
180 static GType v4l2_tv_norm = 0;
183 static const GEnumValue tv_norms[] = {
186 {V4L2_STD_NTSC, "NTSC", "NTSC"},
187 {V4L2_STD_NTSC_M, "NTSC-M", "NTSC-M"},
188 {V4L2_STD_NTSC_M_JP, "NTSC-M-JP", "NTSC-M-JP"},
189 {V4L2_STD_NTSC_M_KR, "NTSC-M-KR", "NTSC-M-KR"},
190 {V4L2_STD_NTSC_443, "NTSC-443", "NTSC-443"},
192 {V4L2_STD_PAL, "PAL", "PAL"},
193 {V4L2_STD_PAL_BG, "PAL-BG", "PAL-BG"},
194 {V4L2_STD_PAL_B, "PAL-B", "PAL-B"},
195 {V4L2_STD_PAL_B1, "PAL-B1", "PAL-B1"},
196 {V4L2_STD_PAL_G, "PAL-G", "PAL-G"},
197 {V4L2_STD_PAL_H, "PAL-H", "PAL-H"},
198 {V4L2_STD_PAL_I, "PAL-I", "PAL-I"},
199 {V4L2_STD_PAL_DK, "PAL-DK", "PAL-DK"},
200 {V4L2_STD_PAL_D, "PAL-D", "PAL-D"},
201 {V4L2_STD_PAL_D1, "PAL-D1", "PAL-D1"},
202 {V4L2_STD_PAL_K, "PAL-K", "PAL-K"},
203 {V4L2_STD_PAL_M, "PAL-M", "PAL-M"},
204 {V4L2_STD_PAL_N, "PAL-N", "PAL-N"},
205 {V4L2_STD_PAL_Nc, "PAL-Nc", "PAL-Nc"},
206 {V4L2_STD_PAL_60, "PAL-60", "PAL-60"},
208 {V4L2_STD_SECAM, "SECAM", "SECAM"},
209 {V4L2_STD_SECAM_B, "SECAM-B", "SECAM-B"},
210 {V4L2_STD_SECAM_G, "SECAM-G", "SECAM-G"},
211 {V4L2_STD_SECAM_H, "SECAM-H", "SECAM-H"},
212 {V4L2_STD_SECAM_DK, "SECAM-DK", "SECAM-DK"},
213 {V4L2_STD_SECAM_D, "SECAM-D", "SECAM-D"},
214 {V4L2_STD_SECAM_K, "SECAM-K", "SECAM-K"},
215 {V4L2_STD_SECAM_K1, "SECAM-K1", "SECAM-K1"},
216 {V4L2_STD_SECAM_L, "SECAM-L", "SECAM-L"},
217 {V4L2_STD_SECAM_LC, "SECAM-Lc", "SECAM-Lc"},
222 v4l2_tv_norm = g_enum_register_static ("V4L2_TV_norms", tv_norms);
229 gst_v4l2_io_mode_get_type (void)
231 static GType v4l2_io_mode = 0;
234 static const GEnumValue io_modes[] = {
235 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
236 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
237 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
238 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
239 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
240 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
245 v4l2_io_mode = g_enum_register_static ("GstV4l2IOMode", io_modes);
251 gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
252 const char *default_device)
254 g_object_class_install_property (gobject_class, PROP_DEVICE,
255 g_param_spec_string ("device", "Device", "Device location",
256 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
257 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
258 g_param_spec_string ("device-name", "Device name",
259 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
260 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
261 g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
262 g_param_spec_int ("device-fd", "File descriptor",
263 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
264 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
265 g_object_class_install_property (gobject_class, PROP_FLAGS,
266 g_param_spec_flags ("flags", "Flags", "Device type flags",
267 GST_TYPE_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
268 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
271 * GstV4l2Src:brightness:
273 * Picture brightness, or more precisely, the black level
275 g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
276 g_param_spec_int ("brightness", "Brightness",
277 "Picture brightness, or more precisely, the black level", G_MININT,
279 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
281 * GstV4l2Src:contrast:
283 * Picture contrast or luma gain
285 g_object_class_install_property (gobject_class, PROP_CONTRAST,
286 g_param_spec_int ("contrast", "Contrast",
287 "Picture contrast or luma gain", G_MININT,
289 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
291 * GstV4l2Src:saturation:
293 * Picture color saturation or chroma gain
295 g_object_class_install_property (gobject_class, PROP_SATURATION,
296 g_param_spec_int ("saturation", "Saturation",
297 "Picture color saturation or chroma gain", G_MININT,
299 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
303 * Hue or color balance
305 g_object_class_install_property (gobject_class, PROP_HUE,
306 g_param_spec_int ("hue", "Hue",
307 "Hue or color balance", G_MININT,
309 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
316 g_object_class_install_property (gobject_class, PROP_TV_NORM,
317 g_param_spec_enum ("norm", "TV norm",
319 GST_TYPE_V4L2_TV_NORM, DEFAULT_PROP_TV_NORM,
320 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
323 * GstV4l2Src:io-mode:
327 g_object_class_install_property (gobject_class, PROP_IO_MODE,
328 g_param_spec_enum ("io-mode", "IO mode",
330 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
331 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
334 * GstV4l2Src:extra-controls:
336 * Additional v4l2 controls for the device. The controls are identified
337 * by the control name (lowercase with '_' for any non-alphanumeric
342 g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
343 g_param_spec_boxed ("extra-controls", "Extra Controls",
344 "Extra v4l2 controls (CIDs) for the device",
345 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
348 * GstV4l2Src:pixel-aspect-ratio:
350 * The pixel aspect ratio of the device. This overwrites the pixel aspect
351 * ratio queried from the device.
355 g_object_class_install_property (gobject_class, PROP_PIXEL_ASPECT_RATIO,
356 g_param_spec_string ("pixel-aspect-ratio", "Pixel Aspect Ratio",
357 "Overwrite the pixel aspect ratio of the device", "1/1",
358 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
361 * GstV4l2Src:force-aspect-ratio:
363 * When enabled, the pixel aspect ratio queried from the device or set
364 * with the pixel-aspect-ratio property will be enforced.
368 g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
369 g_param_spec_boolean ("force-aspect-ratio", "Force aspect ratio",
370 "When enabled, the pixel aspect ratio will be enforced", TRUE,
371 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
376 gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class)
378 g_object_class_install_property (gobject_class, PROP_DEVICE,
379 g_param_spec_string ("device", "Device", "Device location",
380 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
382 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
383 g_param_spec_string ("device-name", "Device name",
384 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
385 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
387 g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
388 g_param_spec_int ("device-fd", "File descriptor",
389 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
390 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
392 g_object_class_install_property (gobject_class, PROP_OUTPUT_IO_MODE,
393 g_param_spec_enum ("output-io-mode", "Output IO mode",
394 "Output side I/O mode (matches sink pad)",
395 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
396 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
398 g_object_class_install_property (gobject_class, PROP_CAPTURE_IO_MODE,
399 g_param_spec_enum ("capture-io-mode", "Capture IO mode",
400 "Capture I/O mode (matches src pad)",
401 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
402 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
404 g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
405 g_param_spec_boxed ("extra-controls", "Extra Controls",
406 "Extra v4l2 controls (CIDs) for the device",
407 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
411 gst_v4l2_object_new (GstElement * element,
412 enum v4l2_buf_type type,
413 const char *default_device,
414 GstV4l2GetInOutFunction get_in_out_func,
415 GstV4l2SetInOutFunction set_in_out_func,
416 GstV4l2UpdateFpsFunction update_fps_func)
418 GstV4l2Object *v4l2object;
421 * some default values
423 v4l2object = g_new0 (GstV4l2Object, 1);
425 v4l2object->type = type;
426 v4l2object->formats = NULL;
428 v4l2object->element = element;
429 v4l2object->get_in_out_func = get_in_out_func;
430 v4l2object->set_in_out_func = set_in_out_func;
431 v4l2object->update_fps_func = update_fps_func;
433 v4l2object->video_fd = -1;
434 v4l2object->active = FALSE;
435 v4l2object->videodev = g_strdup (default_device);
437 v4l2object->norms = NULL;
438 v4l2object->channels = NULL;
439 v4l2object->colors = NULL;
441 v4l2object->xwindow_id = 0;
443 v4l2object->keep_aspect = TRUE;
445 v4l2object->n_v4l2_planes = 0;
447 v4l2object->no_initial_format = FALSE;
452 static gboolean gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object);
456 gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
458 g_return_if_fail (v4l2object != NULL);
460 if (v4l2object->videodev)
461 g_free (v4l2object->videodev);
463 if (v4l2object->channel)
464 g_free (v4l2object->channel);
466 if (v4l2object->formats) {
467 gst_v4l2_object_clear_format_list (v4l2object);
470 if (v4l2object->probed_caps) {
471 gst_caps_unref (v4l2object->probed_caps);
479 gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
481 g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
482 g_slist_free (v4l2object->formats);
483 v4l2object->formats = NULL;
489 gst_v4l2_object_prop_to_cid (guint prop_id)
494 case PROP_BRIGHTNESS:
495 cid = V4L2_CID_BRIGHTNESS;
498 cid = V4L2_CID_CONTRAST;
500 case PROP_SATURATION:
501 cid = V4L2_CID_SATURATION;
507 GST_WARNING ("unmapped property id: %d", prop_id);
514 gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
515 guint prop_id, const GValue * value, GParamSpec * pspec)
519 g_free (v4l2object->videodev);
520 v4l2object->videodev = g_value_dup_string (value);
522 case PROP_BRIGHTNESS:
524 case PROP_SATURATION:
527 gint cid = gst_v4l2_object_prop_to_cid (prop_id);
530 if (GST_V4L2_IS_OPEN (v4l2object)) {
531 gst_v4l2_set_attribute (v4l2object, cid, g_value_get_int (value));
538 v4l2object->tv_norm = g_value_get_enum (value);
542 if (GST_V4L2_IS_OPEN (v4l2object)) {
543 GstTuner *tuner = GST_TUNER (v4l2object->element);
544 GstTunerChannel *channel = gst_tuner_find_channel_by_name (tuner,
545 (gchar *) g_value_get_string (value));
548 /* like gst_tuner_set_channel (tuner, channel)
549 without g_object_notify */
550 gst_v4l2_tuner_set_channel (v4l2object, channel);
553 g_free (v4l2object->channel);
554 v4l2object->channel = g_value_dup_string (value);
558 if (GST_V4L2_IS_OPEN (v4l2object)) {
559 GstTuner *tuner = GST_TUNER (v4l2object->element);
560 GstTunerChannel *channel = gst_tuner_get_channel (tuner);
563 GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
565 gst_tuner_set_frequency (tuner, channel, g_value_get_ulong (value))
566 without g_object_notify */
567 gst_v4l2_tuner_set_frequency (v4l2object, channel,
568 g_value_get_ulong (value));
571 v4l2object->frequency = g_value_get_ulong (value);
577 v4l2object->req_mode = g_value_get_enum (value);
579 case PROP_CAPTURE_IO_MODE:
580 g_return_val_if_fail (!V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
581 v4l2object->req_mode = g_value_get_enum (value);
583 case PROP_OUTPUT_IO_MODE:
584 g_return_val_if_fail (V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
585 v4l2object->req_mode = g_value_get_enum (value);
587 case PROP_EXTRA_CONTROLS:{
588 const GstStructure *s = gst_value_get_structure (value);
590 if (v4l2object->extra_controls)
591 gst_structure_free (v4l2object->extra_controls);
593 v4l2object->extra_controls = s ? gst_structure_copy (s) : NULL;
594 if (GST_V4L2_IS_OPEN (v4l2object))
595 gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
598 case PROP_PIXEL_ASPECT_RATIO:
599 g_free (v4l2object->par);
600 v4l2object->par = g_new0 (GValue, 1);
601 g_value_init (v4l2object->par, GST_TYPE_FRACTION);
602 if (!g_value_transform (value, v4l2object->par)) {
603 g_warning ("Could not transform string to aspect ratio");
604 gst_value_set_fraction (v4l2object->par, 1, 1);
606 GST_DEBUG_OBJECT (v4l2object->element, "set PAR to %d/%d",
607 gst_value_get_fraction_numerator (v4l2object->par),
608 gst_value_get_fraction_denominator (v4l2object->par));
610 case PROP_FORCE_ASPECT_RATIO:
611 v4l2object->keep_aspect = g_value_get_boolean (value);
622 gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object,
623 guint prop_id, GValue * value, GParamSpec * pspec)
627 g_value_set_string (value, v4l2object->videodev);
629 case PROP_DEVICE_NAME:
631 const guchar *new = NULL;
633 if (GST_V4L2_IS_OPEN (v4l2object)) {
634 new = v4l2object->vcap.card;
635 } else if (gst_v4l2_open (v4l2object)) {
636 new = v4l2object->vcap.card;
637 gst_v4l2_close (v4l2object);
639 g_value_set_string (value, (gchar *) new);
644 if (GST_V4L2_IS_OPEN (v4l2object))
645 g_value_set_int (value, v4l2object->video_fd);
647 g_value_set_int (value, DEFAULT_PROP_DEVICE_FD);
654 if (GST_V4L2_IS_OPEN (v4l2object)) {
655 flags |= v4l2object->vcap.capabilities &
656 (V4L2_CAP_VIDEO_CAPTURE |
657 V4L2_CAP_VIDEO_OUTPUT |
658 V4L2_CAP_VIDEO_OVERLAY |
659 V4L2_CAP_VBI_CAPTURE |
660 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
662 if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
663 flags |= V4L2_CAP_VIDEO_CAPTURE;
665 if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
666 flags |= V4L2_CAP_VIDEO_OUTPUT;
668 g_value_set_flags (value, flags);
671 case PROP_BRIGHTNESS:
673 case PROP_SATURATION:
676 gint cid = gst_v4l2_object_prop_to_cid (prop_id);
679 if (GST_V4L2_IS_OPEN (v4l2object)) {
681 if (gst_v4l2_get_attribute (v4l2object, cid, &v)) {
682 g_value_set_int (value, v);
690 g_value_set_enum (value, v4l2object->tv_norm);
693 g_value_set_enum (value, v4l2object->req_mode);
695 case PROP_CAPTURE_IO_MODE:
696 g_return_val_if_fail (!V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
697 g_value_set_enum (value, v4l2object->req_mode);
699 case PROP_OUTPUT_IO_MODE:
700 g_return_val_if_fail (V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
701 g_value_set_enum (value, v4l2object->req_mode);
703 case PROP_EXTRA_CONTROLS:
704 gst_value_set_structure (value, v4l2object->extra_controls);
706 case PROP_PIXEL_ASPECT_RATIO:
708 g_value_transform (v4l2object->par, value);
710 case PROP_FORCE_ASPECT_RATIO:
711 g_value_set_boolean (value, v4l2object->keep_aspect);
721 gst_v4l2_set_defaults (GstV4l2Object * v4l2object)
723 GstTunerNorm *norm = NULL;
724 GstTunerChannel *channel = NULL;
727 if (!GST_IS_TUNER (v4l2object->element))
730 tuner = GST_TUNER (v4l2object->element);
732 if (v4l2object->tv_norm)
733 norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
734 GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
735 "norm=%p", (guint64) v4l2object->tv_norm, norm);
737 gst_tuner_set_norm (tuner, norm);
740 GST_TUNER_NORM (gst_tuner_get_norm (GST_TUNER (v4l2object->element)));
742 v4l2object->tv_norm =
743 gst_v4l2_tuner_get_std_id_by_norm (v4l2object, norm);
744 gst_tuner_norm_changed (tuner, norm);
748 if (v4l2object->channel)
749 channel = gst_tuner_find_channel_by_name (tuner, v4l2object->channel);
751 gst_tuner_set_channel (tuner, channel);
754 GST_TUNER_CHANNEL (gst_tuner_get_channel (GST_TUNER
755 (v4l2object->element)));
757 g_free (v4l2object->channel);
758 v4l2object->channel = g_strdup (channel->label);
759 gst_tuner_channel_changed (tuner, channel);
764 && GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
765 if (v4l2object->frequency != 0) {
766 gst_tuner_set_frequency (tuner, channel, v4l2object->frequency);
768 v4l2object->frequency = gst_tuner_get_frequency (tuner, channel);
769 if (v4l2object->frequency == 0) {
771 gst_tuner_set_frequency (tuner, channel, 1000);
779 gst_v4l2_object_open (GstV4l2Object * v4l2object)
781 if (gst_v4l2_open (v4l2object))
782 gst_v4l2_set_defaults (v4l2object);
790 gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other)
794 ret = gst_v4l2_dup (v4l2object, other);
800 gst_v4l2_object_close (GstV4l2Object * v4l2object)
802 if (!gst_v4l2_close (v4l2object))
805 gst_caps_replace (&v4l2object->probed_caps, NULL);
807 if (v4l2object->formats) {
808 gst_v4l2_object_clear_format_list (v4l2object);
814 static struct v4l2_fmtdesc *
815 gst_v4l2_object_get_format_from_fourcc (GstV4l2Object * v4l2object,
818 struct v4l2_fmtdesc *fmt;
824 walk = gst_v4l2_object_get_format_list (v4l2object);
826 fmt = (struct v4l2_fmtdesc *) walk->data;
827 if (fmt->pixelformat == fourcc)
829 /* special case for jpeg */
830 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
831 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
832 fmt->pixelformat == V4L2_PIX_FMT_PJPG) {
833 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
834 fourcc == V4L2_PIX_FMT_PJPG) {
838 walk = g_slist_next (walk);
846 /* complete made up ranking, the values themselves are meaningless */
847 /* These ranks MUST be X such that X<<15 fits on a signed int - see
848 the comment at the end of gst_v4l2_object_format_get_rank. */
849 #define YUV_BASE_RANK 1000
850 #define JPEG_BASE_RANK 500
851 #define DV_BASE_RANK 200
852 #define RGB_BASE_RANK 100
853 #define YUV_ODD_BASE_RANK 50
854 #define RGB_ODD_BASE_RANK 25
855 #define BAYER_BASE_RANK 15
856 #define S910_BASE_RANK 10
857 #define GREY_BASE_RANK 5
858 #define PWC_BASE_RANK 1
861 gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
863 guint32 fourcc = fmt->pixelformat;
864 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
868 case V4L2_PIX_FMT_MJPEG:
869 case V4L2_PIX_FMT_PJPG:
870 rank = JPEG_BASE_RANK;
872 case V4L2_PIX_FMT_JPEG:
873 rank = JPEG_BASE_RANK + 1;
875 case V4L2_PIX_FMT_MPEG: /* MPEG */
876 rank = JPEG_BASE_RANK + 2;
879 case V4L2_PIX_FMT_RGB332:
880 case V4L2_PIX_FMT_RGB555:
881 case V4L2_PIX_FMT_RGB555X:
882 case V4L2_PIX_FMT_RGB565:
883 case V4L2_PIX_FMT_RGB565X:
884 rank = RGB_ODD_BASE_RANK;
887 case V4L2_PIX_FMT_RGB24:
888 case V4L2_PIX_FMT_BGR24:
889 rank = RGB_BASE_RANK - 1;
892 case V4L2_PIX_FMT_RGB32:
893 case V4L2_PIX_FMT_BGR32:
894 rank = RGB_BASE_RANK;
897 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
898 rank = GREY_BASE_RANK;
901 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
902 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
903 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
904 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
905 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
906 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
907 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
908 rank = YUV_ODD_BASE_RANK;
911 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
912 rank = YUV_BASE_RANK + 3;
914 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
915 rank = YUV_BASE_RANK + 2;
917 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
918 rank = YUV_BASE_RANK + 7;
920 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
921 rank = YUV_BASE_RANK + 10;
923 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
924 rank = YUV_BASE_RANK + 6;
926 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
927 rank = YUV_BASE_RANK + 9;
929 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
930 rank = YUV_BASE_RANK + 5;
932 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
933 rank = YUV_BASE_RANK + 4;
935 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
936 rank = YUV_BASE_RANK + 8;
939 case V4L2_PIX_FMT_DV:
943 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
947 case V4L2_PIX_FMT_SBGGR8:
948 rank = BAYER_BASE_RANK;
951 case V4L2_PIX_FMT_SN9C10X:
952 rank = S910_BASE_RANK;
955 case V4L2_PIX_FMT_PWC1:
956 rank = PWC_BASE_RANK;
958 case V4L2_PIX_FMT_PWC2:
959 rank = PWC_BASE_RANK;
967 /* All ranks are below 1<<15 so a shift by 15
968 * will a) make all non-emulated formats larger
969 * than emulated and b) will not overflow
980 format_cmp_func (gconstpointer a, gconstpointer b)
982 const struct v4l2_fmtdesc *fa = a;
983 const struct v4l2_fmtdesc *fb = b;
985 if (fa->pixelformat == fb->pixelformat)
988 return gst_v4l2_object_format_get_rank (fb) -
989 gst_v4l2_object_format_get_rank (fa);
992 /******************************************************
993 * gst_v4l2_object_fill_format_list():
994 * create list of supported capture formats
995 * return value: TRUE on success, FALSE on error
996 ******************************************************/
998 gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object,
999 enum v4l2_buf_type type)
1002 struct v4l2_fmtdesc *format;
1004 GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations");
1006 /* format enumeration */
1008 format = g_new0 (struct v4l2_fmtdesc, 1);
1011 format->type = type;
1013 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
1014 if (errno == EINVAL) {
1016 break; /* end of enumeration */
1022 GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index);
1023 GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type);
1024 GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags);
1025 GST_LOG_OBJECT (v4l2object->element, "description: '%s'",
1026 format->description);
1027 GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT,
1028 GST_FOURCC_ARGS (format->pixelformat));
1030 /* sort formats according to our preference; we do this, because caps
1031 * are probed in the order the formats are in the list, and the order of
1032 * formats in the final probed caps matters for things like fixation */
1033 v4l2object->formats = g_slist_insert_sorted (v4l2object->formats, format,
1034 (GCompareFunc) format_cmp_func);
1037 #ifndef GST_DISABLE_GST_DEBUG
1041 GST_INFO_OBJECT (v4l2object->element, "got %d format(s):", n);
1042 for (l = v4l2object->formats; l != NULL; l = l->next) {
1045 GST_INFO_OBJECT (v4l2object->element,
1046 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat),
1047 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1057 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
1058 (_("Failed to enumerate possible video formats device '%s' can work with"), v4l2object->videodev), ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)", n, v4l2object->videodev, errno, g_strerror (errno)));
1065 * Get the list of supported capture formats, a list of
1066 * <code>struct v4l2_fmtdesc</code>.
1069 gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
1071 if (!v4l2object->formats) {
1073 /* check usual way */
1074 gst_v4l2_object_fill_format_list (v4l2object, v4l2object->type);
1076 /* if our driver supports multi-planar
1077 * and if formats are still empty then we can workaround driver bug
1078 * by also looking up formats as if our device was not supporting
1080 if (!v4l2object->formats) {
1081 switch (v4l2object->type) {
1082 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1083 gst_v4l2_object_fill_format_list (v4l2object,
1084 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1087 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1088 gst_v4l2_object_fill_format_list (v4l2object,
1089 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1097 return v4l2object->formats;
1100 static GstVideoFormat
1101 gst_v4l2_object_v4l2fourcc_to_video_format (guint32 fourcc)
1103 GstVideoFormat format;
1106 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1107 format = GST_VIDEO_FORMAT_GRAY8;
1109 case V4L2_PIX_FMT_RGB555:
1110 format = GST_VIDEO_FORMAT_RGB15;
1112 case V4L2_PIX_FMT_RGB565:
1113 format = GST_VIDEO_FORMAT_RGB16;
1115 case V4L2_PIX_FMT_RGB24:
1116 format = GST_VIDEO_FORMAT_RGB;
1118 case V4L2_PIX_FMT_BGR24:
1119 format = GST_VIDEO_FORMAT_BGR;
1121 case V4L2_PIX_FMT_RGB32:
1122 format = GST_VIDEO_FORMAT_xRGB;
1124 case V4L2_PIX_FMT_BGR32:
1125 format = GST_VIDEO_FORMAT_BGRx;
1127 case V4L2_PIX_FMT_NV12:
1128 case V4L2_PIX_FMT_NV12M:
1129 format = GST_VIDEO_FORMAT_NV12;
1131 case V4L2_PIX_FMT_NV12MT:
1132 format = GST_VIDEO_FORMAT_NV12_64Z32;
1134 case V4L2_PIX_FMT_NV21:
1135 case V4L2_PIX_FMT_NV21M:
1136 format = GST_VIDEO_FORMAT_NV21;
1138 case V4L2_PIX_FMT_YVU410:
1139 format = GST_VIDEO_FORMAT_YVU9;
1141 case V4L2_PIX_FMT_YUV410:
1142 format = GST_VIDEO_FORMAT_YUV9;
1144 case V4L2_PIX_FMT_YUV420:
1145 format = GST_VIDEO_FORMAT_I420;
1147 case V4L2_PIX_FMT_YUYV:
1148 format = GST_VIDEO_FORMAT_YUY2;
1150 case V4L2_PIX_FMT_YVU420:
1151 format = GST_VIDEO_FORMAT_YV12;
1153 case V4L2_PIX_FMT_UYVY:
1154 format = GST_VIDEO_FORMAT_UYVY;
1157 case V4L2_PIX_FMT_Y41P:
1158 format = GST_VIDEO_FORMAT_Y41P;
1161 case V4L2_PIX_FMT_YUV411P:
1162 format = GST_VIDEO_FORMAT_Y41B;
1164 case V4L2_PIX_FMT_YUV422P:
1165 format = GST_VIDEO_FORMAT_Y42B;
1167 case V4L2_PIX_FMT_YVYU:
1168 format = GST_VIDEO_FORMAT_YVYU;
1171 format = GST_VIDEO_FORMAT_UNKNOWN;
1178 static GstStructure *
1179 gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc)
1181 GstStructure *structure = NULL;
1184 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1185 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1186 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1187 structure = gst_structure_new_empty ("image/jpeg");
1189 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1190 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1191 /* FIXME: get correct fourccs here */
1193 case V4L2_PIX_FMT_MPEG1:
1194 structure = gst_structure_new ("video/mpeg",
1195 "mpegversion", G_TYPE_INT, 2, NULL);
1197 case V4L2_PIX_FMT_MPEG2:
1198 structure = gst_structure_new ("video/mpeg",
1199 "mpegversion", G_TYPE_INT, 2, NULL);
1201 case V4L2_PIX_FMT_MPEG4:
1202 structure = gst_structure_new ("video/mpeg",
1203 "mpegversion", G_TYPE_INT, 4, "systemstream",
1204 G_TYPE_BOOLEAN, FALSE, NULL);
1206 case V4L2_PIX_FMT_H263:
1207 structure = gst_structure_new ("video/x-h263",
1208 "variant", G_TYPE_STRING, "itu", NULL);
1210 case V4L2_PIX_FMT_H264: /* H.264 */
1211 structure = gst_structure_new ("video/x-h264",
1212 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1213 G_TYPE_STRING, "au", NULL);
1215 case V4L2_PIX_FMT_VP8:
1216 structure = gst_structure_new_empty ("video/x-vp8");
1218 case V4L2_PIX_FMT_RGB332:
1219 case V4L2_PIX_FMT_RGB555X:
1220 case V4L2_PIX_FMT_RGB565X:
1221 /* FIXME: get correct fourccs here */
1223 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1224 case V4L2_PIX_FMT_RGB555:
1225 case V4L2_PIX_FMT_RGB565:
1226 case V4L2_PIX_FMT_RGB24:
1227 case V4L2_PIX_FMT_BGR24:
1228 case V4L2_PIX_FMT_RGB32:
1229 case V4L2_PIX_FMT_BGR32:
1230 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1231 case V4L2_PIX_FMT_NV12M:
1232 case V4L2_PIX_FMT_NV12MT:
1233 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1234 case V4L2_PIX_FMT_NV21M:
1235 case V4L2_PIX_FMT_YVU410:
1236 case V4L2_PIX_FMT_YUV410:
1237 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1238 case V4L2_PIX_FMT_YUYV:
1239 case V4L2_PIX_FMT_YVU420:
1240 case V4L2_PIX_FMT_UYVY:
1242 case V4L2_PIX_FMT_Y41P:
1244 case V4L2_PIX_FMT_YUV422P:
1245 case V4L2_PIX_FMT_YVYU:
1246 case V4L2_PIX_FMT_YUV411P:{
1247 GstVideoFormat format;
1248 format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
1249 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1250 structure = gst_structure_new ("video/x-raw",
1251 "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
1254 case V4L2_PIX_FMT_DV:
1256 gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1259 case V4L2_PIX_FMT_MPEG: /* MPEG */
1260 structure = gst_structure_new ("video/mpegts",
1261 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1263 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
1265 case V4L2_PIX_FMT_SBGGR8:
1266 structure = gst_structure_new_empty ("video/x-bayer");
1268 case V4L2_PIX_FMT_SN9C10X:
1269 structure = gst_structure_new_empty ("video/x-sonix");
1271 case V4L2_PIX_FMT_PWC1:
1272 structure = gst_structure_new_empty ("video/x-pwc1");
1274 case V4L2_PIX_FMT_PWC2:
1275 structure = gst_structure_new_empty ("video/x-pwc2");
1278 GST_DEBUG ("Unknown fourcc 0x%08x %" GST_FOURCC_FORMAT,
1279 fourcc, GST_FOURCC_ARGS (fourcc));
1287 gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
1289 GstStructure *template;
1292 template = gst_v4l2_object_v4l2fourcc_to_bare_struct (fourcc);
1294 if (template == NULL)
1297 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
1298 if (gst_v4l2_formats[i].format != fourcc)
1301 if (gst_v4l2_formats[i].dimensions) {
1302 gst_structure_set (template,
1303 "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1304 "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1305 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
1316 gst_v4l2_object_get_caps_helper (GstV4L2FormatFlags flags)
1318 GstStructure *structure;
1322 caps = gst_caps_new_empty ();
1323 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
1325 if ((gst_v4l2_formats[i].flags & flags) == 0)
1329 gst_v4l2_object_v4l2fourcc_to_bare_struct (gst_v4l2_formats[i].format);
1332 GstStructure *alt_s = NULL;
1334 if (gst_v4l2_formats[i].dimensions) {
1335 gst_structure_set (structure,
1336 "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1337 "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1338 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
1341 switch (gst_v4l2_formats[i].format) {
1342 case V4L2_PIX_FMT_RGB32:
1343 alt_s = gst_structure_copy (structure);
1344 gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1346 case V4L2_PIX_FMT_BGR32:
1347 alt_s = gst_structure_copy (structure);
1348 gst_structure_set (alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1353 gst_caps_append_structure (caps, structure);
1356 gst_caps_append_structure (caps, alt_s);
1360 return gst_caps_simplify (caps);
1364 gst_v4l2_object_get_all_caps (void)
1366 static GstCaps *caps = NULL;
1369 caps = gst_v4l2_object_get_caps_helper (GST_V4L2_ALL);
1371 return gst_caps_ref (caps);
1375 gst_v4l2_object_get_raw_caps (void)
1377 static GstCaps *caps = NULL;
1380 caps = gst_v4l2_object_get_caps_helper (GST_V4L2_RAW);
1382 return gst_caps_ref (caps);
1386 gst_v4l2_object_get_codec_caps (void)
1388 static GstCaps *caps = NULL;
1391 caps = gst_v4l2_object_get_caps_helper (GST_V4L2_CODEC);
1393 return gst_caps_ref (caps);
1396 /* collect data for the given caps
1397 * @caps: given input caps
1398 * @format: location for the v4l format
1399 * @w/@h: location for width and height
1400 * @fps_n/@fps_d: location for framerate
1401 * @size: location for expected size of the frame or 0 if unknown
1404 gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
1405 struct v4l2_fmtdesc **format, GstVideoInfo * info)
1407 GstStructure *structure;
1408 guint32 fourcc = 0, fourcc_nc = 0;
1409 const gchar *mimetype;
1410 struct v4l2_fmtdesc *fmt = NULL;
1412 structure = gst_caps_get_structure (caps, 0);
1414 mimetype = gst_structure_get_name (structure);
1416 if (!gst_video_info_from_caps (info, caps))
1417 goto invalid_format;
1419 if (g_str_equal (mimetype, "video/x-raw")) {
1420 switch (GST_VIDEO_INFO_FORMAT (info)) {
1421 case GST_VIDEO_FORMAT_I420:
1422 fourcc = V4L2_PIX_FMT_YUV420;
1424 case GST_VIDEO_FORMAT_YUY2:
1425 fourcc = V4L2_PIX_FMT_YUYV;
1428 case GST_VIDEO_FORMAT_Y41P:
1429 fourcc = V4L2_PIX_FMT_Y41P;
1432 case GST_VIDEO_FORMAT_UYVY:
1433 fourcc = V4L2_PIX_FMT_UYVY;
1435 case GST_VIDEO_FORMAT_YV12:
1436 fourcc = V4L2_PIX_FMT_YVU420;
1438 case GST_VIDEO_FORMAT_Y41B:
1439 fourcc = V4L2_PIX_FMT_YUV411P;
1441 case GST_VIDEO_FORMAT_Y42B:
1442 fourcc = V4L2_PIX_FMT_YUV422P;
1444 case GST_VIDEO_FORMAT_NV12:
1445 fourcc = V4L2_PIX_FMT_NV12;
1446 fourcc_nc = V4L2_PIX_FMT_NV12M;
1448 case GST_VIDEO_FORMAT_NV12_64Z32:
1449 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1451 case GST_VIDEO_FORMAT_NV21:
1452 fourcc = V4L2_PIX_FMT_NV21;
1453 fourcc_nc = V4L2_PIX_FMT_NV21M;
1455 case GST_VIDEO_FORMAT_YVYU:
1456 fourcc = V4L2_PIX_FMT_YVYU;
1458 case GST_VIDEO_FORMAT_RGB15:
1459 fourcc = V4L2_PIX_FMT_RGB555;
1461 case GST_VIDEO_FORMAT_RGB16:
1462 fourcc = V4L2_PIX_FMT_RGB565;
1464 case GST_VIDEO_FORMAT_RGB:
1465 fourcc = V4L2_PIX_FMT_RGB24;
1467 case GST_VIDEO_FORMAT_BGR:
1468 fourcc = V4L2_PIX_FMT_BGR24;
1470 case GST_VIDEO_FORMAT_xRGB:
1471 case GST_VIDEO_FORMAT_ARGB:
1472 fourcc = V4L2_PIX_FMT_RGB32;
1474 case GST_VIDEO_FORMAT_BGRx:
1475 case GST_VIDEO_FORMAT_BGRA:
1476 fourcc = V4L2_PIX_FMT_BGR32;
1478 case GST_VIDEO_FORMAT_GRAY8:
1479 fourcc = V4L2_PIX_FMT_GREY;
1484 if (g_str_equal (mimetype, "video/mpegts")) {
1485 fourcc = V4L2_PIX_FMT_MPEG;
1486 } else if (g_str_equal (mimetype, "video/x-dv")) {
1487 fourcc = V4L2_PIX_FMT_DV;
1488 } else if (g_str_equal (mimetype, "image/jpeg")) {
1489 fourcc = V4L2_PIX_FMT_JPEG;
1490 } else if (g_str_equal (mimetype, "video/mpeg")) {
1492 if (gst_structure_get_int (structure, "mpegversion", &version)) {
1495 fourcc = V4L2_PIX_FMT_MPEG1;
1498 fourcc = V4L2_PIX_FMT_MPEG2;
1501 fourcc = V4L2_PIX_FMT_MPEG4;
1507 } else if (g_str_equal (mimetype, "video/x-h263")) {
1508 fourcc = V4L2_PIX_FMT_H263;
1509 } else if (g_str_equal (mimetype, "video/x-h264")) {
1510 fourcc = V4L2_PIX_FMT_H264;
1511 } else if (g_str_equal (mimetype, "video/x-vp8")) {
1512 fourcc = V4L2_PIX_FMT_VP8;
1513 } else if (g_str_equal (mimetype, "video/x-bayer")) {
1514 fourcc = V4L2_PIX_FMT_SBGGR8;
1515 } else if (g_str_equal (mimetype, "video/x-sonix")) {
1516 fourcc = V4L2_PIX_FMT_SN9C10X;
1517 } else if (g_str_equal (mimetype, "video/x-pwc1")) {
1518 fourcc = V4L2_PIX_FMT_PWC1;
1519 } else if (g_str_equal (mimetype, "video/x-pwc2")) {
1520 fourcc = V4L2_PIX_FMT_PWC2;
1525 /* Prefer the non-contiguous if supported */
1526 v4l2object->prefered_non_contiguous = TRUE;
1529 fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc_nc);
1530 else if (fourcc == 0)
1531 goto unhandled_format;
1534 fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
1535 v4l2object->prefered_non_contiguous = FALSE;
1539 goto unsupported_format;
1548 GST_DEBUG_OBJECT (v4l2object, "invalid format");
1553 GST_DEBUG_OBJECT (v4l2object, "unhandled format");
1558 GST_DEBUG_OBJECT (v4l2object, "unsupported format");
1564 gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
1565 guint32 pixelformat, gint * width, gint * height, gboolean * interlaced);
1568 gst_v4l2_object_add_aspect_ratio (GstV4l2Object * v4l2object, GstStructure * s)
1570 struct v4l2_cropcap cropcap;
1571 int num = 1, den = 1;
1573 if (!v4l2object->keep_aspect)
1576 if (v4l2object->par) {
1577 num = gst_value_get_fraction_numerator (v4l2object->par);
1578 den = gst_value_get_fraction_denominator (v4l2object->par);
1582 memset (&cropcap, 0, sizeof (cropcap));
1584 cropcap.type = v4l2object->type;
1585 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0)
1586 goto cropcap_failed;
1588 num = cropcap.pixelaspect.numerator;
1589 den = cropcap.pixelaspect.denominator;
1591 /* Ignore PAR that are 0/0 */
1596 gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, num, den,
1601 if (errno != ENOTTY)
1602 GST_WARNING_OBJECT (v4l2object->element,
1603 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
1604 g_strerror (errno));
1608 /* returns TRUE if the value was changed in place, otherwise FALSE */
1610 gst_v4l2src_value_simplify (GValue * val)
1612 /* simplify list of one value to one value */
1613 if (GST_VALUE_HOLDS_LIST (val) && gst_value_list_get_size (val) == 1) {
1614 const GValue *list_val;
1615 GValue new_val = G_VALUE_INIT;
1617 list_val = gst_value_list_get_value (val, 0);
1618 g_value_init (&new_val, G_VALUE_TYPE (list_val));
1619 g_value_copy (list_val, &new_val);
1620 g_value_unset (val);
1628 /* The frame interval enumeration code first appeared in Linux 2.6.19. */
1629 static GstStructure *
1630 gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object,
1631 guint32 pixelformat,
1632 guint32 width, guint32 height, const GstStructure * template)
1634 gint fd = v4l2object->video_fd;
1635 struct v4l2_frmivalenum ival;
1638 GValue rates = { 0, };
1639 gboolean interlaced;
1640 gint int_width = width;
1641 gint int_height = height;
1643 if (v4l2object->never_interlaced) {
1646 /* Interlaced detection using VIDIOC_TRY/S_FMT */
1647 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat,
1648 &int_width, &int_height, &interlaced))
1652 memset (&ival, 0, sizeof (struct v4l2_frmivalenum));
1654 ival.pixel_format = pixelformat;
1656 ival.height = height;
1658 GST_LOG_OBJECT (v4l2object->element,
1659 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
1660 GST_FOURCC_ARGS (pixelformat));
1662 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
1663 * fraction to get framerate */
1664 if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
1665 goto enum_frameintervals_failed;
1667 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1668 GValue rate = { 0, };
1670 g_value_init (&rates, GST_TYPE_LIST);
1671 g_value_init (&rate, GST_TYPE_FRACTION);
1674 num = ival.discrete.numerator;
1675 denom = ival.discrete.denominator;
1677 if (num > G_MAXINT || denom > G_MAXINT) {
1678 /* let us hope we don't get here... */
1683 GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d",
1686 /* swap to get the framerate */
1687 gst_value_set_fraction (&rate, denom, num);
1688 gst_value_list_append_value (&rates, &rate);
1691 } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
1692 } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1693 GValue min = { 0, };
1694 GValue step = { 0, };
1695 GValue max = { 0, };
1696 gboolean added = FALSE;
1697 guint32 minnum, mindenom;
1698 guint32 maxnum, maxdenom;
1700 g_value_init (&rates, GST_TYPE_LIST);
1702 g_value_init (&min, GST_TYPE_FRACTION);
1703 g_value_init (&step, GST_TYPE_FRACTION);
1704 g_value_init (&max, GST_TYPE_FRACTION);
1707 minnum = ival.stepwise.min.numerator;
1708 mindenom = ival.stepwise.min.denominator;
1709 if (minnum > G_MAXINT || mindenom > G_MAXINT) {
1713 GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d",
1715 gst_value_set_fraction (&min, minnum, mindenom);
1718 maxnum = ival.stepwise.max.numerator;
1719 maxdenom = ival.stepwise.max.denominator;
1720 if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
1725 GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d",
1727 gst_value_set_fraction (&max, maxnum, maxdenom);
1730 num = ival.stepwise.step.numerator;
1731 denom = ival.stepwise.step.denominator;
1732 if (num > G_MAXINT || denom > G_MAXINT) {
1737 if (num == 0 || denom == 0) {
1738 /* in this case we have a wrong fraction or no step, set the step to max
1739 * so that we only add the min value in the loop below */
1744 /* since we only have gst_value_fraction_subtract and not add, negate the
1746 GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d",
1748 gst_value_set_fraction (&step, -num, denom);
1750 while (gst_value_compare (&min, &max) != GST_VALUE_GREATER_THAN) {
1751 GValue rate = { 0, };
1753 num = gst_value_get_fraction_numerator (&min);
1754 denom = gst_value_get_fraction_denominator (&min);
1755 GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d",
1758 /* invert to get the framerate */
1759 g_value_init (&rate, GST_TYPE_FRACTION);
1760 gst_value_set_fraction (&rate, denom, num);
1761 gst_value_list_append_value (&rates, &rate);
1764 /* we're actually adding because step was negated above. This is because
1765 * there is no _add function... */
1766 if (!gst_value_fraction_subtract (&min, &min, &step)) {
1767 GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!");
1772 /* no range was added, leave the default range from the template */
1773 GST_WARNING_OBJECT (v4l2object->element,
1774 "no range added, leaving default");
1775 g_value_unset (&rates);
1777 } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1778 guint32 maxnum, maxdenom;
1780 g_value_init (&rates, GST_TYPE_FRACTION_RANGE);
1782 num = ival.stepwise.min.numerator;
1783 denom = ival.stepwise.min.denominator;
1784 if (num > G_MAXINT || denom > G_MAXINT) {
1789 maxnum = ival.stepwise.max.numerator;
1790 maxdenom = ival.stepwise.max.denominator;
1791 if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
1796 GST_LOG_OBJECT (v4l2object->element,
1797 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
1800 gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num);
1806 s = gst_structure_copy (template);
1807 gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
1808 "height", G_TYPE_INT, (gint) height, NULL);
1809 gst_v4l2_object_add_aspect_ratio (v4l2object, s);
1810 if (g_str_equal (gst_structure_get_name (s), "video/x-raw"))
1811 gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
1812 (interlaced ? "mixed" : "progressive"), NULL);
1814 if (G_IS_VALUE (&rates)) {
1815 gst_v4l2src_value_simplify (&rates);
1816 /* only change the framerate on the template when we have a valid probed new
1818 gst_structure_take_value (s, "framerate", &rates);
1819 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
1820 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
1821 gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1,
1827 enum_frameintervals_failed:
1829 GST_DEBUG_OBJECT (v4l2object->element,
1830 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
1831 GST_FOURCC_ARGS (pixelformat), width, height);
1836 /* I don't see how this is actually an error, we ignore the format then */
1837 GST_WARNING_OBJECT (v4l2object->element,
1838 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
1839 GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
1845 sort_by_frame_size (GstStructure * s1, GstStructure * s2)
1849 gst_structure_get_int (s1, "width", &w1);
1850 gst_structure_get_int (s1, "height", &h1);
1851 gst_structure_get_int (s2, "width", &w2);
1852 gst_structure_get_int (s2, "height", &h2);
1854 /* I think it's safe to assume that this won't overflow for a while */
1855 return ((w2 * h2) - (w1 * h1));
1859 gst_v4l2_object_update_and_append (GstV4l2Object * v4l2object,
1860 guint32 format, GstCaps * caps, GstStructure * s)
1862 GstStructure *alt_s = NULL;
1864 /* Encoded stream on output buffer need to be parsed */
1865 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
1866 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
1869 for (; i < GST_V4L2_FORMAT_COUNT; i++) {
1870 if (format == gst_v4l2_formats[i].format &&
1871 gst_v4l2_formats[i].flags & GST_V4L2_CODEC &&
1872 !(gst_v4l2_formats[i].flags & GST_V4L2_NO_PARSE)) {
1873 gst_structure_set (s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
1879 if (v4l2object->has_alpha_component &&
1880 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
1881 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
1883 case V4L2_PIX_FMT_RGB32:
1884 alt_s = gst_structure_copy (s);
1885 gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1887 case V4L2_PIX_FMT_BGR32:
1888 alt_s = gst_structure_copy (s);
1889 gst_structure_set (alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1896 gst_caps_append_structure (caps, s);
1899 gst_caps_append_structure (caps, alt_s);
1903 gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object,
1904 guint32 pixelformat, const GstStructure * template)
1906 GstCaps *ret = gst_caps_new_empty ();
1908 gint fd = v4l2object->video_fd;
1909 struct v4l2_frmsizeenum size;
1910 GList *results = NULL;
1913 if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')) {
1914 gst_caps_append_structure (ret, gst_structure_copy (template));
1918 memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
1920 size.pixel_format = pixelformat;
1922 GST_DEBUG_OBJECT (v4l2object->element,
1923 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
1924 GST_FOURCC_ARGS (pixelformat));
1926 if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
1927 goto enum_framesizes_failed;
1929 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
1931 GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d",
1932 size.discrete.width, size.discrete.height);
1934 w = MIN (size.discrete.width, G_MAXINT);
1935 h = MIN (size.discrete.height, G_MAXINT);
1939 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
1940 pixelformat, w, h, template);
1943 results = g_list_prepend (results, tmp);
1947 } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
1948 GST_DEBUG_OBJECT (v4l2object->element,
1949 "done iterating discrete frame sizes");
1950 } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
1951 guint32 maxw, maxh, step_w, step_h;
1953 GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:");
1954 GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
1955 size.stepwise.min_width);
1956 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
1957 size.stepwise.min_height);
1958 GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
1959 size.stepwise.max_width);
1960 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
1961 size.stepwise.max_height);
1962 GST_DEBUG_OBJECT (v4l2object->element, "step width: %d",
1963 size.stepwise.step_width);
1964 GST_DEBUG_OBJECT (v4l2object->element, "step height: %d",
1965 size.stepwise.step_height);
1967 w = MAX (size.stepwise.min_width, 1);
1968 h = MAX (size.stepwise.min_height, 1);
1969 maxw = MIN (size.stepwise.max_width, G_MAXINT);
1970 maxh = MIN (size.stepwise.max_height, G_MAXINT);
1972 step_w = MAX (size.stepwise.step_width, 1);
1973 step_h = MAX (size.stepwise.step_height, 1);
1975 /* FIXME: check for sanity and that min/max are multiples of the steps */
1977 /* we only query details for the max width/height since it's likely the
1978 * most restricted if there are any resolution-dependent restrictions */
1979 tmp = gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
1980 pixelformat, maxw, maxh, template);
1983 GValue step_range = G_VALUE_INIT;
1985 g_value_init (&step_range, GST_TYPE_INT_RANGE);
1986 gst_value_set_int_range_step (&step_range, w, maxw, step_w);
1987 gst_structure_set_value (tmp, "width", &step_range);
1989 gst_value_set_int_range_step (&step_range, h, maxh, step_h);
1990 gst_structure_take_value (tmp, "height", &step_range);
1992 /* no point using the results list here, since there's only one struct */
1993 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
1995 } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
1998 GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:");
1999 GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
2000 size.stepwise.min_width);
2001 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
2002 size.stepwise.min_height);
2003 GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
2004 size.stepwise.max_width);
2005 GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
2006 size.stepwise.max_height);
2008 w = MAX (size.stepwise.min_width, 1);
2009 h = MAX (size.stepwise.min_height, 1);
2010 maxw = MIN (size.stepwise.max_width, G_MAXINT);
2011 maxh = MIN (size.stepwise.max_height, G_MAXINT);
2014 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object, pixelformat,
2017 gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
2018 (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
2021 /* no point using the results list here, since there's only one struct */
2022 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
2028 /* we use an intermediary list to store and then sort the results of the
2029 * probing because we can't make any assumptions about the order in which
2030 * the driver will give us the sizes, but we want the final caps to contain
2031 * the results starting with the highest resolution and having the lowest
2032 * resolution last, since order in caps matters for things like fixation. */
2033 results = g_list_sort (results, (GCompareFunc) sort_by_frame_size);
2034 while (results != NULL) {
2035 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret,
2037 results = g_list_delete_link (results, results);
2040 if (gst_caps_is_empty (ret))
2041 goto enum_framesizes_no_results;
2046 enum_framesizes_failed:
2048 /* I don't see how this is actually an error */
2049 GST_DEBUG_OBJECT (v4l2object->element,
2050 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2051 " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
2052 goto default_frame_sizes;
2054 enum_framesizes_no_results:
2056 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2057 * question doesn't actually support it yet */
2058 GST_DEBUG_OBJECT (v4l2object->element,
2059 "No results for pixelformat %" GST_FOURCC_FORMAT
2060 " enumerating frame sizes, trying fallback",
2061 GST_FOURCC_ARGS (pixelformat));
2062 goto default_frame_sizes;
2066 GST_WARNING_OBJECT (v4l2object->element,
2067 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2068 ": %u", GST_FOURCC_ARGS (pixelformat), size.type);
2069 goto default_frame_sizes;
2072 default_frame_sizes:
2074 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2075 gboolean interlaced;
2077 /* This code is for Linux < 2.6.19 */
2079 max_w = max_h = GST_V4L2_MAX_SIZE;
2080 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
2081 &min_h, &interlaced)) {
2082 GST_WARNING_OBJECT (v4l2object->element,
2083 "Could not probe minimum capture size for pixelformat %"
2084 GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
2086 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
2087 &max_h, &interlaced)) {
2088 GST_WARNING_OBJECT (v4l2object->element,
2089 "Could not probe maximum capture size for pixelformat %"
2090 GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
2093 /* Since we can't get framerate directly, try to use the current norm */
2094 if (v4l2object->tv_norm && v4l2object->norms) {
2096 GstTunerNorm *norm = NULL;
2097 GstTunerNorm *current =
2098 gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
2100 for (norms = v4l2object->norms; norms != NULL; norms = norms->next) {
2101 norm = (GstTunerNorm *) norms->data;
2102 if (!strcmp (norm->label, current->label))
2105 /* If it's possible, set framerate to that (discrete) value */
2107 fix_num = gst_value_get_fraction_numerator (&norm->framerate);
2108 fix_denom = gst_value_get_fraction_denominator (&norm->framerate);
2112 tmp = gst_structure_copy (template);
2114 gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2116 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2117 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
2118 /* if norm can't be used, copy the template framerate */
2119 gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2124 gst_structure_set (tmp, "width", G_TYPE_INT, max_w, NULL);
2126 gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2129 gst_structure_set (tmp, "height", G_TYPE_INT, max_h, NULL);
2131 gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2133 if (g_str_equal (gst_structure_get_name (tmp), "video/x-raw"))
2134 gst_structure_set (tmp, "interlace-mode", G_TYPE_STRING,
2135 (interlaced ? "mixed" : "progressive"), NULL);
2136 gst_v4l2_object_add_aspect_ratio (v4l2object, tmp);
2138 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
2144 gst_v4l2_object_get_interlace (int field, gboolean * interlaced)
2147 case V4L2_FIELD_ANY:
2148 case V4L2_FIELD_NONE:
2149 *interlaced = FALSE;
2151 case V4L2_FIELD_INTERLACED:
2152 case V4L2_FIELD_INTERLACED_TB:
2153 case V4L2_FIELD_INTERLACED_BT:
2162 gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
2163 guint32 pixelformat, gint * width, gint * height, gboolean * interlaced)
2165 struct v4l2_format fmt;
2168 gboolean ret = FALSE;
2170 g_return_val_if_fail (width != NULL, FALSE);
2171 g_return_val_if_fail (height != NULL, FALSE);
2173 GST_LOG_OBJECT (v4l2object->element,
2174 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
2175 *width, *height, GST_FOURCC_ARGS (pixelformat));
2177 fd = v4l2object->video_fd;
2179 memset (&fmt, 0, sizeof (struct v4l2_format));
2181 /* get size delimiters */
2182 memset (&fmt, 0, sizeof (fmt));
2183 fmt.type = v4l2object->type;
2184 fmt.fmt.pix.width = *width;
2185 fmt.fmt.pix.height = *height;
2186 fmt.fmt.pix.pixelformat = pixelformat;
2187 fmt.fmt.pix.field = V4L2_FIELD_NONE;
2189 r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
2190 if ((r < 0 && errno == EINVAL) ||
2191 !gst_v4l2_object_get_interlace (fmt.fmt.pix.field, interlaced)) {
2192 /* try again with interlaced video */
2193 memset (&fmt, 0, sizeof (fmt));
2194 fmt.type = v4l2object->type;
2195 fmt.fmt.pix.width = *width;
2196 fmt.fmt.pix.height = *height;
2197 fmt.fmt.pix.pixelformat = pixelformat;
2198 fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
2199 r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
2203 /* The driver might not implement TRY_FMT, in which case we will try
2205 if (errno != ENOTTY)
2208 /* Only try S_FMT if we're not actively capturing yet, which we shouldn't
2209 be, because we're still probing */
2210 if (GST_V4L2_IS_ACTIVE (v4l2object))
2213 GST_LOG_OBJECT (v4l2object->element,
2214 "Failed to probe size limit with VIDIOC_TRY_FMT, trying VIDIOC_S_FMT");
2216 memset (&fmt, 0, sizeof (fmt));
2217 fmt.type = v4l2object->type;
2218 fmt.fmt.pix.width = *width;
2219 fmt.fmt.pix.height = *height;
2220 fmt.fmt.pix.pixelformat = pixelformat;
2221 fmt.fmt.pix.field = V4L2_FIELD_NONE;
2223 r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
2224 if ((r < 0 && errno == EINVAL) ||
2225 !gst_v4l2_object_get_interlace (fmt.fmt.pix.field, interlaced)) {
2226 /* try again with interlaced video */
2227 memset (&fmt, 0, sizeof (fmt));
2228 fmt.type = v4l2object->type;
2229 fmt.fmt.pix.width = *width;
2230 fmt.fmt.pix.height = *height;
2231 fmt.fmt.pix.pixelformat = pixelformat;
2232 fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
2233 r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
2240 GST_LOG_OBJECT (v4l2object->element,
2241 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
2243 *width = fmt.fmt.pix.width;
2244 *height = fmt.fmt.pix.height;
2246 if (!gst_v4l2_object_get_interlace (fmt.fmt.pix.field, interlaced)) {
2247 GST_WARNING_OBJECT (v4l2object->element,
2248 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
2249 GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field);
2257 GST_WARNING_OBJECT (v4l2object->element,
2258 "Unable to try format: %s", g_strerror (errno));
2265 gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
2269 GST_DEBUG_OBJECT (v4l2object->element, "initializing the %s system",
2270 V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture");
2272 GST_V4L2_CHECK_OPEN (v4l2object);
2273 GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
2275 /* find transport */
2276 mode = v4l2object->req_mode;
2278 if (v4l2object->vcap.capabilities & V4L2_CAP_READWRITE) {
2279 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2280 mode = GST_V4L2_IO_RW;
2281 } else if (v4l2object->req_mode == GST_V4L2_IO_RW)
2282 goto method_not_supported;
2284 if (v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
2285 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
2286 mode = GST_V4L2_IO_MMAP;
2287 } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP)
2288 goto method_not_supported;
2290 /* if still no transport selected, error out */
2291 if (mode == GST_V4L2_IO_AUTO)
2292 goto no_supported_capture_method;
2294 GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode);
2295 v4l2object->mode = mode;
2297 /* Map the buffers */
2298 GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool");
2300 if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
2301 goto buffer_pool_new_failed;
2303 GST_V4L2_SET_ACTIVE (v4l2object);
2308 buffer_pool_new_failed:
2310 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
2311 (_("Could not map buffers from device '%s'"),
2312 v4l2object->videodev),
2313 ("Failed to create buffer pool: %s", g_strerror (errno)));
2316 method_not_supported:
2318 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
2319 (_("The driver of device '%s' does not support the IO method %d"),
2320 v4l2object->videodev, mode), (NULL));
2323 no_supported_capture_method:
2325 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
2326 (_("The driver of device '%s' does not support any known IO "
2327 "method."), v4l2object->videodev), (NULL));
2333 gst_v4l2_object_set_stride (GstVideoInfo * info, GstVideoAlignment * align,
2334 gint plane, gint stride)
2336 const GstVideoFormatInfo *finfo = info->finfo;
2338 if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
2339 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
2342 ws = GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
2343 hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
2344 tile_height = 1 << hs;
2346 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, plane,
2347 info->height + align->padding_top + align->padding_bottom);
2348 padded_height = GST_ROUND_UP_N (padded_height, tile_height);
2350 x_tiles = stride >> ws;
2351 y_tiles = padded_height >> hs;
2352 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE (x_tiles, y_tiles);
2354 info->stride[plane] = stride;
2359 gst_v4l2_object_extrapolate_info (GstV4l2Object * v4l2object,
2360 GstVideoInfo * info, GstVideoAlignment * align, gint stride)
2362 const GstVideoFormatInfo *finfo = info->finfo;
2363 gint i, estride, padded_height;
2366 g_return_if_fail (v4l2object->n_v4l2_planes == 1);
2368 padded_height = info->height + align->padding_top + align->padding_bottom;
2370 for (i = 0; i < finfo->n_planes; i++) {
2371 switch (finfo->format) {
2372 case GST_VIDEO_FORMAT_NV12:
2373 case GST_VIDEO_FORMAT_NV12_64Z32:
2374 case GST_VIDEO_FORMAT_NV21:
2375 case GST_VIDEO_FORMAT_NV16:
2376 case GST_VIDEO_FORMAT_NV24:
2377 estride = (i == 0 ? 1 : 2) *
2378 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, stride);
2381 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, stride);
2385 gst_v4l2_object_set_stride (info, align, i, estride);
2387 info->offset[i] = offs;
2389 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height);
2391 GST_DEBUG_OBJECT (v4l2object->element,
2392 "Extrapolated for plane %d with base stride %d: "
2393 "stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i],
2399 gst_v4l2_object_save_format (GstV4l2Object * v4l2object,
2400 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
2401 GstVideoInfo * info, GstVideoAlignment * align)
2403 const GstVideoFormatInfo *finfo = info->finfo;
2404 gboolean standard_stride = TRUE;
2405 gint stride, padded_width, padded_height, i;
2407 if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) {
2408 v4l2object->n_v4l2_planes = 1;
2409 info->size = format->fmt.pix.sizeimage;
2413 /* adjust right padding */
2414 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
2415 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
2417 stride = format->fmt.pix.bytesperline;
2419 padded_width = stride / GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
2421 if (padded_width < format->fmt.pix.width)
2422 GST_WARNING_OBJECT (v4l2object->element,
2423 "Driver bug detected, stride is too small for the width");
2425 align->padding_right = padded_width - info->width - align->padding_left;
2427 /* adjust bottom padding */
2428 padded_height = format->fmt.pix.height;
2430 if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
2431 guint hs, tile_height;
2433 hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
2434 tile_height = 1 << hs;
2436 padded_height = GST_ROUND_UP_N (padded_height, tile_height);
2439 align->padding_bottom = padded_height - info->height - align->padding_top;
2441 /* setup the strides and offset */
2442 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
2443 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
2445 /* figure out the frame layout */
2446 v4l2object->n_v4l2_planes = MAX (1, pix_mp->num_planes);
2448 for (i = 0; i < v4l2object->n_v4l2_planes; i++) {
2449 stride = pix_mp->plane_fmt[i].bytesperline;
2451 if (info->stride[i] != stride)
2452 standard_stride = FALSE;
2454 gst_v4l2_object_set_stride (info, align, i, stride);
2455 info->offset[i] = info->size;
2456 info->size += pix_mp->plane_fmt[i].sizeimage;
2459 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
2460 if (v4l2object->n_v4l2_planes < finfo->n_planes) {
2461 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
2462 gst_v4l2_object_extrapolate_info (v4l2object, info, align, stride);
2465 /* only one plane in non-MPLANE mode */
2466 v4l2object->n_v4l2_planes = 1;
2467 info->size = format->fmt.pix.sizeimage;
2468 stride = format->fmt.pix.bytesperline;
2470 if (info->stride[0] != stride)
2471 standard_stride = FALSE;
2473 gst_v4l2_object_extrapolate_info (v4l2object, info, align, stride);
2476 /* adjust the offset to take into account left and top */
2477 if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
2478 if ((align->padding_left + align->padding_top) > 0)
2479 GST_WARNING_OBJECT (v4l2object->element,
2480 "Left and top padding is not permitted for tiled formats");
2482 for (i = 0; i < finfo->n_planes; i++) {
2485 /* FIXME we assume plane as component as this is true for all supported
2486 * format we support. */
2488 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, align->padding_left);
2489 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, align->padding_top);
2491 info->offset[i] += (vedge * info->stride[i]) +
2492 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE (info, i));
2497 GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %" G_GSIZE_FORMAT,
2500 /* to avoid copies we need video meta if top or left padding */
2501 v4l2object->need_video_meta =
2502 ((align->padding_top + align->padding_left) != 0);
2504 /* ... or if stride is non "standard" */
2505 if (!standard_stride)
2506 v4l2object->need_video_meta = TRUE;
2508 /* ... or also video meta if we use multiple, non-contiguous, planes */
2509 if (v4l2object->n_v4l2_planes > 1)
2510 v4l2object->need_video_meta = TRUE;
2512 v4l2object->info = *info;
2513 v4l2object->align = *align;
2514 v4l2object->format = *format;
2515 v4l2object->fmtdesc = fmtdesc;
2517 /* if we have a framerate pre-calculate duration */
2518 if (info->fps_n > 0 && info->fps_d > 0) {
2519 v4l2object->duration = gst_util_uint64_scale_int (GST_SECOND, info->fps_d,
2522 v4l2object->duration = GST_CLOCK_TIME_NONE;
2527 gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
2529 gint fd = v4l2object->video_fd;
2530 struct v4l2_format format;
2531 struct v4l2_streamparm streamparm;
2532 enum v4l2_field field;
2533 guint32 pixelformat;
2534 struct v4l2_fmtdesc *fmtdesc;
2536 GstVideoAlignment align;
2537 gint width, height, fps_n, fps_d;
2540 gboolean is_mplane, format_changed;
2541 enum v4l2_colorspace colorspace = 0;
2543 GST_V4L2_CHECK_OPEN (v4l2object);
2544 GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
2546 is_mplane = V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type);
2548 gst_video_info_init (&info);
2549 gst_video_alignment_reset (&align);
2551 if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
2554 pixelformat = fmtdesc->pixelformat;
2555 width = GST_VIDEO_INFO_WIDTH (&info);
2556 height = GST_VIDEO_INFO_HEIGHT (&info);
2557 fps_n = GST_VIDEO_INFO_FPS_N (&info);
2558 fps_d = GST_VIDEO_INFO_FPS_D (&info);
2560 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
2561 * or if contiguous is prefered */
2562 n_v4l_planes = GST_VIDEO_INFO_N_PLANES (&info);
2563 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
2566 if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
2567 GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
2568 /* ideally we would differentiate between types of interlaced video
2569 * but there is not sufficient information in the caps..
2571 field = V4L2_FIELD_INTERLACED;
2573 GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
2574 field = V4L2_FIELD_NONE;
2577 if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
2578 /* We should set colorspace if we have it */
2579 if (gst_video_colorimetry_matches (&info.colorimetry, "bt601")) {
2580 colorspace = V4L2_COLORSPACE_SMPTE170M;
2581 } else if (gst_video_colorimetry_matches (&info.colorimetry, "bt709")) {
2582 colorspace = V4L2_COLORSPACE_REC709;
2583 } else if (gst_video_colorimetry_matches (&info.colorimetry, "smpte240m")) {
2584 colorspace = V4L2_COLORSPACE_SMPTE240M;
2586 /* Try to guess colorspace according to pixelformat and size */
2587 if (GST_VIDEO_INFO_IS_YUV (&info)) {
2588 /* SD streams likely use SMPTE170M and HD streams REC709 */
2589 if (width <= 720 && height <= 576)
2590 colorspace = V4L2_COLORSPACE_SMPTE170M;
2592 colorspace = V4L2_COLORSPACE_REC709;
2593 } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
2594 colorspace = V4L2_COLORSPACE_SRGB;
2599 GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
2600 "%" GST_FOURCC_FORMAT " stride: %d", width, height,
2601 GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
2603 memset (&format, 0x00, sizeof (struct v4l2_format));
2604 format.type = v4l2object->type;
2606 if (v4l2object->no_initial_format) {
2607 format_changed = TRUE;
2609 if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0)
2610 goto get_fmt_failed;
2612 /* Note that four first fields are the same between v4l2_pix_format and
2613 * v4l2_pix_format_mplane, so we don't need to duplicate he checks */
2615 /* If no size in caps, use configured size */
2616 if (width == 0 && height == 0) {
2617 width = format.fmt.pix_mp.width;
2618 height = format.fmt.pix_mp.height;
2621 format_changed = format.type != v4l2object->type ||
2622 format.fmt.pix_mp.width != width ||
2623 format.fmt.pix_mp.height != height ||
2624 format.fmt.pix_mp.pixelformat != pixelformat ||
2625 format.fmt.pix_mp.field != field;
2627 if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
2629 format_changed = format_changed ||
2630 format.fmt.pix_mp.colorspace != colorspace;
2632 format_changed = format_changed ||
2633 format.fmt.pix.colorspace != colorspace;
2638 #ifndef GST_DISABLE_GST_DEBUG
2640 GST_DEBUG_OBJECT (v4l2object->element, "Current size is %dx%d, format "
2641 "%" GST_FOURCC_FORMAT " colorspace %d, nb planes %d",
2642 format.fmt.pix_mp.width, format.fmt.pix_mp.height,
2643 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2644 format.fmt.pix_mp.colorspace, format.fmt.pix_mp.num_planes);
2646 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
2647 GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
2648 format.fmt.pix_mp.plane_fmt[i].bytesperline);
2650 GST_DEBUG_OBJECT (v4l2object->element, "Current size is %dx%d, format "
2651 "%" GST_FOURCC_FORMAT " stride %d, colorspace %d",
2652 format.fmt.pix.width, format.fmt.pix.height,
2653 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2654 format.fmt.pix.bytesperline, format.fmt.pix.colorspace);
2658 /* If nothing changed, we are done */
2659 if (!format_changed)
2662 /* something different, set the format */
2663 GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
2664 "%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
2667 format.type = v4l2object->type;
2668 format.fmt.pix_mp.pixelformat = pixelformat;
2669 format.fmt.pix_mp.width = width;
2670 format.fmt.pix_mp.height = height;
2671 format.fmt.pix_mp.field = field;
2672 format.fmt.pix_mp.num_planes = n_v4l_planes;
2674 /* try to ask our prefered stride but it's not a failure if not
2676 for (i = 0; i < n_v4l_planes; i++) {
2677 gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
2679 if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
2680 stride = GST_VIDEO_TILE_X_TILES (stride) <<
2681 GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
2683 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
2686 if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
2687 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
2689 gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
2691 format.type = v4l2object->type;
2692 format.fmt.pix.width = width;
2693 format.fmt.pix.height = height;
2694 format.fmt.pix.pixelformat = pixelformat;
2695 format.fmt.pix.field = field;
2697 if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
2698 stride = GST_VIDEO_TILE_X_TILES (stride) <<
2699 GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
2701 /* try to ask our prefered stride */
2702 format.fmt.pix.bytesperline = stride;
2704 if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
2705 format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
2708 GST_DEBUG_OBJECT (v4l2object->element, "Desired format is %dx%d, format "
2709 "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
2710 format.fmt.pix_mp.height,
2711 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2712 is_mplane ? format.fmt.pix_mp.num_planes : 1);
2714 #ifndef GST_DISABLE_GST_DEBUG
2716 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
2717 GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
2718 format.fmt.pix_mp.plane_fmt[i].bytesperline);
2720 GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
2721 format.fmt.pix.bytesperline);
2725 if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
2727 format.fmt.pix_mp.colorspace = colorspace;
2729 format.fmt.pix.colorspace = colorspace;
2731 GST_DEBUG_OBJECT (v4l2object->element, "Desired colorspace is %d",
2735 if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
2736 goto set_fmt_failed;
2738 GST_DEBUG_OBJECT (v4l2object->element, "Got format of %dx%d, format "
2739 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d",
2740 format.fmt.pix.width, format.fmt.pix_mp.height,
2741 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
2742 is_mplane ? format.fmt.pix_mp.num_planes : 1,
2743 is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace);
2745 #ifndef GST_DISABLE_GST_DEBUG
2747 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
2748 GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d",
2749 format.fmt.pix_mp.plane_fmt[i].bytesperline,
2750 format.fmt.pix_mp.plane_fmt[i].sizeimage);
2752 GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d",
2753 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
2757 if (format.fmt.pix.pixelformat != pixelformat)
2758 goto invalid_pixelformat;
2760 /* Only negotiate size with raw data.
2761 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
2762 * in ASF mode for example, there is also not reason for a driver to
2763 * change the size. */
2764 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED) {
2765 /* We can crop larger images */
2766 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
2767 goto invalid_dimensions;
2769 /* Note, this will be adjusted if upstream has non-centered cropping. */
2770 align.padding_top = 0;
2771 align.padding_bottom = format.fmt.pix.height - height;
2772 align.padding_left = 0;
2773 align.padding_right = format.fmt.pix.width - width;
2776 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
2777 goto invalid_planes;
2779 if (GST_VIDEO_INFO_HAS_ALPHA (&info)) {
2780 struct v4l2_control ctl = { 0, };
2781 ctl.id = V4L2_CID_ALPHA_COMPONENT;
2784 if (v4l2_ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
2785 GST_WARNING_OBJECT (v4l2object->element,
2786 "Failed to set alpha component value");
2789 /* Is there a reason we require the caller to always specify a framerate? */
2790 GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
2793 memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
2794 streamparm.type = v4l2object->type;
2796 if (v4l2_ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
2797 goto get_parm_failed;
2799 GST_VIDEO_INFO_FPS_N (&info) =
2800 streamparm.parm.capture.timeperframe.denominator;
2801 GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator;
2803 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
2804 || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
2805 GST_DEBUG_OBJECT (v4l2object->element, "Got framerate: %u/%u",
2806 streamparm.parm.capture.timeperframe.denominator,
2807 streamparm.parm.capture.timeperframe.numerator);
2809 /* We used to skip frame rate setup if the camera was already setup
2810 * with the requested frame rate. This breaks some cameras though,
2811 * causing them to not output data (several models of Thinkpad cameras
2812 * have this problem at least).
2813 * So, don't skip. */
2814 GST_LOG_OBJECT (v4l2object->element, "Setting framerate to %u/%u", fps_n,
2816 /* We want to change the frame rate, so check whether we can. Some cheap USB
2817 * cameras don't have the capability */
2818 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
2819 GST_DEBUG_OBJECT (v4l2object->element,
2820 "Not setting framerate (not supported)");
2824 /* Note: V4L2 wants the frame interval, we have the frame rate */
2825 streamparm.parm.capture.timeperframe.numerator = fps_d;
2826 streamparm.parm.capture.timeperframe.denominator = fps_n;
2828 /* some cheap USB cam's won't accept any change */
2829 if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
2830 goto set_parm_failed;
2832 /* get new values */
2833 fps_d = streamparm.parm.capture.timeperframe.numerator;
2834 fps_n = streamparm.parm.capture.timeperframe.denominator;
2836 GST_INFO_OBJECT (v4l2object->element, "Set framerate to %u/%u", fps_n,
2839 GST_VIDEO_INFO_FPS_N (&info) = fps_n;
2840 GST_VIDEO_INFO_FPS_D (&info) = fps_d;
2844 /* add boolean return, so we can fail on drivers bugs */
2845 gst_v4l2_object_save_format (v4l2object, fmtdesc, &format, &info, &align);
2847 /* now configure the pool */
2848 if (!gst_v4l2_object_setup_pool (v4l2object, caps))
2856 GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT,
2862 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2863 (_("Device '%s' does not support video capture"),
2864 v4l2object->videodev),
2865 ("Call to G_FMT failed: (%s)", g_strerror (errno)));
2870 if (errno == EBUSY) {
2871 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, BUSY,
2872 (_("Device '%s' is busy"), v4l2object->videodev),
2873 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
2874 GST_FOURCC_ARGS (pixelformat), width, height,
2875 g_strerror (errno)));
2877 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2878 (_("Device '%s' cannot capture at %dx%d"),
2879 v4l2object->videodev, width, height),
2880 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
2881 GST_FOURCC_ARGS (pixelformat), width, height,
2882 g_strerror (errno)));
2888 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2889 (_("Device '%s' cannot capture at %dx%d"),
2890 v4l2object->videodev, width, height),
2891 ("Tried to capture at %dx%d, but device returned size %dx%d",
2892 width, height, format.fmt.pix.width, format.fmt.pix.height));
2895 invalid_pixelformat:
2897 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2898 (_("Device '%s' cannot capture in the specified format"),
2899 v4l2object->videodev),
2900 ("Tried to capture in %" GST_FOURCC_FORMAT
2901 ", but device returned format" " %" GST_FOURCC_FORMAT,
2902 GST_FOURCC_ARGS (pixelformat),
2903 GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
2908 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
2909 (_("Device '%s' does support non-contiguous planes"),
2910 v4l2object->videodev),
2911 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
2916 /* it's possible that this call is not supported */
2917 if (errno != EINVAL && errno != ENOTTY) {
2918 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
2919 (_("Could not get parameters on device '%s'"),
2920 v4l2object->videodev), GST_ERROR_SYSTEM);
2926 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
2927 (_("Video device did not accept new frame rate setting.")),
2933 /* setup_pool already send the error */
2939 * gst_v4l2_object_acquire_format:
2940 * @v4l2object the object
2941 * @info a GstVideoInfo to be filled
2943 * Acquire the driver choosen format. This is useful in decoder or encoder elements where
2944 * the output format is choosen by the HW.
2946 * Returns: %TRUE on success, %FALSE on failure.
2949 gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info)
2951 struct v4l2_fmtdesc *fmtdesc;
2952 struct v4l2_format fmt;
2953 struct v4l2_crop crop;
2954 GstVideoFormat format;
2955 guint width, height;
2956 GstVideoAlignment align;
2958 gst_video_info_init (info);
2959 gst_video_alignment_reset (&align);
2961 memset (&fmt, 0x00, sizeof (struct v4l2_format));
2962 fmt.type = v4l2object->type;
2963 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
2964 goto get_fmt_failed;
2966 fmtdesc = gst_v4l2_object_get_format_from_fourcc (v4l2object,
2967 fmt.fmt.pix.pixelformat);
2968 if (fmtdesc == NULL)
2969 goto unsupported_format;
2971 /* No need to care about mplane, the four first params are the same */
2972 format = gst_v4l2_object_v4l2fourcc_to_video_format (fmt.fmt.pix.pixelformat);
2974 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
2975 if (format == GST_VIDEO_FORMAT_UNKNOWN)
2976 goto unsupported_format;
2978 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
2979 goto invalid_dimensions;
2981 width = fmt.fmt.pix.width;
2982 height = fmt.fmt.pix.height;
2984 memset (&crop, 0, sizeof (struct v4l2_crop));
2985 crop.type = v4l2object->type;
2986 if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0) {
2987 align.padding_left = crop.c.left;
2988 align.padding_top = crop.c.top;
2989 align.padding_right = width - crop.c.width - crop.c.left;
2990 align.padding_bottom = height - crop.c.height - crop.c.top;
2991 width = crop.c.width;
2992 height = crop.c.height;
2995 gst_video_info_set_format (info, format, width, height);
2997 switch (fmt.fmt.pix.field) {
2998 case V4L2_FIELD_ANY:
2999 case V4L2_FIELD_NONE:
3000 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
3002 case V4L2_FIELD_INTERLACED:
3003 case V4L2_FIELD_INTERLACED_TB:
3004 case V4L2_FIELD_INTERLACED_BT:
3005 info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
3008 goto unsupported_field;
3011 gst_v4l2_object_save_format (v4l2object, fmtdesc, &fmt, info, &align);
3013 /* Shall we setup the pool ? */
3019 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
3020 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
3025 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
3026 (_("Video device returned invalid dimensions.")),
3027 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
3028 fmt.fmt.pix.height));
3033 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
3034 (_("Video devices uses an unsupported interlacing method.")),
3035 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
3040 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
3041 (_("Video devices uses an unsupported pixel format.")),
3042 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
3043 GST_FOURCC_ARGS (fmt.fmt.pix.pixelformat)));
3049 gst_v4l2_object_set_crop (GstV4l2Object * obj)
3051 struct v4l2_crop crop = { 0 };
3053 crop.type = obj->type;
3054 crop.c.left = obj->align.padding_left;
3055 crop.c.top = obj->align.padding_top;
3056 crop.c.width = obj->info.width;
3057 crop.c.height = obj->info.height;
3059 if (obj->align.padding_left + obj->align.padding_top +
3060 obj->align.padding_right + obj->align.padding_bottom == 0) {
3061 GST_DEBUG_OBJECT (obj->element, "no cropping needed");
3065 GST_DEBUG_OBJECT (obj->element,
3066 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
3067 crop.c.width, crop.c.height);
3069 if (v4l2_ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
3070 GST_WARNING_OBJECT (obj->element, "VIDIOC_S_CROP failed");
3074 GST_DEBUG_OBJECT (obj->element,
3075 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
3076 crop.c.width, crop.c.height);
3082 gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps)
3084 GstStructure *config;
3088 if (!v4l2object->pool)
3091 config = gst_buffer_pool_get_config (v4l2object->pool);
3092 gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
3094 ret = oldcaps && gst_caps_is_equal (caps, oldcaps);
3096 gst_structure_free (config);
3102 gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
3104 gboolean ret = TRUE;
3106 GST_LOG_OBJECT (v4l2object->element, "start flushing");
3108 if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
3109 gst_buffer_pool_set_flushing (v4l2object->pool, TRUE);
3115 gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object)
3117 gboolean ret = TRUE;
3119 GST_LOG_OBJECT (v4l2object->element, "stop flushing");
3121 if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
3122 gst_buffer_pool_set_flushing (v4l2object->pool, FALSE);
3128 gst_v4l2_object_stop (GstV4l2Object * v4l2object)
3130 GST_DEBUG_OBJECT (v4l2object->element, "stopping");
3132 if (!GST_V4L2_IS_OPEN (v4l2object))
3134 if (!GST_V4L2_IS_ACTIVE (v4l2object))
3137 if (v4l2object->pool) {
3138 GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool");
3139 gst_buffer_pool_set_active (v4l2object->pool, FALSE);
3140 gst_object_unref (v4l2object->pool);
3141 v4l2object->pool = NULL;
3144 GST_V4L2_SET_INACTIVE (v4l2object);
3151 gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter)
3157 if (v4l2object->probed_caps == NULL) {
3158 formats = gst_v4l2_object_get_format_list (v4l2object);
3160 ret = gst_caps_new_empty ();
3162 for (walk = formats; walk; walk = walk->next) {
3163 struct v4l2_fmtdesc *format;
3164 GstStructure *template;
3166 format = (struct v4l2_fmtdesc *) walk->data;
3169 gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat);
3174 tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
3175 format->pixelformat, template);
3177 gst_caps_append (ret, tmp);
3179 gst_structure_free (template);
3181 GST_DEBUG_OBJECT (v4l2object->element, "unknown format %u",
3182 format->pixelformat);
3185 v4l2object->probed_caps = ret;
3189 ret = gst_caps_intersect_full (filter, v4l2object->probed_caps,
3190 GST_CAPS_INTERSECT_FIRST);
3192 ret = gst_caps_ref (v4l2object->probed_caps);
3195 GST_INFO_OBJECT (v4l2object->element, "probed caps: %" GST_PTR_FORMAT, ret);
3196 LOG_CAPS (v4l2object->element, ret);
3202 gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query)
3205 GstBufferPool *pool = NULL, *other_pool = NULL;
3206 GstStructure *config;
3207 guint size, min, max, own_min = 0;
3209 gboolean has_video_meta;
3210 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
3211 struct v4l2_control ctl = { 0, };
3212 GstAllocator *allocator = NULL;
3213 GstAllocationParams params = { 0 };
3215 GST_DEBUG_OBJECT (obj->element, "decide allocation");
3217 g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
3218 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
3220 gst_query_parse_allocation (query, &caps, NULL);
3222 if (obj->pool == NULL) {
3223 if (!gst_v4l2_object_setup_pool (obj, caps))
3227 if (gst_query_get_n_allocation_params (query) > 0)
3228 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
3230 if (gst_query_get_n_allocation_pools (query) > 0) {
3231 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
3240 GST_DEBUG_OBJECT (obj->element, "allocation: size:%u min:%u max:%u pool:%"
3241 GST_PTR_FORMAT, size, min, max, pool);
3244 gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
3246 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
3248 /* Certain driver may expose a minimum through controls */
3249 ctl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
3250 if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CTRL, &ctl) >= 0) {
3251 GST_DEBUG_OBJECT (obj->element, "driver require a minimum of %d buffers",
3253 obj->min_buffers = ctl.value;
3255 obj->min_buffers = 0;
3258 /* We can't share our own pool, if it exceed V4L2 capacity */
3259 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
3260 can_share_own_pool = FALSE;
3263 switch (obj->mode) {
3264 case GST_V4L2_IO_RW:
3266 /* in READ/WRITE mode, prefer a downstream pool because our own pool
3267 * doesn't help much, we have to write to it as well */
3268 GST_DEBUG_OBJECT (obj->element,
3269 "read/write mode: using downstream pool");
3270 /* use the bigest size, when we use our own pool we can't really do any
3271 * other size than what the hardware gives us but for downstream pools
3273 size = MAX (size, obj->info.size);
3274 } else if (can_share_own_pool) {
3275 /* no downstream pool, use our own then */
3276 GST_DEBUG_OBJECT (obj->element,
3277 "read/write mode: no downstream pool, using our own");
3278 pool = gst_object_ref (obj->pool);
3279 size = obj->info.size;
3280 pushing_from_our_pool = TRUE;
3284 case GST_V4L2_IO_USERPTR:
3285 case GST_V4L2_IO_DMABUF_IMPORT:
3286 /* in importing mode, prefer our own pool, and pass the other pool to
3287 * our own, so it can serve itself */
3289 goto no_downstream_pool;
3290 gst_v4l2_buffer_pool_set_other_pool (GST_V4L2_BUFFER_POOL (obj->pool),
3293 gst_object_unref (pool);
3294 pool = gst_object_ref (obj->pool);
3295 size = obj->info.size;
3298 case GST_V4L2_IO_MMAP:
3299 case GST_V4L2_IO_DMABUF:
3300 /* in streaming mode, prefer our own pool */
3301 /* Check if we can use it ... */
3302 if (can_share_own_pool) {
3304 gst_object_unref (pool);
3305 pool = gst_object_ref (obj->pool);
3306 size = obj->info.size;
3307 GST_DEBUG_OBJECT (obj->element,
3308 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
3309 pushing_from_our_pool = TRUE;
3311 GST_DEBUG_OBJECT (obj->element,
3312 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
3315 GST_DEBUG_OBJECT (obj->element,
3316 "streaming mode: no usable pool, copying to generic pool");
3317 size = MAX (size, obj->info.size);
3320 case GST_V4L2_IO_AUTO:
3322 GST_WARNING_OBJECT (obj->element, "unhandled mode");
3329 /* If pushing from our own pool, configure it with queried minimum,
3330 * otherwise use the minimum required */
3331 if (pushing_from_our_pool) {
3332 /* When pushing from our own pool, we need what downstream one, to be able
3333 * to fill the pipeline, the minimum required to decoder according to the
3334 * driver and 1 more, so we don't endup up with everything downstream or
3335 * held by the decoder. */
3336 own_min = min + obj->min_buffers + 1;
3338 /* In this case we'll have to configure two buffer pool. For our buffer
3339 * pool, we'll need what the driver one, and one more, so we can dequeu */
3340 own_min = obj->min_buffers + 1;
3341 own_min = MAX (own_min, GST_V4L2_MIN_BUFFERS);
3343 /* for the downstream pool, we keep what downstream wants, though ensure
3344 * at least a minimum if downstream didn't suggest anything (we are
3345 * expecting the base class to create a default one for the context) */
3346 min = MAX (min, GST_V4L2_MIN_BUFFERS);
3348 /* To import we need the other pool to hold at least own_min */
3349 if (obj->pool == pool)
3353 /* Request a bigger max, if one was suggested but it's too small */
3355 max = MAX (min, max);
3357 /* First step, configure our own pool */
3358 config = gst_buffer_pool_get_config (obj->pool);
3360 if (obj->need_video_meta || has_video_meta) {
3361 GST_DEBUG_OBJECT (obj->element, "activate Video Meta");
3362 gst_buffer_pool_config_add_option (config,
3363 GST_BUFFER_POOL_OPTION_VIDEO_META);
3366 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
3367 gst_buffer_pool_config_set_params (config, caps, size, own_min, 0);
3369 GST_DEBUG_OBJECT (obj->element, "setting own pool config to %"
3370 GST_PTR_FORMAT, config);
3372 /* Our pool often need to adjust the value */
3373 if (!gst_buffer_pool_set_config (obj->pool, config)) {
3374 config = gst_buffer_pool_get_config (obj->pool);
3376 GST_DEBUG_OBJECT (obj->element, "own pool config changed to %"
3377 GST_PTR_FORMAT, config);
3379 /* our pool will adjust the maximum buffer, which we are fine with */
3380 if (!gst_buffer_pool_set_config (obj->pool, config))
3384 /* Now configure the other pool if different */
3385 if (obj->pool != pool)
3389 config = gst_buffer_pool_get_config (other_pool);
3390 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
3391 gst_buffer_pool_config_set_params (config, caps, size, min, max);
3393 GST_DEBUG_OBJECT (obj->element, "setting other pool config to %"
3394 GST_PTR_FORMAT, config);
3396 /* if downstream supports video metadata, add this to the pool config */
3397 if (has_video_meta) {
3398 GST_DEBUG_OBJECT (obj->element, "activate Video Meta");
3399 gst_buffer_pool_config_add_option (config,
3400 GST_BUFFER_POOL_OPTION_VIDEO_META);
3403 if (!gst_buffer_pool_set_config (other_pool, config)) {
3404 config = gst_buffer_pool_get_config (other_pool);
3406 if (!gst_buffer_pool_config_validate_params (config, caps, size, min,
3408 gst_structure_free (config);
3412 if (!gst_buffer_pool_set_config (other_pool, config))
3418 /* For simplicity, simply read back the active configuration, so our base
3419 * class get the right information */
3420 config = gst_buffer_pool_get_config (pool);
3421 gst_buffer_pool_config_get_params (config, NULL, &size, &min, &max);
3422 gst_structure_free (config);
3426 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
3428 gst_query_add_allocation_pool (query, pool, size, min, max);
3431 gst_object_unref (allocator);
3434 gst_object_unref (pool);
3440 /* setup_pool already send the error */
3445 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
3446 (_("Failed to configure internal buffer pool.")), (NULL));
3451 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
3452 (_("Video device did not suggest any buffer size.")), (NULL));
3458 gst_object_unref (allocator);
3461 gst_object_unref (pool);
3466 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
3467 (_("No downstream pool to import from.")),
3468 ("When importing DMABUF or USERPTR, we need a pool to import from"));
3474 gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
3476 GstBufferPool *pool;
3477 /* we need at least 2 buffers to operate */
3478 guint size, min, max;
3481 struct v4l2_control ctl = { 0, };
3483 /* Set defaults allocation parameters */
3484 size = obj->info.size;
3485 min = GST_V4L2_MIN_BUFFERS;
3486 max = VIDEO_MAX_FRAME;
3488 gst_query_parse_allocation (query, &caps, &need_pool);
3493 if ((pool = obj->pool))
3494 gst_object_ref (pool);
3498 GstStructure *config;
3500 /* we had a pool, check caps */
3501 config = gst_buffer_pool_get_config (pool);
3502 gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL);
3504 GST_DEBUG_OBJECT (obj->element,
3505 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
3506 if (!gst_caps_is_equal (caps, pcaps)) {
3507 gst_structure_free (config);
3508 gst_object_unref (pool);
3509 goto different_caps;
3511 gst_structure_free (config);
3514 /* Some devices may expose a minimum */
3515 ctl.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
3516 if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CTRL, &ctl) >= 0) {
3517 GST_DEBUG_OBJECT (obj->element, "driver require a miminum of %d buffers",
3519 obj->min_buffers = ctl.value;
3521 obj->min_buffers = 0;
3524 min = MAX (obj->min_buffers, GST_V4L2_MIN_BUFFERS);
3526 gst_query_add_allocation_pool (query, pool, size, min, max);
3528 /* we also support various metadata */
3529 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
3532 gst_object_unref (pool);
3539 GST_DEBUG_OBJECT (obj->element, "no caps specified");
3544 /* different caps, we can't use this pool */
3545 GST_DEBUG_OBJECT (obj->element, "pool has different caps");