1 /* G-Streamer Video4linux2 video-capture plugin
2 * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
26 #include "v4l2src_calls.h"
27 #include "gstv4l2tuner.h"
29 GST_DEBUG_CATEGORY (v4l2src_debug);
30 #define GST_CAT_DEFAULT v4l2src_debug
32 /* elementfactory details */
33 static GstElementDetails gst_v4l2src_details = {
34 "Video (video4linux2) Source",
36 "Reads frames (compressed or uncompressed) from a video4linux2 device",
37 "Ronald Bultje <rbultje@ronald.bitfreak.net>"
40 /* V4l2Src signals and args */
59 guint32 gst_v4l2_formats[] = {
60 /* from Linux 2.6.0 videodev2.h */
61 V4L2_PIX_FMT_RGB332, /* 8 RGB-3-3-2 */
62 V4L2_PIX_FMT_RGB555, /* 16 RGB-5-5-5 */
63 V4L2_PIX_FMT_RGB565, /* 16 RGB-5-6-5 */
64 V4L2_PIX_FMT_RGB555X, /* 16 RGB-5-5-5 BE */
65 V4L2_PIX_FMT_RGB565X, /* 16 RGB-5-6-5 BE */
66 V4L2_PIX_FMT_BGR24, /* 24 BGR-8-8-8 */
67 V4L2_PIX_FMT_RGB24, /* 24 RGB-8-8-8 */
68 V4L2_PIX_FMT_BGR32, /* 32 BGR-8-8-8-8 */
69 V4L2_PIX_FMT_RGB32, /* 32 RGB-8-8-8-8 */
70 V4L2_PIX_FMT_GREY, /* 8 Greyscale */
71 V4L2_PIX_FMT_YVU410, /* 9 YVU 4:1:0 */
72 V4L2_PIX_FMT_YVU420, /* 12 YVU 4:2:0 */
73 V4L2_PIX_FMT_YUYV, /* 16 YUV 4:2:2 */
74 V4L2_PIX_FMT_UYVY, /* 16 YUV 4:2:2 */
75 V4L2_PIX_FMT_YUV422P, /* 16 YVU422 planar */
76 V4L2_PIX_FMT_YUV411P, /* 16 YVU411 planar */
77 V4L2_PIX_FMT_Y41P, /* 12 YUV 4:1:1 */
78 V4L2_PIX_FMT_NV12, /* 12 Y/CbCr 4:2:0 */
79 V4L2_PIX_FMT_NV21, /* 12 Y/CrCb 4:2:0 */
80 V4L2_PIX_FMT_YUV410, /* 9 YUV 4:1:0 */
81 V4L2_PIX_FMT_YUV420, /* 12 YUV 4:2:0 */
82 V4L2_PIX_FMT_YYUV, /* 16 YUV 4:2:2 */
83 V4L2_PIX_FMT_HI240, /* 8 8-bit color */
84 V4L2_PIX_FMT_MJPEG, /* Motion-JPEG */
85 V4L2_PIX_FMT_JPEG, /* JFIF JPEG */
86 V4L2_PIX_FMT_DV, /* 1394 */
87 V4L2_PIX_FMT_MPEG, /* MPEG */
88 V4L2_PIX_FMT_WNVA /* Winnov hw compres */
91 #define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
93 GST_FORMATS_FUNCTION (GstPad *, gst_v4l2src_get_formats,
94 GST_FORMAT_TIME, GST_FORMAT_DEFAULT);
95 GST_QUERY_TYPE_FUNCTION (GstPad *, gst_v4l2src_get_query_types,
99 static void gst_v4l2src_class_init (gpointer g_class, gpointer class_data);
100 static void gst_v4l2src_base_init (gpointer g_class);
101 static void gst_v4l2src_init (GTypeInstance * instance, gpointer g_class);
103 /* signal functions */
104 static void gst_v4l2src_open (GstElement * element, const gchar * device);
105 static void gst_v4l2src_close (GstElement * element, const gchar * device);
107 /* pad/buffer functions */
108 static const GstCaps *gst_v4l2src_get_all_caps (void);
109 static GstPadLinkReturn gst_v4l2src_link (GstPad * pad, const GstCaps * caps);
110 static GstCaps *gst_v4l2src_getcaps (GstPad * pad);
111 static GstCaps *gst_v4l2src_fixate (GstPad * pad, const GstCaps * caps);
112 static GstData *gst_v4l2src_get (GstPad * pad);
113 static gboolean gst_v4l2src_src_convert (GstPad * pad,
114 GstFormat src_format,
115 gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
116 static gboolean gst_v4l2src_src_query (GstPad * pad,
117 GstQueryType type, GstFormat * format, gint64 * value);
120 static void gst_v4l2src_set_property (GObject * object,
121 guint prop_id, const GValue * value, GParamSpec * pspec);
122 static void gst_v4l2src_get_property (GObject * object,
123 guint prop_id, GValue * value, GParamSpec * pspec);
126 static GstElementStateReturn gst_v4l2src_change_state (GstElement * element);
128 /* set_clock function for A/V sync */
129 static void gst_v4l2src_set_clock (GstElement * element, GstClock * clock);
131 static GstElementClass *parent_class = NULL;
132 static guint gst_v4l2src_signals[LAST_SIGNAL] = { 0 };
136 gst_v4l2src_get_type (void)
138 static GType v4l2src_type = 0;
141 static const GTypeInfo v4l2src_info = {
142 sizeof (GstV4l2SrcClass),
143 gst_v4l2src_base_init,
145 gst_v4l2src_class_init,
153 v4l2src_type = g_type_register_static (GST_TYPE_V4L2ELEMENT,
154 "GstV4l2Src", &v4l2src_info, 0);
155 GST_DEBUG_CATEGORY_INIT (v4l2src_debug, "v4l2src", 0, "v4l2src element");
161 gst_v4l2src_base_init (gpointer g_class)
163 GstPadTemplate *template;
164 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
166 gst_element_class_set_details (gstelement_class, &gst_v4l2src_details);
168 template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
169 gst_caps_copy (gst_v4l2src_get_all_caps ()));
171 gst_element_class_add_pad_template (gstelement_class, template);
175 gst_v4l2src_class_init (gpointer g_class, gpointer class_data)
177 GObjectClass *gobject_class = G_OBJECT_CLASS (g_class);
178 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
179 GstV4l2ElementClass *v4l2_class = GST_V4L2ELEMENT_CLASS (g_class);
181 parent_class = g_type_class_peek_parent (g_class);
183 gobject_class->set_property = gst_v4l2src_set_property;
184 gobject_class->get_property = gst_v4l2src_get_property;
186 g_object_class_install_property (gobject_class, ARG_NUMBUFS,
187 g_param_spec_int ("num_buffers", "num_buffers", "num_buffers",
188 G_MININT, G_MAXINT, 0, G_PARAM_READWRITE));
189 g_object_class_install_property (gobject_class, ARG_BUFSIZE,
190 g_param_spec_int ("buffer_size", "buffer_size", "buffer_size",
191 G_MININT, G_MAXINT, 0, G_PARAM_READABLE));
193 g_object_class_install_property (gobject_class, ARG_USE_FIXED_FPS,
194 g_param_spec_boolean ("use_fixed_fps", "Use Fixed FPS",
195 "Drop/Insert frames to reach a certain FPS (TRUE) "
196 "or adapt FPS to suit the number of frabbed frames",
197 TRUE, G_PARAM_READWRITE));
200 gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE] =
201 g_signal_new ("frame-capture", G_TYPE_FROM_CLASS (g_class),
202 G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4l2SrcClass, frame_capture), NULL,
203 NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
204 gst_v4l2src_signals[SIGNAL_FRAME_DROP] =
205 g_signal_new ("frame-drop", G_TYPE_FROM_CLASS (g_class),
206 G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4l2SrcClass, frame_drop), NULL,
207 NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
208 gst_v4l2src_signals[SIGNAL_FRAME_INSERT] =
209 g_signal_new ("frame_insert", G_TYPE_FROM_CLASS (g_class),
210 G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4l2SrcClass, frame_insert), NULL,
211 NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
212 gst_v4l2src_signals[SIGNAL_FRAME_LOST] =
213 g_signal_new ("frame-lost", G_TYPE_FROM_CLASS (g_class),
214 G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4l2SrcClass, frame_lost), NULL,
215 NULL, g_cclosure_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
217 gstelement_class->change_state = gst_v4l2src_change_state;
219 v4l2_class->open = gst_v4l2src_open;
220 v4l2_class->close = gst_v4l2src_close;
222 gstelement_class->set_clock = gst_v4l2src_set_clock;
227 gst_v4l2src_init (GTypeInstance * instance, gpointer g_class)
229 GstV4l2Src *v4l2src = GST_V4L2SRC (instance);
231 GST_FLAG_SET (GST_ELEMENT (v4l2src), GST_ELEMENT_THREAD_SUGGESTED);
234 gst_pad_new_from_template (gst_element_class_get_pad_template
235 (GST_ELEMENT_GET_CLASS (v4l2src), "src"), "src");
236 gst_element_add_pad (GST_ELEMENT (v4l2src), v4l2src->srcpad);
238 gst_pad_set_get_function (v4l2src->srcpad, gst_v4l2src_get);
239 gst_pad_set_link_function (v4l2src->srcpad, gst_v4l2src_link);
240 gst_pad_set_getcaps_function (v4l2src->srcpad, gst_v4l2src_getcaps);
241 gst_pad_set_fixate_function (v4l2src->srcpad, gst_v4l2src_fixate);
242 gst_pad_set_convert_function (v4l2src->srcpad, gst_v4l2src_src_convert);
243 gst_pad_set_formats_function (v4l2src->srcpad, gst_v4l2src_get_formats);
244 gst_pad_set_query_function (v4l2src->srcpad, gst_v4l2src_src_query);
245 gst_pad_set_query_type_function (v4l2src->srcpad,
246 gst_v4l2src_get_query_types);
248 v4l2src->breq.count = 0;
250 v4l2src->formats = NULL;
253 v4l2src->clock = NULL;
256 v4l2src->use_fixed_fps = TRUE;
261 gst_v4l2src_open (GstElement * element, const gchar * device)
263 gst_v4l2src_fill_format_list (GST_V4L2SRC (element));
268 gst_v4l2src_close (GstElement * element, const gchar * device)
270 gst_v4l2src_clear_format_list (GST_V4L2SRC (element));
275 gst_v4l2src_get_fps (GstV4l2Src * v4l2src)
280 if (!v4l2src->use_fixed_fps && v4l2src->clock != NULL && v4l2src->handled > 0) {
281 /* try to get time from clock master and calculate fps */
282 GstClockTime time = gst_clock_get_time (v4l2src->clock) -
283 v4l2src->substract_time;
284 return v4l2src->handled * GST_SECOND / time;
287 /* if that failed ... */
289 if (!GST_V4L2_IS_OPEN (GST_V4L2ELEMENT (v4l2src)))
292 if (!gst_v4l2_get_norm (GST_V4L2ELEMENT (v4l2src), &norm))
294 for (item = GST_V4L2ELEMENT (v4l2src)->norms; item != NULL; item = item->next) {
295 GstV4l2TunerNorm *v4l2norm = item->data;
297 if (v4l2norm->index == norm)
298 return GST_TUNER_NORM (v4l2norm)->fps;
305 gst_v4l2src_src_convert (GstPad * pad,
306 GstFormat src_format,
307 gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
312 v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
314 if ((fps = gst_v4l2src_get_fps (v4l2src)) == 0)
317 switch (src_format) {
318 case GST_FORMAT_TIME:
319 switch (*dest_format) {
320 case GST_FORMAT_DEFAULT:
321 *dest_value = src_value * fps / GST_SECOND;
328 case GST_FORMAT_DEFAULT:
329 switch (*dest_format) {
330 case GST_FORMAT_TIME:
331 *dest_value = src_value * GST_SECOND / fps;
346 gst_v4l2src_src_query (GstPad * pad,
347 GstQueryType type, GstFormat * format, gint64 * value)
349 GstV4l2Src *v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
353 if ((fps = gst_v4l2src_get_fps (v4l2src)) == 0)
357 case GST_QUERY_POSITION:
359 case GST_FORMAT_TIME:
360 *value = v4l2src->handled * GST_SECOND / fps;
362 case GST_FORMAT_DEFAULT:
363 *value = v4l2src->handled;
378 static GstStructure *
379 gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc)
381 GstStructure *structure = NULL;
384 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
385 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
386 structure = gst_structure_new ("video/x-jpeg", NULL);
388 case V4L2_PIX_FMT_RGB332:
389 case V4L2_PIX_FMT_RGB555:
390 case V4L2_PIX_FMT_RGB555X:
391 case V4L2_PIX_FMT_RGB565:
392 case V4L2_PIX_FMT_RGB565X:
393 case V4L2_PIX_FMT_RGB24:
394 case V4L2_PIX_FMT_BGR24:
395 case V4L2_PIX_FMT_RGB32:
396 case V4L2_PIX_FMT_BGR32:{
397 guint depth = 0, bpp = 0;
399 guint32 r_mask = 0, b_mask = 0, g_mask = 0;
402 case V4L2_PIX_FMT_RGB332:
404 endianness = G_BYTE_ORDER; /* 'like, whatever' */
409 case V4L2_PIX_FMT_RGB555:
410 case V4L2_PIX_FMT_RGB555X:
414 fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
419 case V4L2_PIX_FMT_RGB565:
420 case V4L2_PIX_FMT_RGB565X:
423 fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
427 case V4L2_PIX_FMT_RGB24:
429 endianness = G_BIG_ENDIAN;
434 case V4L2_PIX_FMT_BGR24:
436 endianness = G_BIG_ENDIAN;
441 case V4L2_PIX_FMT_RGB32:
443 endianness = G_BIG_ENDIAN;
448 case V4L2_PIX_FMT_BGR32:
450 endianness = G_BIG_ENDIAN;
456 g_assert_not_reached ();
459 structure = gst_structure_new ("video/x-raw-rgb",
460 "bpp", G_TYPE_INT, bpp,
461 "depth", G_TYPE_INT, depth,
462 "red_mask", G_TYPE_INT, r_mask,
463 "green_mask", G_TYPE_INT, g_mask,
464 "blue_mask", G_TYPE_INT, b_mask,
465 "endianness", G_TYPE_INT, endianness, NULL);
468 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
469 case V4L2_PIX_FMT_YUV422P: /* 16 YVU422 planar */
470 case V4L2_PIX_FMT_YUV411P: /* 16 YVU411 planar */
471 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
472 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
473 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
474 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
475 /* FIXME: get correct fourccs here */
477 case V4L2_PIX_FMT_YVU410:
478 case V4L2_PIX_FMT_YUV410:
479 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
480 case V4L2_PIX_FMT_YUYV:
481 case V4L2_PIX_FMT_YVU420:
482 case V4L2_PIX_FMT_UYVY:
483 case V4L2_PIX_FMT_Y41P:{
487 case V4L2_PIX_FMT_YVU410:
488 fcc = GST_MAKE_FOURCC ('Y', 'V', 'U', '9');
490 case V4L2_PIX_FMT_YUV410:
491 fcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9');
493 case V4L2_PIX_FMT_YUV420:
494 fcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
496 case V4L2_PIX_FMT_YUYV:
497 fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
499 case V4L2_PIX_FMT_YVU420:
500 fcc = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
502 case V4L2_PIX_FMT_UYVY:
503 fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
505 case V4L2_PIX_FMT_Y41P:
506 fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
509 g_assert_not_reached ();
512 structure = gst_structure_new ("video/x-raw-yuv",
513 "format", GST_TYPE_FOURCC, fcc, NULL);
516 case V4L2_PIX_FMT_DV:
518 gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
521 case V4L2_PIX_FMT_MPEG: /* MPEG */
522 /* someone figure out the MPEG format used... */
524 case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
527 GST_DEBUG ("Unknown fourcc 0x%08x " GST_FOURCC_FORMAT,
528 fourcc, GST_FOURCC_ARGS (fourcc));
532 gst_caps_set_simple (caps,
533 "width", G_TYPE_INT, width,
534 "height", G_TYPE_INT, height, "framerate", G_TYPE_DOUBLE, fps, NULL);
539 static struct v4l2_fmtdesc *
540 gst_v4l2src_get_format_from_fourcc (GstV4l2Src * v4l2src, guint32 fourcc)
542 struct v4l2_fmtdesc *fmt;
548 walk = v4l2src->formats;
550 fmt = (struct v4l2_fmtdesc *) walk->data;
551 if (fmt->pixelformat == fourcc)
553 /* special case for jpeg */
554 if ((fmt->pixelformat == V4L2_PIX_FMT_MJPEG && fourcc == V4L2_PIX_FMT_JPEG)
555 || (fmt->pixelformat == V4L2_PIX_FMT_JPEG
556 && fourcc == V4L2_PIX_FMT_MJPEG)) {
559 walk = g_slist_next (walk);
566 gst_v4l2_fourcc_from_structure (GstStructure * structure)
569 const gchar *mimetype = gst_structure_get_name (structure);
571 if (!strcmp (mimetype, "video/x-raw-yuv") ||
572 !strcmp (mimetype, "video/x-raw-rgb")) {
573 if (!strcmp (mimetype, "video/x-raw-rgb"))
574 fourcc = GST_MAKE_FOURCC ('R', 'G', 'B', ' ');
576 gst_structure_get_fourcc (structure, "format", &fourcc);
579 case GST_MAKE_FOURCC ('I', '4', '2', '0'):
580 case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
581 fourcc = V4L2_PIX_FMT_YUV420;
583 case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
584 fourcc = V4L2_PIX_FMT_YUYV;
586 case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
587 fourcc = V4L2_PIX_FMT_Y41P;
589 case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
590 fourcc = V4L2_PIX_FMT_UYVY;
592 case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
593 fourcc = V4L2_PIX_FMT_YVU420;
595 case GST_MAKE_FOURCC ('R', 'G', 'B', ' '):{
596 gint depth, endianness, r_mask;
598 gst_structure_get_int (structure, "depth", &depth);
599 gst_structure_get_int (structure, "endianness", &endianness);
600 gst_structure_get_int (structure, "red_mask", &r_mask);
604 fourcc = V4L2_PIX_FMT_RGB332;
607 fourcc = (endianness == G_LITTLE_ENDIAN) ?
608 V4L2_PIX_FMT_RGB555 : V4L2_PIX_FMT_RGB555X;
611 fourcc = (endianness == G_LITTLE_ENDIAN) ?
612 V4L2_PIX_FMT_RGB565 : V4L2_PIX_FMT_RGB565X;
615 fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR24 : V4L2_PIX_FMT_RGB24;
618 fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR32 : V4L2_PIX_FMT_RGB32;
625 } else if (strcmp (mimetype, "video/x-dv") == 0) {
626 fourcc = V4L2_PIX_FMT_DV;
627 } else if (strcmp (mimetype, "video/x-jpeg") == 0) {
628 fourcc = V4L2_PIX_FMT_JPEG;
634 static struct v4l2_fmtdesc *
635 gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src * v4l2src, GstStructure * structure)
637 return gst_v4l2src_get_format_from_fourcc (v4l2src,
638 gst_v4l2_fourcc_from_structure (structure));
641 static const GstCaps *
642 gst_v4l2src_get_all_caps (void)
644 static GstCaps *caps = NULL;
647 GstStructure *structure;
650 caps = gst_caps_new_empty ();
651 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
652 structure = gst_v4l2src_v4l2fourcc_to_caps (gst_v4l2_formats[i]);
654 gst_structure_set (structure,
655 "width", GST_TYPE_INT_RANGE, 1, 4096,
656 "height", GST_TYPE_INT_RANGE, 1, 4096,
657 "framerate", GST_TYPE_DOUBLE_RANGE, (double) 0, G_MAXDOUBLE, NULL);
659 gst_caps_append_structure (caps, structure);
668 gst_v4l2src_fixate (GstPad * pad, const GstCaps * const_caps)
671 GstStructure *structure;
672 G_GNUC_UNUSED gchar *caps_str;
673 gboolean changed = FALSE;
674 GstCaps *caps = gst_caps_copy (const_caps);
676 caps_str = gst_caps_to_string (caps);
677 GST_DEBUG_OBJECT (gst_pad_get_parent (pad), "fixating caps %s", caps_str);
680 for (i = 0; i < gst_caps_get_size (caps); i++) {
681 structure = gst_caps_get_structure (caps, i);
683 gst_caps_structure_fixate_field_nearest_int (structure, "width",
689 for (i = 0; i < gst_caps_get_size (caps); i++) {
690 structure = gst_caps_get_structure (caps, i);
692 gst_caps_structure_fixate_field_nearest_int (structure, "height",
698 gst_caps_free (caps);
702 static GstPadLinkReturn
703 gst_v4l2src_link (GstPad * pad, const GstCaps * caps)
706 GstV4l2Element *v4l2element;
707 struct v4l2_fmtdesc *format;
709 GstStructure *structure;
711 v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
712 v4l2element = GST_V4L2ELEMENT (v4l2src);
714 structure = gst_caps_get_structure (caps, 0);
716 /* clean up if we still haven't cleaned up our previous
718 if (GST_V4L2_IS_ACTIVE (v4l2element)) {
719 if (!gst_v4l2src_capture_deinit (v4l2src))
720 return GST_PAD_LINK_REFUSED;
721 } else if (!GST_V4L2_IS_OPEN (v4l2element)) {
722 return GST_PAD_LINK_DELAYED;
725 /* we want our own v4l2 type of fourcc codes */
726 if (!(format = gst_v4l2_caps_to_v4l2fourcc (v4l2src, structure))) {
727 return GST_PAD_LINK_REFUSED;
730 gst_structure_get_int (structure, "width", &w);
731 gst_structure_get_int (structure, "height", &h);
733 /* we found the pixelformat! - try it out */
734 if (gst_v4l2src_set_capture (v4l2src, format, w, h)) {
735 if (gst_v4l2src_capture_init (v4l2src)) {
736 return GST_PAD_LINK_OK;
740 return GST_PAD_LINK_REFUSED;
745 gst_v4l2src_getcaps (GstPad * pad)
747 GstV4l2Src *v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
749 struct v4l2_fmtdesc *format;
750 int min_w, max_w, min_h, max_h;
752 GstStructure *structure;
754 if (!GST_V4L2_IS_OPEN (GST_V4L2ELEMENT (v4l2src))) {
755 return gst_caps_new_any ();
758 /* build our own capslist */
759 caps = gst_caps_new_empty ();
760 walk = v4l2src->formats;
762 format = (struct v4l2_fmtdesc *) walk->data;
763 walk = g_slist_next (walk);
765 /* get size delimiters */
766 if (!gst_v4l2src_get_size_limits (v4l2src, format,
767 &min_w, &max_w, &min_h, &max_h)) {
772 structure = gst_v4l2src_v4l2fourcc_to_caps (format->pixelformat);
775 gst_structure_set (structure,
776 "width", GST_TYPE_INT_RANGE, min_w, max_w,
777 "height", GST_TYPE_INT_RANGE, min_h, max_h,
778 "framerate", GST_TYPE_DOUBLE_RANGE, (double) 0, G_MAXDOUBLE, NULL);
780 gst_caps_append_structure (caps, structure);
788 gst_v4l2src_get (GstPad * pad)
795 v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
797 if (v4l2src->use_fixed_fps && (fps = gst_v4l2src_get_fps (v4l2src)) == 0) {
798 GST_ELEMENT_ERROR (v4l2src, RESOURCE, SETTINGS, (NULL),
799 ("could not get frame rate for element"));
803 if (v4l2src->need_writes > 0) {
805 buf = v4l2src->cached_buffer;
806 v4l2src->need_writes--;
810 /* grab a frame from the device */
811 num = gst_v4l2src_grab_frame (v4l2src);
815 /* to check if v4l2 sets the correct time */
816 time = GST_TIMEVAL_TO_TIME (v4l2src->pool->buffers[num].buffer.timestamp);
817 if (v4l2src->clock && v4l2src->use_fixed_fps && time != 0) {
818 gboolean have_frame = FALSE;
821 /* FIXME: isn't this v4l2 timestamp its own clock?! */
822 /* by default, we use the frame once */
823 v4l2src->need_writes = 1;
825 g_assert (time >= v4l2src->substract_time);
826 time -= v4l2src->substract_time;
828 /* first check whether we lost any frames according to the device */
829 if (v4l2src->last_seq != 0) {
830 if (v4l2src->pool->buffers[num].buffer.sequence - v4l2src->last_seq >
832 v4l2src->need_writes =
833 v4l2src->pool->buffers[num].buffer.sequence - v4l2src->last_seq;
834 g_signal_emit (G_OBJECT (v4l2src),
835 gst_v4l2src_signals[SIGNAL_FRAME_LOST], 0,
836 v4l2src->need_writes - 1);
839 v4l2src->last_seq = v4l2src->pool->buffers[num].buffer.sequence;
841 /* decide how often we're going to write the frame - set
842 * v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE
843 * if we're going to write it - else, just continue.
845 * time is generally the system or audio clock. Let's
846 * say that we've written one second of audio, then we want
847 * to have written one second of video too, within the same
848 * timeframe. This means that if time - begin_time = X sec,
849 * we want to have written X*fps frames. If we've written
850 * more - drop, if we've written less - dup... */
851 if (v4l2src->handled * (GST_SECOND / fps) - time >
852 1.5 * (GST_SECOND / fps)) {
853 /* yo dude, we've got too many frames here! Drop! DROP! */
854 v4l2src->need_writes--; /* -= (v4l2src->handled - (time / fps)); */
855 g_signal_emit (G_OBJECT (v4l2src),
856 gst_v4l2src_signals[SIGNAL_FRAME_DROP], 0);
857 } else if (v4l2src->handled * (GST_SECOND / fps) - time <
858 -1.5 * (GST_SECOND / fps)) {
859 /* this means we're lagging far behind */
860 v4l2src->need_writes++; /* += ((time / fps) - v4l2src->handled); */
861 g_signal_emit (G_OBJECT (v4l2src),
862 gst_v4l2src_signals[SIGNAL_FRAME_INSERT], 0);
865 if (v4l2src->need_writes > 0) {
867 v4l2src->need_writes--;
869 if (!gst_v4l2src_queue_frame (v4l2src, num))
871 num = gst_v4l2src_grab_frame (v4l2src);
875 } while (!have_frame);
878 g_assert (num != -1);
879 GST_LOG_OBJECT (v4l2src, "buffer %d needs %d writes", num,
880 v4l2src->need_writes + 1);
881 i = v4l2src->pool->buffers[num].buffer.bytesused >
882 0 ? v4l2src->pool->buffers[num].buffer.bytesused : v4l2src->pool->
884 /* check if this is the last buffer in the queue. If so do a memcpy to put it back asap
885 to avoid framedrops and deadlocks because of stupid elements */
886 if (gst_atomic_int_read (&v4l2src->pool->refcount) == v4l2src->breq.count) {
887 GST_LOG_OBJECT (v4l2src, "using memcpy'd buffer");
888 buf = gst_buffer_new_and_alloc (i);
889 memcpy (GST_BUFFER_DATA (buf), v4l2src->pool->buffers[num].start, i);
890 if (!gst_v4l2src_queue_frame (v4l2src, num)) {
891 gst_data_unref (GST_DATA (buf));
895 GST_LOG_OBJECT (v4l2src, "using mmap'd buffer");
896 buf = gst_buffer_new ();
897 GST_BUFFER_DATA (buf) = v4l2src->pool->buffers[num].start;
898 GST_BUFFER_SIZE (buf) = i;
899 GST_BUFFER_FREE_DATA_FUNC (buf) = gst_v4l2src_free_buffer;
900 GST_BUFFER_PRIVATE (buf) = &v4l2src->pool->buffers[num];
901 /* no need to be careful here, both are > 0, because the element uses them */
902 gst_atomic_int_inc (&v4l2src->pool->buffers[num].refcount);
903 gst_atomic_int_inc (&v4l2src->pool->refcount);
905 GST_BUFFER_MAXSIZE (buf) = v4l2src->pool->buffers[num].length;
906 if (v4l2src->use_fixed_fps) {
907 GST_BUFFER_TIMESTAMP (buf) = v4l2src->handled * GST_SECOND / fps;
908 GST_BUFFER_DURATION (buf) = GST_SECOND / fps;
910 /* calculate time based on our own clock */
911 GST_BUFFER_TIMESTAMP (buf) =
912 GST_TIMEVAL_TO_TIME (v4l2src->pool->buffers[num].buffer.timestamp) -
913 v4l2src->substract_time;
915 if (v4l2src->need_writes > 0) {
916 v4l2src->cached_buffer = buf;
917 for (i = 0; i < v4l2src->need_writes; i++) {
918 gst_data_ref (GST_DATA (buf));
924 g_signal_emit (G_OBJECT (v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE],
927 return GST_DATA (buf);
931 gst_v4l2src_set_property (GObject * object,
932 guint prop_id, const GValue * value, GParamSpec * pspec)
936 g_return_if_fail (GST_IS_V4L2SRC (object));
937 v4l2src = GST_V4L2SRC (object);
941 if (!GST_V4L2_IS_ACTIVE (GST_V4L2ELEMENT (v4l2src))) {
942 v4l2src->breq.count = g_value_get_int (value);
946 case ARG_USE_FIXED_FPS:
947 if (!GST_V4L2_IS_ACTIVE (GST_V4L2ELEMENT (v4l2src))) {
948 v4l2src->use_fixed_fps = g_value_get_boolean (value);
953 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
960 gst_v4l2src_get_property (GObject * object,
961 guint prop_id, GValue * value, GParamSpec * pspec)
965 g_return_if_fail (GST_IS_V4L2SRC (object));
966 v4l2src = GST_V4L2SRC (object);
970 g_value_set_int (value, v4l2src->breq.count);
974 g_value_set_int (value, v4l2src->format.fmt.pix.sizeimage);
977 case ARG_USE_FIXED_FPS:
978 g_value_set_boolean (value, v4l2src->use_fixed_fps);
982 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
988 static GstElementStateReturn
989 gst_v4l2src_change_state (GstElement * element)
992 gint transition = GST_STATE_TRANSITION (element);
993 GstElementStateReturn parent_return;
996 g_return_val_if_fail (GST_IS_V4L2SRC (element), GST_STATE_FAILURE);
997 v4l2src = GST_V4L2SRC (element);
999 if (GST_ELEMENT_CLASS (parent_class)->change_state) {
1000 parent_return = GST_ELEMENT_CLASS (parent_class)->change_state (element);
1001 if (parent_return != GST_STATE_SUCCESS)
1002 return parent_return;
1005 switch (transition) {
1006 case GST_STATE_NULL_TO_READY:
1007 if (!gst_v4l2src_get_capture (v4l2src))
1008 return GST_STATE_FAILURE;
1010 case GST_STATE_READY_TO_PAUSED:
1011 v4l2src->handled = 0;
1012 v4l2src->need_writes = 0;
1013 v4l2src->substract_time = 0;
1014 /* buffer setup moved to capsnego */
1016 case GST_STATE_PAUSED_TO_PLAYING:
1017 /* queue all buffer, start streaming capture */
1018 if (!gst_v4l2src_capture_start (v4l2src))
1019 return GST_STATE_FAILURE;
1020 g_get_current_time (&time);
1021 v4l2src->substract_time = GST_TIMEVAL_TO_TIME (time) -
1022 v4l2src->substract_time;
1023 v4l2src->last_seq = 0;
1025 case GST_STATE_PLAYING_TO_PAUSED:
1026 g_get_current_time (&time);
1027 v4l2src->substract_time = GST_TIMEVAL_TO_TIME (time) -
1028 v4l2src->substract_time;
1029 /* de-queue all queued buffers */
1030 if (!gst_v4l2src_capture_stop (v4l2src))
1031 return GST_STATE_FAILURE;
1033 case GST_STATE_PAUSED_TO_READY:
1034 /* stop capturing, unmap all buffers */
1035 if (!gst_v4l2src_capture_deinit (v4l2src))
1036 return GST_STATE_FAILURE;
1038 case GST_STATE_READY_TO_NULL:
1042 return GST_STATE_SUCCESS;
1047 gst_v4l2src_set_clock (GstElement * element, GstClock * clock)
1049 GST_V4L2SRC (element)->clock = clock;