/* V4lMjpegSrc signals and args */
enum {
- /* FILL ME */
+ SIGNAL_FRAME_CAPTURE,
+ SIGNAL_FRAME_DROP,
+ SIGNAL_FRAME_INSERT,
+ SIGNAL_FRAME_LOST,
LAST_SIGNAL
};
ARG_HEIGHT,
ARG_QUALITY,
ARG_NUMBUFS,
- ARG_BUFSIZE
+ ARG_BUFSIZE,
+ ARG_USE_FIXED_FPS
};
gint64 src_value,
GstFormat *dest_format,
gint64 *dest_value);
-static GstPadLinkReturn gst_v4lmjpegsrc_srcconnect (GstPad *pad,
+static GstPadLinkReturn gst_v4lmjpegsrc_srcconnect (GstPad *pad,
GstCaps *caps);
static GstBuffer* gst_v4lmjpegsrc_get (GstPad *pad);
GValue *value,
GParamSpec *pspec);
+/* set_clock function for A/V sync */
+static void gst_v4lmjpegsrc_set_clock (GstElement *element,
+ GstClock *clock);
+
/* state handling */
static GstElementStateReturn gst_v4lmjpegsrc_change_state (GstElement *element);
static GstPadTemplate *src_template;
static GstElementClass *parent_class = NULL;
-/*static guint gst_v4lmjpegsrc_signals[LAST_SIGNAL] = { 0 }; */
+static guint gst_v4lmjpegsrc_signals[LAST_SIGNAL] = { 0 };
GType
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_X_OFFSET,
g_param_spec_int("x_offset","x_offset","x_offset",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_Y_OFFSET,
g_param_spec_int("y_offset","y_offset","y_offset",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_F_WIDTH,
g_param_spec_int("frame_width","frame_width","frame_width",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_F_HEIGHT,
g_param_spec_int("frame_height","frame_height","frame_height",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_H_DECIMATION,
g_param_spec_int("h_decimation","h_decimation","h_decimation",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_V_DECIMATION,
g_param_spec_int("v_decimation","v_decimation","v_decimation",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_WIDTH,
g_param_spec_int("width","width","width",
- G_MININT,G_MAXINT,0,G_PARAM_READABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_HEIGHT,
g_param_spec_int("height","height","height",
- G_MININT,G_MAXINT,0,G_PARAM_READABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_QUALITY,
g_param_spec_int("quality","quality","quality",
- G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
+ G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_NUMBUFS,
g_param_spec_int("num_buffers","num_buffers","num_buffers",
- G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE,
g_param_spec_int("buffer_size","buffer_size","buffer_size",
- G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_USE_FIXED_FPS,
+ g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS",
+ "Drop/Insert frames to reach a certain FPS (TRUE) "
+ "or adapt FPS to suit the number of frabbed frames",
+ TRUE, G_PARAM_READWRITE));
+
+ /* signals */
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_CAPTURE] =
+ g_signal_new("frame_capture", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lMjpegSrcClass, frame_capture),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_DROP] =
+ g_signal_new("frame_drop", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lMjpegSrcClass, frame_drop),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_INSERT] =
+ g_signal_new("frame_insert", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lMjpegSrcClass, frame_insert),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_LOST] =
+ g_signal_new("frame_lost", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lMjpegSrcClass, frame_lost),
+ NULL, NULL, g_cclosure_marshal_VOID__INT,
+ G_TYPE_NONE, 1, G_TYPE_INT);
gobject_class->set_property = gst_v4lmjpegsrc_set_property;
gobject_class->get_property = gst_v4lmjpegsrc_get_property;
gstelement_class->change_state = gst_v4lmjpegsrc_change_state;
+
+ gstelement_class->set_clock = gst_v4lmjpegsrc_set_clock;
}
v4lmjpegsrc->numbufs = 64;
v4lmjpegsrc->bufsize = 256;
+
+ /* no clock */
+ v4lmjpegsrc->clock = NULL;
+
+ /* fps */
+ v4lmjpegsrc->use_fixed_fps = TRUE;
}
-static gboolean
-gst_v4lmjpegsrc_srcconvert (GstPad *pad,
- GstFormat src_format,
- gint64 src_value,
- GstFormat *dest_format,
- gint64 *dest_value)
+static gdouble
+gst_v4lmjpegsrc_get_fps (GstV4lMjpegSrc *v4lmjpegsrc)
{
- GstV4lMjpegSrc *v4lmjpegsrc;
gint norm;
gdouble fps;
+
+ if (!v4lmjpegsrc->use_fixed_fps &&
+ v4lmjpegsrc->clock != NULL &&
+ v4lmjpegsrc->handled > 0) {
+ /* try to get time from clock master and calculate fps */
+ GstClockTime time = gst_clock_get_time(v4lmjpegsrc->clock) - v4lmjpegsrc->substract_time;
+ return v4lmjpegsrc->handled * GST_SECOND / time;
+ }
- v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
+ /* if that failed ... */
if (!GST_V4L_IS_OPEN(GST_V4LELEMENT(v4lmjpegsrc)))
- return FALSE;
+ return 0.;
if (!gst_v4l_get_chan_norm(GST_V4LELEMENT(v4lmjpegsrc), NULL, &norm))
- return FALSE;
+ return 0.;
if (norm == VIDEO_MODE_NTSC)
fps = 30000/1001;
else
fps = 25.;
+ return fps;
+}
+
+
+static gboolean
+gst_v4lmjpegsrc_srcconvert (GstPad *pad,
+ GstFormat src_format,
+ gint64 src_value,
+ GstFormat *dest_format,
+ gint64 *dest_value)
+{
+ GstV4lMjpegSrc *v4lmjpegsrc;
+ gdouble fps;
+
+ v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
+
+ if ((fps = gst_v4lmjpegsrc_get_fps(v4lmjpegsrc)) == 0)
+ return FALSE;
+
switch (src_format) {
case GST_FORMAT_TIME:
switch (*dest_format) {
GstV4lMjpegSrc *v4lmjpegsrc;
GstBuffer *buf;
gint num;
+ gdouble fps = 0;
g_return_val_if_fail (pad != NULL, NULL);
v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
+ if (v4lmjpegsrc->use_fixed_fps &&
+ (fps = gst_v4lmjpegsrc_get_fps(v4lmjpegsrc)) == 0)
+ return NULL;
+
buf = gst_buffer_new_from_pool(v4lmjpegsrc->bufferpool, 0, 0);
if (!buf)
{
return NULL;
}
- /* grab a frame from the device */
- if (!gst_v4lmjpegsrc_grab_frame(v4lmjpegsrc, &num, &(GST_BUFFER_SIZE(buf))))
- return NULL;
+ if (v4lmjpegsrc->need_writes > 0) {
+ /* use last frame */
+ num = v4lmjpegsrc->last_frame;
+ v4lmjpegsrc->need_writes--;
+ } else if (v4lmjpegsrc->clock && v4lmjpegsrc->use_fixed_fps) {
+ GstClockTime time;
+ gboolean have_frame = FALSE;
+
+ do {
+ /* by default, we use the frame once */
+ v4lmjpegsrc->need_writes = 1;
+
+ /* grab a frame from the device */
+ if (!gst_v4lmjpegsrc_grab_frame(v4lmjpegsrc, &num, &v4lmjpegsrc->last_size))
+ return NULL;
+
+ v4lmjpegsrc->last_frame = num;
+ time = GST_TIMEVAL_TO_TIME(v4lmjpegsrc->bsync.timestamp) -
+ v4lmjpegsrc->substract_time;
+
+ /* first check whether we lost any frames according to the device */
+ if (v4lmjpegsrc->last_seq != 0) {
+ if (v4lmjpegsrc->bsync.seq - v4lmjpegsrc->last_seq > 1) {
+ v4lmjpegsrc->need_writes = v4lmjpegsrc->bsync.seq - v4lmjpegsrc->last_seq;
+ g_signal_emit(G_OBJECT(v4lmjpegsrc),
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_LOST], 0,
+ v4lmjpegsrc->bsync.seq - v4lmjpegsrc->last_seq - 1);
+ }
+ }
+ v4lmjpegsrc->last_seq = v4lmjpegsrc->bsync.seq;
+
+ /* decide how often we're going to write the frame - set
+ * v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE
+ * if we're going to write it - else, just continue.
+ *
+ * time is generally the system or audio clock. Let's
+ * say that we've written one second of audio, then we want
+ * to have written one second of video too, within the same
+ * timeframe. This means that if time - begin_time = X sec,
+ * we want to have written X*fps frames. If we've written
+ * more - drop, if we've written less - dup... */
+ if (v4lmjpegsrc->handled * fps * GST_SECOND - time > 1.5 * fps * GST_SECOND) {
+ /* yo dude, we've got too many frames here! Drop! DROP! */
+ v4lmjpegsrc->need_writes--; /* -= (v4lmjpegsrc->handled - (time / fps)); */
+ g_signal_emit(G_OBJECT(v4lmjpegsrc),
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_DROP], 0);
+ } else if (v4lmjpegsrc->handled * fps * GST_SECOND - time < - 1.5 * fps * GST_SECOND) {
+ /* this means we're lagging far behind */
+ v4lmjpegsrc->need_writes++; /* += ((time / fps) - v4lmjpegsrc->handled); */
+ g_signal_emit(G_OBJECT(v4lmjpegsrc),
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_INSERT], 0);
+ }
+
+ if (v4lmjpegsrc->need_writes > 0) {
+ have_frame = TRUE;
+ v4lmjpegsrc->use_num_times[num] = v4lmjpegsrc->need_writes;
+ v4lmjpegsrc->need_writes--;
+ } else {
+ gst_v4lmjpegsrc_requeue_frame(v4lmjpegsrc, num);
+ }
+ } while (!have_frame);
+ } else {
+ /* grab a frame from the device */
+ if (!gst_v4lmjpegsrc_grab_frame(v4lmjpegsrc, &num, &v4lmjpegsrc->last_size))
+ return NULL;
+
+ v4lmjpegsrc->use_num_times[num] = 1;
+ }
+
GST_BUFFER_DATA(buf) = gst_v4lmjpegsrc_get_buffer(v4lmjpegsrc, num);
- if (!v4lmjpegsrc->first_timestamp)
- v4lmjpegsrc->first_timestamp = v4lmjpegsrc->bsync.timestamp.tv_sec * GST_SECOND +
- v4lmjpegsrc->bsync.timestamp.tv_usec * GST_SECOND/1000000;
- GST_BUFFER_TIMESTAMP(buf) = v4lmjpegsrc->bsync.timestamp.tv_sec * GST_SECOND +
- v4lmjpegsrc->bsync.timestamp.tv_usec * GST_SECOND/1000000 - v4lmjpegsrc->first_timestamp;
+ GST_BUFFER_SIZE(buf) = v4lmjpegsrc->last_size;
+ if (v4lmjpegsrc->use_fixed_fps)
+ GST_BUFFER_TIMESTAMP(buf) = v4lmjpegsrc->handled * GST_SECOND / fps;
+ else /* calculate time based on our own clock */
+ GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(v4lmjpegsrc->bsync.timestamp) -
+ v4lmjpegsrc->substract_time;
+
+ v4lmjpegsrc->handled++;
+ g_signal_emit(G_OBJECT(v4lmjpegsrc),
+ gst_v4lmjpegsrc_signals[SIGNAL_FRAME_CAPTURE], 0);
return buf;
}
case ARG_BUFSIZE:
v4lmjpegsrc->bufsize = g_value_get_int(value);
break;
+ case ARG_USE_FIXED_FPS:
+ if (!GST_V4L_IS_ACTIVE(GST_V4LELEMENT(v4lmjpegsrc))) {
+ v4lmjpegsrc->use_fixed_fps = g_value_get_boolean(value);
+ }
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case ARG_BUFSIZE:
g_value_set_int(value, v4lmjpegsrc->breq.size);
break;
+ case ARG_USE_FIXED_FPS:
+ g_value_set_boolean(value, v4lmjpegsrc->use_fixed_fps);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
{
GstV4lMjpegSrc *v4lmjpegsrc;
GstElementStateReturn parent_value;
+ GTimeVal time;
g_return_val_if_fail(GST_IS_V4LMJPEGSRC(element), GST_STATE_FAILURE);
case GST_STATE_READY_TO_PAUSED:
/* actual buffer set-up used to be done here - but I moved
* it to capsnego itself */
- v4lmjpegsrc->first_timestamp = 0;
+ v4lmjpegsrc->handled = 0;
+ v4lmjpegsrc->need_writes = 0;
+ v4lmjpegsrc->last_frame = 0;
+ v4lmjpegsrc->substract_time = 0;
break;
case GST_STATE_PAUSED_TO_PLAYING:
/* queue all buffer, start streaming capture */
if (!gst_v4lmjpegsrc_capture_start(v4lmjpegsrc))
return GST_STATE_FAILURE;
+ g_get_current_time(&time);
+ v4lmjpegsrc->substract_time = GST_TIMEVAL_TO_TIME(time) -
+ v4lmjpegsrc->substract_time;
+ v4lmjpegsrc->last_seq = 0;
break;
case GST_STATE_PLAYING_TO_PAUSED:
+ g_get_current_time(&time);
+ v4lmjpegsrc->substract_time = GST_TIMEVAL_TO_TIME(time) -
+ v4lmjpegsrc->substract_time;
/* de-queue all queued buffers */
if (!gst_v4lmjpegsrc_capture_stop(v4lmjpegsrc))
return GST_STATE_FAILURE;
}
+static void
+gst_v4lmjpegsrc_set_clock (GstElement *element,
+ GstClock *clock)
+{
+ GST_V4LMJPEGSRC(element)->clock = clock;
+}
+
+
static GstBuffer*
gst_v4lmjpegsrc_buffer_new (GstBufferPool *pool,
guint64 location,
for (n=0;n<v4lmjpegsrc->breq.count;n++)
if (GST_BUFFER_DATA(buf) == gst_v4lmjpegsrc_get_buffer(v4lmjpegsrc, n))
{
- gst_v4lmjpegsrc_requeue_frame(v4lmjpegsrc, n);
+ v4lmjpegsrc->use_num_times[n]--;
+ if (v4lmjpegsrc->use_num_times[n] <= 0) {
+ gst_v4lmjpegsrc_requeue_frame(v4lmjpegsrc, n);
+ }
break;
}
struct mjpeg_sync bsync;
struct mjpeg_requestbuffers breq;
- /* first timestamp */
- guint64 first_timestamp;
+ /* A/V sync... frame counter and internal cache */
+ gulong handled;
+ gint last_frame;
+ gint last_size;
+ gint need_writes;
+ gulong last_seq;
+
+ /* clock */
+ GstClock *clock;
+
+ /* time to substract from clock time to get back to timestamp */
+ GstClockTime substract_time;
+
+ /* how often are we going to use each frame? */
+ gint *use_num_times;
+
+ /* how are we going to push buffers? */
+ gboolean use_fixed_fps;
/* caching values */
gint x_offset;
struct _GstV4lMjpegSrcClass {
GstV4lElementClass parent_class;
+
+ void (*frame_capture) (GObject *object);
+ void (*frame_drop) (GObject *object);
+ void (*frame_insert) (GObject *object);
+ void (*frame_lost) (GObject *object,
+ gint num_lost);
};
GType gst_v4lmjpegsrc_get_type(void);
/* V4lSrc signals and args */
enum {
/* FILL ME */
+ SIGNAL_FRAME_CAPTURE,
+ SIGNAL_FRAME_DROP,
+ SIGNAL_FRAME_INSERT,
LAST_SIGNAL
};
ARG_PALETTE,
ARG_PALETTE_NAME,
ARG_NUMBUFS,
- ARG_BUFSIZE
+ ARG_BUFSIZE,
+ ARG_USE_FIXED_FPS
};
gint64 src_value,
GstFormat *dest_format,
gint64 *dest_value);
-static GstPadLinkReturn gst_v4lsrc_srcconnect (GstPad *pad,
+static GstPadLinkReturn gst_v4lsrc_srcconnect (GstPad *pad,
GstCaps *caps);
static GstBuffer* gst_v4lsrc_get (GstPad *pad);
GstBuffer *buf,
gpointer user_data);
+/* set_clock function for a/V sync */
+static void gst_v4lsrc_set_clock (GstElement *element,
+ GstClock *clock);
+
static GstCaps *capslist = NULL;
static GstPadTemplate *src_template;
static GstElementClass *parent_class = NULL;\
-/*static guint gst_v4lsrc_signals[LAST_SIGNAL] = { 0 }; */
+static guint gst_v4lsrc_signals[LAST_SIGNAL] = { 0 };
GType
parent_class = g_type_class_ref(GST_TYPE_V4LELEMENT);
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_WIDTH,
- g_param_spec_int("width","width","width",
- G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+ g_param_spec_int("width", "Width", "Video width",
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_HEIGHT,
- g_param_spec_int("height","height","height",
- G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+ g_param_spec_int("height", "Height", "Video height",
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_PALETTE,
- g_param_spec_int("palette","palette","palette",
- G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+ g_param_spec_int("palette", "Palette", "Video palette",
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_PALETTE_NAME,
- g_param_spec_string("palette_name","palette_name","palette_name",
- NULL, G_PARAM_READABLE));
+ g_param_spec_string("palette_name", "Palette name",
+ "Name of the current video palette",
+ NULL, G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_NUMBUFS,
- g_param_spec_int("num_buffers","num_buffers","num_buffers",
- G_MININT,G_MAXINT,0,G_PARAM_READABLE));
+ g_param_spec_int("num_buffers","Num Buffers","Number of buffers",
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE,
- g_param_spec_int("buffer_size","buffer_size","buffer_size",
- G_MININT,G_MAXINT,0,G_PARAM_READABLE));
+ g_param_spec_int("buffer_size","Buffer Size","Size of buffers",
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE));
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_USE_FIXED_FPS,
+ g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS",
+ "Drop/Insert frames to reach a certain FPS (TRUE) "
+ "or adapt FPS to suit the number of frabbed frames",
+ TRUE, G_PARAM_READWRITE));
+
+ /* signals */
+ gst_v4lsrc_signals[SIGNAL_FRAME_CAPTURE] =
+ g_signal_new("frame_capture", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lSrcClass, frame_capture),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4lsrc_signals[SIGNAL_FRAME_DROP] =
+ g_signal_new("frame_drop", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lSrcClass, frame_drop),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4lsrc_signals[SIGNAL_FRAME_INSERT] =
+ g_signal_new("frame_insert", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4lSrcClass, frame_insert),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
gobject_class->set_property = gst_v4lsrc_set_property;
gobject_class->get_property = gst_v4lsrc_get_property;
gstelement_class->change_state = gst_v4lsrc_change_state;
+
+ gstelement_class->set_clock = gst_v4lsrc_set_clock;
}
v4lsrc->width = 160;
v4lsrc->height = 120;
v4lsrc->buffer_size = 0;
+
+ /* no clock */
+ v4lsrc->clock = NULL;
+
+ /* fps */
+ v4lsrc->use_fixed_fps = TRUE;
}
-static gboolean
-gst_v4lsrc_srcconvert (GstPad *pad,
- GstFormat src_format,
- gint64 src_value,
- GstFormat *dest_format,
- gint64 *dest_value)
+static gdouble
+gst_v4lsrc_get_fps (GstV4lSrc *v4lsrc)
{
- GstV4lSrc *v4lsrc;
gint norm;
gdouble fps;
- v4lsrc = GST_V4LSRC (gst_pad_get_parent (pad));
+ if (!v4lsrc->use_fixed_fps &&
+ v4lsrc->clock != NULL &&
+ v4lsrc->handled > 0) {
+ /* try to get time from clock master and calculate fps */
+ GstClockTime time = gst_clock_get_time(v4lsrc->clock) - v4lsrc->substract_time;
+ return v4lsrc->handled * GST_SECOND / time;
+ }
+
+ /* if that failed ... */
if (!GST_V4L_IS_OPEN(GST_V4LELEMENT(v4lsrc)))
- return FALSE;
+ return 0.;
if (!gst_v4l_get_chan_norm(GST_V4LELEMENT(v4lsrc), NULL, &norm))
- return FALSE;
+ return 0.;
if (norm == VIDEO_MODE_NTSC)
fps = 30000/1001;
else
fps = 25.;
+ return fps;
+}
+
+
+static gboolean
+gst_v4lsrc_srcconvert (GstPad *pad,
+ GstFormat src_format,
+ gint64 src_value,
+ GstFormat *dest_format,
+ gint64 *dest_value)
+{
+ GstV4lSrc *v4lsrc;
+ gdouble fps;
+
+ v4lsrc = GST_V4LSRC (gst_pad_get_parent (pad));
+
+ if ((fps = gst_v4lsrc_get_fps(v4lsrc)) == 0)
+ return FALSE;
+
switch (src_format) {
case GST_FORMAT_TIME:
switch (*dest_format) {
GstV4lSrc *v4lsrc;
GstBuffer *buf;
gint num;
+ gdouble fps = 0.;
g_return_val_if_fail (pad != NULL, NULL);
v4lsrc = GST_V4LSRC (gst_pad_get_parent (pad));
+ if (v4lsrc->use_fixed_fps &&
+ (fps = gst_v4lsrc_get_fps(v4lsrc)) == 0)
+ return NULL;
+
buf = gst_buffer_new_from_pool(v4lsrc->bufferpool, 0, 0);
if (!buf)
{
return NULL;
}
- /* grab a frame from the device */
- if (!gst_v4lsrc_grab_frame(v4lsrc, &num))
- return NULL;
+ if (v4lsrc->need_writes > 0) {
+ /* use last frame */
+ num = v4lsrc->last_frame;
+ v4lsrc->need_writes--;
+ } else if (v4lsrc->clock && v4lsrc->use_fixed_fps) {
+ GstClockTime time;
+ gboolean have_frame = FALSE;
+
+ do {
+ /* by default, we use the frame once */
+ v4lsrc->need_writes = 1;
+
+ /* grab a frame from the device */
+ if (!gst_v4lsrc_grab_frame(v4lsrc, &num))
+ return NULL;
+
+ v4lsrc->last_frame = num;
+ time = v4lsrc->timestamp_soft_sync[num] - v4lsrc->substract_time;
+
+ /* decide how often we're going to write the frame - set
+ * v4lsrc->need_writes to (that-1) and have_frame to TRUE
+ * if we're going to write it - else, just continue.
+ *
+ * time is generally the system or audio clock. Let's
+ * say that we've written one second of audio, then we want
+ * to have written one second of video too, within the same
+ * timeframe. This means that if time - begin_time = X sec,
+ * we want to have written X*fps frames. If we've written
+ * more - drop, if we've written less - dup... */
+ if (v4lsrc->handled * fps * GST_SECOND - time > 1.5 * fps * GST_SECOND) {
+ /* yo dude, we've got too many frames here! Drop! DROP! */
+ v4lsrc->need_writes--; /* -= (v4lsrc->handled - (time / fps)); */
+ g_signal_emit(G_OBJECT(v4lsrc),
+ gst_v4lsrc_signals[SIGNAL_FRAME_DROP], 0);
+ } else if (v4lsrc->handled * fps * GST_SECOND - time < - 1.5 * fps * GST_SECOND) {
+ /* this means we're lagging far behind */
+ v4lsrc->need_writes++; /* += ((time / fps) - v4lsrc->handled); */
+ g_signal_emit(G_OBJECT(v4lsrc),
+ gst_v4lsrc_signals[SIGNAL_FRAME_INSERT], 0);
+ }
+
+ if (v4lsrc->need_writes > 0) {
+ have_frame = TRUE;
+ v4lsrc->use_num_times[num] = v4lsrc->need_writes;
+ v4lsrc->need_writes--;
+ } else {
+ gst_v4lsrc_requeue_frame(v4lsrc, num);
+ }
+ } while (!have_frame);
+ } else {
+ /* grab a frame from the device */
+ if (!gst_v4lsrc_grab_frame(v4lsrc, &num))
+ return NULL;
+
+ v4lsrc->use_num_times[num] = 1;
+ }
+
GST_BUFFER_DATA(buf) = gst_v4lsrc_get_buffer(v4lsrc, num);
GST_BUFFER_SIZE(buf) = v4lsrc->buffer_size;
+ if (v4lsrc->use_fixed_fps)
+ GST_BUFFER_TIMESTAMP(buf) = v4lsrc->handled * GST_SECOND / fps;
+ else /* calculate time based on our own clock */
+ GST_BUFFER_TIMESTAMP(buf) = v4lsrc->timestamp_soft_sync[num] - v4lsrc->substract_time;
- if (!v4lsrc->first_timestamp)
- v4lsrc->first_timestamp =
- GST_TIMEVAL_TO_TIME(v4lsrc->timestamp_soft_sync[num]);
-
- GST_BUFFER_TIMESTAMP(buf) =
- GST_TIMEVAL_TO_TIME(v4lsrc->timestamp_soft_sync[num]) -
- v4lsrc->first_timestamp;
+ v4lsrc->handled++;
+ g_signal_emit(G_OBJECT(v4lsrc),
+ gst_v4lsrc_signals[SIGNAL_FRAME_CAPTURE], 0);
return buf;
}
v4lsrc->palette = g_value_get_int(value);
break;
+ case ARG_USE_FIXED_FPS:
+ if (!GST_V4L_IS_ACTIVE(GST_V4LELEMENT(v4lsrc))) {
+ v4lsrc->use_fixed_fps = g_value_get_boolean(value);
+ }
+ break;
+
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
g_value_set_int(value, v4lsrc->mbuf.size/(v4lsrc->mbuf.frames*1024));
break;
+ case ARG_USE_FIXED_FPS:
+ g_value_set_boolean(value, v4lsrc->use_fixed_fps);
+ break;
+
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
gst_v4lsrc_change_state (GstElement *element)
{
GstV4lSrc *v4lsrc;
+ GTimeVal time;
gint transition = GST_STATE_TRANSITION (element);
g_return_val_if_fail(GST_IS_V4LSRC(element), GST_STATE_FAILURE);
case GST_STATE_NULL_TO_READY:
break;
case GST_STATE_READY_TO_PAUSED:
- v4lsrc->first_timestamp = 0;
+ v4lsrc->handled = 0;
+ v4lsrc->need_writes = 0;
+ v4lsrc->last_frame = 0;
+ v4lsrc->substract_time = 0;
/* buffer setup used to be done here, but I moved it to
* capsnego */
break;
/* queue all buffer, start streaming capture */
if (!gst_v4lsrc_capture_start(v4lsrc))
return GST_STATE_FAILURE;
+ g_get_current_time(&time);
+ v4lsrc->substract_time = GST_TIMEVAL_TO_TIME(time) - v4lsrc->substract_time;
break;
case GST_STATE_PLAYING_TO_PAUSED:
/* de-queue all queued buffers */
if (!gst_v4lsrc_capture_stop(v4lsrc))
return GST_STATE_FAILURE;
+ g_get_current_time(&time);
+ v4lsrc->substract_time = GST_TIMEVAL_TO_TIME(time) - v4lsrc->substract_time;
break;
case GST_STATE_PAUSED_TO_READY:
/* stop capturing, unmap all buffers */
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
- GST_ELEMENT_CLASS (parent_class)->change_state (element);
-
- /* FIXME, this gives a not supported error on my machine?
- switch (transition) {
- case GST_STATE_NULL_TO_READY:
- if ((GST_V4LELEMENT(v4lsrc)->norm >= VIDEO_MODE_PAL ||
- GST_V4LELEMENT(v4lsrc)->norm < VIDEO_MODE_AUTO) ||
- GST_V4LELEMENT(v4lsrc)->channel < 0)
- if (!gst_v4l_set_chan_norm(GST_V4LELEMENT(v4lsrc),
- 0, GST_V4LELEMENT(v4lsrc)->norm))
- return GST_STATE_FAILURE;
- break;
- }
- */
-
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element);
return GST_STATE_SUCCESS;
}
for (n=0;n<v4lsrc->mbuf.frames;n++)
if (GST_BUFFER_DATA(buf) == gst_v4lsrc_get_buffer(v4lsrc, n))
{
- gst_v4lsrc_requeue_frame(v4lsrc, n);
+ v4lsrc->use_num_times[n]--;
+ if (v4lsrc->use_num_times[n] <= 0) {
+ gst_v4lsrc_requeue_frame(v4lsrc, n);
+ }
break;
}
}
+static void
+gst_v4lsrc_set_clock (GstElement *element,
+ GstClock *clock)
+{
+ GST_V4LSRC(element)->clock = clock;
+}
+
+
static gboolean
plugin_init (GModule *module,
GstPlugin *plugin)
/* a seperate GThread for the sync() thread (improves correctness of timestamps) */
gint8 *isready_soft_sync; /* 1 = ok, 0 = waiting, -1 = error */
- struct timeval *timestamp_soft_sync;
+ GstClockTime *timestamp_soft_sync;
GThread * thread_soft_sync;
GMutex * mutex_soft_sync;
GCond ** cond_soft_sync;
/* True if we want the soft sync thread to stop */
gboolean quit;
- /* first timestamp */
- guint64 first_timestamp;
+ /* A/V sync... frame counter and internal cache */
+ gulong handled;
+ gint last_frame;
+ gint need_writes;
+
+ /* clock */
+ GstClock *clock;
+
+ /* time to substract from clock time to get back to timestamp */
+ GstClockTime substract_time;
+
+ /* how often are we going to use each frame? */
+ gint *use_num_times;
+
+ /* how are we going to push buffers? */
+ gboolean use_fixed_fps;
/* caching values */
gint width;
struct _GstV4lSrcClass {
GstV4lElementClass parent_class;
+
+ void (*frame_capture) (GObject *object);
+ void (*frame_drop) (GObject *object);
+ void (*frame_insert) (GObject *object);
};
GType gst_v4lsrc_get_type(void);
gst_info("Got %ld buffers of size %ld KB\n",
v4lmjpegsrc->breq.count, v4lmjpegsrc->breq.size/1024);
+ v4lmjpegsrc->use_num_times = (gint *) malloc(sizeof(gint) * v4lmjpegsrc->breq.count);
+ if (!v4lmjpegsrc->use_num_times)
+ {
+ gst_element_error(GST_ELEMENT(v4lmjpegsrc),
+ "Error creating sync-use-time tracker: %s",
+ g_strerror(errno));
+ return FALSE;
+ }
+
/* Map the buffers */
GST_V4LELEMENT(v4lmjpegsrc)->buffer = mmap(0,
v4lmjpegsrc->breq.count * v4lmjpegsrc->breq.size,
munmap(GST_V4LELEMENT(v4lmjpegsrc)->buffer, v4lmjpegsrc->breq.size * v4lmjpegsrc->breq.count);
GST_V4LELEMENT(v4lmjpegsrc)->buffer = NULL;
+ free(v4lmjpegsrc->use_num_times);
+
return TRUE;
}
g_mutex_lock(v4lsrc->mutex_soft_sync);
- gettimeofday(&(v4lsrc->timestamp_soft_sync[num]), NULL);
+ if (v4lsrc->clock) {
+ v4lsrc->timestamp_soft_sync[num] = gst_clock_get_time(v4lsrc->clock);
+ } else {
+ GTimeVal time;
+ g_get_current_time(&time);
+ v4lsrc->timestamp_soft_sync[num] = GST_TIMEVAL_TO_TIME(time);
+ }
v4lsrc->isready_soft_sync[num] = FRAME_SYNCED;
g_cond_broadcast(v4lsrc->cond_soft_sync[num]);
gst_v4lsrc_sync_next_frame (GstV4lSrc *v4lsrc,
gint *num)
{
-
*num = v4lsrc->sync_frame;
+
DEBUG("syncing on next frame (%d)", *num);
+
/* "software sync()" on the frame */
g_mutex_lock(v4lsrc->mutex_soft_sync);
while (v4lsrc->isready_soft_sync[*num] == FRAME_DONE)
return FALSE;
v4lsrc->isready_soft_sync[*num] = FRAME_DONE;
-
v4lsrc->sync_frame = (v4lsrc->sync_frame + 1)%v4lsrc->mbuf.frames;
return TRUE;
v4lsrc->mbuf.frames, palette_name[v4lsrc->mmap.format],
v4lsrc->mbuf.size/(v4lsrc->mbuf.frames*1024));
- /* keep trakc of queued buffers */
+ /* keep track of queued buffers */
v4lsrc->frame_queued = (gint8 *) malloc(sizeof(gint8) * v4lsrc->mbuf.frames);
if (!v4lsrc->frame_queued)
{
g_strerror(errno));
return FALSE;
}
- v4lsrc->timestamp_soft_sync = (struct timeval *)
- malloc(sizeof(struct timeval) * v4lsrc->mbuf.frames);
+ v4lsrc->timestamp_soft_sync = (GstClockTime *)
+ malloc(sizeof(GstClockTime) * v4lsrc->mbuf.frames);
if (!v4lsrc->timestamp_soft_sync)
{
gst_element_error(GST_ELEMENT(v4lsrc),
}
for (n=0;n<v4lsrc->mbuf.frames;n++)
v4lsrc->cond_soft_sync[n] = g_cond_new();
+ v4lsrc->use_num_times = (gint *) malloc(sizeof(gint) * v4lsrc->mbuf.frames);
+ if (!v4lsrc->use_num_times)
+ {
+ gst_element_error(GST_ELEMENT(v4lsrc),
+ "Error creating sync-use-time tracker: %s",
+ g_strerror(errno));
+ return FALSE;
+ }
v4lsrc->mutex_queued_frames = g_mutex_new();
v4lsrc->cond_queued_frames = g_cond_new();
free(v4lsrc->cond_soft_sync);
free(v4lsrc->isready_soft_sync);
free(v4lsrc->timestamp_soft_sync);
+ free(v4lsrc->use_num_times);
/* unmap the buffer */
munmap(GST_V4LELEMENT(v4lsrc)->buffer, v4lsrc->mbuf.size);