* <refsect2>
* <title>Example pipelines</title>
* |[
- * gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videorate ! video/x-raw-yuv,framerate=15/1 ! xvimagesink
+ * gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videorate ! video/x-raw,framerate=15/1 ! xvimagesink
* ]| Decode an Ogg/Theora file and adjust the framerate to 15 fps before playing.
* To create the test Ogg/Theora file refer to the documentation of theoraenc.
* |[
- * gst-launch -v v4lsrc ! videorate ! video/x-raw-yuv,framerate=25/2 ! theoraenc ! oggmux ! filesink location=v4l.ogg
+ * gst-launch -v v4l2src ! videorate ! video/x-raw,framerate=25/2 ! theoraenc ! oggmux ! filesink location=recording.ogg
* ]| Capture video from a V4L device, and adjust the stream to 12.5 fps before
* encoding to Ogg/Theora.
* </refsect2>
#define DEFAULT_DROP_ONLY FALSE
#define DEFAULT_AVERAGE_PERIOD 0
#define DEFAULT_MAX_RATE G_MAXINT
-#define DEFAULT_FORCE_FPS_N -1
-#define DEFAULT_FORCE_FPS_D 1
enum
{
PROP_SKIP_TO_FIRST,
PROP_DROP_ONLY,
PROP_AVERAGE_PERIOD,
- PROP_MAX_RATE,
- PROP_FORCE_FPS
- /* FILL ME */
+ PROP_MAX_RATE
};
static GstStaticPadTemplate gst_video_rate_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-yuv;"
- "video/x-raw-rgb;" "video/x-raw-gray;" "image/jpeg;" "image/png")
+ GST_STATIC_CAPS ("video/x-raw;" "image/jpeg;" "image/png")
);
static GstStaticPadTemplate gst_video_rate_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-yuv;"
- "video/x-raw-rgb;" "video/x-raw-gray;" "image/jpeg;" "image/png")
+ GST_STATIC_CAPS ("video/x-raw;" "image/jpeg;" "image/png")
);
static void gst_video_rate_swap_prev (GstVideoRate * videorate,
GstBuffer * buffer, gint64 time);
-static gboolean gst_video_rate_event (GstBaseTransform * trans,
+static gboolean gst_video_rate_sink_event (GstBaseTransform * trans,
GstEvent * event);
static gboolean gst_video_rate_query (GstBaseTransform * trans,
GstPadDirection direction, GstQuery * query);
GstCaps * in_caps, GstCaps * out_caps);
static GstCaps *gst_video_rate_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static void gst_video_rate_fixate_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
-static GstFlowReturn gst_video_rate_prepare_output_buffer (GstBaseTransform *
- trans, GstBuffer * input, gint size, GstCaps * caps, GstBuffer ** buf);
static GstFlowReturn gst_video_rate_transform_ip (GstBaseTransform * trans,
GstBuffer * buf);
static GParamSpec *pspec_drop = NULL;
static GParamSpec *pspec_duplicate = NULL;
-GST_BOILERPLATE (GstVideoRate, gst_video_rate,
- GstBaseTransform, GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_video_rate_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Video rate adjuster", "Filter/Effect/Video",
- "Drops/duplicates/adjusts timestamps on video frames to make a perfect stream",
- "Wim Taymans <wim@fluendo.com>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_rate_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_rate_src_template);
-}
+#define gst_video_rate_parent_class parent_class
+G_DEFINE_TYPE (GstVideoRate, gst_video_rate, GST_TYPE_BASE_TRANSFORM);
static void
gst_video_rate_class_init (GstVideoRateClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *base_class = GST_BASE_TRANSFORM_CLASS (klass);
- parent_class = g_type_class_peek_parent (klass);
-
object_class->set_property = gst_video_rate_set_property;
object_class->get_property = gst_video_rate_get_property;
base_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_rate_transform_caps);
base_class->transform_ip = GST_DEBUG_FUNCPTR (gst_video_rate_transform_ip);
- base_class->prepare_output_buffer =
- GST_DEBUG_FUNCPTR (gst_video_rate_prepare_output_buffer);
- base_class->event = GST_DEBUG_FUNCPTR (gst_video_rate_event);
+ base_class->sink_event = GST_DEBUG_FUNCPTR (gst_video_rate_sink_event);
base_class->start = GST_DEBUG_FUNCPTR (gst_video_rate_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_video_rate_stop);
base_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_video_rate_fixate_caps);
1, G_MAXINT, DEFAULT_MAX_RATE,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
- /**
- * GstVideoRate:force-fps:
- *
- * Forced output framerate
- *
- * Since: 0.10.36
- */
- g_object_class_install_property (object_class, PROP_FORCE_FPS,
- gst_param_spec_fraction ("force-fps", "Force output framerate",
- "Force output framerate (negative means negotiate via caps)",
- -1, 1, G_MAXINT, 1, DEFAULT_FORCE_FPS_N, DEFAULT_FORCE_FPS_D,
- G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (element_class,
+ "Video rate adjuster", "Filter/Effect/Video",
+ "Drops/duplicates/adjusts timestamps on video frames to make a perfect stream",
+ "Wim Taymans <wim@fluendo.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_video_rate_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_video_rate_src_template));
}
static void
static GstCaps *
gst_video_rate_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstVideoRate *videorate = GST_VIDEO_RATE (trans);
GstCaps *ret;
s = gst_caps_get_structure (ret, 0);
s2 = gst_structure_copy (s);
- if (videorate->force_fps_n >= 0 && videorate->force_fps_d >= 0) {
- if (direction == GST_PAD_SINK) {
- gst_caps_remove_structure (ret, 0);
- gst_structure_set (s2, "framerate", GST_TYPE_FRACTION,
- videorate->force_fps_n, videorate->force_fps_d, NULL);
- } else {
- gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
- G_MAXINT, 1, NULL);
- }
- } else if (videorate->drop_only) {
+ if (videorate->drop_only) {
gint min_num = 0, min_denom = 1;
gint max_num = G_MAXINT, max_denom = 1;
gst_video_rate_setcaps (GstBaseTransform * trans, GstCaps * in_caps,
GstCaps * out_caps)
{
- GstVideoRate *videorate;
+ GstVideoRate *videorate = GST_VIDEO_RATE (trans);
GstStructure *structure;
gboolean ret = TRUE;
gint rate_numerator, rate_denominator;
}
static void
-gst_video_rate_init (GstVideoRate * videorate, GstVideoRateClass * klass)
+gst_video_rate_init (GstVideoRate * videorate)
{
gst_video_rate_reset (videorate);
videorate->silent = DEFAULT_SILENT;
videorate->average_period = DEFAULT_AVERAGE_PERIOD;
videorate->average_period_set = DEFAULT_AVERAGE_PERIOD;
videorate->max_rate = DEFAULT_MAX_RATE;
- videorate->force_fps_n = DEFAULT_FORCE_FPS_N;
- videorate->force_fps_d = DEFAULT_FORCE_FPS_D;
videorate->from_rate_numerator = 0;
videorate->from_rate_denominator = 0;
goto eos_before_buffers;
/* make sure we can write to the metadata */
- outbuf = gst_buffer_make_metadata_writable
- (gst_buffer_ref (videorate->prevbuf));
+ outbuf = gst_buffer_make_writable (gst_buffer_ref (videorate->prevbuf));
GST_BUFFER_OFFSET (outbuf) = videorate->out;
GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1;
if (videorate->to_rate_numerator) {
/* interpolate next expected timestamp in the segment */
videorate->next_ts =
- videorate->segment.accum + videorate->segment.start +
+ videorate->segment.base + videorate->segment.start +
videorate->base_ts + gst_util_uint64_scale (videorate->out_frame_count,
videorate->to_rate_denominator * GST_SECOND,
videorate->to_rate_numerator);
/* We do not need to update time in VFR (variable frame rate) mode */
if (!videorate->drop_only) {
/* adapt for looping, bring back to time in current segment. */
- GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.accum;
+ GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.base;
}
GST_LOG_OBJECT (videorate,
#define MAGIC_LIMIT 25
static gboolean
-gst_video_rate_event (GstBaseTransform * trans, GstEvent * event)
+gst_video_rate_sink_event (GstBaseTransform * trans, GstEvent * event)
{
GstVideoRate *videorate;
videorate = GST_VIDEO_RATE (trans);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gint64 start, stop, time;
- gdouble rate, arate;
- gboolean update;
- GstFormat format;
+ const GstSegment *segment;
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
+ gst_event_parse_segment (event, &segment);
- if (format != GST_FORMAT_TIME)
+ if (segment->format != GST_FORMAT_TIME)
goto format_error;
GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");
/* close up the previous segment, if appropriate */
- if (!update && videorate->prevbuf) {
+ if (videorate->prevbuf) {
gint count = 0;
GstFlowReturn res;
* regardless, prevent going loopy in strange cases */
while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) &&
- videorate->next_ts - videorate->segment.accum
+ videorate->next_ts - videorate->segment.base
< videorate->segment.stop)
|| count < 1)) {
res = gst_video_rate_flush_prev (videorate, count > 0);
}
/* We just want to update the accumulated stream_time */
- gst_segment_set_newsegment_full (&videorate->segment, update, rate, arate,
- format, start, stop, time);
+ gst_segment_copy_into (segment, &videorate->segment);
GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT,
&videorate->segment);
* or only send out the stored buffer if there is no specific stop.
* regardless, prevent going loopy in strange cases */
while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
- ((videorate->next_ts - videorate->segment.accum <
+ ((videorate->next_ts - videorate->segment.base <
videorate->segment.stop)
|| count < 1)) {
res = gst_video_rate_flush_prev (videorate, count > 0);
videorate->next_ts + GST_BUFFER_DURATION (videorate->prevbuf);
while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
- ((videorate->next_ts - videorate->segment.accum < end_ts)
+ ((videorate->next_ts - videorate->segment.base < end_ts)
|| count < 1)) {
res = gst_video_rate_flush_prev (videorate, count > 0);
count++;
* can't ask about its latency yet.. */
}
default:
- res = parent_class->query (trans, direction, query);
+ res =
+ GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
+ query);
break;
}
}
static GstFlowReturn
-gst_video_rate_prepare_output_buffer (GstBaseTransform * trans,
- GstBuffer * input, gint size, GstCaps * caps, GstBuffer ** buf)
-{
- if (gst_buffer_is_metadata_writable (input)) {
- gst_buffer_set_caps (input, caps);
- *buf = gst_buffer_ref (input);
- } else {
- *buf = gst_buffer_create_sub (input, 0, GST_BUFFER_SIZE (input));
- gst_buffer_set_caps (*buf, caps);
- }
-
- return GST_FLOW_OK;
-}
-
-static GstFlowReturn
gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
GstVideoRate *videorate;
/* the input time is the time in the segment + all previously accumulated
* segments */
- intime = in_ts + videorate->segment.accum;
+ intime = in_ts + videorate->segment.base;
/* we need to have two buffers to compare */
if (videorate->prevbuf == NULL) {
videorate->base_ts = in_ts - videorate->segment.start;
videorate->out_frame_count = 0;
} else {
- videorate->next_ts =
- videorate->segment.start + videorate->segment.accum;
+ videorate->next_ts = videorate->segment.start + videorate->segment.base;
}
}
} else {
g_atomic_int_set (&videorate->max_rate, g_value_get_int (value));
goto reconfigure;
break;
- case PROP_FORCE_FPS:
- videorate->force_fps_n = gst_value_get_fraction_numerator (value);
- videorate->force_fps_d = gst_value_get_fraction_denominator (value);
- goto reconfigure;
- break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_MAX_RATE:
g_value_set_int (value, g_atomic_int_get (&videorate->max_rate));
break;
- case PROP_FORCE_FPS:
- gst_value_set_fraction (value, videorate->force_fps_n,
- videorate->force_fps_d);
- break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;