+2005-06-02 Wim Taymans <wim@fluendo.com>
+
+ * ext/ogg/README:
+ * ext/ogg/gstoggdemux.c: (gst_ogg_pad_submit_packet),
+ (gst_ogg_demux_activate_chain), (gst_ogg_demux_clear_chains):
+ * ext/ogg/gstoggmux.c: (gst_ogg_mux_buffer_from_page):
+ * ext/theora/theoradec.c: (theora_dec_src_query),
+ (theora_handle_data_packet):
+ * ext/theora/theoraenc.c: (theora_buffer_from_packet),
+ (theora_enc_chain):
+ * ext/vorbis/vorbisdec.c: (vorbis_dec_sink_event),
+ (vorbis_handle_data_packet):
+ * gst/audioconvert/bufferframesconvert.c:
+ (buffer_frames_convert_chain):
+ * gst/ffmpegcolorspace/gstffmpegcolorspace.c:
+ (gst_ffmpegcsp_getcaps), (gst_ffmpegcsp_configure_context),
+ (gst_ffmpegcsp_setcaps), (gst_ffmpegcsp_bufferalloc),
+ (gst_ffmpegcsp_chain):
+ * gst/videorate/gstvideorate.c: (gst_videorate_transformcaps),
+ (gst_videorate_getcaps), (gst_videorate_setcaps),
+ (gst_videorate_event), (gst_videorate_chain):
+ * gst/videotestsrc/gstvideotestsrc.c: (gst_videotestsrc_activate),
+ (gst_videotestsrc_src_query), (gst_videotestsrc_loop):
+ * sys/ximage/ximagesink.c: (gst_ximagesink_ximage_new),
+ (gst_ximagesink_setcaps), (gst_ximagesink_buffer_alloc):
+ * sys/xvimage/xvimagesink.c: (gst_xvimage_buffer_destroy),
+ (gst_xvimage_buffer_finalize), (gst_xvimage_buffer_free),
+ (gst_xvimage_buffer_class_init), (gst_xvimage_buffer_get_type),
+ (gst_xvimagesink_xvimage_new), (gst_xvimagesink_xvimage_put),
+ (gst_xvimagesink_show_frame), (gst_xvimagesink_buffer_alloc):
+ Cleanups and buffer alloc.
+
2005-05-31 Wim Taymans <wim@fluendo.com>
* gst-libs/gst/audio/gstringbuffer.c: (gst_ringbuffer_delay):
- use the OFFSET field in the GstBuffer to store/read the granulepos as
opposed to the OFFSET_END field.
+
+
+Ogg media mapping
+-----------------
+
+Ogg defines a mapping for each media type that it embeds.
+
+For Vorbis:
+
+ - 3 header pages, with granulepos 0.
+ - 1 page with 1 packet header identification
+ - N pages with 2 packets comments and codebooks
+ - granulepos is samplenumber of next page
+ - one packet can contain a variable number of samples
+
+For Theora
+
+ - 3 header pages, with granulepos 0.
+ - 1 page with 1 packet header identification
+ - N pages with 2 packets comments and codebooks
+ - granulepos is framenumber of last packet in page, where framenumber
+ is a combination of keyframe number and p frames since keyframe.
+ - one packet contains 1 frame
+
+
+
+
+
if (pad->packetno == 0) {
gst_ogg_pad_typefind (pad, packet);
}
-#if 0
- if (ogg->state != OGG_STATE_STREAMING) {
- GST_DEBUG_OBJECT (ogg, "%p collecting headers, state %d", pad, ogg->state);
-
- buf = gst_buffer_new_and_alloc (packet->bytes);
- memcpy (GST_BUFFER_DATA (buf), packet->packet, packet->bytes);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (pad));
- GST_BUFFER_OFFSET (buf) = -1;
- GST_BUFFER_OFFSET_END (buf) = packet->granulepos;
-
- /* we are collecting the chain info, just need to queue the buffers */
- pad->headers = g_list_append (pad->headers, buf);
-
- goto done;
- }
-#endif
/* stream packet to peer plugin */
if (pad->mode == GST_OGG_PAD_MODE_STREAMING) {
- buf =
+ ret =
gst_pad_alloc_buffer (GST_PAD (pad), GST_BUFFER_OFFSET_NONE,
- packet->bytes, GST_PAD_CAPS (pad));
+ packet->bytes, GST_PAD_CAPS (pad), &buf);
GST_DEBUG_OBJECT (ogg,
"%p streaming to peer serial %08lx, packetno %lld", pad, pad->serialno,
pad->packetno);
- if (buf) {
+ if (ret == GST_FLOW_OK) {
memcpy (buf->data, packet->packet, packet->bytes);
pad->offset = packet->granulepos;
}
} else {
/* initialize our internal decoder with packets */
- if (!pad->elem_pad) {
- GST_WARNING_OBJECT (ogg,
- "pad %08lx does not have elem_pad, no decoder ?", pad);
- return GST_FLOW_OK;
- }
+ if (!pad->elem_pad)
+ goto no_decoder;
GST_DEBUG_OBJECT (ogg,
"%p init decoder serial %08lx, packetno %lld", pad, pad->serialno,
ret = gst_pad_chain (pad->elem_pad, buf);
}
-
-#if 0
-done:
-#endif
pad->packetno++;
return ret;
+
+no_decoder:
+ {
+ GST_WARNING_OBJECT (ogg,
+ "pad %08lx does not have elem_pad, no decoder ?", pad);
+ return GST_FLOW_OK;
+ }
}
/* submit a page to an oggpad, this function will then submit all
GstBuffer *buffer;
/* allocate space for header and body */
- buffer = gst_pad_alloc_buffer (mux->srcpad, GST_BUFFER_OFFSET_NONE,
- page->header_len + page->body_len, NULL);
+ buffer = gst_buffer_new_and_alloc (page->header_len + page->body_len);
memcpy (GST_BUFFER_DATA (buffer), page->header, page->header_len);
memcpy (GST_BUFFER_DATA (buffer) + page->header_len,
page->body, page->body_len);
/* now copy over the area contained in offset_x,offset_y,
* frame_width, frame_height */
- out = gst_pad_alloc_buffer (dec->srcpad, GST_BUFFER_OFFSET_NONE, out_size,
- GST_PAD_CAPS (dec->srcpad));
- if (out == NULL)
+ result = gst_pad_alloc_buffer (dec->srcpad, GST_BUFFER_OFFSET_NONE, out_size,
+ GST_PAD_CAPS (dec->srcpad), &out);
+ if (result != GST_FLOW_OK)
goto no_buffer;
/* copy the visible region to the destination. This is actually pretty
* complicated and gstreamer doesn't support all the needed caps to do this
* correctly. For example, when we have an odd offset, we should only combine
* 1 row/column of luma samples with one chroma sample in colorspace conversion.
- * We compensate for this by adding a block border around the image when the
+ * We compensate for this by adding a black border around the image when the
* offset or size is odd (see above).
*/
{
GstClockTime timestamp, GstClockTime duration)
{
GstBuffer *buf;
+ GstFlowReturn ret;
+
+ ret = gst_pad_alloc_buffer (enc->srcpad,
+ GST_BUFFER_OFFSET_NONE, packet->bytes, GST_PAD_CAPS (enc->srcpad), &buf);
+ if (ret != GST_FLOW_OK)
+ goto no_buffer;
- buf = gst_pad_alloc_buffer (enc->srcpad,
- GST_BUFFER_OFFSET_NONE, packet->bytes, GST_PAD_CAPS (enc->srcpad));
memcpy (GST_BUFFER_DATA (buf), packet->packet, packet->bytes);
GST_BUFFER_OFFSET (buf) = enc->bytes_out;
GST_BUFFER_OFFSET_END (buf) = packet->granulepos;
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
-
enc->packetno++;
return buf;
+
+no_buffer:
+ {
+ return NULL;
+ }
}
/* push out the buffer and do internal bookkeeping */
in_time = GST_BUFFER_TIMESTAMP (buffer);
- GST_STREAM_LOCK (pad);
-
/* no packets written yet, setup headers */
if (enc->packetno == 0) {
GstCaps *caps;
dst_y_stride = enc->info_width;
dst_uv_stride = enc->info_width / 2;
- newbuf = gst_pad_alloc_buffer (enc->srcpad,
- GST_BUFFER_OFFSET_NONE, y_size * 3 / 2, GST_PAD_CAPS (enc->srcpad));
+ ret = gst_pad_alloc_buffer (enc->srcpad,
+ GST_BUFFER_OFFSET_NONE, y_size * 3 / 2, GST_PAD_CAPS (enc->srcpad),
+ &newbuf);
+ if (ret != GST_FLOW_OK)
+ goto no_buffer;
dest_y = yuv.y = (guint8 *) GST_BUFFER_DATA (newbuf);
dest_u = yuv.u = yuv.y + y_size;
}
gst_buffer_unref (buffer);
}
- GST_STREAM_UNLOCK (pad);
return ret;
header_push:
{
gst_buffer_unref (buffer);
- GST_STREAM_UNLOCK (pad);
+ return ret;
+ }
+no_buffer:
+ {
+ gst_buffer_unref (buffer);
return ret;
}
data_push:
{
gst_buffer_unref (buffer);
- GST_STREAM_UNLOCK (pad);
return ret;
}
}
if (sample_count > 0) {
GstBuffer *out;
- out = gst_pad_alloc_buffer (vd->srcpad, GST_BUFFER_OFFSET_NONE,
+ result = gst_pad_alloc_buffer (vd->srcpad, GST_BUFFER_OFFSET_NONE,
sample_count * vd->vi.channels * sizeof (float),
- GST_PAD_CAPS (vd->srcpad));
+ GST_PAD_CAPS (vd->srcpad), &out);
- if (out != NULL) {
+ if (result == GST_FLOW_OK) {
float *out_data = (float *) GST_BUFFER_DATA (out);
copy_samples (out_data, pcm, sample_count, vd->vi.channels);
gfloat *data_out;
gint i, samples_in, samples_in_remaining, samples_out_remaining,
out_buffer_samples;
+ GstFlowReturn ret;
+
this = (BufferFramesConvert *) GST_OBJECT_PARENT (pad);
} else {
/* otherwise make a leftover buffer if it's necessary */
if (samples_in_remaining) {
- buf_out =
+ ret =
gst_pad_alloc_buffer (this->srcpad, 0,
- out_buffer_samples * sizeof (gfloat), GST_PAD_CAPS (this->srcpad));
+ out_buffer_samples * sizeof (gfloat), GST_PAD_CAPS (this->srcpad),
+ &buf_out);
+ if (ret != GST_FLOW_OK)
+ goto done;
+
data_out = (gfloat *) GST_BUFFER_DATA (buf_out);
this->buf_out = buf_out;
this->samples_out_remaining = out_buffer_samples - samples_in_remaining;
*(data_out++) = *(data_in++);
}
}
+ ret = GST_FLOW_OK;
+done:
gst_buffer_unref (buf_in);
- return GST_FLOW_OK;
+ return ret;
}
static void gst_ffmpegcsp_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static GstBuffer *gst_ffmpegcsp_bufferalloc (GstPad * pad, guint64 offset,
- guint size, GstCaps * caps);
+static GstFlowReturn gst_ffmpegcsp_bufferalloc (GstPad * pad, guint64 offset,
+ guint size, GstCaps * caps, GstBuffer ** buf);
static GstFlowReturn gst_ffmpegcsp_chain (GstPad * pad, GstBuffer * buffer);
static GstElementStateReturn gst_ffmpegcsp_change_state (GstElement * element);
space->palette = NULL;
}
-static GstBuffer *
+static GstFlowReturn
gst_ffmpegcsp_bufferalloc (GstPad * pad, guint64 offset, guint size,
- GstCaps * caps)
+ GstCaps * caps, GstBuffer ** buf)
{
- GstBuffer *buf;
+ GstFlowReturn ret;
GstFFMpegCsp *space;
space = GST_FFMPEGCSP (GST_PAD_PARENT (pad));
if ((space->from_pixfmt == space->to_pixfmt) &&
space->from_pixfmt != PIX_FMT_NB) {
- buf = gst_pad_alloc_buffer (space->srcpad, offset, size, caps);
+ ret = gst_pad_alloc_buffer (space->srcpad, offset, size, caps, buf);
} else {
- buf = NULL;
+ *buf = NULL;
+ ret = GST_FLOW_OK;
}
- return buf;
+ return ret;
}
static GstFlowReturn
avpicture_get_size (space->to_pixfmt, space->width, space->height);
/* get buffer in prefered format, setcaps will be called when it is different */
- outbuf = gst_pad_alloc_buffer (space->srcpad, GST_BUFFER_OFFSET_NONE, size,
- space->src_prefered);
- if (outbuf == NULL)
+ res = gst_pad_alloc_buffer (space->srcpad, GST_BUFFER_OFFSET_NONE, size,
+ space->src_prefered, &outbuf);
+ if (res != GST_FLOW_OK)
goto no_buffer;
/* fill from from with source data */
/* copy timestamps */
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
+ GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buffer);
+ GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_END (buffer);
/* we don't need source anymore */
gst_buffer_unref (buffer);
return res;
/* ERRORS */
-no_buffer:
- {
- gst_buffer_unref (buffer);
- return GST_FLOW_ERROR;
- }
unkown_format:
{
GST_ELEMENT_ERROR (space, CORE, NOT_IMPLEMENTED, (NULL),
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
+no_buffer:
+ {
+ gst_buffer_unref (buffer);
+ return res;
+ }
}
static GstElementStateReturn
if (videorate->to_fps == 0)
return GST_FLOW_NOT_NEGOTIATED;
- GST_STREAM_LOCK (pad);
-
/* pull in 2 buffers */
if (videorate->prevbuf == NULL) {
/* We're sure it's a GstBuffer here */
videorate->prevbuf = buffer;
}
done:
- GST_STREAM_UNLOCK (pad);
return res;
}
case GST_ACTIVATE_PULL:
break;
case GST_ACTIVATE_PUSH:
- /* if we have a scheduler we can start the task */
- if (GST_ELEMENT_SCHEDULER (videotestsrc)) {
- GST_STREAM_LOCK (pad);
- GST_RPAD_TASK (pad) =
- gst_scheduler_create_task (GST_ELEMENT_SCHEDULER (videotestsrc),
- (GstTaskFunction) gst_videotestsrc_loop, pad);
-
- gst_task_start (GST_RPAD_TASK (pad));
- GST_STREAM_UNLOCK (pad);
- result = TRUE;
- }
+ result = gst_pad_start_task (pad,
+ (GstTaskFunction) gst_videotestsrc_loop, pad);
break;
case GST_ACTIVATE_NONE:
/* step 1, unblock clock sync (if any) */
/* step 2, make sure streaming finishes */
- GST_STREAM_LOCK (pad);
- /* step 3, stop the task */
- gst_task_stop (GST_RPAD_TASK (pad));
- gst_object_unref (GST_OBJECT (GST_RPAD_TASK (pad)));
- GST_STREAM_UNLOCK (pad);
-
- result = TRUE;
+ result = gst_pad_stop_task (pad);
break;
default:
result = FALSE;
GstVideotestsrc *videotestsrc;
gulong newsize;
GstBuffer *outbuf;
+ GstFlowReturn res;
videotestsrc = GST_VIDEOTESTSRC (GST_PAD_PARENT (pad));
GST_LOG_OBJECT (videotestsrc, "get");
GST_LOG_OBJECT (videotestsrc, "creating buffer of %ld bytes for %dx%d image",
newsize, videotestsrc->width, videotestsrc->height);
- outbuf =
+ res =
gst_pad_alloc_buffer (pad, GST_BUFFER_OFFSET_NONE, newsize,
- GST_RPAD_CAPS (pad));
-
- if (GST_BUFFER_CAPS (outbuf) != GST_PAD_CAPS (pad)) {
- if (!gst_pad_set_caps (pad, GST_BUFFER_CAPS (outbuf))) {
- GST_ELEMENT_ERROR (videotestsrc, CORE, NEGOTIATION, (NULL),
- ("format wasn't accepted"));
- gst_pad_push_event (pad, gst_event_new (GST_EVENT_EOS));
- goto need_pause;
- }
- }
+ GST_PAD_CAPS (pad), &outbuf);
+ if (res != GST_FLOW_OK)
+ goto need_pause;
videotestsrc->make_image (videotestsrc, (void *) GST_BUFFER_DATA (outbuf),
videotestsrc->width, videotestsrc->height);
need_pause:
{
- gst_task_pause (GST_RPAD_TASK (pad));
+ gst_task_pause (GST_PAD_TASK (pad));
}
}
/* if the caps contain pixel-aspect-ratio, they have to match ours,
* otherwise linking should fail */
par = gst_structure_get_value (structure, "pixel-aspect-ratio");
- if (par) {
- if (gst_value_compare (par, ximagesink->par) != GST_VALUE_EQUAL) {
- GST_INFO_OBJECT (ximagesink, "pixel aspect ratio does not match");
- return FALSE;
- }
- }
+ if (par && gst_value_compare (par, ximagesink->par) != GST_VALUE_EQUAL)
+ goto wrong_aspect;
/* Creating our window and our image */
g_assert (GST_VIDEOSINK_WIDTH (ximagesink) > 0);
GST_VIDEOSINK_WIDTH (ximagesink), GST_VIDEOSINK_HEIGHT (ximagesink));
return TRUE;
+
+ /* ERRORS */
+wrong_aspect:
+ {
+ g_mutex_unlock (ximagesink->stream_lock);
+ GST_INFO_OBJECT (ximagesink, "pixel aspect ratio does not match");
+ return FALSE;
+ }
}
static GstElementStateReturn
}
#endif
-static GstBuffer *
+static GstFlowReturn
gst_ximagesink_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size,
- GstCaps * caps)
+ GstCaps * caps, GstBuffer ** buf)
{
GstXImageSink *ximagesink;
GstXImageBuffer *ximage = NULL;
* we should not just reconfigure ourselves yet */
if (caps && caps != GST_PAD_CAPS (GST_VIDEOSINK_PAD (ximagesink))) {
if (!gst_ximagesink_setcaps (bsink, caps)) {
- return NULL;
+ return GST_FLOW_NOT_NEGOTIATED;
}
}
GST_VIDEOSINK_WIDTH (ximagesink), GST_VIDEOSINK_HEIGHT (ximagesink));
}
- return GST_BUFFER (ximage);
+ *buf = GST_BUFFER (ximage);
+
+ return GST_FLOW_OK;
}
/* Interfaces stuff */
}
#endif
-static GstBuffer *
+static GstFlowReturn
gst_xvimagesink_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size,
- GstCaps * caps)
+ GstCaps * caps, GstBuffer ** buf)
{
GstXvImageSink *xvimagesink;
GstXvImageBuffer *xvimage = NULL;
* we should not just reconfigure ourselves yet */
if (caps && caps != GST_PAD_CAPS (GST_VIDEOSINK_PAD (xvimagesink))) {
if (!gst_xvimagesink_setcaps (bsink, caps)) {
- return NULL;
+ return GST_FLOW_NOT_NEGOTIATED;
}
}
if (xvimage) {
gst_buffer_set_caps (GST_BUFFER (xvimage), caps);
}
- return GST_BUFFER (xvimage);
+ *buf = GST_BUFFER (xvimage);
+
+ return GST_FLOW_OK;
}
/* Interfaces stuff */