static void gst_gl_mixer_pad_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_gl_mixer_pad_prepare_frame (GstVideoAggregatorPad * vpad,
- GstVideoAggregator * vagg);
+ GstVideoAggregator * vagg, GstBuffer * buffer,
+ GstVideoFrame * prepared_frame);
static void gst_gl_mixer_pad_clean_frame (GstVideoAggregatorPad * vpad,
- GstVideoAggregator * vagg);
+ GstVideoAggregator * vagg, GstVideoFrame * prepared_frame);
enum
{
static gboolean
gst_gl_mixer_pad_prepare_frame (GstVideoAggregatorPad * vpad,
- GstVideoAggregator * vagg)
+ GstVideoAggregator * vagg, GstBuffer * buffer,
+ GstVideoFrame * prepared_frame)
{
GstGLMixerPad *pad = GST_GL_MIXER_PAD (vpad);
GstGLMixer *mix = GST_GL_MIXER (vagg);
+ GstVideoInfo gl_info;
+ GstGLSyncMeta *sync_meta;
pad->current_texture = 0;
- vpad->aggregated_frame = NULL;
-
- if (vpad->buffer != NULL) {
- GstVideoInfo gl_info;
- GstVideoFrame aggregated_frame;
- GstGLSyncMeta *sync_meta;
-
- gst_video_info_set_format (&gl_info,
- GST_VIDEO_FORMAT_RGBA,
- GST_VIDEO_INFO_WIDTH (&vpad->info),
- GST_VIDEO_INFO_HEIGHT (&vpad->info));
-
- sync_meta = gst_buffer_get_gl_sync_meta (vpad->buffer);
- if (sync_meta)
- gst_gl_sync_meta_wait (sync_meta, GST_GL_BASE_MIXER (mix)->context);
-
- if (!gst_video_frame_map (&aggregated_frame, &gl_info, vpad->buffer,
- GST_MAP_READ | GST_MAP_GL)) {
- GST_ERROR_OBJECT (pad, "Failed to map input frame");
- return FALSE;
- }
- pad->current_texture = *(guint *) aggregated_frame.data[0];
+ gst_video_info_set_format (&gl_info,
+ GST_VIDEO_FORMAT_RGBA,
+ GST_VIDEO_INFO_WIDTH (&vpad->info), GST_VIDEO_INFO_HEIGHT (&vpad->info));
+
+ sync_meta = gst_buffer_get_gl_sync_meta (buffer);
+ if (sync_meta)
+ gst_gl_sync_meta_wait (sync_meta, GST_GL_BASE_MIXER (mix)->context);
- vpad->aggregated_frame = g_slice_new0 (GstVideoFrame);
- *vpad->aggregated_frame = aggregated_frame;
+ if (!gst_video_frame_map (prepared_frame, &gl_info, buffer,
+ GST_MAP_READ | GST_MAP_GL)) {
+ GST_ERROR_OBJECT (pad, "Failed to map input frame");
+ return FALSE;
}
+ pad->current_texture = *(guint *) prepared_frame->data[0];
+
return TRUE;
}
static void
gst_gl_mixer_pad_clean_frame (GstVideoAggregatorPad * vpad,
- GstVideoAggregator * vagg)
+ GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
{
GstGLMixerPad *pad = GST_GL_MIXER_PAD (vpad);
pad->current_texture = 0;
- if (vpad->aggregated_frame) {
- gst_video_frame_unmap (vpad->aggregated_frame);
- g_slice_free (GstVideoFrame, vpad->aggregated_frame);
- vpad->aggregated_frame = NULL;
+ if (prepared_frame->buffer) {
+ gst_video_frame_unmap (prepared_frame);
+ memset (prepared_frame, 0, sizeof (GstVideoFrame));
}
}
static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps);
static gboolean _negotiated_caps (GstAggregator * aggregator, GstCaps * caps);
-gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix);
+static gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix);
static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer);
#define DEFAULT_DOWNMIX GST_GL_STEREO_DOWNMIX_ANAGLYPH_GREEN_MAGENTA_DUBOIS
return ret;
}
-gboolean
+static gboolean
gst_gl_stereo_mix_make_output (GstGLStereoMix * mix)
{
GList *walk;
while (walk) {
GstVideoAggregatorPad *vaggpad = walk->data;
GstGLStereoMixPad *pad = walk->data;
+ GstBuffer *buffer = gst_video_aggregator_pad_get_current_buffer (vaggpad);
GST_LOG_OBJECT (mix, "Checking pad %" GST_PTR_FORMAT, vaggpad);
- if (vaggpad->buffer != NULL) {
- pad->current_buffer = vaggpad->buffer;
+ if (buffer != NULL) {
+ pad->current_buffer = buffer;
GST_DEBUG_OBJECT (pad, "Got buffer %" GST_PTR_FORMAT,
pad->current_buffer);
{
GstVideoAffineTransformationMeta *af_meta;
gfloat matrix[16];
+ GstBuffer *buffer =
+ gst_video_aggregator_pad_get_current_buffer (vagg_pad);
- af_meta =
- gst_buffer_get_video_affine_transformation_meta (vagg_pad->buffer);
+ af_meta = gst_buffer_get_video_affine_transformation_meta (buffer);
gst_gl_get_affine_transformation_meta_as_ndc_ext (af_meta, matrix);
gst_gl_shader_set_uniform_matrix_4fv (video_mixer->shader,
"u_transformation", 1, FALSE, matrix);
GST_OBJECT_LOCK (vagg);
for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *pad = l->data;
+ GstVideoFrame *prepared_frame =
+ gst_video_aggregator_pad_get_prepared_frame (pad);
- if (pad->aggregated_frame != NULL) {
+ if (prepared_frame != NULL) {
if (!ref_frame) {
- ref_frame = pad->aggregated_frame;
+ ref_frame = prepared_frame;
} else {
gboolean res;
gchar *padname = gst_pad_get_name (pad);
- GstVideoFrame *cmp_frame = pad->aggregated_frame;
+ GstVideoFrame *cmp_frame = prepared_frame;
res = compare_frames (self, ref_frame, cmp_frame, outbuf, msg_structure,
padname);
struct _GstVideoAggregatorPadPrivate
{
+ GstBuffer *buffer;
+ GstVideoFrame prepared_frame;
+
/* properties */
guint zorder;
gboolean repeat_after_eos;
GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
gst_video_aggregator_reset_qos (vagg);
- gst_buffer_replace (&pad->buffer, NULL);
+ gst_buffer_replace (&pad->priv->buffer, NULL);
pad->priv->start_time = -1;
pad->priv->end_time = -1;
static gboolean
gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad,
- GstVideoAggregator * vagg)
+ GstVideoAggregator * vagg, GstBuffer * buffer,
+ GstVideoFrame * prepared_frame)
{
- guint outsize;
- GstVideoFrame *converted_frame;
- GstBuffer *converted_buf = NULL;
- GstVideoFrame *frame;
- static GstAllocationParams params = { 0, 15, 0, 0, };
+ GstVideoFrame frame;
- if (!pad->buffer)
+ if (!pad->priv->buffer)
return TRUE;
- frame = g_slice_new0 (GstVideoFrame);
-
- if (!gst_video_frame_map (frame, &pad->info, pad->buffer, GST_MAP_READ)) {
+ if (!gst_video_frame_map (&frame, &pad->info, pad->priv->buffer,
+ GST_MAP_READ)) {
GST_WARNING_OBJECT (vagg, "Could not map input buffer");
return FALSE;
}
if (pad->priv->convert) {
+ GstVideoFrame converted_frame;
+ GstBuffer *converted_buf = NULL;
+ static GstAllocationParams params = { 0, 15, 0, 0, };
gint converted_size;
-
- converted_frame = g_slice_new0 (GstVideoFrame);
+ guint outsize;
/* We wait until here to set the conversion infos, in case vagg->info changed */
converted_size = pad->priv->conversion_info.size;
converted_size = converted_size > outsize ? converted_size : outsize;
converted_buf = gst_buffer_new_allocate (NULL, converted_size, ¶ms);
- if (!gst_video_frame_map (converted_frame, &(pad->priv->conversion_info),
+ if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
converted_buf, GST_MAP_READWRITE)) {
GST_WARNING_OBJECT (vagg, "Could not map converted frame");
- g_slice_free (GstVideoFrame, converted_frame);
- gst_video_frame_unmap (frame);
- g_slice_free (GstVideoFrame, frame);
+ gst_video_frame_unmap (&frame);
return FALSE;
}
- gst_video_converter_frame (pad->priv->convert, frame, converted_frame);
+ gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
pad->priv->converted_buffer = converted_buf;
- gst_video_frame_unmap (frame);
- g_slice_free (GstVideoFrame, frame);
+ gst_video_frame_unmap (&frame);
+ *prepared_frame = converted_frame;
} else {
- converted_frame = frame;
+ *prepared_frame = frame;
}
- pad->aggregated_frame = converted_frame;
-
return TRUE;
}
static void
gst_video_aggregator_pad_clean_frame (GstVideoAggregatorPad * pad,
- GstVideoAggregator * vagg)
+ GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
{
- if (pad->aggregated_frame) {
- gst_video_frame_unmap (pad->aggregated_frame);
- g_slice_free (GstVideoFrame, pad->aggregated_frame);
- pad->aggregated_frame = NULL;
+ if (prepared_frame->buffer) {
+ gst_video_frame_unmap (prepared_frame);
+ memset (prepared_frame, 0, sizeof (GstVideoFrame));
}
if (pad->priv->converted_buffer) {
vaggpad->priv->zorder = DEFAULT_PAD_ZORDER;
vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS;
vaggpad->priv->converted_buffer = NULL;
- vaggpad->aggregated_frame = NULL;
+ memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
vaggpad->priv->convert = NULL;
}
+/**
+ * gst_video_aggregator_pad_has_current_buffer:
+ * @pad: a #GstVideoAggregatorPad
+ *
+ * Checks if the pad currently has a buffer queued that is going to be used
+ * for the current output frame.
+ *
+ * This must only be called from the aggregate_frames() virtual method,
+ * or from the prepare_frame() virtual method of the aggregator pads.
+ *
+ * Returns: %TRUE if the pad has currently a buffer queued
+ */
+gboolean
+gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad * pad)
+{
+ g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), FALSE);
+
+ return pad->priv->buffer != NULL;
+}
+
+/**
+ * gst_video_aggregator_pad_has_current_buffer:
+ * @pad: a #GstVideoAggregatorPad
+ *
+ * Returns the currently queued buffer that is going to be used
+ * for the current output frame.
+ *
+ * This must only be called from the aggregate_frames() virtual method,
+ * or from the prepare_frame() virtual method of the aggregator pads.
+ *
+ * The return value is only valid until aggregate_frames() or prepare_frames()
+ * returns.
+ *
+ * Returns: (transfer none): The currently queued buffer
+ */
+GstBuffer *
+gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad * pad)
+{
+ g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
+
+ return pad->priv->buffer;
+}
+
+/**
+ * gst_video_aggregator_pad_get_prepared_frame:
+ * @pad: a #GstVideoAggregatorPad
+ *
+ * Returns the currently prepared video frame that has to be aggregated into
+ * the current output frame.
+ *
+ * This must only be called from the aggregate_frames() virtual method,
+ * or from the prepare_frame() virtual method of the aggregator pads.
+ *
+ * The return value is only valid until aggregate_frames() or prepare_frames()
+ * returns.
+ *
+ * Returns: (transfer none): The currently prepared video frame
+ */
+GstVideoFrame *
+gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad * pad)
+{
+ g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
+
+ return pad->priv->prepared_frame.buffer ? &pad->priv->prepared_frame : NULL;
+}
+
/**************************************
* GstVideoAggregator implementation *
**************************************/
for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *p = l->data;
- gst_buffer_replace (&p->buffer, NULL);
+ gst_buffer_replace (&p->priv->buffer, NULL);
p->priv->start_time = -1;
p->priv->end_time = -1;
gboolean need_reconfigure = FALSE;
GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
- /* get a set of buffers into pad->buffer that are within output_start_running_time
+ /* get a set of buffers into pad->priv->buffer that are within output_start_running_time
* and output_end_running_time taking into account finished and unresponsive pads */
GST_OBJECT_LOCK (vagg);
gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
if (start_time >= output_end_running_time) {
- if (pad->buffer) {
+ if (pad->priv->buffer) {
GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
"output_end_running_time. Keeping previous buffer");
} else {
} else if (start_time < output_start_running_time) {
GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time < "
"output_start_running_time. Discarding old buffer");
- gst_buffer_replace (&pad->buffer, buf);
+ gst_buffer_replace (&pad->priv->buffer, buf);
if (pad->priv->pending_vinfo.finfo) {
pad->info = pad->priv->pending_vinfo;
need_reconfigure = TRUE;
}
gst_buffer_unref (buf);
buf = gst_aggregator_pad_pop_buffer (bpad);
- gst_buffer_replace (&pad->buffer, buf);
+ gst_buffer_replace (&pad->priv->buffer, buf);
if (pad->priv->pending_vinfo.finfo) {
pad->info = pad->priv->pending_vinfo;
need_reconfigure = TRUE;
GST_DEBUG_OBJECT (pad,
"Taking new buffer with start time %" GST_TIME_FORMAT,
GST_TIME_ARGS (start_time));
- gst_buffer_replace (&pad->buffer, buf);
+ gst_buffer_replace (&pad->priv->buffer, buf);
if (pad->priv->pending_vinfo.finfo) {
pad->info = pad->priv->pending_vinfo;
need_reconfigure = TRUE;
gst_buffer_unref (buf);
eos = FALSE;
} else {
- gst_buffer_replace (&pad->buffer, buf);
+ gst_buffer_replace (&pad->priv->buffer, buf);
if (pad->priv->pending_vinfo.finfo) {
pad->info = pad->priv->pending_vinfo;
need_reconfigure = TRUE;
GST_DEBUG ("I just need more data");
need_more_data = TRUE;
} else {
- gst_buffer_replace (&pad->buffer, NULL);
+ gst_buffer_replace (&pad->priv->buffer, NULL);
}
} else if (is_eos) {
eos = FALSE;
}
} else if (is_eos) {
- gst_buffer_replace (&pad->buffer, NULL);
+ gst_buffer_replace (&pad->priv->buffer, NULL);
}
}
}
GstClockTime timestamp;
gint64 stream_time;
- if (vpad->buffer == NULL)
+ if (vpad->priv->buffer == NULL)
return TRUE;
- timestamp = GST_BUFFER_TIMESTAMP (vpad->buffer);
+ timestamp = GST_BUFFER_TIMESTAMP (vpad->priv->buffer);
GST_OBJECT_LOCK (bpad);
stream_time = gst_segment_to_stream_time (&bpad->segment, GST_FORMAT_TIME,
timestamp);
GstVideoAggregatorPadClass *vaggpad_class =
GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
- if (vpad->buffer == NULL || !vaggpad_class->prepare_frame)
+ memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
+
+ if (vpad->priv->buffer == NULL || !vaggpad_class->prepare_frame)
return TRUE;
- return vaggpad_class->prepare_frame (vpad, GST_VIDEO_AGGREGATOR_CAST (agg));
+ return vaggpad_class->prepare_frame (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
+ vpad->priv->buffer, &vpad->priv->prepared_frame);
}
static gboolean
GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
if (vaggpad_class->clean_frame)
- vaggpad_class->clean_frame (vpad, vagg);
+ vaggpad_class->clean_frame (vpad, vagg, &vpad->priv->prepared_frame);
+
+ memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
return TRUE;
}
/* Convert to the output segment rate */
if (ABS (agg_segment->rate) != abs_rate) {
- if (ABS (agg_segment->rate) != 1.0 && p->buffer) {
+ if (ABS (agg_segment->rate) != 1.0 && p->priv->buffer) {
p->priv->start_time /= ABS (agg_segment->rate);
p->priv->end_time /= ABS (agg_segment->rate);
}
- if (abs_rate != 1.0 && p->buffer) {
+ if (abs_rate != 1.0 && p->priv->buffer) {
p->priv->start_time *= abs_rate;
p->priv->end_time *= abs_rate;
}
if (last_pad)
gst_video_aggregator_reset (vagg);
- gst_buffer_replace (&vaggpad->buffer, NULL);
+ gst_buffer_replace (&vaggpad->priv->buffer, NULL);
GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->release_pad
(GST_ELEMENT (vagg), pad);
/* read-only, with OBJECT_LOCK */
GstVideoInfo info;
- GstBuffer *buffer;
- GstVideoFrame *aggregated_frame;
-
/* Subclasses can force an alpha channel in the (input thus output)
* colorspace format */
gboolean needs_alpha;
GstVideoInfo * wanted_info);
gboolean (*prepare_frame) (GstVideoAggregatorPad * pad,
- GstVideoAggregator * videoaggregator);
+ GstVideoAggregator * videoaggregator,
+ GstBuffer * buffer,
+ GstVideoFrame * prepared_frame);
void (*clean_frame) (GstVideoAggregatorPad * pad,
- GstVideoAggregator * videoaggregator);
+ GstVideoAggregator * videoaggregator,
+ GstVideoFrame * prepared_frame);
gpointer _gst_reserved[GST_PADDING_LARGE];
};
GST_VIDEO_BAD_API
GType gst_video_aggregator_pad_get_type (void);
+GST_VIDEO_BAD_API
+gboolean gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad *pad);
+
+GST_VIDEO_BAD_API
+GstBuffer * gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad *pad);
+
+GST_VIDEO_BAD_API
+GstVideoFrame * gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad *pad);
+
#define GST_TYPE_VIDEO_AGGREGATOR (gst_video_aggregator_get_type())
#define GST_VIDEO_AGGREGATOR(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR, GstVideoAggregator))
static gboolean
gst_compositor_pad_prepare_frame (GstVideoAggregatorPad * pad,
- GstVideoAggregator * vagg)
+ GstVideoAggregator * vagg, GstBuffer * buffer,
+ GstVideoFrame * prepared_frame)
{
GstCompositor *comp = GST_COMPOSITOR (vagg);
GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad);
guint outsize;
- GstVideoFrame *converted_frame;
- GstBuffer *converted_buf = NULL;
- GstVideoFrame *frame;
+ GstVideoFrame frame;
static GstAllocationParams params = { 0, 15, 0, 0, };
gint width, height;
gboolean frame_obscured = FALSE;
* Due to the clamping, this is different from the frame width/height above. */
GstVideoRectangle frame_rect;
- if (!pad->buffer)
- return TRUE;
-
/* There's three types of width/height here:
* 1. GST_VIDEO_FRAME_WIDTH/HEIGHT:
* The frame width/height (same as pad->info.height/width;
if (cpad->alpha == 0.0) {
GST_DEBUG_OBJECT (vagg, "Pad has alpha 0.0, not converting frame");
- converted_frame = NULL;
goto done;
}
if (frame_rect.w == 0 || frame_rect.h == 0) {
GST_DEBUG_OBJECT (vagg, "Resulting frame is zero-width or zero-height "
"(w: %i, h: %i), skipping", frame_rect.w, frame_rect.h);
- converted_frame = NULL;
goto done;
}
/* Check if there's a buffer to be aggregated, ensure it can't have an alpha
* channel, then check opacity and frame boundaries */
- if (pad2->buffer && cpad2->alpha == 1.0 &&
- !GST_VIDEO_INFO_HAS_ALPHA (&pad2->info) &&
- is_rectangle_contained (frame_rect, frame2_rect)) {
+ if (gst_video_aggregator_pad_has_current_buffer (pad2)
+ && cpad2->alpha == 1.0 && !GST_VIDEO_INFO_HAS_ALPHA (&pad2->info)
+ && is_rectangle_contained (frame_rect, frame2_rect)) {
frame_obscured = TRUE;
GST_DEBUG_OBJECT (pad, "%ix%i@(%i,%i) obscured by %s %ix%i@(%i,%i) "
"in output of size %ix%i; skipping frame", frame_rect.w, frame_rect.h,
}
GST_OBJECT_UNLOCK (vagg);
- if (frame_obscured) {
- converted_frame = NULL;
+ if (frame_obscured)
goto done;
- }
- frame = g_slice_new0 (GstVideoFrame);
-
- if (!gst_video_frame_map (frame, &pad->info, pad->buffer, GST_MAP_READ)) {
+ if (!gst_video_frame_map (&frame, &pad->info, buffer, GST_MAP_READ)) {
GST_WARNING_OBJECT (vagg, "Could not map input buffer");
return FALSE;
}
if (cpad->convert) {
gint converted_size;
-
- converted_frame = g_slice_new0 (GstVideoFrame);
+ GstVideoFrame converted_frame;
+ GstBuffer *converted_buf = NULL;
/* We wait until here to set the conversion infos, in case vagg->info changed */
converted_size = GST_VIDEO_INFO_SIZE (&cpad->conversion_info);
converted_size = converted_size > outsize ? converted_size : outsize;
converted_buf = gst_buffer_new_allocate (NULL, converted_size, ¶ms);
- if (!gst_video_frame_map (converted_frame, &(cpad->conversion_info),
+ if (!gst_video_frame_map (&converted_frame, &(cpad->conversion_info),
converted_buf, GST_MAP_READWRITE)) {
GST_WARNING_OBJECT (vagg, "Could not map converted frame");
- g_slice_free (GstVideoFrame, converted_frame);
- gst_video_frame_unmap (frame);
- g_slice_free (GstVideoFrame, frame);
+ gst_video_frame_unmap (&frame);
return FALSE;
}
- gst_video_converter_frame (cpad->convert, frame, converted_frame);
+ gst_video_converter_frame (cpad->convert, &frame, &converted_frame);
cpad->converted_buffer = converted_buf;
- gst_video_frame_unmap (frame);
- g_slice_free (GstVideoFrame, frame);
+ gst_video_frame_unmap (&frame);
+ *prepared_frame = converted_frame;
} else {
- converted_frame = frame;
+ *prepared_frame = frame;
}
done:
- pad->aggregated_frame = converted_frame;
return TRUE;
}
static void
gst_compositor_pad_clean_frame (GstVideoAggregatorPad * pad,
- GstVideoAggregator * vagg)
+ GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
{
GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad);
- if (pad->aggregated_frame) {
- gst_video_frame_unmap (pad->aggregated_frame);
- g_slice_free (GstVideoFrame, pad->aggregated_frame);
- pad->aggregated_frame = NULL;
+ if (prepared_frame->buffer) {
+ gst_video_frame_unmap (prepared_frame);
+ memset (prepared_frame, 0, sizeof (GstVideoFrame));
}
if (cpad->converted_buffer) {
for (l = GST_ELEMENT (self)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *pad = l->data;
GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad);
+ GstVideoFrame *prepared_frame =
+ gst_video_aggregator_pad_get_prepared_frame (pad);
- if (compo_pad->crossfade >= 0.0f && pad->aggregated_frame) {
+ if (compo_pad->crossfade >= 0.0f && prepared_frame) {
gfloat alpha = compo_pad->crossfade * compo_pad->alpha;
GstVideoAggregatorPad *npad = l->next ? l->next->data : NULL;
- GstVideoFrame *nframe;
+ GstVideoFrame *next_prepared_frame;
+ GstVideoFrame nframe;
+
+ next_prepared_frame =
+ npad ? gst_video_aggregator_pad_get_prepared_frame (npad) : NULL;
if (!all_crossfading) {
- nframe = g_slice_new0 (GstVideoFrame);
- gst_compositor_fill_transparent (self, outframe, nframe);
+ gst_compositor_fill_transparent (self, outframe, &nframe);
} else {
- nframe = outframe;
+ nframe = *outframe;
}
- self->overlay (pad->aggregated_frame,
+ self->overlay (prepared_frame,
compo_pad->crossfaded ? 0 : compo_pad->xpos,
compo_pad->crossfaded ? 0 : compo_pad->ypos,
- alpha, nframe, COMPOSITOR_BLEND_MODE_ADDITIVE);
+ alpha, &nframe, COMPOSITOR_BLEND_MODE_ADDITIVE);
- if (npad && npad->aggregated_frame) {
+ if (npad && next_prepared_frame) {
GstCompositorPad *next_compo_pad = GST_COMPOSITOR_PAD (npad);
alpha = (1.0 - compo_pad->crossfade) * next_compo_pad->alpha;
- self->overlay (npad->aggregated_frame, next_compo_pad->xpos,
- next_compo_pad->ypos, alpha, nframe,
+ self->overlay (next_prepared_frame, next_compo_pad->xpos,
+ next_compo_pad->ypos, alpha, &nframe,
COMPOSITOR_BLEND_MODE_ADDITIVE);
/* Replace frame with current frame */
- gst_compositor_pad_clean_frame (npad, vagg);
- npad->aggregated_frame = !all_crossfading ? nframe : NULL;
+ gst_compositor_pad_clean_frame (npad, vagg, next_prepared_frame);
+ if (!all_crossfading)
+ *next_prepared_frame = nframe;
next_compo_pad->crossfaded = TRUE;
/* Frame is now consumed, clean it up */
- gst_compositor_pad_clean_frame (pad, vagg);
- pad->aggregated_frame = NULL;
+ gst_compositor_pad_clean_frame (pad, vagg, prepared_frame);
} else {
GST_LOG_OBJECT (self, "Simply fading out as no following pad found");
- gst_compositor_pad_clean_frame (pad, vagg);
- pad->aggregated_frame = !all_crossfading ? nframe : NULL;
+ gst_compositor_pad_clean_frame (pad, vagg, prepared_frame);
+ if (!all_crossfading)
+ *prepared_frame = nframe;
compo_pad->crossfaded = TRUE;
}
}
for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *pad = l->data;
GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad);
+ GstVideoFrame *prepared_frame =
+ gst_video_aggregator_pad_get_prepared_frame (pad);
- if (pad->aggregated_frame != NULL) {
- composite (pad->aggregated_frame,
+ if (prepared_frame != NULL) {
+ composite (prepared_frame,
compo_pad->crossfaded ? 0 : compo_pad->xpos,
compo_pad->crossfaded ? 0 : compo_pad->ypos, compo_pad->alpha,
outframe, COMPOSITOR_BLEND_MODE_NORMAL);