/gst-libs/gst/tag/mklangtables
/gst-libs/gst/tag/mklicensestables
+/gst-libs/gst/audio/audio-marshal.[ch]
+/gst-libs/gst/video/video-marshal.[ch]
+ /tests/examples/playback/playback-test
tmp-orc.c
gst*orc.h
dnl check if we have ANSI C header files
AC_HEADER_STDC
-dnl used in gst/ffmpegcolorspace/mem.c
-dnl FIXME: could be fixed by redefining av_malloc and av_free to GLib's
-AC_CHECK_HEADERS([malloc.h])
-
ac_cppflags_save="$CPPFLAGS"
CPPFLAGS="`$PKG_CONFIG --cflags libxml-2.0`"
- AC_COMPILE_IFELSE(
- AC_LANG_PROGRAM([
+ AC_COMPILE_IFELSE([
+ AC_LANG_PROGRAM([[
#include <libxml/HTMLparser.h>
- ],[
+ ]],[[
#ifndef LIBXML_HTML_ENABLED
#error libxml2 has no HTML support
#endif /* LIBXML_HTML_ENABLED */
#ifndef TREMOR
- #include <vorbis/codec.h>
-
- typedef float vorbis_sample_t;
- typedef ogg_packet ogg_packet_wrapper;
-
#define GST_VORBIS_DEC_DESCRIPTION "decode raw vorbis streams to float audio"
-#define GST_VORBIS_DEC_SRC_CAPS \
- GST_STATIC_CAPS ("audio/x-raw-float, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 256 ], " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 32")
+#define GST_VORBIS_AUDIO_FORMAT GST_AUDIO_FORMAT_F32
+#define GST_VORBIS_AUDIO_FORMAT_STR GST_AUDIO_NE (F32)
+
+#define GST_VORBIS_DEC_SRC_CAPS \
+ GST_STATIC_CAPS ("audio/x-raw, " \
+ "format = (string)" GST_VORBIS_AUDIO_FORMAT_STR ", " \
+ "rate = (int) [ 1, MAX ], " \
+ "channels = (int) [ 1, 256 ]")
#define GST_VORBIS_DEC_DEFAULT_SAMPLE_WIDTH (32)
#define GST_VORBIS_DEC_GLIB_TYPE_NAME GstVorbisDec
-#define GST_VORBIS_DEC_SRC_CAPS \
- GST_STATIC_CAPS ("audio/x-raw-int, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 6 ], " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) { 16, 32 }, " \
- "depth = (int) 16, " \
- "signed = (boolean) true")
+ #else /* TREMOR */
+
+ #define GST_VORBIS_DEC_DESCRIPTION "decode raw vorbis streams to integer audio"
+
++#define GST_VORBIS_AUDIO_FORMAT GST_AUDIO_FORMAT_S16
++#define GST_VORBIS_AUDIO_FORMAT_STR GST_AUDIO_NE (S16)
++
++#define GST_VORBIS_DEC_SRC_CAPS \
++ GST_STATIC_CAPS ("audio/x-raw, " \
++ "format = (string) " GST_VORBIS_AUDIO_FORMAT_STR ", " \
++ "rate = (int) [ 1, MAX ], " \
++ "channels = (int) [ 1, 6 ]")
+
+ #define GST_VORBIS_DEC_DEFAULT_SAMPLE_WIDTH (16)
+
+ /* we need a different type name here */
+ #define GST_VORBIS_DEC_GLIB_TYPE_NAME GstIVorbisDec
+
+ /* and still have it compile */
+ typedef struct _GstVorbisDec GstIVorbisDec;
+ typedef struct _GstVorbisDecClass GstIVorbisDecClass;
+
+ #endif /* TREMOR */
+
+ #ifndef USE_TREMOLO
+
+ #ifdef TREMOR
+ #include <tremor/ivorbiscodec.h>
+ typedef ogg_int32_t vorbis_sample_t;
+ #else
+ #include <vorbis/codec.h>
+ typedef float vorbis_sample_t;
+ #endif
+
+ typedef ogg_packet ogg_packet_wrapper;
+
static inline guint8 *
gst_ogg_packet_data (ogg_packet * p)
{
return &(packet->packet);
}
- #endif
+ #endif /* USE_TREMOLO */
typedef void (*CopySampleFunc)(vorbis_sample_t *out, vorbis_sample_t **in,
- guint samples, gint channels, gint width);
+ guint samples, gint channels);
-CopySampleFunc get_copy_sample_func (gint channels, gint width);
+CopySampleFunc get_copy_sample_func (gint channels);
#endif /* __GST_VORBIS_DEC_LIB_H__ */
static void gst_audio_encoder_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
+static gboolean gst_audio_encoder_sink_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+static GstCaps *gst_audio_encoder_getcaps_default (GstAudioEncoder * enc,
+ GstCaps * filter);
+
+static gboolean gst_audio_encoder_sink_event_default (GstAudioEncoder * enc,
+ GstEvent * event);
+static gboolean gst_audio_encoder_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_audio_encoder_sink_setcaps (GstAudioEncoder * enc,
+ GstCaps * caps);
+static GstFlowReturn gst_audio_encoder_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static gboolean gst_audio_encoder_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_audio_encoder_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static GstStateChangeReturn gst_audio_encoder_change_state (GstElement *
+ element, GstStateChange transition);
-static gboolean gst_audio_encoder_sink_activate_push (GstPad * pad,
- gboolean active);
-
-static gboolean gst_audio_encoder_sink_event (GstPad * pad, GstEvent * event);
-static gboolean gst_audio_encoder_sink_setcaps (GstPad * pad, GstCaps * caps);
-static GstFlowReturn gst_audio_encoder_chain (GstPad * pad, GstBuffer * buffer);
-static gboolean gst_audio_encoder_src_query (GstPad * pad, GstQuery * query);
-static gboolean gst_audio_encoder_sink_query (GstPad * pad, GstQuery * query);
-static const GstQueryType *gst_audio_encoder_get_query_types (GstPad * pad);
-static GstCaps *gst_audio_encoder_sink_getcaps (GstPad * pad);
-
-
static void
gst_audio_encoder_class_init (GstAudioEncoderClass * klass)
{
0, G_MAXINT64, DEFAULT_TOLERANCE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_audio_encoder_change_state);
++
+ klass->getcaps = gst_audio_encoder_getcaps_default;
+ klass->event = gst_audio_encoder_sink_event_default;
}
static void
gboolean (*event) (GstAudioEncoder *enc,
GstEvent *event);
- GstCaps * (*getcaps) (GstAudioEncoder *enc);
+ GstCaps * (*getcaps) (GstAudioEncoder *enc, GstCaps *filter);
+ gboolean (*open) (GstAudioEncoder *enc);
+
+ gboolean (*close) (GstAudioEncoder *enc);
+
/*< private >*/
- gpointer _gst_reserved[GST_PADDING_LARGE];
+ gpointer _gst_reserved[GST_PADDING_LARGE-2];
};
GType gst_audio_encoder_get_type (void);
*_taglist = NULL;
return;
}
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
- taglist = gst_tag_list_new ();
+ gst_buffer_map (buf, &info, GST_MAP_READ);
+
+ taglist = gst_tag_list_new_empty ();
+
+ ptr = info.data;
+ left = info.size;
- while (size > 8) {
- tag = GST_READ_UINT32_LE (data);
- tsize = GST_READ_UINT32_LE (data + 4);
+ while (left > 8) {
+ tag = GST_READ_UINT32_LE (ptr);
+ tsize = GST_READ_UINT32_LE (ptr + 4);
+
- GST_MEMDUMP_OBJECT (element, "tag chunk", data, MIN (tsize + 8, size));
++ GST_MEMDUMP_OBJECT (element, "tag chunk", ptr, MIN (tsize + 8, left));
+
- size -= 8;
- data += 8;
+ left -= 8;
+ ptr += 8;
GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
GST_FOURCC_ARGS (tag), tsize);
- if (tsize > size) {
+ if (tsize > left) {
GST_WARNING_OBJECT (element,
- "Tagsize %d is larger than available data %d", tsize, size);
- tsize = size;
+ "Tagsize %d is larger than available data %" G_GSIZE_FORMAT,
+ tsize, left);
+ tsize = left;
}
+ /* make uppercase */
+ tag = tag & 0xDFDFDFDF;
+
/* find out the type of metadata */
switch (tag) {
case GST_RIFF_INFO_IARL:
static const gchar *env_vars[] = { "GST_AVI_TAG_ENCODING",
"GST_RIFF_TAG_ENCODING", "GST_TAG_ENCODING", NULL
};
+ GType tag_type;
gchar *val;
- val = gst_tag_freeform_string_to_utf8 ((gchar *) data, tsize, env_vars);
+ GST_DEBUG_OBJECT (element, "mapped tag %" GST_FOURCC_FORMAT " to tag %s",
+ GST_FOURCC_ARGS (tag), type);
+
+ tag_type = gst_tag_get_type (type);
+ val = gst_tag_freeform_string_to_utf8 ((gchar *) ptr, tsize, env_vars);
- if (val) {
- gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, type, val, NULL);
+ if (val != NULL) {
+ if (tag_type == G_TYPE_STRING) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, type, val, NULL);
+ } else {
+ GValue tag_val = { 0, };
+
+ g_value_init (&tag_val, tag_type);
+ if (gst_value_deserialize (&tag_val, val)) {
+ gst_tag_list_add_value (taglist, GST_TAG_MERGE_APPEND, type,
+ &tag_val);
+ } else {
+ GST_WARNING_OBJECT (element, "could not deserialize '%s' into a "
+ "tag %s of type %s", val, type, g_type_name (tag_type));
+ }
+ g_value_unset (&tag_val);
+ }
g_free (val);
} else {
GST_WARNING_OBJECT (element, "could not extract %s tag", type);
return raw;
}
+static GstPadProbeReturn
+sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info,
+ gpointer user_data);
+
static void
-sinkpad_blocked_cb (GstPad * blockedpad, gboolean blocked, gpointer user_data)
+video_set_blocked (GstPlaySink * playsink, gboolean blocked)
{
- GstPlaySink *playsink = (GstPlaySink *) user_data;
- GstPad *pad;
-
- GST_PLAY_SINK_LOCK (playsink);
-
- pad = GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (blockedpad)));
- if (pad == playsink->video_pad) {
- playsink->video_pad_blocked = blocked;
- GST_DEBUG_OBJECT (pad, "Video pad blocked: %d", blocked);
- if (!blocked) {
+ if (playsink->video_pad) {
+ GstPad *opad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
+ (playsink->video_pad)));
+ if (blocked && playsink->video_block_id == 0) {
+ playsink->video_block_id =
+ gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- sinkpad_blocked_cb, gst_object_ref (playsink),
- (GDestroyNotify) gst_object_unref);
++ sinkpad_blocked_cb, playsink, NULL);
+ } else if (!blocked && playsink->video_block_id) {
+ gst_pad_remove_probe (opad, playsink->video_block_id);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO_RAW);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO);
+ playsink->video_block_id = 0;
+ playsink->video_pad_blocked = FALSE;
}
- } else if (pad == playsink->audio_pad) {
- playsink->audio_pad_blocked = blocked;
- GST_DEBUG_OBJECT (pad, "Audio pad blocked: %d", blocked);
- if (!blocked) {
+ gst_object_unref (opad);
+ }
+}
+
+static void
+audio_set_blocked (GstPlaySink * playsink, gboolean blocked)
+{
+ if (playsink->audio_pad) {
+ GstPad *opad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
+ (playsink->audio_pad)));
+ if (blocked && playsink->audio_block_id == 0) {
+ playsink->audio_block_id =
+ gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- sinkpad_blocked_cb, gst_object_ref (playsink),
- (GDestroyNotify) gst_object_unref);
++ sinkpad_blocked_cb, playsink, NULL);
+ } else if (!blocked && playsink->audio_block_id) {
+ gst_pad_remove_probe (opad, playsink->audio_block_id);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO_RAW);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO);
+ playsink->audio_block_id = 0;
+ playsink->audio_pad_blocked = FALSE;
}
- } else if (pad == playsink->text_pad) {
- playsink->text_pad_blocked = blocked;
- GST_DEBUG_OBJECT (pad, "Text pad blocked: %d", blocked);
- if (!blocked)
+ gst_object_unref (opad);
+ }
+}
+
+static void
+text_set_blocked (GstPlaySink * playsink, gboolean blocked)
+{
+ if (playsink->text_pad) {
+ GstPad *opad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
+ (playsink->text_pad)));
+ if (blocked && playsink->text_block_id == 0) {
+ playsink->text_block_id =
+ gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- sinkpad_blocked_cb, gst_object_ref (playsink),
- (GDestroyNotify) gst_object_unref);
++ sinkpad_blocked_cb, playsink, NULL);
+ } else if (!blocked && playsink->text_block_id) {
+ gst_pad_remove_probe (opad, playsink->text_block_id);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_TEXT);
+ playsink->text_block_id = 0;
+ playsink->text_pad_blocked = FALSE;
+ }
+ gst_object_unref (opad);
}
+}
- if (!blocked) {
- gst_object_unref (pad);
- GST_PLAY_SINK_UNLOCK (playsink);
- return;
+static GstPadProbeReturn
+sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info,
+ gpointer user_data)
+{
+ GstPlaySink *playsink = (GstPlaySink *) user_data;
+ GstPad *pad;
+
+ GST_PLAY_SINK_LOCK (playsink);
+
+ pad = GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (blockedpad)));
+ if (pad == playsink->video_pad) {
+ playsink->video_pad_blocked = TRUE;
+ GST_DEBUG_OBJECT (pad, "Video pad blocked");
+ } else if (pad == playsink->audio_pad) {
+ playsink->audio_pad_blocked = TRUE;
+ GST_DEBUG_OBJECT (pad, "Audio pad blocked");
+ } else if (pad == playsink->text_pad) {
+ playsink->text_pad_blocked = TRUE;
+ GST_DEBUG_OBJECT (pad, "Text pad blocked");
}
/* We reconfigure when for ALL streams:
GstPad *blockpad =
GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (res)));
- gst_pad_set_blocked_async (blockpad, TRUE, sinkpad_blocked_cb, playsink);
+ *block_id =
+ gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- sinkpad_blocked_cb, gst_object_ref (playsink),
- (GDestroyNotify) gst_object_unref);
++ sinkpad_blocked_cb, playsink, NULL);
PENDING_FLAG_SET (playsink, type);
gst_object_unref (blockpad);
}
return ret;
}
- GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, pad_blocked_cb,
- gst_object_ref (self), (GDestroyNotify) gst_object_unref);
+static void
+block_proxypad (GstPlaySinkConvertBin * self)
+{
+ if (self->sink_proxypad_block_id == 0) {
+ self->sink_proxypad_block_id =
+ gst_pad_add_probe (self->sink_proxypad,
++ GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, pad_blocked_cb, self, NULL);
+ }
+}
+
+static void
+unblock_proxypad (GstPlaySinkConvertBin * self)
+{
+ if (self->sink_proxypad_block_id != 0) {
+ gst_pad_remove_probe (self->sink_proxypad, self->sink_proxypad_block_id);
+ self->sink_proxypad_block_id = 0;
+ }
+}
+
static gboolean
-gst_play_sink_convert_bin_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_play_sink_convert_bin_sink_setcaps (GstPlaySinkConvertBin * self,
+ GstCaps * caps)
{
- GstPlaySinkConvertBin *self =
- GST_PLAY_SINK_CONVERT_BIN (gst_pad_get_parent (pad));
- gboolean ret;
GstStructure *s;
const gchar *name;
gboolean reconfigure = FALSE;
caps = DEFAULT_CAPS;
g_object_set (decoder, "caps", caps, NULL);
gst_caps_unref (caps);
- GST_OBJECT_FLAG_SET (decoder, GST_OBJECT_FLOATING);
+ /* make it freshly floating again */
++ g_object_force_floating (G_OBJECT (decoder));
bin->pending_decodebins =
g_slist_prepend (bin->pending_decodebins, decoder);
GST_END_TEST;
- #undef ASSERT_CRITICAL
- #define ASSERT_CRITICAL(code) while(0){} /* nothing */
-
+#if 0
+/* FIXME 0.11: port overlay composition to buffer meta */
GST_START_TEST (test_overlay_composition)
{
GstVideoOverlayComposition *comp1, *comp2;
}
GST_END_TEST;
+
+ GST_START_TEST (test_overlay_composition_premultiplied_alpha)
+ {
+ GstVideoOverlayRectangle *rect1;
+ GstBuffer *pix1, *pix2, *pix3, *pix4, *pix5;
+ GstBuffer *pix6, *pix7, *pix8, *pix9, *pix10;
+ guint8 *data5, *data7;
+ guint w, h, stride, w2, h2, stride2;
+
+ pix1 = gst_buffer_new_and_alloc (200 * sizeof (guint32) * 50);
+ memset (GST_BUFFER_DATA (pix1), 0x80, GST_BUFFER_SIZE (pix1));
+
+ rect1 = gst_video_overlay_rectangle_new_argb (pix1, 200, 50, 200 * 4,
+ 600, 50, 300, 50, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+ gst_buffer_unref (pix1);
+
+ /* same flags, unscaled, should be the same buffer */
+ pix2 = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect1, &w, &h,
+ &stride, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+ fail_unless (pix1 == pix2);
+
+ /* same flags, but scaled */
+ pix3 = gst_video_overlay_rectangle_get_pixels_argb (rect1, &stride,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+ fail_if (pix3 == pix1 || pix3 == pix2);
+
+ /* same again, should hopefully get the same (cached) buffer as before */
+ pix4 = gst_video_overlay_rectangle_get_pixels_argb (rect1, &stride,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+ fail_unless (pix4 == pix3);
+
+ /* just to update the vars */
+ pix2 = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect1, &w, &h,
+ &stride, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+
+ /* now, let's try to get premultiplied alpha from the unpremultiplied input */
+ pix5 = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect1, &w2, &h2,
+ &stride2, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
+ fail_if (pix5 == pix1 || pix5 == pix2 || pix5 == pix3);
+ fail_unless_equals_int (stride, stride2);
+ fail_unless_equals_int (w, w2);
+ fail_unless_equals_int (h, h2);
+ fail_unless_equals_int (GST_BUFFER_SIZE (pix2), GST_BUFFER_SIZE (pix5));
+ data5 = GST_BUFFER_DATA (pix5);
+ fail_if (memcmp (data5, GST_BUFFER_DATA (pix2), GST_BUFFER_SIZE (pix5)) == 0);
+
+ /* make sure it actually did what we expected it to do (input=0x80808080) */
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ /* B - G - R - A */
+ fail_unless_equals_int (data5[0], 0x40);
+ fail_unless_equals_int (data5[1], 0x40);
+ fail_unless_equals_int (data5[2], 0x40);
+ fail_unless_equals_int (data5[3], 0x80);
+ #else
+ /* A - R - G - B */
+ fail_unless_equals_int (data5[0], 0x40);
+ fail_unless_equals_int (data5[1], 0x40);
+ fail_unless_equals_int (data5[2], 0x40);
+ fail_unless_equals_int (data5[3], 0x80);
+ #endif
+
+ /* same again, now we should be getting back the same buffer as before,
+ * as it should have been cached */
+ pix6 = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect1, &w2, &h2,
+ &stride2, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
+ fail_unless (pix6 == pix5);
+
+ /* just to update the stride var */
+ pix3 = gst_video_overlay_rectangle_get_pixels_argb (rect1, &stride,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+ fail_unless (pix3 == pix4);
+
+ /* now try to get scaled premultiplied alpha from unpremultiplied input */
+ pix7 = gst_video_overlay_rectangle_get_pixels_argb (rect1, &stride2,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
+ fail_if (pix7 == pix1 || pix7 == pix2 || pix7 == pix3 || pix7 == pix5);
+ fail_unless_equals_int (stride, stride2);
+
+ data7 = GST_BUFFER_DATA (pix7);
+ /* make sure it actually did what we expected it to do (input=0x80808080)
+ * hoping that the scaling didn't mess up our values */
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ /* B - G - R - A */
+ fail_unless_equals_int (data7[0], 0x40);
+ fail_unless_equals_int (data7[1], 0x40);
+ fail_unless_equals_int (data7[2], 0x40);
+ fail_unless_equals_int (data7[3], 0x80);
+ #else
+ /* A - R - G - B */
+ fail_unless_equals_int (data7[0], 0x40);
+ fail_unless_equals_int (data7[1], 0x40);
+ fail_unless_equals_int (data7[2], 0x40);
+ fail_unless_equals_int (data7[3], 0x80);
+ #endif
+
+ /* and the same again, it should be cached now */
+ pix8 = gst_video_overlay_rectangle_get_pixels_argb (rect1, &stride2,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
+ fail_unless (pix8 == pix7);
+
+ /* make sure other cached stuff is still there */
+ pix9 = gst_video_overlay_rectangle_get_pixels_argb (rect1, &stride,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
+ fail_unless (pix9 == pix3);
+ pix10 = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect1, &w2, &h2,
+ &stride2, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
+ fail_unless (pix10 == pix5);
+
+ gst_video_overlay_rectangle_unref (rect1);
+ }
+
+ GST_END_TEST;
+#endif
static Suite *
video_suite (void)
tcase_add_test (tc_chain, test_convert_frame);
tcase_add_test (tc_chain, test_convert_frame_async);
tcase_add_test (tc_chain, test_video_size_from_caps);
+#if 0
+ /* FIXME 0.11: port overlay compositions */
tcase_add_test (tc_chain, test_overlay_composition);
+ tcase_add_test (tc_chain, test_overlay_composition_premultiplied_alpha);
+#endif
return s;
}