AG_GST_ARG_ENABLE_EXPERIMENTAL
dnl *** checks for platform ***
+AG_GST_PLATFORM
dnl * hardware/architecture *
gst_tag_get_language_code_iso_639_1
gst_tag_get_language_code_iso_639_2B
gst_tag_get_language_code_iso_639_2T
+gst_tag_check_language_code
</SECTION>
<SECTION>
gst_video_overlay_rectangle_get_seqnum
gst_video_overlay_rectangle_set_render_rectangle
gst_video_overlay_rectangle_copy
+gst_video_overlay_rectangle_get_flags
<SUBSECTION Standard>
GST_TYPE_VIDEO_OVERLAY_COMPOSITION
GST_VIDEO_OVERLAY_COMPOSITION
#include "config.h"
#endif
-#include <gstclockoverlay.h>
+#include "gstclockoverlay.h"
#include <gst/video/video.h>
#include <time.h>
#include <gst/video/video.h>
-#include <gsttimeoverlay.h>
+#include "gsttimeoverlay.h"
#define gst_time_overlay_parent_class parent_class
G_DEFINE_TYPE (GstTimeOverlay, gst_time_overlay, GST_TYPE_BASE_TEXT_OVERLAY);
gboolean do_min, guint64 min, gboolean do_max, guint64 max);
static gboolean gst_app_src_negotiate (GstBaseSrc * basesrc);
-static GstFlowReturn gst_app_src_create (GstBaseSrc * bsrc,
- guint64 offset, guint size, GstBuffer ** buf);
+static GstCaps *gst_app_src_internal_get_caps (GstBaseSrc * bsrc,
+ GstCaps * filter);
+static GstFlowReturn gst_app_src_create (GstBaseSrc * bsrc, guint64 offset,
+ guint size, GstBuffer ** buf);
static gboolean gst_app_src_start (GstBaseSrc * bsrc);
static gboolean gst_app_src_stop (GstBaseSrc * bsrc);
static gboolean gst_app_src_unlock (GstBaseSrc * bsrc);
gst_static_pad_template_get (&gst_app_src_template));
basesrc_class->negotiate = gst_app_src_negotiate;
+ basesrc_class->get_caps = gst_app_src_internal_get_caps;
basesrc_class->create = gst_app_src_create;
basesrc_class->start = gst_app_src_start;
basesrc_class->stop = gst_app_src_stop;
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
+static GstCaps *
+gst_app_src_internal_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
+{
+ return gst_app_src_get_caps (GST_APP_SRC_CAST (bsrc), filter);
+}
+
static void
gst_app_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
GstCaps *caps;
/* we're missing a _take_caps() function to transfer ownership */
- caps = gst_app_src_get_caps (appsrc);
+ caps = gst_app_src_get_caps (appsrc, NULL);
gst_value_set_caps (value, caps);
if (caps)
gst_caps_unref (caps);
* Since: 0.10.22
*/
GstCaps *
-gst_app_src_get_caps (GstAppSrc * appsrc)
+gst_app_src_get_caps (GstAppSrc * appsrc, GstCaps * filter)
{
GstCaps *caps;
GstAppSrcPrivate *priv;
GST_OBJECT_LOCK (appsrc);
if ((caps = priv->caps))
gst_caps_ref (caps);
+
+ if (filter) {
+ GstCaps *intersection =
+ gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = intersection;
+ }
+
GST_DEBUG_OBJECT (appsrc, "getting caps of %" GST_PTR_FORMAT, caps);
GST_OBJECT_UNLOCK (appsrc);
GType gst_app_stream_type_get_type (void);
void gst_app_src_set_caps (GstAppSrc *appsrc, const GstCaps *caps);
-GstCaps* gst_app_src_get_caps (GstAppSrc *appsrc);
+GstCaps* gst_app_src_get_caps (GstAppSrc *appsrc, GstCaps * filter);
void gst_app_src_set_size (GstAppSrc *appsrc, gint64 size);
gint64 gst_app_src_get_size (GstAppSrc *appsrc);
gst_discoverer_info_copy (GstDiscovererInfo * ptr)
{
GstDiscovererInfo *ret;
- GHashTable *stream_map = g_hash_table_new (g_direct_hash, NULL);
+ GHashTable *stream_map;
GList *tmp;
g_return_val_if_fail (ptr != NULL, NULL);
+ stream_map = g_hash_table_new (g_direct_hash, NULL);
+
ret = gst_discoverer_info_new ();
ret->uri = g_strdup (ptr->uri);
return GST_PAD_PROBE_OK;
}
+static GstStaticCaps subtitle_caps = GST_STATIC_CAPS ("text/plain; "
+ "text/x-pango-markup; subpicture/x-pgs; subpicture/x-dvb; "
+ "application/x-subtitle-unknown; application/x-ssa; application/x-ass; "
+ "subtitle/x-kate; application/x-kate; video/x-dvd-subpicture");
+
static gboolean
is_subtitle_caps (const GstCaps * caps)
{
- static GstCaps *subs_caps = NULL;
+ GstCaps *subs_caps;
+ gboolean ret;
- if (!subs_caps) {
- subs_caps = gst_caps_from_string ("text/plain; text/x-pango-markup; "
- "subpicture/x-pgs; subpicture/x-dvb; application/x-subtitle-unknown; "
- "application/x-ssa; application/x-ass; subtitle/x-kate; "
- "application/x-kate; video/x-dvd-subpicture; ");
- }
+ subs_caps = gst_static_caps_get (&subtitle_caps);
+ ret = gst_caps_can_intersect (caps, subs_caps);
+ gst_caps_unref (subs_caps);
- return gst_caps_can_intersect (caps, subs_caps);
+ return ret;
}
static void
gst_rtcp_buffer_validate_data (guint8 * data, guint len)
{
guint16 header_mask;
- guint16 header_len;
+ guint header_len;
guint8 version;
guint data_len;
gboolean padding;
static gchar *
do_resolve (const gchar * host)
{
- static gchar ip[INET6_ADDRSTRLEN];
+ gchar ip[INET6_ADDRSTRLEN];
struct addrinfo *aires, hints;
struct addrinfo *ai;
gint aierr;
/**
* GstRTSPHeaderField:
*
- * Enumeration of rtsp header fields.
+ * Enumeration of rtsp header fields
*/
typedef enum {
- /*< protected >*/
GST_RTSP_HDR_INVALID,
/*
/**
* GstRTSPStatusCode:
*
- * Enumeration of rtsp status codes.
+ * Enumeration of rtsp status codes
*/
typedef enum {
- /*< protected >*/
GST_RTSP_STS_INVALID = 0,
GST_RTSP_STS_CONTINUE = 100,
GST_RTSP_STS_OK = 200,
* </refsect2>
*/
-/* FIXME 0.11: maybe switch to ISO-639-2 everywhere incl. GST_TAG_LANGUAGE? */
-
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
* gst_tag_get_language_name:
* @language_code: two or three-letter ISO-639 language code
*
- * Returns the name of the language given an ISO-639 language code, such
- * as often found in a GST_TAG_LANGUAGE tag. The name will be translated
+ * Returns the name of the language given an ISO-639 language code as
+ * found in a GST_TAG_LANGUAGE_CODE tag. The name will be translated
* according to the current locale (if the library was built against the
* iso-codes package, otherwise the English name will be returned).
*
return c;
}
+
+/**
+ * gst_tag_check_language_code:
+ * @lang_code: ISO-639 language code (e.g. "deu" or "ger" or "de")
+ *
+ * Check if a given string contains a known ISO 639 language code.
+ *
+ * This is useful in situations where it's not clear whether a given
+ * string is a language code (which should be put into a #GST_TAG_LANGUAGE_CODE
+ * tag) or a free-form language name descriptor (which should be put into a
+ * #GST_TAG_LANGUAGE_NAME tag instead).
+ *
+ * Returns: TRUE if the two- or three-letter language code in @lang_code
+ * is a valid ISO-639 language code.
+ *
+ * Since: 0.10.37
+ */
+gboolean
+gst_tag_check_language_code (const gchar * lang_code)
+{
+ return (gst_tag_get_language_code_iso_639_1 (lang_code) != NULL);
+}
const gchar * gst_tag_get_language_code_iso_639_2T (const gchar * lang_code);
+gboolean gst_tag_check_language_code (const gchar * lang_code);
+
/**
* gst_tag_get_language_code:
* @lang_code: ISO-639 language code (e.g. "deu" or "ger" or "de")
/* fallback in case iconv implementation doesn't support windows-1252
* for some reason */
if (err->code == G_CONVERT_ERROR_NO_CONVERSION) {
+ g_free (utf8);
utf8 = g_convert (data, size, "UTF-8", "ISO-8859-1", &bytes_read,
NULL, NULL);
}
}
static void
+matrix_prea_rgb_to_yuv (guint8 * tmpline, guint width)
+{
+ int i;
+ int a, r, g, b;
+ int y, u, v;
+
+ for (i = 0; i < width; i++) {
+ a = tmpline[i * 4 + 0];
+ r = tmpline[i * 4 + 1];
+ g = tmpline[i * 4 + 2];
+ b = tmpline[i * 4 + 3];
+ if (a) {
+ r = (r * 255 + a / 2) / a;
+ g = (g * 255 + a / 2) / a;
+ b = (b * 255 + a / 2) / a;
+ }
+
+ y = (47 * r + 157 * g + 16 * b + 4096) >> 8;
+ u = (-26 * r - 87 * g + 112 * b + 32768) >> 8;
+ v = (112 * r - 102 * g - 10 * b + 32768) >> 8;
+
+ tmpline[i * 4 + 1] = CLAMP (y, 0, 255);
+ tmpline[i * 4 + 2] = CLAMP (u, 0, 255);
+ tmpline[i * 4 + 3] = CLAMP (v, 0, 255);
+ }
+}
+
+static void
matrix_rgb_to_yuv (guint8 * tmpline, guint width)
{
int i;
return FALSE;
}
-#define BLEND(ret, alpha, v0, v1) \
-{ \
+#define BLEND00(ret, alpha, v0, v1) \
+G_STMT_START { \
ret = (v0 * alpha + v1 * (255 - alpha)) / 255; \
-}
+} G_STMT_END
+
+#define BLEND10(ret, alpha, v0, v1) \
+G_STMT_START { \
+ ret = v0 + (v1 * (255 - alpha)) / 255; \
+} G_STMT_END
void
video_blend_scale_linear_RGBA (GstBlendVideoFormatInfo * src,
/* Update src, our reference to the old src->pixels is lost */
video_blend_format_info_init (src, dest_pixels, dest_height, dest_width,
- src->fmt);
+ src->fmt, src->premultiplied_alpha);
g_free (tmpbuf);
}
guint i, j;
guint8 alpha;
GetPutLine getputdest, getputsrc;
+ gint src_stride;
+ guint8 *tmpdestline = NULL, *tmpsrcline = NULL;
+ gboolean src_premultiplied_alpha;
+
+ g_return_val_if_fail (dest, FALSE);
+ g_return_val_if_fail (src, FALSE);
- gint src_stride = src->width * 4;
- guint8 *tmpdestline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
- guint8 *tmpsrcline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
+ /* we do no support writing to premultiplied alpha, though that should
+ just be a matter of adding blenders below (BLEND01 and BLEND11) */
+ g_return_val_if_fail (!dest->premultiplied_alpha, FALSE);
+ src_premultiplied_alpha = src->premultiplied_alpha;
+
+ src_stride = src->width * 4;
+ tmpdestline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
+ tmpsrcline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
ensure_debug_category ();
if (!lookup_getput (&getputsrc, src->fmt))
goto failed;
- if (gst_video_format_is_rgb (src->fmt) != gst_video_format_is_rgb (dest->fmt))
- getputsrc.matrix = gst_video_format_is_rgb (src->fmt) ?
- matrix_rgb_to_yuv : matrix_yuv_to_rgb;
+ if (gst_video_format_is_rgb (src->fmt) != gst_video_format_is_rgb (dest->fmt)) {
+ if (gst_video_format_is_rgb (src->fmt)) {
+ if (src_premultiplied_alpha) {
+ getputsrc.matrix = matrix_prea_rgb_to_yuv;
+ src_premultiplied_alpha = FALSE;
+ } else {
+ getputsrc.matrix = matrix_rgb_to_yuv;
+ }
+ } else {
+ getputsrc.matrix = matrix_yuv_to_rgb;
+ }
+ }
/* adjust src pointers for negative sizes */
if (x < 0) {
/* Here dest and src are both either in AYUV or ARGB
* TODO: Make the orc version working properly*/
- for (j = 0; j < src->width * 4; j += 4) {
- alpha = tmpsrcline[j];
-
- BLEND (tmpdestline[j + 1], alpha, tmpsrcline[j + 1], tmpdestline[j + 1]);
- BLEND (tmpdestline[j + 2], alpha, tmpsrcline[j + 2], tmpdestline[j + 2]);
- BLEND (tmpdestline[j + 3], alpha, tmpsrcline[j + 3], tmpdestline[j + 3]);
+#define BLENDLOOP(blender) \
+ do { \
+ for (j = 0; j < src->width * 4; j += 4) { \
+ alpha = tmpsrcline[j]; \
+ \
+ blender (tmpdestline[j + 1], alpha, tmpsrcline[j + 1], tmpdestline[j + 1]); \
+ blender (tmpdestline[j + 2], alpha, tmpsrcline[j + 2], tmpdestline[j + 2]); \
+ blender (tmpdestline[j + 3], alpha, tmpsrcline[j + 3], tmpdestline[j + 3]); \
+ } \
+ } while(0)
+
+ if (src_premultiplied_alpha && dest->premultiplied_alpha) {
+ /* BLENDLOOP (BLEND11); */
+ } else if (!src_premultiplied_alpha && dest->premultiplied_alpha) {
+ /* BLENDLOOP (BLEND01); */
+ } else if (src_premultiplied_alpha && !dest->premultiplied_alpha) {
+ BLENDLOOP (BLEND10);
+ } else {
+ BLENDLOOP (BLEND00);
}
+#undef BLENDLOOP
+
/* FIXME
* #if G_BYTE_ORDER == LITTLE_ENDIAN
* orc_blend_little (tmpdestline, tmpsrcline, dest->width);
*/
void
video_blend_format_info_init (GstBlendVideoFormatInfo * info,
- guint8 * pixels, guint height, guint width, GstVideoFormat fmt)
+ guint8 * pixels, guint height, guint width, GstVideoFormat fmt,
+ gboolean premultiplied_alpha)
{
guint nb_component = gst_video_format_has_alpha (fmt) ? 4 : 3;
info->height = height;
info->pixels = pixels;
info->fmt = fmt;
+ info->premultiplied_alpha = premultiplied_alpha;
info->size = gst_video_format_get_size (fmt, height, width);
fill_planes (info);
guint8 * pixels;
gsize size;
+ gboolean premultiplied_alpha;
+
/* YUV components: Y=0, U=1, V=2, A=3
* RGB components: R=0, G=1, B=2, A=3 */
gint offset[MAX_VIDEO_PLANES];
void video_blend_format_info_init (GstBlendVideoFormatInfo * info,
guint8 *pixels, guint height,
- guint width, GstVideoFormat fmt);
+ guint width, GstVideoFormat fmt,
+ gboolean premultiplied_alpha);
void video_blend_scale_linear_RGBA (GstBlendVideoFormatInfo * src,
gint dest_height, gint dest_width);
/* The format of the data in pixels */
GstVideoFormat format;
+ /* The flags associated to this rectangle */
+ GstVideoOverlayFormatFlags flags;
+
/* Refcounted blob of memory, no caps or timestamps */
GstBuffer *pixels;
}
video_blend_format_info_init (&video_info, GST_BUFFER_DATA (video_buf),
- h, w, fmt);
+ h, w, fmt, FALSE);
num = comp->num_rectangles;
GST_LOG ("Blending composition %p with %u rectangles onto video buffer %p "
video_blend_format_info_init (&rectangle_info,
GST_BUFFER_DATA (rect->pixels), rect->height, rect->width,
- rect->format);
+ rect->format,
+ ! !(rect->flags & GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA));
needs_scaling = gst_video_overlay_rectangle_needs_scaling (rect);
if (needs_scaling) {
#endif
}
+static inline gboolean
+gst_video_overlay_rectangle_check_flags (GstVideoOverlayFormatFlags flags)
+{
+ /* Check flags only contains flags we know about */
+ return (flags & ~(GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA)) == 0;
+}
+
+static gboolean
+gst_video_overlay_rectangle_is_same_alpha_type (GstVideoOverlayFormatFlags
+ flags1, GstVideoOverlayFormatFlags flags2)
+{
+ return ((flags1 ^ flags2) & GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA)
+ == 0;
+}
+
+
/**
* gst_video_overlay_rectangle_new_argb:
* @pixels: (transfer none): a #GstBuffer pointing to the pixel memory
* overlay rectangle should be rendered to
* @render_width: the render width of this rectangle on the video
* @render_height: the render height of this rectangle on the video
- * @flags: flags (currently unused)
+ * @flags: flags
*
* Creates a new video overlay rectangle with ARGB pixel data. The layout
* of the components in memory is B-G-R-A on little-endian platforms
* platforms (corresponding to #GST_VIDEO_FORMAT_ARGB). In other words,
* pixels are treated as 32-bit words and the lowest 8 bits then contain
* the blue component value and the highest 8 bits contain the alpha
- * component value. The RGB values are non-premultiplied. This is the
- * format that is used by most hardware, and also many rendering libraries
- * such as Cairo, for example.
+ * component value. Unless specified in the flags, the RGB values are
+ * non-premultiplied. This is the format that is used by most hardware,
+ * and also many rendering libraries such as Cairo, for example.
*
* Returns: (transfer full): a new #GstVideoOverlayRectangle. Unref with
* gst_video_overlay_rectangle_unref() when no longer needed.
g_return_val_if_fail (stride >= (4 * width), NULL);
g_return_val_if_fail (height > 0 && width > 0, NULL);
g_return_val_if_fail (render_height > 0 && render_width > 0, NULL);
- g_return_val_if_fail (flags == 0, NULL);
+ g_return_val_if_fail (gst_video_overlay_rectangle_check_flags (flags), NULL);
rect = (GstVideoOverlayRectangle *)
gst_mini_object_new (GST_TYPE_VIDEO_OVERLAY_RECTANGLE);
#else
rect->format = GST_VIDEO_FORMAT_ARGB;
#endif
+
rect->pixels = gst_buffer_ref (pixels);
rect->width = width;
rect->render_width = render_width;
rect->render_height = render_height;
+ rect->flags = flags;
+
rect->seq_num = gst_video_overlay_get_seqnum ();
GST_LOG ("new rectangle %p: %ux%u => %ux%u @ %u,%u, seq_num %u, format %u, "
- "pixels %p", rect, width, height, render_width, render_height, render_x,
- render_y, rect->seq_num, rect->format, pixels);
+ "flags %x, pixels %p", rect, width, height, render_width, render_height,
+ render_x, render_y, rect->seq_num, rect->format, rect->flags, pixels);
return rect;
}
rectangle->render_height = render_height;
}
-/**
- * gst_video_overlay_rectangle_get_pixels_argb:
- * @rectangle: a #GstVideoOverlayRectangle
- * @stride: (out) (allow-none): address of guint variable where to store the
- * row stride of the ARGB pixel data in the buffer
- * @flags: flags (unused)
- *
- * Returns: (transfer none): a #GstBuffer holding the ARGB pixel data with
- * row stride @stride and width and height of the render dimensions as per
- * gst_video_overlay_rectangle_get_render_rectangle(). This function does
- * not return a reference, the caller should obtain a reference of her own
- * with gst_buffer_ref() if needed.
- *
- * Since: 0.10.36
- */
-GstBuffer *
-gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
- rectangle, guint * stride, GstVideoOverlayFormatFlags flags)
+static void
+gst_video_overlay_rectangle_premultiply (GstBlendVideoFormatInfo * info)
+{
+ int i, j;
+ for (j = 0; j < info->height; ++j) {
+ guint8 *line = info->pixels + info->stride[0] * j;
+ for (i = 0; i < info->width; ++i) {
+ int a = line[0];
+ line[1] = line[1] * a / 255;
+ line[2] = line[2] * a / 255;
+ line[3] = line[3] * a / 255;
+ line += 4;
+ }
+ }
+}
+
+static void
+gst_video_overlay_rectangle_unpremultiply (GstBlendVideoFormatInfo * info)
+{
+ int i, j;
+ for (j = 0; j < info->height; ++j) {
+ guint8 *line = info->pixels + info->stride[0] * j;
+ for (i = 0; i < info->width; ++i) {
+ int a = line[0];
+ if (a) {
+ line[1] = MIN ((line[1] * 255 + a / 2) / a, 255);
+ line[2] = MIN ((line[2] * 255 + a / 2) / a, 255);
+ line[3] = MIN ((line[3] * 255 + a / 2) / a, 255);
+ }
+ line += 4;
+ }
+ }
+}
+
+static GstBuffer *
+gst_video_overlay_rectangle_get_pixels_argb_internal (GstVideoOverlayRectangle *
+ rectangle, guint * stride, GstVideoOverlayFormatFlags flags,
+ gboolean unscaled)
{
GstVideoOverlayRectangle *scaled_rect = NULL;
GstBlendVideoFormatInfo info;
GstBuffer *buf;
GList *l;
+ guint wanted_width = unscaled ? rectangle->width : rectangle->render_width;
+ guint wanted_height = unscaled ? rectangle->height : rectangle->render_height;
g_return_val_if_fail (GST_IS_VIDEO_OVERLAY_RECTANGLE (rectangle), NULL);
- g_return_val_if_fail (flags == 0, NULL);
g_return_val_if_fail (stride != NULL, NULL);
+ g_return_val_if_fail (gst_video_overlay_rectangle_check_flags (flags), NULL);
/* This assumes we don't need to adjust the format */
- if (rectangle->render_width == rectangle->width &&
- rectangle->render_height == rectangle->height) {
+ if (wanted_width == rectangle->width &&
+ wanted_height == rectangle->height &&
+ gst_video_overlay_rectangle_is_same_alpha_type (rectangle->flags,
+ flags)) {
*stride = rectangle->stride;
return rectangle->pixels;
}
for (l = rectangle->scaled_rectangles; l != NULL; l = l->next) {
GstVideoOverlayRectangle *r = l->data;
- if (r->width == rectangle->render_width &&
- r->height == rectangle->render_height) {
+ if (r->width == wanted_width &&
+ r->height == wanted_height &&
+ gst_video_overlay_rectangle_is_same_alpha_type (rectangle->flags,
+ flags)) {
/* we'll keep these rectangles around until finalize, so it's ok not
* to take our own ref here */
scaled_rect = r;
/* not cached yet, do the scaling and put the result into our cache */
video_blend_format_info_init (&info, GST_BUFFER_DATA (rectangle->pixels),
- rectangle->height, rectangle->width, rectangle->format);
+ rectangle->height, rectangle->width, rectangle->format,
+ ! !(rectangle->flags &
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA));
- video_blend_scale_linear_RGBA (&info, rectangle->render_height,
- rectangle->render_width);
+ if (wanted_width != rectangle->width || wanted_height != rectangle->height) {
+ video_blend_scale_linear_RGBA (&info, wanted_height, wanted_width);
+ }
+
+ if (!gst_video_overlay_rectangle_is_same_alpha_type (rectangle->flags, flags)) {
+ if (rectangle->flags & GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA) {
+ gst_video_overlay_rectangle_unpremultiply (&info);
+ } else {
+ gst_video_overlay_rectangle_premultiply (&info);
+ }
+ }
buf = gst_buffer_new ();
GST_BUFFER_DATA (buf) = info.pixels;
GST_BUFFER_SIZE (buf) = info.size;
scaled_rect = gst_video_overlay_rectangle_new_argb (buf,
- rectangle->render_width, rectangle->render_height, info.stride[0],
- 0, 0, rectangle->render_width, rectangle->render_height, 0);
+ wanted_width, wanted_height, info.stride[0],
+ 0, 0, wanted_width, wanted_height, rectangle->flags);
gst_buffer_unref (buf);
}
/**
+ * gst_video_overlay_rectangle_get_pixels_argb:
+ * @rectangle: a #GstVideoOverlayRectangle
+ * @stride: (out) (allow-none): address of guint variable where to store the
+ * row stride of the ARGB pixel data in the buffer
+ * @flags: flags
+ *
+ * Returns: (transfer none): a #GstBuffer holding the ARGB pixel data with
+ * row stride @stride and width and height of the render dimensions as per
+ * gst_video_overlay_rectangle_get_render_rectangle(). This function does
+ * not return a reference, the caller should obtain a reference of her own
+ * with gst_buffer_ref() if needed.
+ *
+ * Since: 0.10.36
+ */
+GstBuffer *
+gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
+ rectangle, guint * stride, GstVideoOverlayFormatFlags flags)
+{
+ return gst_video_overlay_rectangle_get_pixels_argb_internal (rectangle,
+ stride, flags, FALSE);
+}
+
+/**
* gst_video_overlay_rectangle_get_pixels_unscaled_argb:
* @rectangle: a #GstVideoOverlayRectangle
* @width: (out): address where to store the width of the unscaled
* rectangle in pixels
* @stride: (out): address of guint variable where to store the row
* stride of the ARGB pixel data in the buffer
- * @flags: flags for future use (unused)
+ * @flags: flags
*
* Retrieves the pixel data as it is. This is useful if the caller can
* do the scaling itself when handling the overlaying. The rectangle will
g_return_val_if_fail (width != NULL, NULL);
g_return_val_if_fail (height != NULL, NULL);
g_return_val_if_fail (stride != NULL, NULL);
- g_return_val_if_fail (flags == 0, NULL);
*width = rectangle->width;
*height = rectangle->height;
- *stride = rectangle->stride;
+ return gst_video_overlay_rectangle_get_pixels_argb_internal (rectangle,
+ stride, flags, TRUE);
+}
+
+/**
+ * gst_video_overlay_rectangle_get_flags:
+ * @rectangle: a #GstVideoOverlayRectangle
+ *
+ * Retrieves the flags associated with a #GstVideoOverlayRectangle.
+ * This is useful if the caller can handle both premultiplied alpha and
+ * non premultiplied alpha, for example. By knowing whether the rectangle
+ * uses premultiplied or not, it can request the pixel data in the format
+ * it is stored in, to avoid unnecessary conversion.
+ *
+ * Returns: the #GstVideoOverlayFormatFlags associated with the rectangle.
+ *
+ * Since: 0.10.37
+ */
+GstVideoOverlayFormatFlags
+gst_video_overlay_rectangle_get_flags (GstVideoOverlayRectangle * rectangle)
+{
+ g_return_val_if_fail (GST_IS_VIDEO_OVERLAY_RECTANGLE (rectangle),
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
- return rectangle->pixels;
+ return rectangle->flags;
}
/**
copy = gst_video_overlay_rectangle_new_argb (rectangle->pixels,
rectangle->width, rectangle->height, rectangle->stride,
rectangle->x, rectangle->y,
- rectangle->render_width, rectangle->render_height, 0);
+ rectangle->render_width, rectangle->render_height, rectangle->flags);
return copy;
}
/**
* GstVideoOverlayFormatFlags:
* @GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE: no flags
+ * @GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA: RGB are premultiplied by A/255. Since: 0.10.37
*
* Overlay format flags.
*
* Since: 0.10.36
*/
typedef enum {
- GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE = 0
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE = 0,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA = 1
} GstVideoOverlayFormatFlags;
GType gst_video_overlay_rectangle_get_type (void);
guint * stride,
GstVideoOverlayFormatFlags flags);
+GstVideoOverlayFormatFlags gst_video_overlay_rectangle_get_flags (GstVideoOverlayRectangle * rectangle);
+
/**
* GstVideoOverlayComposition:
*
* #ifdef GDK_WINDOWING_X11
* #include <gdk/gdkx.h> // for GDK_WINDOW_XID
* #endif
+ * #ifdef GDK_WINDOWING_WIN32
+ * #include <gdk/gdkwin32.h> // for GDK_WINDOW_HWND
+ * #endif
* ...
- * static gulong video_window_xid = 0;
+ * static guintptr video_window_handle = 0;
* ...
* static GstBusSyncReply
* bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
* if (!gst_is_video_overlay_prepare_window_handle_message (message))
* return GST_BUS_PASS;
*
- * if (video_window_xid != 0) {
- * GstVideoOverlay *xoverlay;
+ * if (video_window_handle != 0) {
+ * GstXOverlay *xoverlay;
*
* // GST_MESSAGE_SRC (message) will be the video sink element
- * xoverlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
- * gst_video_overlay_set_window_handle (xoverlay, video_window_xid);
+ * xoverlay = GST_X_OVERLAY (GST_MESSAGE_SRC (message));
+ * gst_x_overlay_set_window_handle (xoverlay, video_window_handle);
* } else {
- * g_warning ("Should have obtained video_window_xid by now!");
+ * g_warning ("Should have obtained video_window_handle by now!");
* }
*
* gst_message_unref (message);
* video_widget_realize_cb (GtkWidget * widget, gpointer data)
* {
* #if GTK_CHECK_VERSION(2,18,0)
+ * // Tell Gtk+/Gdk to create a native window for this widget instead of
+ * // drawing onto the parent widget.
* // This is here just for pedagogical purposes, GDK_WINDOW_XID will call
* // it as well in newer Gtk versions
* if (!gdk_window_ensure_native (widget->window))
* #endif
*
* #ifdef GDK_WINDOWING_X11
- * video_window_xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
+ * {
+ * gulong xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
+ * video_window_handle = xid;
+ * }
+ * #endif
+ * #ifdef GDK_WINDOWING_WIN32
+ * {
+ * HWND wnd = GDK_WINDOW_HWND (gtk_widget_get_window (video_window));
+ * video_window_handle = (guintptr) wnd;
+ * }
* #endif
* }
* ...
* gtk_widget_show_all (app_window);
*
* // realize window now so that the video window gets created and we can
- * // obtain its XID before the pipeline is started up and the videosink
- * // asks for the XID of the window to render onto
+ * // obtain its XID/HWND before the pipeline is started up and the videosink
+ * // asks for the XID/HWND of the window to render onto
* gtk_widget_realize (video_window);
*
- * // we should have the XID now
- * g_assert (video_window_xid != 0);
+ * // we should have the XID/HWND now
+ * g_assert (video_window_handle != 0);
* ...
* // set up sync handler for setting the xid once the pipeline is started
* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
if (eclass->send_event)
eclass->send_event (GST_ELEMENT_CAST (basesrc),
gst_event_new_tag (taglist));
+ else
+ gst_tag_list_free (taglist);
src->tags_pushed = TRUE;
}
gst_caps_unref (tmpl);
- if (gst_caps_is_empty (res))
+ if (gst_caps_is_empty (res)) {
+ gst_caps_unref (res);
ret = GST_STATE_CHANGE_FAILURE;
- else
+ } else
smart_encoder->available_caps = res;
GST_DEBUG_OBJECT (smart_encoder, "Done, available_caps:%" GST_PTR_FORMAT,
* @GST_PLAY_FLAG_DOWNLOAD: enable progressice download buffering for selected
* formats.
* @GST_PLAY_FLAG_BUFFERING: enable buffering of the demuxed or parsed data.
+ * @GST_PLAY_FLAG_DEINTERLACE: deinterlace raw video (if native not forced).
*
* Extra flags to configure the behaviour of the sinks.
*/
static GstPad *
gst_stream_get_other_pad_from_pad (GstPad * pad)
{
- GstStreamSynchronizer *self =
- GST_STREAM_SYNCHRONIZER (gst_pad_get_parent (pad));
+ GstObject *parent = gst_pad_get_parent (pad);
+ GstStreamSynchronizer *self;
GstStream *stream;
GstPad *opad = NULL;
+ /* released pad does not have parent anymore */
+ if (!G_LIKELY (parent))
+ goto exit;
+
+ self = GST_STREAM_SYNCHRONIZER (parent);
GST_STREAM_SYNCHRONIZER_LOCK (self);
stream = gst_pad_get_element_private (pad);
if (!stream)
GST_STREAM_SYNCHRONIZER_UNLOCK (self);
gst_object_unref (self);
+exit:
if (!opad)
GST_WARNING_OBJECT (pad, "Trying to get other pad after releasing");
}
done:
+ gst_object_unref (self);
+
return ret;
}
noinst_HEADERS = gstvideotestsrc.h videotestsrc.h
-noinst_PROGRAMS = generate_sine_table
+EXTRA_PROGRAMS = generate_sine_table
generate_sine_table_SOURCES = generate_sine_table.c
generate_sine_table_CFLAGS = $(GST_CFLAGS)
format->paint_setup (p, frame);
y1 = 2 * h / 3;
- y2 = h * 0.75;
+ y2 = 3 * h / 4;
/* color bars */
for (j = 0; j < y1; j++) {
# videoconvert takes too long, so disabled for now
VALGRIND_TO_FIX = \
elements/videoconvert \
+ libs/discoverer \
libs/video
# these tests don't even pass
GstDiscovererResult result;
gchar *uri;
- dc = gst_discoverer_new (GST_SECOND, &err);
+ /* high timeout, in case we're running under valgrind */
+ dc = gst_discoverer_new (5 * GST_SECOND, &err);
fail_unless (dc != NULL);
fail_unless (err == NULL);
GST_END_TEST;
+static void
+test_disco_sync_reuse (const gchar * test_fn, guint num, GstClockTime timeout)
+{
+ GError *err = NULL;
+ GstDiscoverer *dc;
+ GstDiscovererInfo *info;
+ GstDiscovererResult result;
+ gchar *uri, *path;
+ int i;
+
+ dc = gst_discoverer_new (timeout, &err);
+ fail_unless (dc != NULL);
+ fail_unless (err == NULL);
+
+ /* GST_TEST_FILE comes from makefile CFLAGS */
+ path = g_build_filename (GST_TEST_FILES_PATH, test_fn, NULL);
+ uri = gst_filename_to_uri (path, &err);
+ g_free (path);
+ fail_unless (err == NULL);
+
+ for (i = 0; i < num; ++i) {
+ GST_INFO ("[%02d] discovering uri '%s'", i, uri);
+ info = gst_discoverer_discover_uri (dc, uri, &err);
+ if (info) {
+ result = gst_discoverer_info_get_result (info);
+ GST_INFO ("result: %d", result);
+ gst_discoverer_info_unref (info);
+ }
+ /* in case we don't have some of the elements needed */
+ if (err) {
+ g_error_free (err);
+ err = NULL;
+ }
+ }
+ g_free (uri);
+
+ g_object_unref (dc);
+}
+
+GST_START_TEST (test_disco_sync_reuse_ogg)
+{
+ test_disco_sync_reuse ("theora-vorbis.ogg", 2, 10 * GST_SECOND);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_disco_sync_reuse_mp3)
+{
+ /* this will cause errors because -base doesn't do mp3 parsing or decoding */
+ test_disco_sync_reuse ("test.mp3", 3, 10 * GST_SECOND);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_disco_sync_reuse_timeout)
+{
+ /* set minimum timeout to test that, esp. leakage under valgrind */
+ /* FIXME: should really be even shorter */
+ test_disco_sync_reuse ("theora-vorbis.ogg", 2, GST_SECOND);
+}
+
+GST_END_TEST;
static Suite *
discoverer_suite (void)
suite_add_tcase (s, tc_chain);
tcase_add_test (tc_chain, test_disco_init);
tcase_add_test (tc_chain, test_disco_sync);
+ tcase_add_test (tc_chain, test_disco_sync_reuse_ogg);
+ tcase_add_test (tc_chain, test_disco_sync_reuse_mp3);
+ tcase_add_test (tc_chain, test_disco_sync_reuse_timeout);
return s;
}
ASSERT_STRINGS_EQUAL (gst_tag_get_language_code_iso_639_2B ("de"), "ger");
ASSERT_STRINGS_EQUAL (gst_tag_get_language_code_iso_639_2B ("deu"), "ger");
ASSERT_STRINGS_EQUAL (gst_tag_get_language_code_iso_639_2B ("ger"), "ger");
+
+ fail_unless (gst_tag_check_language_code ("de"));
+ fail_unless (gst_tag_check_language_code ("deu"));
+ fail_unless (gst_tag_check_language_code ("ger"));
+ fail_if (gst_tag_check_language_code ("xxx"));
+ fail_if (gst_tag_check_language_code ("und"));
+ fail_if (gst_tag_check_language_code ("un"));
+ fail_if (gst_tag_check_language_code (""));
+ fail_if (gst_tag_check_language_code ("\377"));
+ fail_if (gst_tag_check_language_code ("deutsch"));
}
GST_END_TEST;
hls.m3u8 \
license-uris \
partialframe.mjpeg \
- test.mp3
+ test.mp3 \
+ theora-vorbis.ogg
char **argv;
} PrivStruct;
-#define my_g_string_append_printf(str, format, ...) \
- g_string_append_printf (str, "%*s" format, 2*depth, " ", ##__VA_ARGS__)
+static void
+my_g_string_append_printf (GString * str, int depth, const gchar * format, ...)
+{
+ va_list args;
+
+ while (depth-- > 0) {
+ g_string_append (str, " ");
+ }
+
+ va_start (args, format);
+ g_string_append_vprintf (str, format, args);
+ va_end (args);
+}
static gchar *
gst_stream_audio_information_to_string (GstDiscovererStreamInfo * info,
s = g_string_sized_new (len);
- my_g_string_append_printf (s, "Codec:\n");
+ my_g_string_append_printf (s, depth, "Codec:\n");
caps = gst_discoverer_stream_info_get_caps (info);
tmp = gst_caps_to_string (caps);
gst_caps_unref (caps);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
- my_g_string_append_printf (s, "Additional info:\n");
+ my_g_string_append_printf (s, depth, "Additional info:\n");
if (gst_discoverer_stream_info_get_misc (info)) {
tmp = gst_structure_to_string (gst_discoverer_stream_info_get_misc (info));
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
- my_g_string_append_printf (s, " None\n");
+ my_g_string_append_printf (s, depth, " None\n");
}
audio_info = (GstDiscovererAudioInfo *) info;
ctmp = gst_discoverer_audio_info_get_language (audio_info);
- my_g_string_append_printf (s, "Language: %s\n", ctmp ? ctmp : "<unknown>");
- my_g_string_append_printf (s, "Channels: %u\n",
+ my_g_string_append_printf (s, depth, "Language: %s\n",
+ ctmp ? ctmp : "<unknown>");
+ my_g_string_append_printf (s, depth, "Channels: %u\n",
gst_discoverer_audio_info_get_channels (audio_info));
- my_g_string_append_printf (s, "Sample rate: %u\n",
+ my_g_string_append_printf (s, depth, "Sample rate: %u\n",
gst_discoverer_audio_info_get_sample_rate (audio_info));
- my_g_string_append_printf (s, "Depth: %u\n",
+ my_g_string_append_printf (s, depth, "Depth: %u\n",
gst_discoverer_audio_info_get_depth (audio_info));
- my_g_string_append_printf (s, "Bitrate: %u\n",
+ my_g_string_append_printf (s, depth, "Bitrate: %u\n",
gst_discoverer_audio_info_get_bitrate (audio_info));
- my_g_string_append_printf (s, "Max bitrate: %u\n",
+ my_g_string_append_printf (s, depth, "Max bitrate: %u\n",
gst_discoverer_audio_info_get_max_bitrate (audio_info));
- my_g_string_append_printf (s, "Tags:\n");
+ my_g_string_append_printf (s, depth, "Tags:\n");
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
tmp = gst_structure_to_string ((GstStructure *) tags);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
- my_g_string_append_printf (s, " None\n");
+ my_g_string_append_printf (s, depth, " None\n");
}
if (verbose)
- my_g_string_append_printf (s, "\n");
+ my_g_string_append_printf (s, depth, "\n");
return g_string_free (s, FALSE);
}
s = g_string_sized_new (len);
- my_g_string_append_printf (s, "Codec:\n");
+ my_g_string_append_printf (s, depth, "Codec:\n");
caps = gst_discoverer_stream_info_get_caps (info);
tmp = gst_caps_to_string (caps);
gst_caps_unref (caps);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
- my_g_string_append_printf (s, "Additional info:\n");
+ my_g_string_append_printf (s, depth, "Additional info:\n");
misc = gst_discoverer_stream_info_get_misc (info);
if (misc) {
tmp = gst_structure_to_string (misc);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
- my_g_string_append_printf (s, " None\n");
+ my_g_string_append_printf (s, depth, " None\n");
}
video_info = (GstDiscovererVideoInfo *) info;
- my_g_string_append_printf (s, "Width: %u\n",
+ my_g_string_append_printf (s, depth, "Width: %u\n",
gst_discoverer_video_info_get_width (video_info));
- my_g_string_append_printf (s, "Height: %u\n",
+ my_g_string_append_printf (s, depth, "Height: %u\n",
gst_discoverer_video_info_get_height (video_info));
- my_g_string_append_printf (s, "Depth: %u\n",
+ my_g_string_append_printf (s, depth, "Depth: %u\n",
gst_discoverer_video_info_get_depth (video_info));
- my_g_string_append_printf (s, "Frame rate: %u/%u\n",
+ my_g_string_append_printf (s, depth, "Frame rate: %u/%u\n",
gst_discoverer_video_info_get_framerate_num (video_info),
gst_discoverer_video_info_get_framerate_denom (video_info));
- my_g_string_append_printf (s, "Pixel aspect ratio: %u/%u\n",
+ my_g_string_append_printf (s, depth, "Pixel aspect ratio: %u/%u\n",
gst_discoverer_video_info_get_par_num (video_info),
gst_discoverer_video_info_get_par_denom (video_info));
- my_g_string_append_printf (s, "Interlaced: %s\n",
+ my_g_string_append_printf (s, depth, "Interlaced: %s\n",
gst_discoverer_video_info_is_interlaced (video_info) ? "true" : "false");
- my_g_string_append_printf (s, "Bitrate: %u\n",
+ my_g_string_append_printf (s, depth, "Bitrate: %u\n",
gst_discoverer_video_info_get_bitrate (video_info));
- my_g_string_append_printf (s, "Max bitrate: %u\n",
+ my_g_string_append_printf (s, depth, "Max bitrate: %u\n",
gst_discoverer_video_info_get_max_bitrate (video_info));
- my_g_string_append_printf (s, "Tags:\n");
+ my_g_string_append_printf (s, depth, "Tags:\n");
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
tmp = gst_structure_to_string ((GstStructure *) tags);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
- my_g_string_append_printf (s, " None\n");
+ my_g_string_append_printf (s, depth, " None\n");
}
if (verbose)
- my_g_string_append_printf (s, "\n");
+ my_g_string_append_printf (s, depth, "\n");
return g_string_free (s, FALSE);
}
s = g_string_sized_new (len);
- my_g_string_append_printf (s, "Codec:\n");
+ my_g_string_append_printf (s, depth, "Codec:\n");
caps = gst_discoverer_stream_info_get_caps (info);
tmp = gst_caps_to_string (caps);
gst_caps_unref (caps);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
- my_g_string_append_printf (s, "Additional info:\n");
+ my_g_string_append_printf (s, depth, "Additional info:\n");
if (gst_discoverer_stream_info_get_misc (info)) {
tmp = gst_structure_to_string (gst_discoverer_stream_info_get_misc (info));
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
- my_g_string_append_printf (s, " None\n");
+ my_g_string_append_printf (s, depth, " None\n");
}
subtitle_info = (GstDiscovererSubtitleInfo *) info;
ctmp = gst_discoverer_subtitle_info_get_language (subtitle_info);
- my_g_string_append_printf (s, "Language: %s\n", ctmp ? ctmp : "<unknown>");
+ my_g_string_append_printf (s, depth, "Language: %s\n",
+ ctmp ? ctmp : "<unknown>");
- my_g_string_append_printf (s, "Tags:\n");
+ my_g_string_append_printf (s, depth, "Tags:\n");
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
tmp = gst_structure_to_string ((GstStructure *) tags);
- my_g_string_append_printf (s, " %s\n", tmp);
+ my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
- my_g_string_append_printf (s, " None\n");
+ my_g_string_append_printf (s, depth, " None\n");
}
if (verbose)
- my_g_string_append_printf (s, "\n");
+ my_g_string_append_printf (s, depth, "\n");
return g_string_free (s, FALSE);
}
g_print ("Analyzing %s\n", uri);
info = gst_discoverer_discover_uri (dc, uri, &err);
print_info (info, err);
+ if (err)
+ g_error_free (err);
gst_discoverer_info_unref (info);
if (st)
gst_structure_free (st);
EXPORTS
+ gst_tag_check_language_code
gst_tag_demux_get_type
gst_tag_demux_result_get_type
gst_tag_freeform_string_to_utf8