/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* EffecTV:
- * Copyright (C) 2001 FUKUCHI Kentarou
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
*
- * EffecTV is free software. * This library is free software;
+ * EdgeTV - detects edge and display it in good old computer way
+ *
+ * EffecTV is free software. This library is free software;
* you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-edgetv
+ *
+ * EdgeTV detects edges and display it in good old low resolution
+ * computer way.
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! edgetv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of edgetv on a test stream.
+ * </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
-#include <gst/video/gstvideofilter.h>
-
#include <string.h>
-#include <gst/video/video.h>
-
-#define GST_TYPE_EDGETV \
- (gst_edgetv_get_type())
-#define GST_EDGETV(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_EDGETV,GstEdgeTV))
-#define GST_EDGETV_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_EDGETV,GstEdgeTVClass))
-#define GST_IS_EDGETV(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_EDGETV))
-#define GST_IS_EDGETV_CLASS(obj) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_EDGETV))
-
-typedef struct _GstEdgeTV GstEdgeTV;
-typedef struct _GstEdgeTVClass GstEdgeTVClass;
-
-struct _GstEdgeTV
-{
- GstVideofilter videofilter;
-
- gint width, height;
- gint map_width, map_height;
- guint32 *map;
- gint video_width_margin;
-};
-
-struct _GstEdgeTVClass
-{
- GstVideofilterClass parent_class;
-};
+#include "gstedge.h"
-GType gst_edgetv_get_type (void);
+#define gst_edgetv_parent_class parent_class
+G_DEFINE_TYPE (GstEdgeTV, gst_edgetv, GST_TYPE_VIDEO_FILTER);
-static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
- "Filter/Effect/Video",
- "Apply edge detect on video",
- "Wim Taymans <wim.taymans@chello.be>");
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
+#endif
static GstStaticPadTemplate gst_edgetv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ GST_STATIC_CAPS (CAPS_STR)
);
static GstStaticPadTemplate gst_edgetv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ GST_STATIC_CAPS (CAPS_STR)
);
-static GstVideofilterClass *parent_class = NULL;
-
static gboolean
-gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_edgetv_set_info (GstVideoFilter * filter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstEdgeTV *edgetv = GST_EDGETV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
-
- if (gst_structure_get_int (structure, "width", &edgetv->width) &&
- gst_structure_get_int (structure, "height", &edgetv->height)) {
- edgetv->map_width = edgetv->width / 4;
- edgetv->map_height = edgetv->height / 4;
- edgetv->video_width_margin = edgetv->width % 4;
-
- g_free (edgetv->map);
- edgetv->map =
- (guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
- sizeof (guint32) * 2);
- memset (edgetv->map, 0,
- edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
- ret = TRUE;
- }
-
- return ret;
-}
-
-static gboolean
-gst_edgetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
- guint * size)
-{
- GstEdgeTV *filter;
- GstStructure *structure;
- gboolean ret = FALSE;
+ GstEdgeTV *edgetv = GST_EDGETV (filter);
+ guint map_size;
gint width, height;
- filter = GST_EDGETV (btrans);
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- structure = gst_caps_get_structure (caps, 0);
+ edgetv->map_width = width / 4;
+ edgetv->map_height = height / 4;
+ edgetv->video_width_margin = width % 4;
- if (gst_structure_get_int (structure, "width", &width) &&
- gst_structure_get_int (structure, "height", &height)) {
- *size = width * height * 32 / 8;
- ret = TRUE;
- GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
- width, height);
- }
+ map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
- return ret;
+ g_free (edgetv->map);
+ edgetv->map = (guint32 *) g_malloc0 (map_size);
+
+ return TRUE;
}
static GstFlowReturn
-gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
+gst_edgetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstEdgeTV *filter;
+ GstEdgeTV *filter = GST_EDGETV (vfilter);
gint x, y, r, g, b;
guint32 *src, *dest;
guint32 p, q;
guint32 v0, v1, v2, v3;
+ gint width, map_height, map_width;
+ gint video_width_margin;
+ guint32 *map;
GstFlowReturn ret = GST_FLOW_OK;
- filter = GST_EDGETV (trans);
-
- gst_buffer_stamp (out, in);
+ map = filter->map;
+ map_height = filter->map_height;
+ map_width = filter->map_width;
+ video_width_margin = filter->video_width_margin;
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
- src += filter->width * 4 + 4;
- dest += filter->width * 4 + 4;
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
- for (y = 1; y < filter->map_height - 1; y++) {
- for (x = 1; x < filter->map_width - 1; x++) {
+ src += width * 4 + 4;
+ dest += width * 4 + 4;
+ for (y = 1; y < map_height - 1; y++) {
+ for (x = 1; x < map_width - 1; x++) {
p = *src;
q = *(src - 4);
- /* difference between the current pixel and right neighbor. */
+ /* difference between the current pixel and left neighbor. */
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8;
b = (p & 0xff) - (q & 0xff);
v2 = (r << 17) | (g << 9) | b;
/* difference between the current pixel and upper neighbor. */
- q = *(src - filter->width * 4);
+ q = *(src - width * 4);
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8;
b = (p & 0xff) - (q & 0xff);
b = 255;
v3 = (r << 17) | (g << 9) | b;
- v0 = filter->map[(y - 1) * filter->map_width * 2 + x * 2];
- v1 = filter->map[y * filter->map_width * 2 + (x - 1) * 2 + 1];
- filter->map[y * filter->map_width * 2 + x * 2] = v2;
- filter->map[y * filter->map_width * 2 + x * 2 + 1] = v3;
+ v0 = map[(y - 1) * map_width * 2 + x * 2];
+ v1 = map[y * map_width * 2 + (x - 1) * 2 + 1];
+ map[y * map_width * 2 + x * 2] = v2;
+ map[y * map_width * 2 + x * 2 + 1] = v3;
r = v0 + v1;
g = r & 0x01010100;
dest[0] = r | (g - (g >> 8));
dest[3] = v3;
r = v2 + v1;
g = r & 0x01010100;
- dest[filter->width] = r | (g - (g >> 8));
+ dest[width] = r | (g - (g >> 8));
r = v2 + v3;
g = r & 0x01010100;
- dest[filter->width + 1] = r | (g - (g >> 8));
- dest[filter->width + 2] = v3;
- dest[filter->width + 3] = v3;
- dest[filter->width * 2] = v2;
- dest[filter->width * 2 + 1] = v2;
- dest[filter->width * 3] = v2;
- dest[filter->width * 3 + 1] = v2;
+ dest[width + 1] = r | (g - (g >> 8));
+ dest[width + 2] = v3;
+ dest[width + 3] = v3;
+ dest[width * 2] = v2;
+ dest[width * 2 + 1] = v2;
+ dest[width * 2 + 2] = 0;
+ dest[width * 2 + 3] = 0;
+ dest[width * 3] = v2;
+ dest[width * 3 + 1] = v2;
+ dest[width * 3 + 2] = 0;
+ dest[width * 3 + 3] = 0;
src += 4;
dest += 4;
}
- src += filter->width * 3 + 8 + filter->video_width_margin;
- dest += filter->width * 3 + 8 + filter->video_width_margin;
+ src += width * 3 + 8 + video_width_margin;
+ dest += width * 3 + 8 + video_width_margin;
}
return ret;
}
-static void
-gst_edgetv_base_init (gpointer g_class)
+static gboolean
+gst_edgetv_start (GstBaseTransform * trans)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details (element_class, &gst_edgetv_details);
+ GstEdgeTV *edgetv = GST_EDGETV (trans);
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_edgetv_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_edgetv_src_template));
+ if (edgetv->map)
+ memset (edgetv->map, 0,
+ edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
+ return TRUE;
}
static void
-gst_edgetv_class_init (gpointer klass, gpointer class_data)
+gst_edgetv_finalize (GObject * object)
{
- GObjectClass *gobject_class;
- GstElementClass *element_class;
- GstBaseTransformClass *trans_class;
-
- gobject_class = (GObjectClass *) klass;
- element_class = (GstElementClass *) klass;
- trans_class = (GstBaseTransformClass *) klass;
+ GstEdgeTV *edgetv = GST_EDGETV (object);
- parent_class = g_type_class_peek_parent (klass);
+ g_free (edgetv->map);
+ edgetv->map = NULL;
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
- trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_edgetv_get_unit_size);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
-gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
+gst_edgetv_class_init (GstEdgeTVClass * klass)
{
- GstEdgeTV *edgetv = GST_EDGETV (instance);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
- edgetv->map = NULL;
+ gobject_class->finalize = gst_edgetv_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class, "EdgeTV effect",
+ "Filter/Effect/Video",
+ "Apply edge detect on video", "Wim Taymans <wim.taymans@chello.be>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_edgetv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_edgetv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_edgetv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_edgetv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_edgetv_transform_frame);
}
-GType
-gst_edgetv_get_type (void)
+static void
+gst_edgetv_init (GstEdgeTV * edgetv)
{
- static GType edgetv_type = 0;
-
- if (!edgetv_type) {
- static const GTypeInfo edgetv_info = {
- sizeof (GstEdgeTVClass),
- gst_edgetv_base_init,
- NULL,
- (GClassInitFunc) gst_edgetv_class_init,
- NULL,
- NULL,
- sizeof (GstEdgeTV),
- 0,
- (GInstanceInitFunc) gst_edgetv_init,
- };
-
- edgetv_type =
- g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
- 0);
- }
- return edgetv_type;
}