*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*/
/**
*
* Sample pipeline:
* |[
- * gst-launch videotestsrc pattern=smpte75 ! alpha method=green ! \
- * videomixer name=mixer ! videoconvert ! autovideosink \
- * videotestsrc pattern=snow ! mixer.
+ * gst-launch-1.0 videotestsrc pattern=snow ! mixer.sink_0 \
+ * videotestsrc pattern=smpte75 ! alpha method=green ! mixer.sink_1 \
+ * videomixer name=mixer sink_0::zorder=0 sink_1::zorder=1 ! \
+ * videoconvert ! autovideosink
* ]| This pipeline adds a alpha channel to the SMPTE color bars
- * with green as the transparent color and mixes the output with
- * a snow video stream.
+ * with green as the transparent color and overlays the output on
+ * top of a snow video stream.
*/
PROP_NOISE_LEVEL,
PROP_BLACK_SENSITIVITY,
PROP_WHITE_SENSITIVITY,
- PROP_PREFER_PASSTHROUGH,
- PROP_LAST
+ PROP_PREFER_PASSTHROUGH
};
static GstStaticPadTemplate gst_alpha_src_template =
static GstStaticCaps gst_alpha_alpha_caps =
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }"));
+/* FIXME: why do we need our own lock for this? */
#define GST_ALPHA_LOCK(alpha) G_STMT_START { \
GST_LOG_OBJECT (alpha, "Locking alpha from thread %p", g_thread_self ()); \
- g_static_mutex_lock (&alpha->lock); \
+ g_mutex_lock (&alpha->lock); \
GST_LOG_OBJECT (alpha, "Locked alpha from thread %p", g_thread_self ()); \
} G_STMT_END
#define GST_ALPHA_UNLOCK(alpha) G_STMT_START { \
GST_LOG_OBJECT (alpha, "Unlocking alpha from thread %p", g_thread_self ()); \
- g_static_mutex_unlock (&alpha->lock); \
+ g_mutex_unlock (&alpha->lock); \
} G_STMT_END
-static gboolean gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, gsize * size);
static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
-static gboolean gst_alpha_set_caps (GstBaseTransform * btrans,
- GstCaps * incaps, GstCaps * outcaps);
-static GstFlowReturn gst_alpha_transform (GstBaseTransform * btrans,
- GstBuffer * in, GstBuffer * out);
static void gst_alpha_before_transform (GstBaseTransform * btrans,
GstBuffer * buf);
+static gboolean gst_alpha_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+static GstFlowReturn gst_alpha_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
+static void gst_alpha_init_params_full (GstAlpha * alpha,
+ const GstVideoFormatInfo * in_info, const GstVideoFormatInfo * out_info);
static void gst_alpha_init_params (GstAlpha * alpha);
-static gboolean gst_alpha_set_process_function (GstAlpha * alpha);
+static void gst_alpha_set_process_function (GstAlpha * alpha);
+static gboolean gst_alpha_set_process_function_full (GstAlpha * alpha,
+ GstVideoInfo * in_info, GstVideoInfo * out_info);
static void gst_alpha_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static GType alpha_method_type = 0;
static const GEnumValue alpha_method[] = {
{ALPHA_METHOD_SET, "Set/adjust alpha channel", "set"},
- {ALPHA_METHOD_GREEN, "Chroma Key green", "green"},
- {ALPHA_METHOD_BLUE, "Chroma Key blue", "blue"},
- {ALPHA_METHOD_CUSTOM, "Chroma Key on target_r/g/b", "custom"},
+ {ALPHA_METHOD_GREEN, "Chroma Key on pure green", "green"},
+ {ALPHA_METHOD_BLUE, "Chroma Key on pure blue", "blue"},
+ {ALPHA_METHOD_CUSTOM, "Chroma Key on custom RGB values", "custom"},
{0, NULL, NULL},
};
GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
"alpha - Element for adding alpha channel to streams");
0.0, 1.0, DEFAULT_ALPHA,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TARGET_R,
- g_param_spec_uint ("target-r", "Target Red", "The Red target", 0, 255,
+ g_param_spec_uint ("target-r", "Target Red",
+ "The red color value for custom RGB chroma keying", 0, 255,
DEFAULT_TARGET_R,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TARGET_G,
- g_param_spec_uint ("target-g", "Target Green", "The Green target", 0, 255,
+ g_param_spec_uint ("target-g", "Target Green",
+ "The green color value for custom RGB chroma keying", 0, 255,
DEFAULT_TARGET_G,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TARGET_B,
- g_param_spec_uint ("target-b", "Target Blue", "The Blue target", 0, 255,
+ g_param_spec_uint ("target-b", "Target Blue",
+ "The blue color value for custom RGB chroma keying", 0, 255,
DEFAULT_TARGET_B,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ANGLE,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_WHITE_SENSITIVITY, g_param_spec_uint ("white-sensitivity",
- "Sensitivity", "Sensitivity to bright colors", 0, 128,
+ "White Sensitivity", "Sensitivity to bright colors", 0, 128,
DEFAULT_WHITE_SENSITIVITY,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass),
DEFAULT_PREFER_PASSTHROUGH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
- gst_element_class_set_details_simple (gstelement_class, "Alpha filter",
+ gst_element_class_set_static_metadata (gstelement_class, "Alpha filter",
"Filter/Effect/Video",
"Adds an alpha channel to video - uniform or via chroma-keying",
"Wim Taymans <wim.taymans@gmail.com>\n"
"Edward Hervey <edward.hervey@collabora.co.uk>\n"
"Jan Schmidt <thaytan@noraisin.net>");
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_alpha_sink_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_alpha_src_template));
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_alpha_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_alpha_src_template);
- btrans_class->transform = GST_DEBUG_FUNCPTR (gst_alpha_transform);
btrans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_alpha_before_transform);
- btrans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_alpha_get_unit_size);
btrans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_alpha_transform_caps);
- btrans_class->set_caps = GST_DEBUG_FUNCPTR (gst_alpha_set_caps);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_alpha_transform_frame);
}
static void
alpha->black_sensitivity = DEFAULT_BLACK_SENSITIVITY;
alpha->white_sensitivity = DEFAULT_WHITE_SENSITIVITY;
- g_static_mutex_init (&alpha->lock);
+ g_mutex_init (&alpha->lock);
}
static void
{
GstAlpha *alpha = GST_ALPHA (object);
- g_static_mutex_free (&alpha->lock);
+ g_mutex_clear (&alpha->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
&& (alpha->prefer_passthrough);
alpha->method = method;
- switch (alpha->method) {
- case ALPHA_METHOD_GREEN:
- alpha->target_r = 0;
- alpha->target_g = 255;
- alpha->target_b = 0;
- break;
- case ALPHA_METHOD_BLUE:
- alpha->target_r = 0;
- alpha->target_g = 0;
- alpha->target_b = 255;
- break;
- default:
- break;
- }
gst_alpha_set_process_function (alpha);
gst_alpha_init_params (alpha);
break;
}
if (reconfigure)
- gst_base_transform_reconfigure (GST_BASE_TRANSFORM_CAST (alpha));
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (alpha));
GST_ALPHA_UNLOCK (alpha);
}
}
}
-static gboolean
-gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, gsize * size)
-{
- GstVideoInfo info;
-
- if (!gst_video_info_from_caps (&info, caps))
- return FALSE;
-
- *size = info.size;
-
- GST_DEBUG_OBJECT (btrans,
- "unit size = %" G_GSIZE_FORMAT " for format %s w %d height %d", *size,
- GST_VIDEO_INFO_NAME (&info), GST_VIDEO_INFO_WIDTH (&info),
- GST_VIDEO_INFO_HEIGHT (&info));
-
- return TRUE;
-}
-
static GstCaps *
gst_alpha_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter)
structure = gst_structure_copy (gst_caps_get_structure (caps, i));
gst_structure_remove_field (structure, "format");
- gst_structure_remove_field (structure, "color-matrix");
+ gst_structure_remove_field (structure, "colorimetry");
gst_structure_remove_field (structure, "chroma-site");
gst_caps_append_structure (tmp, structure);
}
static gboolean
-gst_alpha_set_caps (GstBaseTransform * btrans,
- GstCaps * incaps, GstCaps * outcaps)
+gst_alpha_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info)
{
- GstAlpha *alpha = GST_ALPHA (btrans);
- GstVideoInfo in_info, out_info;
+ GstAlpha *alpha = GST_ALPHA (filter);
gboolean passthrough;
- if (!gst_video_info_from_caps (&in_info, incaps) ||
- !gst_video_info_from_caps (&out_info, outcaps))
- goto invalid_format;
-
GST_ALPHA_LOCK (alpha);
- alpha->in_sdtv = in_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
- alpha->out_sdtv = out_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ alpha->in_sdtv = in_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+ alpha->out_sdtv =
+ out_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
passthrough = alpha->prefer_passthrough &&
- GST_VIDEO_INFO_FORMAT (&in_info) == GST_VIDEO_INFO_FORMAT (&out_info)
+ GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
&& alpha->in_sdtv == alpha->out_sdtv && alpha->method == ALPHA_METHOD_SET
&& alpha->alpha == 1.0;
GST_DEBUG_OBJECT (alpha,
"Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT
" (passthrough: %d)", incaps, outcaps, passthrough);
- gst_base_transform_set_passthrough (btrans, passthrough);
-
- alpha->in_info = in_info;
- alpha->out_info = out_info;
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (filter),
+ passthrough);
- if (!gst_alpha_set_process_function (alpha) && !passthrough)
+ if (!gst_alpha_set_process_function_full (alpha, in_info, out_info)
+ && !passthrough)
goto no_process;
- gst_alpha_init_params (alpha);
+ gst_alpha_init_params_full (alpha, in_info->finfo, out_info->finfo);
GST_ALPHA_UNLOCK (alpha);
return TRUE;
/* ERRORS */
-invalid_format:
- {
- GST_WARNING_OBJECT (alpha,
- "Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps,
- outcaps);
- return FALSE;
- }
no_process:
{
GST_WARNING_OBJECT (alpha,
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
memcpy (matrix,
alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
memcpy (matrix,
alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
gst_alpha_set_planar_yuv_ayuv (const GstVideoFrame * in_frame,
GstVideoFrame * out_frame, GstAlpha * alpha)
{
- const guint8 *src;
guint8 *dest;
gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint y_stride, uv_stride;
gint v_subs, h_subs;
- src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (in_frame);
y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
- srcY_tmp = srcY = src;
- srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
gst_alpha_chroma_key_planar_yuv_ayuv (const GstVideoFrame * in_frame,
GstVideoFrame * out_frame, GstAlpha * alpha)
{
- const guint8 *src;
guint8 *dest;
gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
guint8 kfgy_scale = alpha->kfgy_scale;
guint noise_level2 = alpha->noise_level2;
- src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (in_frame);
y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
- srcY_tmp = srcY = src;
- srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
gst_alpha_set_planar_yuv_argb (const GstVideoFrame * in_frame,
GstVideoFrame * out_frame, GstAlpha * alpha)
{
- const guint8 *src;
guint8 *dest;
gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint r, g, b;
gint p[4];
- src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
- srcY_tmp = srcY = src;
- srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
gst_alpha_chroma_key_planar_yuv_argb (const GstVideoFrame * in_frame,
GstVideoFrame * out_frame, GstAlpha * alpha)
{
- const guint8 *src;
guint8 *dest;
gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint matrix[12];
gint p[4];
- src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (in_frame);
height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
- srcY_tmp = srcY = src;
- srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
p[2] = p[0] + 2;
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
if (alpha->in_sdtv != alpha->out_sdtv) {
gint matrix[12];
y = src[p[0]];
u = src[p[1]];
- v = src[p[3]];;
+ v = src[p[3]];
dest[1] = y;
dest[2] = u;
y = src[p[0]];
u = src[p[1]];
- v = src[p[3]];;
+ v = src[p[3]];
dest[1] = y;
dest[2] = u;
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
p[2] = p[0] + 2;
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
o[2] = o[0] + 2;
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
o[2] = o[0] + 2;
- o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
- o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
- p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
- p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
- p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
- p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
/* Protected with the alpha lock */
static void
-gst_alpha_init_params (GstAlpha * alpha)
+gst_alpha_init_params_full (GstAlpha * alpha,
+ const GstVideoFormatInfo * in_info, const GstVideoFormatInfo * out_info)
{
gfloat kgl;
gfloat tmp;
gfloat tmp1, tmp2;
gfloat y;
- const GstVideoFormatInfo *in_info, *out_info;
+ guint target_r = alpha->target_r;
+ guint target_g = alpha->target_g;
+ guint target_b = alpha->target_b;
const gint *matrix;
- in_info = alpha->in_info.finfo;
- out_info = alpha->out_info.finfo;
+ switch (alpha->method) {
+ case ALPHA_METHOD_GREEN:
+ target_r = 0;
+ target_g = 255;
+ target_b = 0;
+ break;
+ case ALPHA_METHOD_BLUE:
+ target_r = 0;
+ target_g = 0;
+ target_b = 255;
+ break;
+ default:
+ break;
+ }
/* RGB->RGB: convert to SDTV YUV, chroma keying, convert back
* YUV->RGB: chroma keying, convert to RGB
(alpha->out_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
cog_rgb_to_ycbcr_matrix_8bit_hdtv;
- y = (matrix[0] * ((gint) alpha->target_r) +
- matrix[1] * ((gint) alpha->target_g) +
- matrix[2] * ((gint) alpha->target_b) + matrix[3]) >> 8;
+ y = (matrix[0] * ((gint) target_r) +
+ matrix[1] * ((gint) target_g) +
+ matrix[2] * ((gint) target_b) + matrix[3]) >> 8;
/* Cb,Cr without offset here because the chroma keying
* works with them being in range [-128,127]
*/
tmp1 =
- (matrix[4] * ((gint) alpha->target_r) +
- matrix[5] * ((gint) alpha->target_g) +
- matrix[6] * ((gint) alpha->target_b)) >> 8;
+ (matrix[4] * ((gint) target_r) +
+ matrix[5] * ((gint) target_g) + matrix[6] * ((gint) target_b)) >> 8;
tmp2 =
- (matrix[8] * ((gint) alpha->target_r) +
- matrix[9] * ((gint) alpha->target_g) +
- matrix[10] * ((gint) alpha->target_b)) >> 8;
+ (matrix[8] * ((gint) target_r) +
+ matrix[9] * ((gint) target_g) + matrix[10] * ((gint) target_b)) >> 8;
kgl = sqrt (tmp1 * tmp1 + tmp2 * tmp2);
alpha->cb = 127 * (tmp1 / kgl);
tmp = MIN (tmp, 255);
alpha->accept_angle_ctg = tmp;
tmp = 1 / (kgl);
- alpha->one_over_kc = 255 * 2 * tmp - 255;
+ alpha->one_over_kc = (gint) (255 * 2 * tmp - 255);
tmp = 15 * y / kgl;
tmp = MIN (tmp, 255);
alpha->kfgy_scale = tmp;
alpha->noise_level2 = alpha->noise_level * alpha->noise_level;
}
+static void
+gst_alpha_init_params (GstAlpha * alpha)
+{
+ const GstVideoFormatInfo *finfo_in, *finfo_out;
+
+ finfo_in = GST_VIDEO_FILTER (alpha)->in_info.finfo;
+ finfo_out = GST_VIDEO_FILTER (alpha)->out_info.finfo;
+
+ if (finfo_in != NULL && finfo_out != NULL) {
+ gst_alpha_init_params_full (alpha, finfo_in, finfo_out);
+ } else {
+ GST_DEBUG_OBJECT (alpha, "video formats not set yet");
+ }
+}
+
/* Protected with the alpha lock */
static gboolean
-gst_alpha_set_process_function (GstAlpha * alpha)
+gst_alpha_set_process_function_full (GstAlpha * alpha, GstVideoInfo * in_info,
+ GstVideoInfo * out_info)
{
alpha->process = NULL;
switch (alpha->method) {
case ALPHA_METHOD_SET:
- switch (GST_VIDEO_INFO_FORMAT (&alpha->out_info)) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV:
- switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_ayuv;
break;
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA:
- switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_argb;
break;
break;
}
break;
- break;
default:
break;
}
case ALPHA_METHOD_GREEN:
case ALPHA_METHOD_BLUE:
case ALPHA_METHOD_CUSTOM:
- switch (GST_VIDEO_INFO_FORMAT (&alpha->out_info)) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV:
- switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_ayuv;
break;
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA:
- switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_argb;
break;
break;
}
break;
- break;
default:
break;
}
}
static void
+gst_alpha_set_process_function (GstAlpha * alpha)
+{
+ GstVideoInfo *info_in, *info_out;
+
+ info_in = &GST_VIDEO_FILTER (alpha)->in_info;
+ info_out = &GST_VIDEO_FILTER (alpha)->out_info;
+
+ if (info_in->finfo != NULL && info_out->finfo != NULL) {
+ gst_alpha_set_process_function_full (alpha, info_in, info_out);
+ } else {
+ GST_DEBUG_OBJECT (alpha, "video formats not set yet");
+ }
+}
+
+static void
gst_alpha_before_transform (GstBaseTransform * btrans, GstBuffer * buf)
{
GstAlpha *alpha = GST_ALPHA (btrans);
}
static GstFlowReturn
-gst_alpha_transform (GstBaseTransform * btrans, GstBuffer * in, GstBuffer * out)
+gst_alpha_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstAlpha *alpha = GST_ALPHA (btrans);
- GstVideoFrame in_frame, out_frame;
+ GstAlpha *alpha = GST_ALPHA (filter);
GST_ALPHA_LOCK (alpha);
if (G_UNLIKELY (!alpha->process))
goto not_negotiated;
- if (!gst_video_frame_map (&in_frame, &alpha->in_info, in, GST_MAP_READ))
- goto invalid_in;
-
- if (!gst_video_frame_map (&out_frame, &alpha->out_info, out, GST_MAP_WRITE))
- goto invalid_out;
-
- alpha->process (&in_frame, &out_frame, alpha);
-
- gst_video_frame_unmap (&out_frame);
- gst_video_frame_unmap (&in_frame);
+ alpha->process (in_frame, out_frame, alpha);
GST_ALPHA_UNLOCK (alpha);
GST_ALPHA_UNLOCK (alpha);
return GST_FLOW_NOT_NEGOTIATED;
}
-invalid_in:
- {
- GST_ERROR_OBJECT (alpha, "Invalid input frame");
- GST_ALPHA_UNLOCK (alpha);
- return GST_FLOW_OK;
- }
-invalid_out:
- {
- GST_ERROR_OBJECT (alpha, "Invalid output frame");
- gst_video_frame_unmap (&in_frame);
- GST_ALPHA_UNLOCK (alpha);
- return GST_FLOW_OK;
- }
}
static gboolean
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
- "alpha",
+ alpha,
"adds an alpha channel to video - constant or via chroma-keying",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)