*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*/
/*
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch videotestsrc ! videobalance saturation=0.0 ! ffmpegcolorspace ! ximagesink
+ * gst-launch-1.0 videotestsrc ! videobalance saturation=0.0 ! videoconvert ! ximagesink
* ]| This pipeline converts the image to black and white by setting the
* saturation to 0.0.
* </refsect2>
- *
- * Last reviewed on 2010-04-18 (0.10.22)
*/
#ifdef HAVE_CONFIG_H
PROP_SATURATION
};
+#define PROCESSING_CAPS \
+ "{ AYUV, ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, " \
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, " \
+ "I420, YV12, IYUV, Y41B, NV12, NV21 }"
+
static GstStaticPadTemplate gst_video_balance_src_template =
-GST_STATIC_PAD_TEMPLATE ("src",
+ GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
- "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
- "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
- "I420, YV12, IYUV, Y41B }"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
+ "video/x-raw(ANY)")
);
static GstStaticPadTemplate gst_video_balance_sink_template =
-GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
- "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
- "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
- "I420, YV12, IYUV, Y41B }"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
+ "video/x-raw(ANY)")
);
static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
static void
gst_video_balance_update_properties (GstVideoBalance * videobalance)
{
- gboolean passthrough = gst_video_balance_is_passthrough (videobalance);
+ gboolean passthrough;
GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
- base->passthrough = passthrough;
-
+ GST_OBJECT_LOCK (videobalance);
+ passthrough = gst_video_balance_is_passthrough (videobalance);
if (!passthrough)
gst_video_balance_update_tables (videobalance);
+ GST_OBJECT_UNLOCK (videobalance);
+
+ gst_base_transform_set_passthrough (base, passthrough);
}
static void
}
static void
+gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+{
+ gint x, y;
+ guint8 *ydata;
+ guint8 *uvdata;
+ gint ystride, uvstride;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+ gint upos, vpos;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * ystride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr++;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+
+ upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
+ vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uvptr;
+ guint8 u1, v1;
+
+ uvptr = uvdata + y * uvstride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = uvptr[upos];
+ v1 = uvptr[vpos];
+
+ uvptr[upos] = tableu[u1][v1];
+ uvptr[vpos] = tablev[u1][v1];
+ uvptr += 2;
+ }
+ }
+}
+
+static void
gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
GstVideoFrame * frame)
{
case GST_VIDEO_FORMAT_YVYU:
videobalance->process = gst_video_balance_packed_yuv;
break;
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ videobalance->process = gst_video_balance_semiplanar_yuv;
+ break;
case GST_VIDEO_FORMAT_ARGB:
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
videobalance->process = gst_video_balance_packed_rgb;
break;
default:
- goto unknown_format;
+ if (!gst_video_balance_is_passthrough (videobalance))
+ goto unknown_format;
break;
}
gst_object_sync_values (GST_OBJECT (balance), stream_time);
}
+static GstCaps *
+gst_video_balance_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (trans);
+ GstCaps *ret;
+
+ if (!gst_video_balance_is_passthrough (balance)) {
+ static GstStaticCaps raw_caps =
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS));
+ GstCaps *tmp = gst_static_caps_get (&raw_caps);
+
+ caps = gst_caps_intersect (caps, tmp);
+ gst_caps_unref (tmp);
+
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ } else {
+ ret = caps;
+ }
+ } else {
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ } else {
+ ret = gst_caps_ref (caps);
+ }
+ }
+
+ return ret;
+}
+
static GstFlowReturn
gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
GstVideoFrame * frame)
if (!videobalance->process)
goto not_negotiated;
- /* if no change is needed, we are done */
- if (gst_base_transform_is_passthrough (GST_BASE_TRANSFORM (vfilter)))
- goto done;
-
GST_OBJECT_LOCK (videobalance);
videobalance->process (videobalance, frame);
GST_OBJECT_UNLOCK (videobalance);
-done:
return GST_FLOW_OK;
/* ERRORS */
DEFAULT_PROP_SATURATION,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gst_element_class_set_details_simple (gstelement_class, "Video balance",
+ gst_element_class_set_static_metadata (gstelement_class, "Video balance",
"Filter/Effect/Video",
"Adjusts brightness, contrast, hue, saturation on a video stream",
"David Schleef <ds@schleef.org>");
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_video_balance_sink_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_video_balance_src_template));
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_balance_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_balance_src_template);
trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
+ trans_class->transform_ip_on_passthrough = FALSE;
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_balance_transform_caps);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
vfilter_class->transform_frame_ip =
changed = new_val != vb->contrast;
vb->contrast = new_val;
}
+ GST_OBJECT_UNLOCK (vb);
if (changed)
gst_video_balance_update_properties (vb);
- GST_OBJECT_UNLOCK (vb);
if (changed) {
gst_color_balance_value_changed (balance, channel,
return value;
}
+static GstColorBalanceType
+gst_video_balance_colorbalance_get_balance_type (GstColorBalance * balance)
+{
+ return GST_COLOR_BALANCE_SOFTWARE;
+}
+
static void
gst_video_balance_colorbalance_init (GstColorBalanceInterface * iface)
{
- GST_COLOR_BALANCE_TYPE (iface) = GST_COLOR_BALANCE_SOFTWARE;
iface->list_channels = gst_video_balance_colorbalance_list_channels;
iface->set_value = gst_video_balance_colorbalance_set_value;
iface->get_value = gst_video_balance_colorbalance_get_value;
+ iface->get_balance_type = gst_video_balance_colorbalance_get_balance_type;
}
static GstColorBalanceChannel *
break;
}
- gst_video_balance_update_properties (balance);
GST_OBJECT_UNLOCK (balance);
+ gst_video_balance_update_properties (balance);
if (label) {
GstColorBalanceChannel *channel =