2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2003> David Schleef <ds@schleef.org>
4 * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 * Boston, MA 02111-1307, USA.
23 * This file was (probably) generated from gstvideobalance.c,
24 * gstvideobalance.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
28 * SECTION:element-videobalance
30 * Adjusts brightness, contrast, hue, saturation on a video stream.
33 * <title>Example launch line</title>
35 * gst-launch videotestsrc ! videobalance saturation=0.0 ! ffmpegcolorspace ! ximagesink
36 * ]| This pipeline converts the image to black and white by setting the
40 * Last reviewed on 2010-04-18 (0.10.22)
47 #include <gst/math-compat.h>
49 #include "gstvideobalance.h"
52 #include <gst/controller/gstcontroller.h>
53 #include <gst/interfaces/colorbalance.h>
55 GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
56 #define GST_CAT_DEFAULT videobalance_debug
58 /* GstVideoBalance properties */
59 #define DEFAULT_PROP_CONTRAST 1.0
60 #define DEFAULT_PROP_BRIGHTNESS 0.0
61 #define DEFAULT_PROP_HUE 0.0
62 #define DEFAULT_PROP_SATURATION 1.0
73 static GstStaticPadTemplate gst_video_balance_src_template =
74 GST_STATIC_PAD_TEMPLATE ("src",
77 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
78 "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
79 "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
80 "I420, YV12, IYUV, Y41B }"))
83 static GstStaticPadTemplate gst_video_balance_sink_template =
84 GST_STATIC_PAD_TEMPLATE ("sink",
87 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
88 "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
89 "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
90 "I420, YV12, IYUV, Y41B }"))
93 static void gst_video_balance_colorbalance_init (GstColorBalanceClass * iface);
95 static void gst_video_balance_set_property (GObject * object, guint prop_id,
96 const GValue * value, GParamSpec * pspec);
97 static void gst_video_balance_get_property (GObject * object, guint prop_id,
98 GValue * value, GParamSpec * pspec);
100 #define gst_video_balance_parent_class parent_class
101 G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
102 GST_TYPE_VIDEO_FILTER,
103 G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
104 gst_video_balance_colorbalance_init));
107 * look-up tables (LUT).
110 gst_video_balance_update_tables (GstVideoBalance * vb)
113 gdouble y, u, v, hue_cos, hue_sin;
116 for (i = 0; i < 256; i++) {
117 y = 16 + ((i - 16) * vb->contrast + vb->brightness * 255);
122 vb->tabley[i] = rint (y);
125 hue_cos = cos (G_PI * vb->hue);
126 hue_sin = sin (G_PI * vb->hue);
128 /* U/V lookup tables are 2D, since we need both U/V for each table
130 for (i = -128; i < 128; i++) {
131 for (j = -128; j < 128; j++) {
132 u = 128 + ((i * hue_cos + j * hue_sin) * vb->saturation);
133 v = 128 + ((-i * hue_sin + j * hue_cos) * vb->saturation);
142 vb->tableu[i + 128][j + 128] = rint (u);
143 vb->tablev[i + 128][j + 128] = rint (v);
149 gst_video_balance_is_passthrough (GstVideoBalance * videobalance)
151 return videobalance->contrast == 1.0 &&
152 videobalance->brightness == 0.0 &&
153 videobalance->hue == 0.0 && videobalance->saturation == 1.0;
157 gst_video_balance_update_properties (GstVideoBalance * videobalance)
159 gboolean passthrough = gst_video_balance_is_passthrough (videobalance);
160 GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
162 base->passthrough = passthrough;
165 gst_video_balance_update_tables (videobalance);
169 gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
170 GstVideoFrame * frame)
174 guint8 *udata, *vdata;
175 gint ystride, ustride, vstride;
177 gint width2, height2;
178 guint8 *tabley = videobalance->tabley;
179 guint8 **tableu = videobalance->tableu;
180 guint8 **tablev = videobalance->tablev;
182 width = GST_VIDEO_FRAME_WIDTH (frame);
183 height = GST_VIDEO_FRAME_HEIGHT (frame);
185 ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
186 ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
188 for (y = 0; y < height; y++) {
191 yptr = ydata + y * ystride;
192 for (x = 0; x < width; x++) {
193 *yptr = tabley[*yptr];
198 width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
199 height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
201 udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
202 vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
203 ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
204 vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
206 for (y = 0; y < height2; y++) {
210 uptr = udata + y * ustride;
211 vptr = vdata + y * vstride;
213 for (x = 0; x < width2; x++) {
217 *uptr++ = tableu[u1][v1];
218 *vptr++ = tablev[u1][v1];
224 gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
225 GstVideoFrame * frame)
228 guint8 *ydata, *udata, *vdata;
229 gint yoff, uoff, voff;
231 gint width2, height2;
232 guint8 *tabley = videobalance->tabley;
233 guint8 **tableu = videobalance->tableu;
234 guint8 **tablev = videobalance->tablev;
236 width = GST_VIDEO_FRAME_WIDTH (frame);
237 height = GST_VIDEO_FRAME_HEIGHT (frame);
239 stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
240 ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
241 yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
243 for (y = 0; y < height; y++) {
246 yptr = ydata + y * stride;
247 for (x = 0; x < width; x++) {
248 *yptr = tabley[*yptr];
253 width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
254 height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
256 udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
257 vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
258 uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
259 voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
261 for (y = 0; y < height2; y++) {
265 uptr = udata + y * stride;
266 vptr = vdata + y * stride;
268 for (x = 0; x < width2; x++) {
272 *uptr = tableu[u1][v1];
273 *vptr = tablev[u1][v1];
281 static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
283 298, -100, -208, 34707,
287 static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
289 -38, -74, 112, 32768,
290 112, -94, -18, 32768,
293 #define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
296 gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
297 GstVideoFrame * frame)
300 gint width, stride, row_wrap;
307 guint8 *tabley = videobalance->tabley;
308 guint8 **tableu = videobalance->tableu;
309 guint8 **tablev = videobalance->tablev;
311 width = GST_VIDEO_FRAME_WIDTH (frame);
312 height = GST_VIDEO_FRAME_HEIGHT (frame);
314 offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
315 offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
316 offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
318 data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
319 stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
321 pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
322 row_wrap = stride - pixel_stride * width;
324 for (i = 0; i < height; i++) {
325 for (j = 0; j < width; j++) {
326 r = data[offsets[0]];
327 g = data[offsets[1]];
328 b = data[offsets[2]];
330 y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
331 u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
332 v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
334 y = CLAMP (y, 0, 255);
335 u_tmp = CLAMP (u_tmp, 0, 255);
336 v_tmp = CLAMP (v_tmp, 0, 255);
339 u = tableu[u_tmp][v_tmp];
340 v = tablev[u_tmp][v_tmp];
342 r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
343 g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
344 b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
346 data[offsets[0]] = CLAMP (r, 0, 255);
347 data[offsets[1]] = CLAMP (g, 0, 255);
348 data[offsets[2]] = CLAMP (b, 0, 255);
349 data += pixel_stride;
355 /* get notified of caps and plug in the correct process function */
357 gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
360 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
363 GST_DEBUG_OBJECT (videobalance,
364 "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
366 videobalance->process = NULL;
368 if (!gst_video_info_from_caps (&info, incaps))
371 switch (GST_VIDEO_INFO_FORMAT (&info)) {
372 case GST_VIDEO_FORMAT_I420:
373 case GST_VIDEO_FORMAT_YV12:
374 case GST_VIDEO_FORMAT_Y41B:
375 case GST_VIDEO_FORMAT_Y42B:
376 case GST_VIDEO_FORMAT_Y444:
377 videobalance->process = gst_video_balance_planar_yuv;
379 case GST_VIDEO_FORMAT_YUY2:
380 case GST_VIDEO_FORMAT_UYVY:
381 case GST_VIDEO_FORMAT_AYUV:
382 case GST_VIDEO_FORMAT_YVYU:
383 videobalance->process = gst_video_balance_packed_yuv;
385 case GST_VIDEO_FORMAT_ARGB:
386 case GST_VIDEO_FORMAT_ABGR:
387 case GST_VIDEO_FORMAT_RGBA:
388 case GST_VIDEO_FORMAT_BGRA:
389 case GST_VIDEO_FORMAT_xRGB:
390 case GST_VIDEO_FORMAT_xBGR:
391 case GST_VIDEO_FORMAT_RGBx:
392 case GST_VIDEO_FORMAT_BGRx:
393 case GST_VIDEO_FORMAT_RGB:
394 case GST_VIDEO_FORMAT_BGR:
395 videobalance->process = gst_video_balance_packed_rgb;
402 videobalance->info = info;
408 GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
413 GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
419 gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
421 GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
422 GstClockTime timestamp, stream_time;
424 timestamp = GST_BUFFER_TIMESTAMP (buf);
426 gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
428 GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
429 GST_TIME_ARGS (timestamp));
431 if (GST_CLOCK_TIME_IS_VALID (stream_time))
432 gst_object_sync_values (G_OBJECT (balance), stream_time);
436 gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
438 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
441 if (!videobalance->process)
444 /* if no change is needed, we are done */
445 if (base->passthrough)
448 if (!gst_video_frame_map (&frame, &videobalance->info, outbuf,
452 GST_OBJECT_LOCK (videobalance);
453 videobalance->process (videobalance, &frame);
454 GST_OBJECT_UNLOCK (videobalance);
456 gst_video_frame_unmap (&frame);
464 GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
465 (NULL), ("Invalid buffer received"));
466 return GST_FLOW_ERROR;
470 GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
471 return GST_FLOW_NOT_NEGOTIATED;
476 gst_video_balance_finalize (GObject * object)
478 GList *channels = NULL;
479 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
481 g_free (balance->tableu[0]);
483 channels = balance->channels;
485 GstColorBalanceChannel *channel = channels->data;
487 g_object_unref (channel);
488 channels->data = NULL;
489 channels = g_list_next (channels);
492 if (balance->channels)
493 g_list_free (balance->channels);
495 G_OBJECT_CLASS (parent_class)->finalize (object);
499 gst_video_balance_class_init (GstVideoBalanceClass * klass)
501 GObjectClass *gobject_class = (GObjectClass *) klass;
502 GstElementClass *gstelement_class = (GstElementClass *) klass;
503 GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
505 GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
508 gobject_class->finalize = gst_video_balance_finalize;
509 gobject_class->set_property = gst_video_balance_set_property;
510 gobject_class->get_property = gst_video_balance_get_property;
512 g_object_class_install_property (gobject_class, PROP_CONTRAST,
513 g_param_spec_double ("contrast", "Contrast", "contrast",
514 0.0, 2.0, DEFAULT_PROP_CONTRAST,
515 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
516 g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
517 g_param_spec_double ("brightness", "Brightness", "brightness", -1.0, 1.0,
518 DEFAULT_PROP_BRIGHTNESS,
519 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
520 g_object_class_install_property (gobject_class, PROP_HUE,
521 g_param_spec_double ("hue", "Hue", "hue", -1.0, 1.0, DEFAULT_PROP_HUE,
522 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
523 g_object_class_install_property (gobject_class, PROP_SATURATION,
524 g_param_spec_double ("saturation", "Saturation", "saturation", 0.0, 2.0,
525 DEFAULT_PROP_SATURATION,
526 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
528 gst_element_class_set_details_simple (gstelement_class, "Video balance",
529 "Filter/Effect/Video",
530 "Adjusts brightness, contrast, hue, saturation on a video stream",
531 "David Schleef <ds@schleef.org>");
533 gst_element_class_add_pad_template (gstelement_class,
534 gst_static_pad_template_get (&gst_video_balance_sink_template));
535 gst_element_class_add_pad_template (gstelement_class,
536 gst_static_pad_template_get (&gst_video_balance_src_template));
538 trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_balance_set_caps);
539 trans_class->transform_ip =
540 GST_DEBUG_FUNCPTR (gst_video_balance_transform_ip);
541 trans_class->before_transform =
542 GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
546 gst_video_balance_init (GstVideoBalance * videobalance)
548 const gchar *channels[4] = { "HUE", "SATURATION",
549 "BRIGHTNESS", "CONTRAST"
553 /* Initialize propertiews */
554 videobalance->contrast = DEFAULT_PROP_CONTRAST;
555 videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
556 videobalance->hue = DEFAULT_PROP_HUE;
557 videobalance->saturation = DEFAULT_PROP_SATURATION;
559 videobalance->tableu[0] = g_new (guint8, 256 * 256 * 2);
560 for (i = 0; i < 256; i++) {
561 videobalance->tableu[i] =
562 videobalance->tableu[0] + i * 256 * sizeof (guint8);
563 videobalance->tablev[i] =
564 videobalance->tableu[0] + 256 * 256 * sizeof (guint8) +
565 i * 256 * sizeof (guint8);
568 gst_video_balance_update_properties (videobalance);
570 /* Generate the channels list */
571 for (i = 0; i < G_N_ELEMENTS (channels); i++) {
572 GstColorBalanceChannel *channel;
574 channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
575 channel->label = g_strdup (channels[i]);
576 channel->min_value = -1000;
577 channel->max_value = 1000;
579 videobalance->channels = g_list_append (videobalance->channels, channel);
584 gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
586 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);
588 g_return_val_if_fail (videobalance != NULL, NULL);
589 g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);
591 return videobalance->channels;
595 gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
596 GstColorBalanceChannel * channel, gint value)
598 GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
600 gboolean changed = FALSE;
602 g_return_if_fail (vb != NULL);
603 g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
604 g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
605 g_return_if_fail (channel->label != NULL);
607 GST_BASE_TRANSFORM_LOCK (vb);
608 GST_OBJECT_LOCK (vb);
609 if (!g_ascii_strcasecmp (channel->label, "HUE")) {
610 new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
611 changed = new_val != vb->hue;
613 } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
614 new_val = (value + 1000.0) * 2.0 / 2000.0;
615 changed = new_val != vb->saturation;
616 vb->saturation = new_val;
617 } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
618 new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
619 changed = new_val != vb->brightness;
620 vb->brightness = new_val;
621 } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
622 new_val = (value + 1000.0) * 2.0 / 2000.0;
623 changed = new_val != vb->contrast;
624 vb->contrast = new_val;
628 gst_video_balance_update_properties (vb);
629 GST_OBJECT_UNLOCK (vb);
630 GST_BASE_TRANSFORM_UNLOCK (vb);
633 gst_color_balance_value_changed (balance, channel,
634 gst_color_balance_get_value (balance, channel));
639 gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
640 GstColorBalanceChannel * channel)
642 GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
645 g_return_val_if_fail (vb != NULL, 0);
646 g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
647 g_return_val_if_fail (channel->label != NULL, 0);
649 if (!g_ascii_strcasecmp (channel->label, "HUE")) {
650 value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
651 } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
652 value = vb->saturation * 2000.0 / 2.0 - 1000.0;
653 } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
654 value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
655 } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
656 value = vb->contrast * 2000.0 / 2.0 - 1000.0;
663 gst_video_balance_colorbalance_init (GstColorBalanceClass * iface)
665 GST_COLOR_BALANCE_TYPE (iface) = GST_COLOR_BALANCE_SOFTWARE;
666 iface->list_channels = gst_video_balance_colorbalance_list_channels;
667 iface->set_value = gst_video_balance_colorbalance_set_value;
668 iface->get_value = gst_video_balance_colorbalance_get_value;
671 static GstColorBalanceChannel *
672 gst_video_balance_find_channel (GstVideoBalance * balance, const gchar * label)
676 for (l = balance->channels; l; l = l->next) {
677 GstColorBalanceChannel *channel = l->data;
679 if (g_ascii_strcasecmp (channel->label, label) == 0)
686 gst_video_balance_set_property (GObject * object, guint prop_id,
687 const GValue * value, GParamSpec * pspec)
689 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
691 const gchar *label = NULL;
693 GST_BASE_TRANSFORM_LOCK (balance);
694 GST_OBJECT_LOCK (balance);
697 d = g_value_get_double (value);
698 GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
699 balance->contrast, d);
700 if (d != balance->contrast)
702 balance->contrast = d;
704 case PROP_BRIGHTNESS:
705 d = g_value_get_double (value);
706 GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
707 balance->brightness, d);
708 if (d != balance->brightness)
709 label = "BRIGHTNESS";
710 balance->brightness = d;
713 d = g_value_get_double (value);
714 GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
716 if (d != balance->hue)
720 case PROP_SATURATION:
721 d = g_value_get_double (value);
722 GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
723 balance->saturation, d);
724 if (d != balance->saturation)
725 label = "SATURATION";
726 balance->saturation = d;
729 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
733 gst_video_balance_update_properties (balance);
734 GST_OBJECT_UNLOCK (balance);
735 GST_BASE_TRANSFORM_UNLOCK (balance);
738 GstColorBalanceChannel *channel =
739 gst_video_balance_find_channel (balance, label);
740 gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
741 gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
746 gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
749 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
753 g_value_set_double (value, balance->contrast);
755 case PROP_BRIGHTNESS:
756 g_value_set_double (value, balance->brightness);
759 g_value_set_double (value, balance->hue);
761 case PROP_SATURATION:
762 g_value_set_double (value, balance->saturation);
765 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);