2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2003> David Schleef <ds@schleef.org>
4 * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 * Boston, MA 02111-1307, USA.
23 * This file was (probably) generated from gstvideobalance.c,
24 * gstvideobalance.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
28 * SECTION:element-videobalance
30 * Adjusts brightness, contrast, hue, saturation on a video stream.
33 * <title>Example launch line</title>
35 * gst-launch videotestsrc ! videobalance saturation=0.0 ! ffmpegcolorspace ! ximagesink
36 * ]| This pipeline converts the image to black and white by setting the
40 * Last reviewed on 2010-04-18 (0.10.22)
47 #include <gst/math-compat.h>
49 #include "gstvideobalance.h"
52 #include <gst/controller/gstcontroller.h>
53 #include <gst/interfaces/colorbalance.h>
55 GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
56 #define GST_CAT_DEFAULT videobalance_debug
58 /* GstVideoBalance properties */
59 #define DEFAULT_PROP_CONTRAST 1.0
60 #define DEFAULT_PROP_BRIGHTNESS 0.0
61 #define DEFAULT_PROP_HUE 0.0
62 #define DEFAULT_PROP_SATURATION 1.0
73 static GstStaticPadTemplate gst_video_balance_src_template =
74 GST_STATIC_PAD_TEMPLATE ("src",
77 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
78 GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
79 GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
80 GST_VIDEO_CAPS_YUV ("Y444") ";"
81 GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
82 GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
83 GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
84 GST_VIDEO_CAPS_YUV ("Y42B") ";"
85 GST_VIDEO_CAPS_YUV ("YUY2") ";"
86 GST_VIDEO_CAPS_YUV ("UYVY") ";"
87 GST_VIDEO_CAPS_YUV ("YVYU") ";"
88 GST_VIDEO_CAPS_YUV ("I420") ";"
89 GST_VIDEO_CAPS_YUV ("YV12") ";"
90 GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
94 static GstStaticPadTemplate gst_video_balance_sink_template =
95 GST_STATIC_PAD_TEMPLATE ("sink",
98 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
99 GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
100 GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
101 GST_VIDEO_CAPS_YUV ("Y444") ";"
102 GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
103 GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
104 GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
105 GST_VIDEO_CAPS_YUV ("Y42B") ";"
106 GST_VIDEO_CAPS_YUV ("YUY2") ";"
107 GST_VIDEO_CAPS_YUV ("UYVY") ";"
108 GST_VIDEO_CAPS_YUV ("YVYU") ";"
109 GST_VIDEO_CAPS_YUV ("I420") ";"
110 GST_VIDEO_CAPS_YUV ("YV12") ";"
111 GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
115 static void gst_video_balance_colorbalance_init (GstColorBalanceClass * iface);
116 static void gst_video_balance_interface_init (GstImplementsInterfaceClass *
119 static void gst_video_balance_set_property (GObject * object, guint prop_id,
120 const GValue * value, GParamSpec * pspec);
121 static void gst_video_balance_get_property (GObject * object, guint prop_id,
122 GValue * value, GParamSpec * pspec);
125 _do_init (GType video_balance_type)
127 static const GInterfaceInfo iface_info = {
128 (GInterfaceInitFunc) gst_video_balance_interface_init,
132 static const GInterfaceInfo colorbalance_info = {
133 (GInterfaceInitFunc) gst_video_balance_colorbalance_init,
138 g_type_add_interface_static (video_balance_type,
139 GST_TYPE_IMPLEMENTS_INTERFACE, &iface_info);
140 g_type_add_interface_static (video_balance_type, GST_TYPE_COLOR_BALANCE,
144 GST_BOILERPLATE_FULL (GstVideoBalance, gst_video_balance, GstVideoFilter,
145 GST_TYPE_VIDEO_FILTER, _do_init);
148 * look-up tables (LUT).
151 gst_video_balance_update_tables (GstVideoBalance * vb)
154 gdouble y, u, v, hue_cos, hue_sin;
157 for (i = 0; i < 256; i++) {
158 y = 16 + ((i - 16) * vb->contrast + vb->brightness * 255);
163 vb->tabley[i] = rint (y);
166 hue_cos = cos (G_PI * vb->hue);
167 hue_sin = sin (G_PI * vb->hue);
169 /* U/V lookup tables are 2D, since we need both U/V for each table
171 for (i = -128; i < 128; i++) {
172 for (j = -128; j < 128; j++) {
173 u = 128 + ((i * hue_cos + j * hue_sin) * vb->saturation);
174 v = 128 + ((-i * hue_sin + j * hue_cos) * vb->saturation);
183 vb->tableu[i + 128][j + 128] = rint (u);
184 vb->tablev[i + 128][j + 128] = rint (v);
190 gst_video_balance_is_passthrough (GstVideoBalance * videobalance)
192 return videobalance->contrast == 1.0 &&
193 videobalance->brightness == 0.0 &&
194 videobalance->hue == 0.0 && videobalance->saturation == 1.0;
198 gst_video_balance_update_properties (GstVideoBalance * videobalance)
200 gboolean passthrough = gst_video_balance_is_passthrough (videobalance);
201 GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
203 base->passthrough = passthrough;
206 gst_video_balance_update_tables (videobalance);
210 gst_video_balance_planar_yuv (GstVideoBalance * videobalance, guint8 * data)
214 guint8 *udata, *vdata;
215 gint ystride, ustride, vstride;
216 GstVideoFormat format;
218 gint width2, height2;
219 guint8 *tabley = videobalance->tabley;
220 guint8 **tableu = videobalance->tableu;
221 guint8 **tablev = videobalance->tablev;
223 format = videobalance->format;
224 width = videobalance->width;
225 height = videobalance->height;
228 data + gst_video_format_get_component_offset (format, 0, width, height);
229 ystride = gst_video_format_get_row_stride (format, 0, width);
231 for (y = 0; y < height; y++) {
234 yptr = ydata + y * ystride;
235 for (x = 0; x < width; x++) {
236 *yptr = tabley[*yptr];
241 width2 = gst_video_format_get_component_width (format, 1, width);
242 height2 = gst_video_format_get_component_height (format, 1, height);
245 data + gst_video_format_get_component_offset (format, 1, width, height);
247 data + gst_video_format_get_component_offset (format, 2, width, height);
248 ustride = gst_video_format_get_row_stride (format, 1, width);
249 vstride = gst_video_format_get_row_stride (format, 1, width);
251 for (y = 0; y < height2; y++) {
255 uptr = udata + y * ustride;
256 vptr = vdata + y * vstride;
258 for (x = 0; x < width2; x++) {
262 *uptr++ = tableu[u1][v1];
263 *vptr++ = tablev[u1][v1];
269 gst_video_balance_packed_yuv (GstVideoBalance * videobalance, guint8 * data)
273 guint8 *udata, *vdata;
274 gint ystride, ustride, vstride;
275 gint yoff, uoff, voff;
276 GstVideoFormat format;
278 gint width2, height2;
279 guint8 *tabley = videobalance->tabley;
280 guint8 **tableu = videobalance->tableu;
281 guint8 **tablev = videobalance->tablev;
283 format = videobalance->format;
284 width = videobalance->width;
285 height = videobalance->height;
288 data + gst_video_format_get_component_offset (format, 0, width, height);
289 ystride = gst_video_format_get_row_stride (format, 0, width);
290 yoff = gst_video_format_get_pixel_stride (format, 0);
292 for (y = 0; y < height; y++) {
295 yptr = ydata + y * ystride;
296 for (x = 0; x < width; x++) {
297 *yptr = tabley[*yptr];
302 width2 = gst_video_format_get_component_width (format, 1, width);
303 height2 = gst_video_format_get_component_height (format, 1, height);
306 data + gst_video_format_get_component_offset (format, 1, width, height);
308 data + gst_video_format_get_component_offset (format, 2, width, height);
309 ustride = gst_video_format_get_row_stride (format, 1, width);
310 vstride = gst_video_format_get_row_stride (format, 1, width);
311 uoff = gst_video_format_get_pixel_stride (format, 1);
312 voff = gst_video_format_get_pixel_stride (format, 2);
314 for (y = 0; y < height2; y++) {
318 uptr = udata + y * ustride;
319 vptr = vdata + y * vstride;
321 for (x = 0; x < width2; x++) {
325 *uptr = tableu[u1][v1];
326 *vptr = tablev[u1][v1];
334 static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
336 298, -100, -208, 34707,
340 static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
342 -38, -74, 112, 32768,
343 112, -94, -18, 32768,
346 #define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
349 gst_video_balance_packed_rgb (GstVideoBalance * videobalance, guint8 * data)
352 gint width, row_stride, row_wrap;
358 guint8 *tabley = videobalance->tabley;
359 guint8 **tableu = videobalance->tableu;
360 guint8 **tablev = videobalance->tablev;
362 offsets[0] = gst_video_format_get_component_offset (videobalance->format, 0,
363 videobalance->width, videobalance->height);
364 offsets[1] = gst_video_format_get_component_offset (videobalance->format, 1,
365 videobalance->width, videobalance->height);
366 offsets[2] = gst_video_format_get_component_offset (videobalance->format, 2,
367 videobalance->width, videobalance->height);
370 gst_video_format_get_component_width (videobalance->format, 0,
371 videobalance->width);
373 gst_video_format_get_component_height (videobalance->format, 0,
374 videobalance->height);
376 gst_video_format_get_row_stride (videobalance->format, 0,
377 videobalance->width);
378 pixel_stride = gst_video_format_get_pixel_stride (videobalance->format, 0);
379 row_wrap = row_stride - pixel_stride * width;
381 for (i = 0; i < height; i++) {
382 for (j = 0; j < width; j++) {
383 r = data[offsets[0]];
384 g = data[offsets[1]];
385 b = data[offsets[2]];
387 y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
388 u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
389 v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
391 y = CLAMP (y, 0, 255);
392 u_tmp = CLAMP (u_tmp, 0, 255);
393 v_tmp = CLAMP (v_tmp, 0, 255);
396 u = tableu[u_tmp][v_tmp];
397 v = tablev[u_tmp][v_tmp];
399 r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
400 g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
401 b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
403 data[offsets[0]] = CLAMP (r, 0, 255);
404 data[offsets[1]] = CLAMP (g, 0, 255);
405 data[offsets[2]] = CLAMP (b, 0, 255);
406 data += pixel_stride;
412 /* get notified of caps and plug in the correct process function */
414 gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
417 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
419 GST_DEBUG_OBJECT (videobalance,
420 "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
422 videobalance->process = NULL;
424 if (!gst_video_format_parse_caps (incaps, &videobalance->format,
425 &videobalance->width, &videobalance->height))
429 gst_video_format_get_size (videobalance->format, videobalance->width,
430 videobalance->height);
432 switch (videobalance->format) {
433 case GST_VIDEO_FORMAT_I420:
434 case GST_VIDEO_FORMAT_YV12:
435 case GST_VIDEO_FORMAT_Y41B:
436 case GST_VIDEO_FORMAT_Y42B:
437 case GST_VIDEO_FORMAT_Y444:
438 videobalance->process = gst_video_balance_planar_yuv;
440 case GST_VIDEO_FORMAT_YUY2:
441 case GST_VIDEO_FORMAT_UYVY:
442 case GST_VIDEO_FORMAT_AYUV:
443 case GST_VIDEO_FORMAT_YVYU:
444 videobalance->process = gst_video_balance_packed_yuv;
446 case GST_VIDEO_FORMAT_ARGB:
447 case GST_VIDEO_FORMAT_ABGR:
448 case GST_VIDEO_FORMAT_RGBA:
449 case GST_VIDEO_FORMAT_BGRA:
450 case GST_VIDEO_FORMAT_xRGB:
451 case GST_VIDEO_FORMAT_xBGR:
452 case GST_VIDEO_FORMAT_RGBx:
453 case GST_VIDEO_FORMAT_BGRx:
454 case GST_VIDEO_FORMAT_RGB:
455 case GST_VIDEO_FORMAT_BGR:
456 videobalance->process = gst_video_balance_packed_rgb;
462 return videobalance->process != NULL;
465 GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
470 gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
472 GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
473 GstClockTime timestamp, stream_time;
475 timestamp = GST_BUFFER_TIMESTAMP (buf);
477 gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
479 GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
480 GST_TIME_ARGS (timestamp));
482 if (GST_CLOCK_TIME_IS_VALID (stream_time))
483 gst_object_sync_values (G_OBJECT (balance), stream_time);
487 gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
489 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
493 if (!videobalance->process)
496 /* if no change is needed, we are done */
497 if (base->passthrough)
500 data = GST_BUFFER_DATA (outbuf);
501 size = GST_BUFFER_SIZE (outbuf);
503 if (size != videobalance->size)
506 GST_OBJECT_LOCK (videobalance);
507 videobalance->process (videobalance, data);
508 GST_OBJECT_UNLOCK (videobalance);
516 GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
517 (NULL), ("Invalid buffer size %d, expected %d", size,
518 videobalance->size));
519 return GST_FLOW_ERROR;
522 GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
523 return GST_FLOW_NOT_NEGOTIATED;
527 gst_video_balance_base_init (gpointer g_class)
529 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
531 gst_element_class_set_details_simple (element_class, "Video balance",
532 "Filter/Effect/Video",
533 "Adjusts brightness, contrast, hue, saturation on a video stream",
534 "David Schleef <ds@schleef.org>");
536 gst_element_class_add_static_pad_template (element_class,
537 &gst_video_balance_sink_template);
538 gst_element_class_add_static_pad_template (element_class,
539 &gst_video_balance_src_template);
543 gst_video_balance_finalize (GObject * object)
545 GList *channels = NULL;
546 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
548 g_free (balance->tableu[0]);
550 channels = balance->channels;
552 GstColorBalanceChannel *channel = channels->data;
554 g_object_unref (channel);
555 channels->data = NULL;
556 channels = g_list_next (channels);
559 if (balance->channels)
560 g_list_free (balance->channels);
562 G_OBJECT_CLASS (parent_class)->finalize (object);
566 gst_video_balance_class_init (GstVideoBalanceClass * klass)
568 GObjectClass *gobject_class = (GObjectClass *) klass;
569 GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
571 GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
574 gobject_class->finalize = gst_video_balance_finalize;
575 gobject_class->set_property = gst_video_balance_set_property;
576 gobject_class->get_property = gst_video_balance_get_property;
578 g_object_class_install_property (gobject_class, PROP_CONTRAST,
579 g_param_spec_double ("contrast", "Contrast", "contrast",
580 0.0, 2.0, DEFAULT_PROP_CONTRAST,
581 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
582 g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
583 g_param_spec_double ("brightness", "Brightness", "brightness", -1.0, 1.0,
584 DEFAULT_PROP_BRIGHTNESS,
585 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
586 g_object_class_install_property (gobject_class, PROP_HUE,
587 g_param_spec_double ("hue", "Hue", "hue", -1.0, 1.0, DEFAULT_PROP_HUE,
588 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
589 g_object_class_install_property (gobject_class, PROP_SATURATION,
590 g_param_spec_double ("saturation", "Saturation", "saturation", 0.0, 2.0,
591 DEFAULT_PROP_SATURATION,
592 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
594 trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_balance_set_caps);
595 trans_class->transform_ip =
596 GST_DEBUG_FUNCPTR (gst_video_balance_transform_ip);
597 trans_class->before_transform =
598 GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
602 gst_video_balance_init (GstVideoBalance * videobalance,
603 GstVideoBalanceClass * klass)
605 const gchar *channels[4] = { "HUE", "SATURATION",
606 "BRIGHTNESS", "CONTRAST"
610 /* Initialize propertiews */
611 videobalance->contrast = DEFAULT_PROP_CONTRAST;
612 videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
613 videobalance->hue = DEFAULT_PROP_HUE;
614 videobalance->saturation = DEFAULT_PROP_SATURATION;
616 videobalance->tableu[0] = g_new (guint8, 256 * 256 * 2);
617 for (i = 0; i < 256; i++) {
618 videobalance->tableu[i] =
619 videobalance->tableu[0] + i * 256 * sizeof (guint8);
620 videobalance->tablev[i] =
621 videobalance->tableu[0] + 256 * 256 * sizeof (guint8) +
622 i * 256 * sizeof (guint8);
625 gst_video_balance_update_properties (videobalance);
627 /* Generate the channels list */
628 for (i = 0; i < G_N_ELEMENTS (channels); i++) {
629 GstColorBalanceChannel *channel;
631 channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
632 channel->label = g_strdup (channels[i]);
633 channel->min_value = -1000;
634 channel->max_value = 1000;
636 videobalance->channels = g_list_append (videobalance->channels, channel);
641 gst_video_balance_interface_supported (GstImplementsInterface * iface,
644 g_assert (type == GST_TYPE_COLOR_BALANCE);
649 gst_video_balance_interface_init (GstImplementsInterfaceClass * klass)
651 klass->supported = gst_video_balance_interface_supported;
655 gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
657 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);
659 g_return_val_if_fail (videobalance != NULL, NULL);
660 g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);
662 return videobalance->channels;
666 gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
667 GstColorBalanceChannel * channel, gint value)
669 GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
671 gboolean changed = FALSE;
673 g_return_if_fail (vb != NULL);
674 g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
675 g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
676 g_return_if_fail (channel->label != NULL);
678 GST_BASE_TRANSFORM_LOCK (vb);
679 GST_OBJECT_LOCK (vb);
680 if (!g_ascii_strcasecmp (channel->label, "HUE")) {
681 new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
682 changed = new_val != vb->hue;
684 } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
685 new_val = (value + 1000.0) * 2.0 / 2000.0;
686 changed = new_val != vb->saturation;
687 vb->saturation = new_val;
688 } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
689 new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
690 changed = new_val != vb->brightness;
691 vb->brightness = new_val;
692 } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
693 new_val = (value + 1000.0) * 2.0 / 2000.0;
694 changed = new_val != vb->contrast;
695 vb->contrast = new_val;
699 gst_video_balance_update_properties (vb);
700 GST_OBJECT_UNLOCK (vb);
701 GST_BASE_TRANSFORM_UNLOCK (vb);
704 gst_color_balance_value_changed (balance, channel,
705 gst_color_balance_get_value (balance, channel));
710 gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
711 GstColorBalanceChannel * channel)
713 GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
716 g_return_val_if_fail (vb != NULL, 0);
717 g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
718 g_return_val_if_fail (channel->label != NULL, 0);
720 if (!g_ascii_strcasecmp (channel->label, "HUE")) {
721 value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
722 } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
723 value = vb->saturation * 2000.0 / 2.0 - 1000.0;
724 } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
725 value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
726 } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
727 value = vb->contrast * 2000.0 / 2.0 - 1000.0;
734 gst_video_balance_colorbalance_init (GstColorBalanceClass * iface)
736 GST_COLOR_BALANCE_TYPE (iface) = GST_COLOR_BALANCE_SOFTWARE;
737 iface->list_channels = gst_video_balance_colorbalance_list_channels;
738 iface->set_value = gst_video_balance_colorbalance_set_value;
739 iface->get_value = gst_video_balance_colorbalance_get_value;
742 static GstColorBalanceChannel *
743 gst_video_balance_find_channel (GstVideoBalance * balance, const gchar * label)
747 for (l = balance->channels; l; l = l->next) {
748 GstColorBalanceChannel *channel = l->data;
750 if (g_ascii_strcasecmp (channel->label, label) == 0)
757 gst_video_balance_set_property (GObject * object, guint prop_id,
758 const GValue * value, GParamSpec * pspec)
760 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
762 const gchar *label = NULL;
764 GST_BASE_TRANSFORM_LOCK (balance);
765 GST_OBJECT_LOCK (balance);
768 d = g_value_get_double (value);
769 GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
770 balance->contrast, d);
771 if (d != balance->contrast)
773 balance->contrast = d;
775 case PROP_BRIGHTNESS:
776 d = g_value_get_double (value);
777 GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
778 balance->brightness, d);
779 if (d != balance->brightness)
780 label = "BRIGHTNESS";
781 balance->brightness = d;
784 d = g_value_get_double (value);
785 GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
787 if (d != balance->hue)
791 case PROP_SATURATION:
792 d = g_value_get_double (value);
793 GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
794 balance->saturation, d);
795 if (d != balance->saturation)
796 label = "SATURATION";
797 balance->saturation = d;
800 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
804 gst_video_balance_update_properties (balance);
805 GST_OBJECT_UNLOCK (balance);
806 GST_BASE_TRANSFORM_UNLOCK (balance);
809 GstColorBalanceChannel *channel =
810 gst_video_balance_find_channel (balance, label);
811 gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
812 gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
817 gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
820 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
824 g_value_set_double (value, balance->contrast);
826 case PROP_BRIGHTNESS:
827 g_value_set_double (value, balance->brightness);
830 g_value_set_double (value, balance->hue);
832 case PROP_SATURATION:
833 g_value_set_double (value, balance->saturation);
836 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);