2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2003> David Schleef <ds@schleef.org>
4 * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
23 * This file was (probably) generated from gstvideobalance.c,
24 * gstvideobalance.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
28 * SECTION:element-videobalance
29 * @title: videobalance
31 * Adjusts brightness, contrast, hue, saturation on a video stream.
33 * ## Example launch line
35 * gst-launch-1.0 videotestsrc ! videobalance saturation=0.0 ! videoconvert ! ximagesink
36 * ]| This pipeline converts the image to black and white by setting the
45 #include <gst/math-compat.h>
47 #include "gstvideobalance.h"
50 #include <gst/video/colorbalance.h>
52 GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
53 #define GST_CAT_DEFAULT videobalance_debug
55 /* GstVideoBalance properties */
56 #define DEFAULT_PROP_CONTRAST 1.0
57 #define DEFAULT_PROP_BRIGHTNESS 0.0
58 #define DEFAULT_PROP_HUE 0.0
59 #define DEFAULT_PROP_SATURATION 1.0
70 #define PROCESSING_CAPS \
71 "{ AYUV, ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, " \
72 "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, " \
73 "I420, YV12, IYUV, Y41B, NV12, NV21 }"
75 static GstStaticPadTemplate gst_video_balance_src_template =
76 GST_STATIC_PAD_TEMPLATE ("src",
79 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
83 static GstStaticPadTemplate gst_video_balance_sink_template =
84 GST_STATIC_PAD_TEMPLATE ("sink",
87 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
91 static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
94 static void gst_video_balance_set_property (GObject * object, guint prop_id,
95 const GValue * value, GParamSpec * pspec);
96 static void gst_video_balance_get_property (GObject * object, guint prop_id,
97 GValue * value, GParamSpec * pspec);
99 #define gst_video_balance_parent_class parent_class
100 G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
101 GST_TYPE_VIDEO_FILTER,
102 G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
103 gst_video_balance_colorbalance_init));
104 GST_ELEMENT_REGISTER_DEFINE (videobalance, "videobalance",
105 GST_RANK_NONE, GST_TYPE_VIDEO_BALANCE);
108 * look-up tables (LUT).
111 gst_video_balance_update_tables (GstVideoBalance * vb)
114 gdouble y, u, v, hue_cos, hue_sin;
117 for (i = 0; i < 256; i++) {
118 y = 16 + ((i - 16) * vb->contrast + vb->brightness * 255);
123 vb->tabley[i] = rint (y);
126 hue_cos = cos (G_PI * vb->hue);
127 hue_sin = sin (G_PI * vb->hue);
129 /* U/V lookup tables are 2D, since we need both U/V for each table
131 for (i = -128; i < 128; i++) {
132 for (j = -128; j < 128; j++) {
133 u = 128 + ((i * hue_cos + j * hue_sin) * vb->saturation);
134 v = 128 + ((-i * hue_sin + j * hue_cos) * vb->saturation);
143 vb->tableu[i + 128][j + 128] = rint (u);
144 vb->tablev[i + 128][j + 128] = rint (v);
150 gst_video_balance_is_passthrough (GstVideoBalance * videobalance)
152 return videobalance->contrast == 1.0 &&
153 videobalance->brightness == 0.0 &&
154 videobalance->hue == 0.0 && videobalance->saturation == 1.0;
158 gst_video_balance_update_properties (GstVideoBalance * videobalance)
160 gboolean passthrough;
161 GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
163 GST_OBJECT_LOCK (videobalance);
164 passthrough = gst_video_balance_is_passthrough (videobalance);
166 gst_video_balance_update_tables (videobalance);
167 GST_OBJECT_UNLOCK (videobalance);
169 gst_base_transform_set_passthrough (base, passthrough);
173 gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
174 GstVideoFrame * frame)
178 guint8 *udata, *vdata;
179 gint ystride, ustride, vstride;
181 gint width2, height2;
182 guint8 *tabley = videobalance->tabley;
183 guint8 **tableu = videobalance->tableu;
184 guint8 **tablev = videobalance->tablev;
186 width = GST_VIDEO_FRAME_WIDTH (frame);
187 height = GST_VIDEO_FRAME_HEIGHT (frame);
189 ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
190 ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
192 for (y = 0; y < height; y++) {
195 yptr = ydata + y * ystride;
196 for (x = 0; x < width; x++) {
197 *yptr = tabley[*yptr];
202 width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
203 height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
205 udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
206 vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
207 ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
208 vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
210 for (y = 0; y < height2; y++) {
214 uptr = udata + y * ustride;
215 vptr = vdata + y * vstride;
217 for (x = 0; x < width2; x++) {
221 *uptr++ = tableu[u1][v1];
222 *vptr++ = tablev[u1][v1];
228 gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
229 GstVideoFrame * frame)
234 gint ystride, uvstride;
236 gint width2, height2;
237 guint8 *tabley = videobalance->tabley;
238 guint8 **tableu = videobalance->tableu;
239 guint8 **tablev = videobalance->tablev;
242 width = GST_VIDEO_FRAME_WIDTH (frame);
243 height = GST_VIDEO_FRAME_HEIGHT (frame);
245 ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
246 ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
248 for (y = 0; y < height; y++) {
251 yptr = ydata + y * ystride;
252 for (x = 0; x < width; x++) {
253 *yptr = tabley[*yptr];
258 width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
259 height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
261 uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
262 uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
264 upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
265 vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
267 for (y = 0; y < height2; y++) {
271 uvptr = uvdata + y * uvstride;
273 for (x = 0; x < width2; x++) {
277 uvptr[upos] = tableu[u1][v1];
278 uvptr[vpos] = tablev[u1][v1];
285 gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
286 GstVideoFrame * frame)
289 guint8 *ydata, *udata, *vdata;
290 gint yoff, uoff, voff;
292 gint width2, height2;
293 guint8 *tabley = videobalance->tabley;
294 guint8 **tableu = videobalance->tableu;
295 guint8 **tablev = videobalance->tablev;
297 width = GST_VIDEO_FRAME_WIDTH (frame);
298 height = GST_VIDEO_FRAME_HEIGHT (frame);
300 stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
301 ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
302 yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
304 for (y = 0; y < height; y++) {
307 yptr = ydata + y * stride;
308 for (x = 0; x < width; x++) {
309 *yptr = tabley[*yptr];
314 width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
315 height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
317 udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
318 vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
319 uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
320 voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
322 for (y = 0; y < height2; y++) {
326 uptr = udata + y * stride;
327 vptr = vdata + y * stride;
329 for (x = 0; x < width2; x++) {
333 *uptr = tableu[u1][v1];
334 *vptr = tablev[u1][v1];
342 static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
344 298, -100, -208, 34707,
348 static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
350 -38, -74, 112, 32768,
351 112, -94, -18, 32768,
354 #define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
357 gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
358 GstVideoFrame * frame)
361 gint width, stride, row_wrap;
368 guint8 *tabley = videobalance->tabley;
369 guint8 **tableu = videobalance->tableu;
370 guint8 **tablev = videobalance->tablev;
372 width = GST_VIDEO_FRAME_WIDTH (frame);
373 height = GST_VIDEO_FRAME_HEIGHT (frame);
375 offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
376 offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
377 offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
379 data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
380 stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
382 pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
383 row_wrap = stride - pixel_stride * width;
385 for (i = 0; i < height; i++) {
386 for (j = 0; j < width; j++) {
387 r = data[offsets[0]];
388 g = data[offsets[1]];
389 b = data[offsets[2]];
391 y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
392 u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
393 v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
395 y = CLAMP (y, 0, 255);
396 u_tmp = CLAMP (u_tmp, 0, 255);
397 v_tmp = CLAMP (v_tmp, 0, 255);
400 u = tableu[u_tmp][v_tmp];
401 v = tablev[u_tmp][v_tmp];
403 r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
404 g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
405 b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
407 data[offsets[0]] = CLAMP (r, 0, 255);
408 data[offsets[1]] = CLAMP (g, 0, 255);
409 data[offsets[2]] = CLAMP (b, 0, 255);
410 data += pixel_stride;
416 /* get notified of caps and plug in the correct process function */
418 gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
419 GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
421 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
423 GST_DEBUG_OBJECT (videobalance,
424 "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
426 videobalance->process = NULL;
428 switch (GST_VIDEO_INFO_FORMAT (in_info)) {
429 case GST_VIDEO_FORMAT_I420:
430 case GST_VIDEO_FORMAT_YV12:
431 case GST_VIDEO_FORMAT_Y41B:
432 case GST_VIDEO_FORMAT_Y42B:
433 case GST_VIDEO_FORMAT_Y444:
434 videobalance->process = gst_video_balance_planar_yuv;
436 case GST_VIDEO_FORMAT_YUY2:
437 case GST_VIDEO_FORMAT_UYVY:
438 case GST_VIDEO_FORMAT_AYUV:
439 case GST_VIDEO_FORMAT_YVYU:
440 videobalance->process = gst_video_balance_packed_yuv;
442 case GST_VIDEO_FORMAT_NV12:
443 case GST_VIDEO_FORMAT_NV21:
444 videobalance->process = gst_video_balance_semiplanar_yuv;
446 case GST_VIDEO_FORMAT_ARGB:
447 case GST_VIDEO_FORMAT_ABGR:
448 case GST_VIDEO_FORMAT_RGBA:
449 case GST_VIDEO_FORMAT_BGRA:
450 case GST_VIDEO_FORMAT_xRGB:
451 case GST_VIDEO_FORMAT_xBGR:
452 case GST_VIDEO_FORMAT_RGBx:
453 case GST_VIDEO_FORMAT_BGRx:
454 case GST_VIDEO_FORMAT_RGB:
455 case GST_VIDEO_FORMAT_BGR:
456 videobalance->process = gst_video_balance_packed_rgb;
459 if (!gst_video_balance_is_passthrough (videobalance))
469 GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
475 gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
477 GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
478 GstClockTime timestamp, stream_time;
480 timestamp = GST_BUFFER_TIMESTAMP (buf);
482 gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
484 GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
485 GST_TIME_ARGS (timestamp));
487 if (GST_CLOCK_TIME_IS_VALID (stream_time))
488 gst_object_sync_values (GST_OBJECT (balance), stream_time);
492 gst_video_balance_transform_caps (GstBaseTransform * trans,
493 GstPadDirection direction, GstCaps * caps, GstCaps * filter)
495 GstVideoBalance *balance = GST_VIDEO_BALANCE (trans);
498 if (!gst_video_balance_is_passthrough (balance)) {
499 static GstStaticCaps raw_caps =
500 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS));
501 GstCaps *tmp = gst_static_caps_get (&raw_caps);
503 caps = gst_caps_intersect (caps, tmp);
504 gst_caps_unref (tmp);
507 ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
508 gst_caps_unref (caps);
514 ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
516 ret = gst_caps_ref (caps);
524 gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
525 GstVideoFrame * frame)
527 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
529 if (!videobalance->process)
532 GST_OBJECT_LOCK (videobalance);
533 videobalance->process (videobalance, frame);
534 GST_OBJECT_UNLOCK (videobalance);
541 GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
542 return GST_FLOW_NOT_NEGOTIATED;
547 gst_video_balance_finalize (GObject * object)
549 GList *channels = NULL;
550 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
552 g_free (balance->tableu[0]);
554 channels = balance->channels;
556 GstColorBalanceChannel *channel = channels->data;
558 g_object_unref (channel);
559 channels->data = NULL;
560 channels = g_list_next (channels);
563 if (balance->channels)
564 g_list_free (balance->channels);
566 G_OBJECT_CLASS (parent_class)->finalize (object);
570 gst_video_balance_class_init (GstVideoBalanceClass * klass)
572 GObjectClass *gobject_class = (GObjectClass *) klass;
573 GstElementClass *gstelement_class = (GstElementClass *) klass;
574 GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
575 GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
577 GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
580 gobject_class->finalize = gst_video_balance_finalize;
581 gobject_class->set_property = gst_video_balance_set_property;
582 gobject_class->get_property = gst_video_balance_get_property;
584 g_object_class_install_property (gobject_class, PROP_CONTRAST,
585 g_param_spec_double ("contrast", "Contrast", "contrast",
586 0.0, 2.0, DEFAULT_PROP_CONTRAST,
587 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
588 g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
589 g_param_spec_double ("brightness", "Brightness", "brightness", -1.0, 1.0,
590 DEFAULT_PROP_BRIGHTNESS,
591 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
592 g_object_class_install_property (gobject_class, PROP_HUE,
593 g_param_spec_double ("hue", "Hue", "hue", -1.0, 1.0, DEFAULT_PROP_HUE,
594 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
595 g_object_class_install_property (gobject_class, PROP_SATURATION,
596 g_param_spec_double ("saturation", "Saturation", "saturation", 0.0, 2.0,
597 DEFAULT_PROP_SATURATION,
598 GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
600 gst_element_class_set_static_metadata (gstelement_class, "Video balance",
601 "Filter/Effect/Video",
602 "Adjusts brightness, contrast, hue, saturation on a video stream",
603 "David Schleef <ds@schleef.org>");
605 gst_element_class_add_static_pad_template (gstelement_class,
606 &gst_video_balance_sink_template);
607 gst_element_class_add_static_pad_template (gstelement_class,
608 &gst_video_balance_src_template);
610 trans_class->before_transform =
611 GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
612 trans_class->transform_ip_on_passthrough = FALSE;
613 trans_class->transform_caps =
614 GST_DEBUG_FUNCPTR (gst_video_balance_transform_caps);
616 vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
617 vfilter_class->transform_frame_ip =
618 GST_DEBUG_FUNCPTR (gst_video_balance_transform_frame_ip);
622 gst_video_balance_init (GstVideoBalance * videobalance)
624 const gchar *channels[4] = { "HUE", "SATURATION",
625 "BRIGHTNESS", "CONTRAST"
629 /* Initialize propertiews */
630 videobalance->contrast = DEFAULT_PROP_CONTRAST;
631 videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
632 videobalance->hue = DEFAULT_PROP_HUE;
633 videobalance->saturation = DEFAULT_PROP_SATURATION;
635 videobalance->tableu[0] = g_new (guint8, 256 * 256 * 2);
636 for (i = 0; i < 256; i++) {
637 videobalance->tableu[i] =
638 videobalance->tableu[0] + i * 256 * sizeof (guint8);
639 videobalance->tablev[i] =
640 videobalance->tableu[0] + 256 * 256 * sizeof (guint8) +
641 i * 256 * sizeof (guint8);
644 gst_video_balance_update_properties (videobalance);
646 /* Generate the channels list */
647 for (i = 0; i < G_N_ELEMENTS (channels); i++) {
648 GstColorBalanceChannel *channel;
650 channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
651 channel->label = g_strdup (channels[i]);
652 channel->min_value = -1000;
653 channel->max_value = 1000;
655 videobalance->channels = g_list_append (videobalance->channels, channel);
660 gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
662 GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);
664 g_return_val_if_fail (videobalance != NULL, NULL);
665 g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);
667 return videobalance->channels;
671 gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
672 GstColorBalanceChannel * channel, gint value)
674 GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
676 gboolean changed = FALSE;
678 g_return_if_fail (vb != NULL);
679 g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
680 g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
681 g_return_if_fail (channel->label != NULL);
683 GST_OBJECT_LOCK (vb);
684 if (!g_ascii_strcasecmp (channel->label, "HUE")) {
685 new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
686 changed = new_val != vb->hue;
688 } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
689 new_val = (value + 1000.0) * 2.0 / 2000.0;
690 changed = new_val != vb->saturation;
691 vb->saturation = new_val;
692 } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
693 new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
694 changed = new_val != vb->brightness;
695 vb->brightness = new_val;
696 } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
697 new_val = (value + 1000.0) * 2.0 / 2000.0;
698 changed = new_val != vb->contrast;
699 vb->contrast = new_val;
701 GST_OBJECT_UNLOCK (vb);
704 gst_video_balance_update_properties (vb);
707 gst_color_balance_value_changed (balance, channel,
708 gst_color_balance_get_value (balance, channel));
713 gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
714 GstColorBalanceChannel * channel)
716 GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
719 g_return_val_if_fail (vb != NULL, 0);
720 g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
721 g_return_val_if_fail (channel->label != NULL, 0);
723 if (!g_ascii_strcasecmp (channel->label, "HUE")) {
724 value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
725 } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
726 value = vb->saturation * 2000.0 / 2.0 - 1000.0;
727 } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
728 value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
729 } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
730 value = vb->contrast * 2000.0 / 2.0 - 1000.0;
736 static GstColorBalanceType
737 gst_video_balance_colorbalance_get_balance_type (GstColorBalance * balance)
739 return GST_COLOR_BALANCE_SOFTWARE;
743 gst_video_balance_colorbalance_init (GstColorBalanceInterface * iface)
745 iface->list_channels = gst_video_balance_colorbalance_list_channels;
746 iface->set_value = gst_video_balance_colorbalance_set_value;
747 iface->get_value = gst_video_balance_colorbalance_get_value;
748 iface->get_balance_type = gst_video_balance_colorbalance_get_balance_type;
751 static GstColorBalanceChannel *
752 gst_video_balance_find_channel (GstVideoBalance * balance, const gchar * label)
756 for (l = balance->channels; l; l = l->next) {
757 GstColorBalanceChannel *channel = l->data;
759 if (g_ascii_strcasecmp (channel->label, label) == 0)
766 gst_video_balance_set_property (GObject * object, guint prop_id,
767 const GValue * value, GParamSpec * pspec)
769 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
771 const gchar *label = NULL;
773 GST_OBJECT_LOCK (balance);
776 d = g_value_get_double (value);
777 GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
778 balance->contrast, d);
779 if (d != balance->contrast)
781 balance->contrast = d;
783 case PROP_BRIGHTNESS:
784 d = g_value_get_double (value);
785 GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
786 balance->brightness, d);
787 if (d != balance->brightness)
788 label = "BRIGHTNESS";
789 balance->brightness = d;
792 d = g_value_get_double (value);
793 GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
795 if (d != balance->hue)
799 case PROP_SATURATION:
800 d = g_value_get_double (value);
801 GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
802 balance->saturation, d);
803 if (d != balance->saturation)
804 label = "SATURATION";
805 balance->saturation = d;
808 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
812 GST_OBJECT_UNLOCK (balance);
813 gst_video_balance_update_properties (balance);
816 GstColorBalanceChannel *channel =
817 gst_video_balance_find_channel (balance, label);
818 gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
819 gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
824 gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
827 GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
831 g_value_set_double (value, balance->contrast);
833 case PROP_BRIGHTNESS:
834 g_value_set_double (value, balance->brightness);
837 g_value_set_double (value, balance->hue);
839 case PROP_SATURATION:
840 g_value_set_double (value, balance->saturation);
843 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);