1 /* cairo - a vector graphics library with display and print output
3 * Copyright © 2009 Eric Anholt
4 * Copyright © 2009 Chris Wilson
5 * Copyright © 2005,2010 Red Hat, Inc
6 * Copyright © 2011 Linaro Limited
7 * Copyright © 2011 Samsung Electronics
9 * This library is free software; you can redistribute it and/or
10 * modify it either under the terms of the GNU Lesser General Public
11 * License version 2.1 as published by the Free Software Foundation
12 * (the "LGPL") or, at your option, under the terms of the Mozilla
13 * Public License Version 1.1 (the "MPL"). If you do not alter this
14 * notice, a recipient may use your version of this file under either
15 * the MPL or the LGPL.
17 * You should have received a copy of the LGPL along with this library
18 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
20 * You should have received a copy of the MPL along with this library
21 * in the file COPYING-MPL-1.1
23 * The contents of this file are subject to the Mozilla Public License
24 * Version 1.1 (the "License"); you may not use this file except in
25 * compliance with the License. You may obtain a copy of the License at
26 * http://www.mozilla.org/MPL/
28 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
29 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
30 * the specific language governing rights and limitations.
32 * The Original Code is the cairo graphics library.
34 * The Initial Developer of the Original Code is Red Hat, Inc.
37 * Benjamin Otte <otte@gnome.org>
38 * Carl Worth <cworth@cworth.org>
39 * Chris Wilson <chris@chris-wilson.co.uk>
40 * Eric Anholt <eric@anholt.net>
41 * Alexandros Frantzis <alexandros.frantzis@linaro.org>
42 * Henry Song <hsong@sisa.samsung.com>
43 * Martin Robinson <mrobinson@igalia.com>
48 #include "cairo-gl-private.h"
50 #include "cairo-composite-rectangles-private.h"
51 #include "cairo-clip-private.h"
52 #include "cairo-error-private.h"
53 #include "cairo-image-surface-private.h"
61 _cairo_gl_composite_set_source (cairo_gl_composite_t *setup,
62 const cairo_pattern_t *pattern,
63 const cairo_rectangle_int_t *sample,
64 const cairo_rectangle_int_t *extents,
65 cairo_bool_t use_color_attribute)
67 _cairo_gl_operand_destroy (&setup->src);
68 return _cairo_gl_operand_init (&setup->src, pattern, setup->dst,
69 sample, extents, use_color_attribute);
73 _cairo_gl_composite_set_source_operand (cairo_gl_composite_t *setup,
74 const cairo_gl_operand_t *source)
76 _cairo_gl_operand_destroy (&setup->src);
77 _cairo_gl_operand_copy (&setup->src, source);
81 _cairo_gl_composite_set_solid_source (cairo_gl_composite_t *setup,
82 const cairo_color_t *color)
84 _cairo_gl_operand_destroy (&setup->src);
85 _cairo_gl_solid_operand_init (&setup->src, color);
89 _cairo_gl_composite_set_mask (cairo_gl_composite_t *setup,
90 const cairo_pattern_t *pattern,
91 const cairo_rectangle_int_t *sample,
92 const cairo_rectangle_int_t *extents)
94 cairo_int_status_t status;
96 _cairo_gl_operand_destroy (&setup->mask);
98 return CAIRO_STATUS_SUCCESS;
100 /* XXX: shoot me - we need to set component_alpha to be true
101 if op is CAIRO_OPERATOR_CLEAR AND pattern is a surface_pattern
103 status = _cairo_gl_operand_init (&setup->mask, pattern, setup->dst,
104 sample, extents, FALSE);
105 if (unlikely (status))
108 if (setup->op == CAIRO_OPERATOR_CLEAR &&
109 ! _cairo_pattern_is_opaque (pattern, sample))
110 setup->mask.texture.attributes.has_component_alpha = TRUE;
116 _cairo_gl_composite_set_mask_operand (cairo_gl_composite_t *setup,
117 const cairo_gl_operand_t *mask)
119 _cairo_gl_operand_destroy (&setup->mask);
121 _cairo_gl_operand_copy (&setup->mask, mask);
125 _cairo_gl_composite_set_spans (cairo_gl_composite_t *setup)
131 _cairo_gl_composite_set_clip_region (cairo_gl_composite_t *setup,
132 cairo_region_t *clip_region)
134 setup->clip_region = clip_region;
138 _cairo_gl_composite_set_clip (cairo_gl_composite_t *setup,
145 _cairo_gl_composite_bind_to_shader (cairo_gl_context_t *ctx,
146 cairo_gl_composite_t *setup)
148 _cairo_gl_shader_bind_matrix4f(ctx, "ModelViewProjectionMatrix",
149 ctx->modelviewprojection_matrix);
150 _cairo_gl_operand_bind_to_shader (ctx, &setup->src, CAIRO_GL_TEX_SOURCE);
151 _cairo_gl_operand_bind_to_shader (ctx, &setup->mask, CAIRO_GL_TEX_MASK);
155 _cairo_gl_texture_set_filter (cairo_gl_context_t *ctx,
157 cairo_filter_t filter)
160 case CAIRO_FILTER_FAST:
161 case CAIRO_FILTER_NEAREST:
162 glTexParameteri (target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
163 glTexParameteri (target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
165 case CAIRO_FILTER_GOOD:
166 case CAIRO_FILTER_BEST:
167 case CAIRO_FILTER_BILINEAR:
168 glTexParameteri (target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
169 glTexParameteri (target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
172 case CAIRO_FILTER_GAUSSIAN:
178 _cairo_gl_texture_set_extend (cairo_gl_context_t *ctx,
180 cairo_extend_t extend,
181 cairo_bool_t use_atlas)
184 assert (! _cairo_gl_device_requires_power_of_two_textures (&ctx->base) ||
185 (extend != CAIRO_EXTEND_REPEAT && extend != CAIRO_EXTEND_REFLECT));
188 case CAIRO_EXTEND_NONE:
189 if (ctx->gl_flavor == CAIRO_GL_FLAVOR_ES)
190 wrap_mode = GL_CLAMP_TO_EDGE;
192 wrap_mode = GL_CLAMP_TO_BORDER;
194 case CAIRO_EXTEND_PAD:
195 wrap_mode = GL_CLAMP_TO_EDGE;
197 case CAIRO_EXTEND_REPEAT:
198 if (ctx->has_npot_repeat)
199 wrap_mode = GL_REPEAT;
201 wrap_mode = GL_CLAMP_TO_EDGE;
203 case CAIRO_EXTEND_REFLECT:
204 if (ctx->has_npot_repeat)
205 wrap_mode = GL_MIRRORED_REPEAT;
207 wrap_mode = GL_CLAMP_TO_EDGE;
213 if (likely (wrap_mode)) {
214 glTexParameteri (target, GL_TEXTURE_WRAP_S, wrap_mode);
215 glTexParameteri (target, GL_TEXTURE_WRAP_T, wrap_mode);
221 _cairo_gl_context_setup_operand (cairo_gl_context_t *ctx,
222 cairo_gl_tex_t tex_unit,
223 cairo_gl_operand_t *operand,
224 unsigned int vertex_size,
225 unsigned int vertex_offset)
227 cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
228 cairo_bool_t needs_setup;
229 cairo_bool_t needs_flush = TRUE;
231 /* XXX: we need to do setup when switching from shaders
232 * to no shaders (or back) */
233 needs_setup = ctx->vertex_size != vertex_size;
234 needs_setup |= _cairo_gl_operand_needs_setup (&ctx->operands[tex_unit],
239 if (needs_setup && needs_flush) {
240 _cairo_gl_composite_flush (ctx);
241 _cairo_gl_context_destroy_operand (ctx, tex_unit);
244 memcpy (&ctx->operands[tex_unit], operand, sizeof (cairo_gl_operand_t));
245 ctx->operands[tex_unit].vertex_offset = vertex_offset;
250 switch (operand->type) {
252 case CAIRO_GL_OPERAND_COUNT:
254 case CAIRO_GL_OPERAND_NONE:
257 case CAIRO_GL_OPERAND_CONSTANT:
258 if (operand->use_color_attribute) {
259 dispatch->VertexAttribPointer (CAIRO_GL_COLOR_ATTRIB_INDEX, 4,
260 GL_UNSIGNED_BYTE, GL_TRUE, vertex_size,
261 ctx->vb + vertex_offset);
262 dispatch->EnableVertexAttribArray (CAIRO_GL_COLOR_ATTRIB_INDEX);
265 case CAIRO_GL_OPERAND_TEXTURE:
266 if (ctx->states_cache.active_texture != GL_TEXTURE0 + tex_unit) {
267 glActiveTexture (GL_TEXTURE0 + tex_unit);
268 ctx->states_cache.active_texture = GL_TEXTURE0 + tex_unit;
270 glBindTexture (ctx->tex_target, operand->texture.tex);
271 _cairo_gl_texture_set_extend (ctx, ctx->tex_target,
272 operand->texture.attributes.extend,
273 operand->texture.use_atlas);
274 _cairo_gl_texture_set_filter (ctx, ctx->tex_target,
275 operand->texture.attributes.filter);
277 dispatch->VertexAttribPointer (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit, 2,
278 GL_FLOAT, GL_FALSE, vertex_size,
279 ctx->vb + vertex_offset);
280 dispatch->EnableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
282 if (operand->texture.use_atlas) {
283 dispatch->VertexAttribPointer (CAIRO_GL_START_COORD0_ATTRIB_INDEX + tex_unit,
284 2, GL_FLOAT, GL_FALSE,
286 ctx->vb + vertex_offset + 2 * sizeof (float));
287 dispatch->EnableVertexAttribArray (CAIRO_GL_START_COORD0_ATTRIB_INDEX + tex_unit);
288 dispatch->VertexAttribPointer (CAIRO_GL_STOP_COORD0_ATTRIB_INDEX + tex_unit,
289 2, GL_FLOAT, GL_FALSE,
291 ctx->vb + vertex_offset + 4 * sizeof (float));
292 dispatch->EnableVertexAttribArray (CAIRO_GL_STOP_COORD0_ATTRIB_INDEX + tex_unit);
295 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
296 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
297 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
298 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
299 if(ctx->states_cache.active_texture != GL_TEXTURE0 + tex_unit) {
300 glActiveTexture (GL_TEXTURE0 + tex_unit);
301 ctx->states_cache.active_texture = GL_TEXTURE0 + tex_unit;
303 glBindTexture (ctx->tex_target, operand->gradient.gradient->tex);
304 _cairo_gl_texture_set_extend (ctx, ctx->tex_target,
305 operand->gradient.extend, FALSE);
306 _cairo_gl_texture_set_filter (ctx, ctx->tex_target, CAIRO_FILTER_BILINEAR);
308 dispatch->VertexAttribPointer (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit, 2,
309 GL_FLOAT, GL_FALSE, vertex_size,
310 ctx->vb + vertex_offset);
311 dispatch->EnableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
317 _cairo_gl_context_setup_spans (cairo_gl_context_t *ctx,
318 unsigned int vertex_size,
319 unsigned int vertex_offset)
321 cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
323 dispatch->VertexAttribPointer (CAIRO_GL_COVERAGE_ATTRIB_INDEX, 4,
324 GL_UNSIGNED_BYTE, GL_TRUE, vertex_size,
325 ctx->vb + vertex_offset);
326 dispatch->EnableVertexAttribArray (CAIRO_GL_COVERAGE_ATTRIB_INDEX);
331 _cairo_gl_context_destroy_operand (cairo_gl_context_t *ctx,
332 cairo_gl_tex_t tex_unit)
334 cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
336 if (!_cairo_gl_context_is_flushed (ctx))
337 _cairo_gl_composite_flush (ctx);
339 switch (ctx->operands[tex_unit].type) {
341 case CAIRO_GL_OPERAND_COUNT:
343 case CAIRO_GL_OPERAND_NONE:
346 case CAIRO_GL_OPERAND_CONSTANT:
347 if (ctx->operands[tex_unit].use_color_attribute)
348 ctx->dispatch.DisableVertexAttribArray (CAIRO_GL_COLOR_ATTRIB_INDEX);
350 case CAIRO_GL_OPERAND_TEXTURE:
351 dispatch->DisableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
352 if (ctx->operands[tex_unit].texture.use_atlas) {
353 dispatch->DisableVertexAttribArray (CAIRO_GL_START_COORD0_ATTRIB_INDEX + tex_unit);
354 dispatch->DisableVertexAttribArray (CAIRO_GL_STOP_COORD0_ATTRIB_INDEX + tex_unit);
357 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
358 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
359 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
360 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
361 dispatch->DisableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
365 memset (&ctx->operands[tex_unit], 0, sizeof (cairo_gl_operand_t));
369 _cairo_gl_set_operator (cairo_gl_context_t *ctx,
371 cairo_bool_t component_alpha)
376 } blend_factors[] = {
377 { GL_ZERO, GL_ZERO }, /* Clear */
378 { GL_ONE, GL_ZERO }, /* Source */
379 { GL_ONE, GL_ONE_MINUS_SRC_ALPHA }, /* Over */
380 { GL_DST_ALPHA, GL_ZERO }, /* In */
381 { GL_ONE_MINUS_DST_ALPHA, GL_ZERO }, /* Out */
382 { GL_DST_ALPHA, GL_ONE_MINUS_SRC_ALPHA }, /* Atop */
384 { GL_ZERO, GL_ONE }, /* Dest */
385 { GL_ONE_MINUS_DST_ALPHA, GL_ONE }, /* DestOver */
386 { GL_ZERO, GL_SRC_ALPHA }, /* DestIn */
387 { GL_ZERO, GL_ONE_MINUS_SRC_ALPHA }, /* DestOut */
388 { GL_ONE_MINUS_DST_ALPHA, GL_SRC_ALPHA }, /* DestAtop */
390 { GL_ONE_MINUS_DST_ALPHA, GL_ONE_MINUS_SRC_ALPHA }, /* Xor */
391 { GL_ONE, GL_ONE }, /* Add */
393 GLenum src_factor, dst_factor;
395 assert (op < ARRAY_LENGTH (blend_factors));
396 /* different dst and component_alpha changes cause flushes elsewhere */
397 if (ctx->current_operator != op)
398 _cairo_gl_composite_flush (ctx);
399 ctx->current_operator = op;
401 src_factor = blend_factors[op].src;
402 dst_factor = blend_factors[op].dst;
404 /* Even when the user requests CAIRO_CONTENT_COLOR, we use GL_RGBA
405 * due to texture filtering of GL_CLAMP_TO_BORDER. So fix those
408 if (ctx->current_target->base.content == CAIRO_CONTENT_COLOR) {
409 if (src_factor == GL_ONE_MINUS_DST_ALPHA)
410 src_factor = GL_ZERO;
411 if (src_factor == GL_DST_ALPHA)
415 if (component_alpha) {
416 if (dst_factor == GL_ONE_MINUS_SRC_ALPHA)
417 dst_factor = GL_ONE_MINUS_SRC_COLOR;
418 if (dst_factor == GL_SRC_ALPHA)
419 dst_factor = GL_SRC_COLOR;
422 if (ctx->current_target->base.content == CAIRO_CONTENT_ALPHA) {
423 /* cache glBlendFunc, src factor and dst factor, alpha factor */
424 if (ctx->states_cache.src_color_factor != GL_ZERO ||
425 ctx->states_cache.dst_color_factor != GL_ZERO ||
426 ctx->states_cache.src_alpha_factor != src_factor ||
427 ctx->states_cache.dst_alpha_factor != dst_factor) {
428 glBlendFuncSeparate (GL_ZERO, GL_ZERO, src_factor, dst_factor);
429 ctx->states_cache.src_color_factor = GL_ZERO;
430 ctx->states_cache.dst_color_factor = GL_ZERO;
431 ctx->states_cache.src_alpha_factor = src_factor;
432 ctx->states_cache.dst_alpha_factor = dst_factor;
434 } else if (ctx->current_target->base.content == CAIRO_CONTENT_COLOR) {
435 if (ctx->states_cache.src_color_factor != src_factor ||
436 ctx->states_cache.dst_color_factor != dst_factor ||
437 ctx->states_cache.src_alpha_factor != GL_ONE ||
438 ctx->states_cache.dst_alpha_factor != GL_ONE) {
439 glBlendFuncSeparate (src_factor, dst_factor, GL_ONE, GL_ONE);
440 ctx->states_cache.src_color_factor = src_factor;
441 ctx->states_cache.dst_color_factor = dst_factor;
442 ctx->states_cache.src_alpha_factor = GL_ONE;
443 ctx->states_cache.dst_alpha_factor = GL_ONE;
446 if (ctx->states_cache.src_color_factor != src_factor ||
447 ctx->states_cache.dst_color_factor != dst_factor) {
448 glBlendFunc (src_factor, dst_factor);
449 ctx->states_cache.src_color_factor = src_factor;
450 ctx->states_cache.dst_color_factor = dst_factor;
455 static cairo_status_t
456 _cairo_gl_composite_begin_component_alpha (cairo_gl_context_t *ctx,
457 cairo_gl_composite_t *setup)
459 cairo_gl_shader_t *pre_shader = NULL;
460 cairo_status_t status;
462 /* For CLEAR, cairo's rendering equation (quoting Owen's description in:
463 * http://lists.cairographics.org/archives/cairo/2005-August/004992.html)
465 * mask IN clip ? src OP dest : dest
467 * mask IN CLIP ? 0 : dest
469 * where the ternary operator A ? B : C is (A * B) + ((1 - A) * C).
471 * The model we use in _cairo_gl_set_operator() is Render's:
472 * src IN mask IN clip OP dest
473 * which would boil down to:
474 * 0 (bounded by the extents of the drawing).
476 * However, we can do a Render operation using an opaque source
477 * and DEST_OUT to produce:
478 * 1 IN mask IN clip DEST_OUT dest
480 * mask IN clip ? 0 : dest
482 if (setup->op == CAIRO_OPERATOR_CLEAR) {
483 _cairo_gl_solid_operand_init (&setup->src, CAIRO_COLOR_WHITE);
484 setup->op = CAIRO_OPERATOR_DEST_OUT;
488 * implements component-alpha %CAIRO_OPERATOR_OVER using two passes of
489 * the simpler operations %CAIRO_OPERATOR_DEST_OUT and %CAIRO_OPERATOR_ADD.
491 * From http://anholt.livejournal.com/32058.html:
493 * The trouble is that component-alpha rendering requires two different sources
494 * for blending: one for the source value to the blender, which is the
495 * per-channel multiplication of source and mask, and one for the source alpha
496 * for multiplying with the destination channels, which is the multiplication
497 * of the source channels by the mask alpha. So the equation for Over is:
499 * dst.A = src.A * mask.A + (1 - (src.A * mask.A)) * dst.A
500 * dst.R = src.R * mask.R + (1 - (src.A * mask.R)) * dst.R
501 * dst.G = src.G * mask.G + (1 - (src.A * mask.G)) * dst.G
502 * dst.B = src.B * mask.B + (1 - (src.A * mask.B)) * dst.B
504 * But we can do some simpler operations, right? How about PictOpOutReverse,
505 * which has a source factor of 0 and dest factor of (1 - source alpha). We
506 * can get the source alpha value (srca.X = src.A * mask.X) out of the texture
507 * blenders pretty easily. So we can do a component-alpha OutReverse, which
510 * dst.A = 0 + (1 - (src.A * mask.A)) * dst.A
511 * dst.R = 0 + (1 - (src.A * mask.R)) * dst.R
512 * dst.G = 0 + (1 - (src.A * mask.G)) * dst.G
513 * dst.B = 0 + (1 - (src.A * mask.B)) * dst.B
515 * OK. And if an op doesn't use the source alpha value for the destination
516 * factor, then we can do the channel multiplication in the texture blenders
517 * to get the source value, and ignore the source alpha that we wouldn't use.
518 * We've supported this in the Radeon driver for a long time. An example would
519 * be PictOpAdd, which does:
521 * dst.A = src.A * mask.A + dst.A
522 * dst.R = src.R * mask.R + dst.R
523 * dst.G = src.G * mask.G + dst.G
524 * dst.B = src.B * mask.B + dst.B
526 * Hey, this looks good! If we do a PictOpOutReverse and then a PictOpAdd right
529 * dst.A = src.A * mask.A + ((1 - (src.A * mask.A)) * dst.A)
530 * dst.R = src.R * mask.R + ((1 - (src.A * mask.R)) * dst.R)
531 * dst.G = src.G * mask.G + ((1 - (src.A * mask.G)) * dst.G)
532 * dst.B = src.B * mask.B + ((1 - (src.A * mask.B)) * dst.B)
534 * This two-pass trickery could be avoided using a new GL extension that
535 * lets two values come out of the shader and into the blend unit.
537 if (setup->op == CAIRO_OPERATOR_OVER) {
538 setup->op = CAIRO_OPERATOR_ADD;
539 status = _cairo_gl_get_shader_by_type (ctx,
543 CAIRO_GL_SHADER_IN_CA_SOURCE_ALPHA,
545 if (unlikely (status))
549 if (ctx->pre_shader != pre_shader)
550 _cairo_gl_composite_flush (ctx);
551 ctx->pre_shader = pre_shader;
553 return CAIRO_STATUS_SUCCESS;
557 _scissor_to_doubles (cairo_gl_surface_t *surface,
558 double x1, double y1,
559 double x2, double y2)
564 if (_cairo_gl_surface_is_texture (surface) == FALSE)
565 y1 = surface->height - (y1 + height);
566 glScissor (x1, y1, x2 - x1, height);
567 glEnable (GL_SCISSOR_TEST);
571 _cairo_gl_scissor_to_rectangle (cairo_gl_surface_t *surface,
572 const cairo_rectangle_int_t *r)
574 _scissor_to_doubles (surface, r->x, r->y, r->x+r->width, r->y+r->height);
578 _scissor_to_box (cairo_gl_surface_t *surface,
579 const cairo_box_t *box)
581 double x1, y1, x2, y2;
582 _cairo_box_to_doubles (box, &x1, &y1, &x2, &y2);
583 _scissor_to_doubles (surface, x1, y1, x2, y2);
587 _cairo_gl_composite_setup_vbo (cairo_gl_context_t *ctx,
588 unsigned int size_per_vertex)
590 if (ctx->vertex_size != size_per_vertex)
591 _cairo_gl_composite_flush (ctx);
593 if (_cairo_gl_context_is_flushed (ctx)) {
594 ctx->dispatch.VertexAttribPointer (CAIRO_GL_VERTEX_ATTRIB_INDEX, 2,
595 GL_FLOAT, GL_FALSE, size_per_vertex,
597 ctx->dispatch.EnableVertexAttribArray (CAIRO_GL_VERTEX_ATTRIB_INDEX);
599 ctx->vertex_size = size_per_vertex;
603 _disable_stencil_buffer (void)
605 if (glIsEnabled (GL_STENCIL_TEST))
606 glDisable (GL_STENCIL_TEST);
610 _disable_scissor_buffer (void)
612 if (glIsEnabled (GL_SCISSOR_TEST))
613 glDisable (GL_SCISSOR_TEST);
616 static cairo_int_status_t
617 _cairo_gl_composite_setup_painted_clipping (cairo_gl_composite_t *setup,
618 cairo_gl_context_t *ctx,
620 cairo_bool_t equal_clip)
622 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
624 cairo_gl_surface_t *dst = setup->dst;
625 cairo_clip_t *clip = setup->clip;
626 cairo_clip_t *cached_clip = ctx->clip;
628 if (_cairo_gl_can_use_scissor_for_clip (clip)) {
629 _scissor_to_box (dst, &clip->boxes[0]);
630 goto disable_stencil_buffer_and_return;
633 if (! _cairo_gl_ensure_stencil (ctx, setup->dst)) {
634 status = CAIRO_INT_STATUS_UNSUPPORTED;
635 goto disable_stencil_buffer_and_return;
638 /* The clip is not rectangular, so use the stencil buffer. */
639 if (! ctx->states_cache.depth_mask ) {
640 glDepthMask (GL_TRUE);
641 ctx->states_cache.depth_mask = TRUE;
643 glEnable (GL_STENCIL_TEST);
646 return CAIRO_INT_STATUS_SUCCESS;
648 /* Clear the stencil buffer of previous cached clip */
650 _cairo_gl_scissor_to_rectangle (dst, _cairo_clip_get_extents (cached_clip));
652 glClear (GL_STENCIL_BUFFER_BIT);
653 _disable_scissor_buffer ();
656 glStencilOp (GL_REPLACE, GL_REPLACE, GL_REPLACE);
657 glStencilFunc (GL_EQUAL, 1, 0xffffffff);
658 glColorMask (0, 0, 0, 0);
660 status = _cairo_gl_msaa_compositor_draw_clip (ctx, setup, clip);
662 if (unlikely (status)) {
663 glColorMask (1, 1, 1, 1);
664 goto disable_stencil_buffer_and_return;
667 /* We want to only render to the stencil buffer, so draw everything now.
668 Flushing also unbinds the VBO, which we want to rebind for regular
670 _cairo_gl_composite_flush (ctx);
671 _cairo_gl_composite_setup_vbo (ctx, vertex_size);
673 glColorMask (1, 1, 1, 1);
674 glStencilOp (GL_KEEP, GL_KEEP, GL_KEEP);
675 glStencilFunc (GL_EQUAL, 1, 0xffffffff);
676 return CAIRO_INT_STATUS_SUCCESS;
678 disable_stencil_buffer_and_return:
679 _disable_stencil_buffer ();
683 static cairo_int_status_t
684 _cairo_gl_composite_setup_clipping (cairo_gl_composite_t *setup,
685 cairo_gl_context_t *ctx,
688 cairo_int_status_t status;
689 cairo_bool_t same_clip;
691 if (! ctx->clip && ! setup->clip && ! ctx->clip_region)
694 same_clip = _cairo_clip_equal (ctx->clip, setup->clip);
695 if (! _cairo_gl_context_is_flushed (ctx) &&
696 (! cairo_region_equal (ctx->clip_region, setup->clip_region) ||
698 _cairo_gl_composite_flush (ctx);
700 cairo_region_destroy (ctx->clip_region);
701 ctx->clip_region = cairo_region_reference (setup->clip_region);
703 assert (!setup->clip_region || !setup->clip);
705 if (ctx->clip_region) {
706 _disable_stencil_buffer ();
707 glEnable (GL_SCISSOR_TEST);
708 return CAIRO_INT_STATUS_SUCCESS;
712 status = _cairo_gl_composite_setup_painted_clipping (setup,
717 _cairo_clip_destroy (ctx->clip);
718 ctx->clip = _cairo_clip_copy (setup->clip);
725 _disable_stencil_buffer ();
726 _disable_scissor_buffer ();
727 return CAIRO_INT_STATUS_SUCCESS;
731 _cairo_gl_set_operands_and_operator (cairo_gl_composite_t *setup,
732 cairo_gl_context_t *ctx,
733 cairo_bool_t multisampling)
735 unsigned int dst_size, src_size, mask_size, vertex_size;
736 cairo_status_t status;
737 cairo_gl_shader_t *shader;
738 cairo_bool_t component_alpha;
739 cairo_operator_t op = setup->op;
740 cairo_surface_t *mask_surface = NULL;
743 setup->mask.type == CAIRO_GL_OPERAND_TEXTURE &&
744 setup->mask.texture.attributes.has_component_alpha;
746 /* Do various magic for component alpha */
747 if (component_alpha) {
748 status = _cairo_gl_composite_begin_component_alpha (ctx, setup);
749 if (unlikely (status))
752 if (ctx->pre_shader) {
753 _cairo_gl_composite_flush (ctx);
754 ctx->pre_shader = NULL;
758 status = _cairo_gl_get_shader_by_type (ctx,
763 CAIRO_GL_SHADER_IN_CA_SOURCE :
764 CAIRO_GL_SHADER_IN_NORMAL,
766 if (unlikely (status)) {
767 ctx->pre_shader = NULL;
770 if (ctx->current_shader != shader)
771 _cairo_gl_composite_flush (ctx);
773 status = CAIRO_STATUS_SUCCESS;
775 dst_size = 2 * sizeof (GLfloat);
776 src_size = _cairo_gl_operand_get_vertex_size (&setup->src);
777 mask_size = _cairo_gl_operand_get_vertex_size (&setup->mask);
778 vertex_size = dst_size + src_size + mask_size;
781 vertex_size += sizeof (GLfloat);
783 _cairo_gl_composite_setup_vbo (ctx, vertex_size);
785 _cairo_gl_context_setup_operand (ctx, CAIRO_GL_TEX_SOURCE, &setup->src, vertex_size, dst_size);
786 _cairo_gl_context_setup_operand (ctx, CAIRO_GL_TEX_MASK, &setup->mask, vertex_size, dst_size + src_size);
788 _cairo_gl_context_setup_spans (ctx, vertex_size, dst_size + src_size + mask_size);
790 ctx->dispatch.DisableVertexAttribArray (CAIRO_GL_COVERAGE_ATTRIB_INDEX);
794 /* XXX: Shoot me - we have converted CLEAR to DEST_OUT,
795 so the dst_factor would be GL_ONE_MINUS_SRC_ALPHA, if the
796 mask is a surface and mask content not content_alpha, we want to use
797 GL_ONE_MINUS_SRC_COLOR, otherwise, we use GL_ONE_MINUS_SRC_ALPHA
799 if (setup->mask.type == CAIRO_GL_OPERAND_TEXTURE)
800 mask_surface = &setup->mask.texture.surface->base;
801 if (op == CAIRO_OPERATOR_CLEAR &&
803 mask_surface != NULL &&
804 cairo_surface_get_content (mask_surface) == CAIRO_CONTENT_ALPHA)
805 component_alpha = FALSE;
807 _cairo_gl_set_operator (ctx, setup->op, component_alpha);
809 if (_cairo_gl_context_is_flushed (ctx)) {
810 if (ctx->pre_shader) {
811 _cairo_gl_set_shader (ctx, ctx->pre_shader);
812 _cairo_gl_composite_bind_to_shader (ctx, setup);
814 _cairo_gl_set_shader (ctx, shader);
815 _cairo_gl_composite_bind_to_shader (ctx, setup);
822 _cairo_gl_composite_begin_multisample (cairo_gl_composite_t *setup,
823 cairo_gl_context_t **ctx_out,
824 cairo_bool_t multisampling)
826 cairo_gl_context_t *ctx;
827 cairo_status_t status;
831 status = _cairo_gl_context_acquire (setup->dst->base.device, &ctx);
832 if (unlikely (status))
835 _cairo_gl_context_set_destination (ctx, setup->dst, multisampling);
836 if (ctx->states_cache.blend_enabled == FALSE) {
838 ctx->states_cache.blend_enabled = TRUE;
840 _cairo_gl_set_operands_and_operator (setup, ctx, multisampling);
842 status = _cairo_gl_composite_setup_clipping (setup, ctx, ctx->vertex_size);
843 if (unlikely (status))
849 if (unlikely (status))
850 status = _cairo_gl_context_release (ctx, status);
856 _cairo_gl_composite_begin (cairo_gl_composite_t *setup,
857 cairo_gl_context_t **ctx_out)
859 return _cairo_gl_composite_begin_multisample (setup, ctx_out, FALSE);
863 _cairo_gl_composite_draw_tristrip (cairo_gl_context_t *ctx)
865 cairo_array_t* indices = &ctx->tristrip_indices;
866 const unsigned short *indices_array = _cairo_array_index_const (indices, 0);
868 if (ctx->pre_shader) {
869 cairo_gl_shader_t *prev_shader = ctx->current_shader;
871 _cairo_gl_set_shader (ctx, ctx->pre_shader);
872 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_DEST_OUT, TRUE);
873 glDrawElements (GL_TRIANGLE_STRIP, _cairo_array_num_elements (indices), GL_UNSIGNED_SHORT, indices_array);
875 _cairo_gl_set_shader (ctx, prev_shader);
876 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_ADD, TRUE);
879 glDrawElements (GL_TRIANGLE_STRIP, _cairo_array_num_elements (indices), GL_UNSIGNED_SHORT, indices_array);
880 _cairo_array_truncate (indices, 0);
884 _cairo_gl_composite_draw_line (cairo_gl_context_t *ctx)
886 GLenum type = GL_LINE_STRIP;
887 cairo_array_t* indices = &ctx->tristrip_indices;
888 const unsigned short *indices_array = _cairo_array_index_const (indices, 0);
890 if (ctx->draw_mode == CAIRO_GL_LINES)
893 if (ctx->pre_shader) {
894 cairo_gl_shader_t *prev_shader = ctx->current_shader;
896 _cairo_gl_set_shader (ctx, ctx->pre_shader);
897 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_DEST_OUT, TRUE);
898 glDrawElements (type, _cairo_array_num_elements (indices), GL_UNSIGNED_SHORT, indices_array);
900 _cairo_gl_set_shader (ctx, prev_shader);
901 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_ADD, TRUE);
904 glDrawElements (type, _cairo_array_num_elements (indices), GL_UNSIGNED_SHORT, indices_array);
905 _cairo_array_truncate (indices, 0);
909 _cairo_gl_composite_draw_triangles (cairo_gl_context_t *ctx,
912 if (! ctx->pre_shader) {
913 glDrawArrays (GL_TRIANGLES, 0, count);
915 cairo_gl_shader_t *prev_shader = ctx->current_shader;
917 _cairo_gl_set_shader (ctx, ctx->pre_shader);
918 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_DEST_OUT, TRUE);
919 glDrawArrays (GL_TRIANGLES, 0, count);
921 _cairo_gl_set_shader (ctx, prev_shader);
922 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_ADD, TRUE);
923 glDrawArrays (GL_TRIANGLES, 0, count);
928 _cairo_gl_composite_draw_triangles_with_clip_region (cairo_gl_context_t *ctx,
931 int i, num_rectangles;
933 if (!ctx->clip_region) {
934 _cairo_gl_composite_draw_triangles (ctx, count);
938 num_rectangles = cairo_region_num_rectangles (ctx->clip_region);
939 for (i = 0; i < num_rectangles; i++) {
940 cairo_rectangle_int_t rect;
942 cairo_region_get_rectangle (ctx->clip_region, i, &rect);
944 _cairo_gl_scissor_to_rectangle (ctx->current_target, &rect);
945 _cairo_gl_composite_draw_triangles (ctx, count);
950 _cairo_gl_composite_unmap_vertex_buffer (cairo_gl_context_t *ctx)
956 _cairo_gl_composite_flush (cairo_gl_context_t *ctx)
961 if (_cairo_gl_context_is_flushed (ctx))
964 count = ctx->vb_offset / ctx->vertex_size;
965 _cairo_gl_composite_unmap_vertex_buffer (ctx);
967 if (ctx->primitive_type == CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS) {
968 if (ctx->draw_mode == CAIRO_GL_LINE_STRIP ||
969 ctx->draw_mode == CAIRO_GL_LINES)
970 _cairo_gl_composite_draw_line (ctx);
972 _cairo_gl_composite_draw_tristrip (ctx);
974 assert (ctx->primitive_type == CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
975 _cairo_gl_composite_draw_triangles_with_clip_region (ctx, count);
978 for (i = 0; i < ARRAY_LENGTH (&ctx->glyph_cache); i++)
979 _cairo_gl_glyph_cache_unlock (&ctx->glyph_cache[i]);
981 _cairo_gl_image_cache_unlock (ctx);
985 _cairo_gl_composite_prepare_buffer (cairo_gl_context_t *ctx,
986 unsigned int n_vertices,
987 cairo_gl_primitive_type_t primitive_type)
989 if (ctx->primitive_type != primitive_type) {
990 _cairo_gl_composite_flush (ctx);
991 ctx->primitive_type = primitive_type;
994 if (ctx->vb_offset + n_vertices * ctx->vertex_size > CAIRO_GL_VBO_SIZE)
995 _cairo_gl_composite_flush (ctx);
999 _cairo_gl_composite_operand_emit (cairo_gl_operand_t *operand,
1004 switch (operand->type) {
1006 case CAIRO_GL_OPERAND_COUNT:
1008 case CAIRO_GL_OPERAND_NONE:
1010 case CAIRO_GL_OPERAND_CONSTANT:
1011 if (operand->use_color_attribute) {
1012 fi.bytes[0] = operand->constant.color[0] * 255;
1013 fi.bytes[1] = operand->constant.color[1] * 255;
1014 fi.bytes[2] = operand->constant.color[2] * 255;
1015 fi.bytes[3] = operand->constant.color[3] * 255;
1019 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
1020 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
1021 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
1022 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
1027 cairo_matrix_transform_point (&operand->gradient.m, &s, &t);
1033 case CAIRO_GL_OPERAND_TEXTURE:
1035 cairo_surface_attributes_t *src_attributes = &operand->texture.attributes;
1039 cairo_matrix_transform_point (&src_attributes->matrix, &s, &t);
1043 if (operand->texture.use_atlas) {
1044 *(*vb)++ = operand->texture.p1.x;
1045 *(*vb)++ = operand->texture.p1.y;
1046 *(*vb)++ = operand->texture.p2.x;
1047 *(*vb)++ = operand->texture.p2.y;
1055 _cairo_gl_composite_emit_vertex (cairo_gl_context_t *ctx,
1060 GLfloat *vb = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
1065 _cairo_gl_composite_operand_emit (&ctx->operands[CAIRO_GL_TEX_SOURCE], &vb, x, y);
1066 _cairo_gl_composite_operand_emit (&ctx->operands[CAIRO_GL_TEX_MASK ], &vb, x, y);
1073 fi.bytes[3] = alpha;
1077 ctx->vb_offset += ctx->vertex_size;
1081 _cairo_gl_composite_emit_point (cairo_gl_context_t *ctx,
1082 const cairo_point_t *point,
1085 _cairo_gl_composite_emit_vertex (ctx,
1086 _cairo_fixed_to_double (point->x),
1087 _cairo_fixed_to_double (point->y),
1092 _cairo_gl_composite_emit_rect (cairo_gl_context_t *ctx,
1099 if (ctx->draw_mode != CAIRO_GL_VERTEX) {
1100 _cairo_gl_composite_flush (ctx);
1101 ctx->draw_mode = CAIRO_GL_VERTEX;
1104 _cairo_gl_composite_prepare_buffer (ctx, 6,
1105 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1107 _cairo_gl_composite_emit_vertex (ctx, x1, y1, alpha);
1108 _cairo_gl_composite_emit_vertex (ctx, x2, y1, alpha);
1109 _cairo_gl_composite_emit_vertex (ctx, x1, y2, alpha);
1111 _cairo_gl_composite_emit_vertex (ctx, x2, y1, alpha);
1112 _cairo_gl_composite_emit_vertex (ctx, x2, y2, alpha);
1113 _cairo_gl_composite_emit_vertex (ctx, x1, y2, alpha);
1117 _cairo_gl_composite_emit_glyph_vertex (cairo_gl_context_t *ctx,
1123 GLfloat *vb = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
1128 _cairo_gl_composite_operand_emit (&ctx->operands[CAIRO_GL_TEX_SOURCE], &vb, x, y);
1133 ctx->vb_offset += ctx->vertex_size;
1137 _cairo_gl_composite_emit_glyph (cairo_gl_context_t *ctx,
1147 if (ctx->draw_mode != CAIRO_GL_VERTEX) {
1148 _cairo_gl_composite_flush (ctx);
1149 ctx->draw_mode = CAIRO_GL_VERTEX;
1152 _cairo_gl_composite_prepare_buffer (ctx, 6,
1153 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1155 _cairo_gl_composite_emit_glyph_vertex (ctx, x1, y1, glyph_x1, glyph_y1);
1156 _cairo_gl_composite_emit_glyph_vertex (ctx, x2, y1, glyph_x2, glyph_y1);
1157 _cairo_gl_composite_emit_glyph_vertex (ctx, x1, y2, glyph_x1, glyph_y2);
1159 _cairo_gl_composite_emit_glyph_vertex (ctx, x2, y1, glyph_x2, glyph_y1);
1160 _cairo_gl_composite_emit_glyph_vertex (ctx, x2, y2, glyph_x2, glyph_y2);
1161 _cairo_gl_composite_emit_glyph_vertex (ctx, x1, y2, glyph_x1, glyph_y2);
1165 _cairo_gl_composite_fini (cairo_gl_composite_t *setup)
1167 _cairo_gl_operand_destroy (&setup->src);
1168 _cairo_gl_operand_destroy (&setup->mask);
1172 _cairo_gl_composite_set_operator (cairo_gl_composite_t *setup,
1173 cairo_operator_t op,
1174 cairo_bool_t assume_component_alpha)
1176 if (assume_component_alpha) {
1177 if (op != CAIRO_OPERATOR_CLEAR &&
1178 op != CAIRO_OPERATOR_OVER &&
1179 op != CAIRO_OPERATOR_ADD)
1180 return UNSUPPORTED ("unsupported component alpha operator");
1182 if (! _cairo_gl_operator_is_supported (op))
1183 return UNSUPPORTED ("unsupported operator");
1187 return CAIRO_STATUS_SUCCESS;
1191 _cairo_gl_composite_init (cairo_gl_composite_t *setup,
1192 cairo_operator_t op,
1193 cairo_gl_surface_t *dst,
1194 cairo_bool_t assume_component_alpha)
1196 cairo_status_t status;
1198 memset (setup, 0, sizeof (cairo_gl_composite_t));
1200 status = _cairo_gl_composite_set_operator (setup, op,
1201 assume_component_alpha);
1206 setup->clip_region = dst->clip_region;
1208 return CAIRO_STATUS_SUCCESS;
1211 static cairo_int_status_t
1212 _cairo_gl_composite_append_vertex_indices (cairo_gl_context_t *ctx,
1213 int number_of_new_indices,
1214 cairo_bool_t is_connected)
1216 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
1217 cairo_array_t *indices = &ctx->tristrip_indices;
1218 int number_of_indices = _cairo_array_num_elements (indices);
1219 unsigned short current_vertex_index = 0;
1222 assert (number_of_new_indices > 0);
1224 /* If any preexisting triangle triangle strip indices exist on this
1225 context, we insert a set of degenerate triangles from the last
1226 preexisting vertex to our first one. */
1227 if (number_of_indices > 0 && is_connected) {
1228 const unsigned short *indices_array = _cairo_array_index_const (indices, 0);
1229 current_vertex_index = indices_array[number_of_indices - 1];
1231 status = _cairo_array_append (indices, ¤t_vertex_index);
1232 if (unlikely (status))
1235 current_vertex_index++;
1236 status =_cairo_array_append (indices, ¤t_vertex_index);
1237 if (unlikely (status))
1240 current_vertex_index = (unsigned short) number_of_indices;
1242 for (i = 0; i < number_of_new_indices; i++) {
1243 status = _cairo_array_append (indices, ¤t_vertex_index);
1244 current_vertex_index++;
1245 if (unlikely (status))
1249 return CAIRO_STATUS_SUCCESS;
1253 _cairo_gl_composite_emit_quad_as_tristrip (cairo_gl_context_t *ctx,
1254 cairo_gl_composite_t *setup,
1255 const cairo_point_t quad[4])
1257 if (ctx->draw_mode != CAIRO_GL_VERTEX) {
1258 _cairo_gl_composite_flush (ctx);
1259 ctx->draw_mode = CAIRO_GL_VERTEX;
1262 _cairo_gl_composite_prepare_buffer (ctx, 4,
1263 CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS);
1265 _cairo_gl_composite_emit_point (ctx, &quad[0], 0);
1266 _cairo_gl_composite_emit_point (ctx, &quad[1], 0);
1268 /* Cairo stores quad vertices in counter-clockwise order, but we need to
1269 emit them from top to bottom in the triangle strip, so we need to reverse
1270 the order of the last two vertices. */
1271 _cairo_gl_composite_emit_point (ctx, &quad[3], 0);
1272 _cairo_gl_composite_emit_point (ctx, &quad[2], 0);
1274 return _cairo_gl_composite_append_vertex_indices (ctx, 4, TRUE);
1278 _cairo_gl_composite_emit_triangle_as_tristrip (cairo_gl_context_t *ctx,
1279 cairo_gl_composite_t *setup,
1280 const cairo_point_t triangle[3])
1282 if (ctx->draw_mode != CAIRO_GL_VERTEX) {
1283 _cairo_gl_composite_flush (ctx);
1284 ctx->draw_mode = CAIRO_GL_VERTEX;
1287 _cairo_gl_composite_prepare_buffer (ctx, 3,
1288 CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS);
1290 _cairo_gl_composite_emit_point (ctx, &triangle[0], 0);
1291 _cairo_gl_composite_emit_point (ctx, &triangle[1], 0);
1292 _cairo_gl_composite_emit_point (ctx, &triangle[2], 0);
1293 return _cairo_gl_composite_append_vertex_indices (ctx, 3, TRUE);
1297 _cairo_gl_composite_emit_point_as_single_line (cairo_gl_context_t *ctx,
1298 const cairo_point_t point[2])
1300 int num_indices = 2;
1301 if (ctx->draw_mode != CAIRO_GL_LINES)
1302 _cairo_gl_composite_flush (ctx);
1304 ctx->draw_mode = CAIRO_GL_LINES;
1306 _cairo_gl_composite_prepare_buffer (ctx, 2,
1307 CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS);
1309 _cairo_gl_composite_emit_point (ctx, &point[0], 0);
1310 _cairo_gl_composite_emit_point (ctx, &point[1], 0);
1311 return _cairo_gl_composite_append_vertex_indices (ctx, num_indices, FALSE);