1 /* cairo - a vector graphics library with display and print output
3 * Copyright © 2009 Eric Anholt
4 * Copyright © 2009 Chris Wilson
5 * Copyright © 2005,2010 Red Hat, Inc
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is Red Hat, Inc.
35 * Benjamin Otte <otte@gnome.org>
36 * Carl Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
38 * Eric Anholt <eric@anholt.net>
43 #include "cairo-gl-private.h"
45 #include "cairo-composite-rectangles-private.h"
46 #include "cairo-compositor-private.h"
47 #include "cairo-default-context-private.h"
48 #include "cairo-error-private.h"
49 #include "cairo-image-surface-inline.h"
50 #include "cairo-surface-backend-private.h"
51 #include "cairo-surface-shadow-private.h"
52 #include "cairo-surface-scale-translate-private.h"
53 #include "cairo-ttrace.h"
55 static const cairo_surface_backend_t _cairo_gl_surface_backend;
58 _cairo_gl_surface_flush (void *abstract_surface, unsigned flags);
60 static cairo_bool_t _cairo_surface_is_gl (cairo_surface_t *surface)
62 return surface->backend == &_cairo_gl_surface_backend;
65 static cairo_surface_t *
66 _cairo_gl_surface_shadow_surface (void *surface,
67 const cairo_bool_t has_blur,
68 int width, int height,
69 int *width_out, int *height_out)
71 CAIRO_TRACE_BEGIN (__func__);
72 int shadow_width, shadow_height;
73 cairo_gl_surface_t *shadow_surface = NULL;
75 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
76 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
78 CAIRO_TRACE_END (__func__);
82 shadow_surface = ctx->shadow_scratch_surfaces[0];
85 shadow_width = shadow_surface->width;
86 shadow_height = shadow_surface->height;
89 if(shadow_width >= width &&
90 shadow_height >= height) {
93 CAIRO_TRACE_END (__func__);
94 return cairo_surface_reference (&shadow_surface->base);
97 cairo_surface_destroy (&shadow_surface->base);
98 shadow_surface = NULL;
102 if (shadow_width * 2 < width &&
103 shadow_height * 2 < height) {
104 if (shadow_width < MAX_SCRATCH_SIZE ||
105 shadow_height < MAX_SCRATCH_SIZE) {
106 cairo_surface_destroy (&shadow_surface->base);
107 shadow_surface = NULL;
110 else if (shadow_width > 4 * width &&
111 shadow_height > 4 * height) {
112 cairo_surface_destroy (&shadow_surface->base);
113 shadow_surface = NULL;
118 if (! shadow_surface) {
119 shadow_width = shadow_height = MIN_SCRATCH_SIZE;
121 while (shadow_width * 2 < width) {
123 if (shadow_width == MAX_SCRATCH_SIZE)
125 else if (shadow_width > MAX_SCRATCH_SIZE) {
130 while (shadow_height * 2 < height) {
132 if (shadow_height == MAX_SCRATCH_SIZE)
134 else if (shadow_height > MAX_SCRATCH_SIZE) {
135 shadow_height *= 0.5;
141 while (shadow_width < width) {
143 if (shadow_width == MAX_SCRATCH_SIZE)
145 else if (shadow_width > MAX_SCRATCH_SIZE) {
150 while (shadow_height < height) {
152 if (shadow_height == MAX_SCRATCH_SIZE)
154 else if (shadow_height > MAX_SCRATCH_SIZE) {
155 shadow_height *= 0.5;
162 shadow_surface = (cairo_gl_surface_t *)
163 _cairo_gl_surface_create_scratch (ctx,
164 CAIRO_CONTENT_COLOR_ALPHA,
167 if (unlikely (shadow_surface->base.status)) {
168 cairo_surface_destroy (&shadow_surface->base);
169 CAIRO_TRACE_END (__func__);
173 _cairo_surface_release_device_reference (&shadow_surface->base);
176 ctx->shadow_scratch_surfaces[0] = shadow_surface;
178 shadow_surface->needs_to_cache = FALSE;
179 shadow_surface->force_no_cache = TRUE;
182 *height_out = height;
185 while (*width_out > shadow_width) {
189 while (*height_out > shadow_height) {
194 if (*width_out > MAX_SCRATCH_SIZE)
196 if (*height_out > MAX_SCRATCH_SIZE)
200 CAIRO_TRACE_END (__func__);
201 return cairo_surface_reference (&shadow_surface->base);
204 static cairo_surface_t *
205 _cairo_gl_surface_shadow_mask_surface (void *surface,
206 int width, int height,
209 CAIRO_TRACE_BEGIN (__func__);
210 cairo_gl_surface_t *mask_surface = NULL;
212 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
213 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
215 CAIRO_TRACE_END (__func__);
220 CAIRO_TRACE_END (__func__);
224 mask_surface = ctx->shadow_masks[index];
227 if (mask_surface->width != width ||
228 mask_surface->height != height) {
229 cairo_surface_destroy (&mask_surface->base);
231 ctx->shadow_masks[index] = NULL;
235 if (! mask_surface) {
236 mask_surface = (cairo_gl_surface_t *)
237 _cairo_gl_surface_create_scratch (ctx,
238 CAIRO_CONTENT_COLOR_ALPHA,
241 if (unlikely (mask_surface->base.status)) {
242 cairo_surface_destroy (&mask_surface->base);
243 CAIRO_TRACE_END (__func__);
246 _cairo_surface_release_device_reference (&mask_surface->base);
249 ctx->shadow_masks[index] = mask_surface;
251 mask_surface->needs_to_cache = FALSE;
252 mask_surface->force_no_cache = TRUE;
253 CAIRO_TRACE_END (__func__);
254 return cairo_surface_reference (&mask_surface->base);
257 static cairo_surface_t *
258 _cairo_gl_surface_glyph_shadow_surface (void *surface,
259 int width, int height,
260 cairo_bool_t for_source)
262 CAIRO_TRACE_BEGIN (__func__);
263 int shadow_width, shadow_height;
264 cairo_gl_surface_t *shadow_surface = NULL;
266 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
267 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
269 CAIRO_TRACE_END (__func__);
274 shadow_surface = ctx->shadow_scratch_surfaces[1];
276 shadow_surface = ctx->shadow_scratch_surfaces[2];
278 if (shadow_surface) {
279 shadow_width = shadow_surface->width;
280 shadow_height = shadow_surface->height;
282 if (shadow_width < width ||
283 shadow_height < height) {
284 cairo_surface_destroy (&shadow_surface->base);
285 shadow_surface = NULL;
289 if (! shadow_surface) {
290 shadow_surface = (cairo_gl_surface_t *)
291 _cairo_gl_surface_create_scratch (ctx,
292 CAIRO_CONTENT_COLOR_ALPHA,
294 if (unlikely (shadow_surface->base.status)) {
295 cairo_surface_destroy (&shadow_surface->base);
296 CAIRO_TRACE_END (__func__);
299 _cairo_surface_release_device_reference (&shadow_surface->base);
303 ctx->shadow_scratch_surfaces[1] = shadow_surface;
305 ctx->shadow_scratch_surfaces[2] = shadow_surface;
307 shadow_surface->needs_to_cache = FALSE;
308 shadow_surface->force_no_cache = TRUE;
310 CAIRO_TRACE_END (__func__);
311 return cairo_surface_reference (&shadow_surface->base);
314 static cairo_surface_t *
315 _cairo_gl_surface_glyph_shadow_mask_surface (void *surface,
316 int width, int height,
319 CAIRO_TRACE_BEGIN (__func__);
320 cairo_gl_surface_t *mask_surface = NULL;
322 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
323 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
325 CAIRO_TRACE_END (__func__);
330 CAIRO_TRACE_END (__func__);
334 mask_surface = ctx->shadow_masks[index + 2];
337 if (mask_surface->width != width ||
338 mask_surface->height != height) {
339 cairo_surface_destroy (&mask_surface->base);
341 ctx->shadow_masks[index + 2] = NULL;
345 if (! mask_surface) {
346 mask_surface = (cairo_gl_surface_t *)
347 _cairo_gl_surface_create_scratch (ctx,
351 if (unlikely (mask_surface->base.status)) {
352 cairo_surface_destroy (&mask_surface->base);
353 CAIRO_TRACE_END (__func__);
356 _cairo_surface_release_device_reference (&mask_surface->base);
359 ctx->shadow_masks[index + 2] = mask_surface;
361 mask_surface->needs_to_cache = FALSE;
362 mask_surface->force_no_cache = TRUE;
364 CAIRO_TRACE_END (__func__);
365 return cairo_surface_reference (&mask_surface->base);
369 _cairo_gl_get_image_format_and_type_gles2 (pixman_format_code_t pixman_format,
370 GLenum *internal_format, GLenum *format,
371 GLenum *type, cairo_bool_t *has_alpha,
372 cairo_bool_t *needs_swap)
374 cairo_bool_t is_little_endian = _cairo_is_little_endian ();
378 switch ((int) pixman_format) {
379 case PIXMAN_a8r8g8b8:
380 *internal_format = GL_BGRA;
382 *type = GL_UNSIGNED_BYTE;
383 *needs_swap = !is_little_endian;
386 case PIXMAN_x8r8g8b8:
387 *internal_format = GL_BGRA;
389 *type = GL_UNSIGNED_BYTE;
391 *needs_swap = !is_little_endian;
394 case PIXMAN_a8b8g8r8:
395 *internal_format = GL_RGBA;
397 *type = GL_UNSIGNED_BYTE;
398 *needs_swap = !is_little_endian;
401 case PIXMAN_x8b8g8r8:
402 *internal_format = GL_RGBA;
404 *type = GL_UNSIGNED_BYTE;
406 *needs_swap = !is_little_endian;
409 case PIXMAN_b8g8r8a8:
410 *internal_format = GL_BGRA;
412 *type = GL_UNSIGNED_BYTE;
413 *needs_swap = is_little_endian;
416 case PIXMAN_b8g8r8x8:
417 *internal_format = GL_BGRA;
419 *type = GL_UNSIGNED_BYTE;
421 *needs_swap = is_little_endian;
425 *internal_format = GL_RGB;
427 *type = GL_UNSIGNED_BYTE;
428 *needs_swap = is_little_endian;
432 *internal_format = GL_RGB;
434 *type = GL_UNSIGNED_BYTE;
435 *needs_swap = !is_little_endian;
439 *internal_format = GL_RGB;
441 *type = GL_UNSIGNED_SHORT_5_6_5;
446 *internal_format = GL_RGB;
448 *type = GL_UNSIGNED_SHORT_5_6_5;
452 case PIXMAN_a1b5g5r5:
453 *internal_format = GL_RGBA;
455 *type = GL_UNSIGNED_SHORT_5_5_5_1;
459 case PIXMAN_x1b5g5r5:
460 *internal_format = GL_RGBA;
462 *type = GL_UNSIGNED_SHORT_5_5_5_1;
468 *internal_format = GL_ALPHA;
470 *type = GL_UNSIGNED_BYTE;
480 _cairo_gl_get_image_format_and_type_gl (pixman_format_code_t pixman_format,
481 GLenum *internal_format, GLenum *format,
482 GLenum *type, cairo_bool_t *has_alpha,
483 cairo_bool_t *needs_swap)
488 switch (pixman_format) {
489 case PIXMAN_a8r8g8b8:
490 *internal_format = GL_RGBA;
492 *type = GL_UNSIGNED_INT_8_8_8_8_REV;
494 case PIXMAN_x8r8g8b8:
495 *internal_format = GL_RGB;
497 *type = GL_UNSIGNED_INT_8_8_8_8_REV;
500 case PIXMAN_a8b8g8r8:
501 *internal_format = GL_RGBA;
503 *type = GL_UNSIGNED_INT_8_8_8_8_REV;
505 case PIXMAN_x8b8g8r8:
506 *internal_format = GL_RGB;
508 *type = GL_UNSIGNED_INT_8_8_8_8_REV;
511 case PIXMAN_b8g8r8a8:
512 *internal_format = GL_RGBA;
514 *type = GL_UNSIGNED_INT_8_8_8_8;
516 case PIXMAN_b8g8r8x8:
517 *internal_format = GL_RGB;
519 *type = GL_UNSIGNED_INT_8_8_8_8;
523 *internal_format = GL_RGB;
525 *type = GL_UNSIGNED_BYTE;
528 *internal_format = GL_RGB;
530 *type = GL_UNSIGNED_BYTE;
533 *internal_format = GL_RGB;
535 *type = GL_UNSIGNED_SHORT_5_6_5;
538 *internal_format = GL_RGB;
540 *type = GL_UNSIGNED_SHORT_5_6_5_REV;
542 case PIXMAN_a1r5g5b5:
543 *internal_format = GL_RGBA;
545 *type = GL_UNSIGNED_SHORT_1_5_5_5_REV;
547 case PIXMAN_x1r5g5b5:
548 *internal_format = GL_RGB;
550 *type = GL_UNSIGNED_SHORT_1_5_5_5_REV;
553 case PIXMAN_a1b5g5r5:
554 *internal_format = GL_RGBA;
556 *type = GL_UNSIGNED_SHORT_1_5_5_5_REV;
558 case PIXMAN_x1b5g5r5:
559 *internal_format = GL_RGB;
561 *type = GL_UNSIGNED_SHORT_1_5_5_5_REV;
565 *internal_format = GL_ALPHA;
567 *type = GL_UNSIGNED_BYTE;
570 #if PIXMAN_VERSION >= PIXMAN_VERSION_ENCODE(0,27,2)
571 case PIXMAN_a8r8g8b8_sRGB:
573 case PIXMAN_a2b10g10r10:
574 case PIXMAN_x2b10g10r10:
575 case PIXMAN_a4r4g4b4:
576 case PIXMAN_x4r4g4b4:
577 case PIXMAN_a4b4g4r4:
578 case PIXMAN_x4b4g4r4:
581 case PIXMAN_a2r2g2b2:
582 case PIXMAN_a2b2g2r2:
585 /* case PIXMAN_x4c4: */
590 case PIXMAN_a1r1g1b1:
591 case PIXMAN_a1b1g1r1:
598 case PIXMAN_x2r10g10b10:
599 case PIXMAN_a2r10g10b10:
600 case PIXMAN_r8g8b8x8:
601 case PIXMAN_r8g8b8a8:
602 case PIXMAN_x14r6g6b6:
609 * Extracts pixel data from an image surface.
611 static cairo_status_t
612 _cairo_gl_surface_extract_image_data (cairo_image_surface_t *image,
614 int width, int height,
617 int cpp = PIXMAN_FORMAT_BPP (image->pixman_format) / 8;
618 char *data = _cairo_malloc_ab (width * height, cpp);
620 unsigned char *src = image->data + y * image->stride + x * cpp;
623 if (unlikely (data == NULL))
624 return CAIRO_STATUS_NO_MEMORY;
626 for (i = 0; i < height; i++) {
627 memcpy (dst, src, width * cpp);
628 src += image->stride;
634 return CAIRO_STATUS_SUCCESS;
638 _cairo_gl_get_image_format_and_type (cairo_gl_flavor_t flavor,
639 pixman_format_code_t pixman_format,
640 GLenum *internal_format, GLenum *format,
641 GLenum *type, cairo_bool_t *has_alpha,
642 cairo_bool_t *needs_swap)
644 if (flavor == CAIRO_GL_FLAVOR_DESKTOP)
645 return _cairo_gl_get_image_format_and_type_gl (pixman_format,
646 internal_format, format,
650 return _cairo_gl_get_image_format_and_type_gles2 (pixman_format,
651 internal_format, format,
658 _cairo_gl_operator_is_supported (cairo_operator_t op)
660 return op < CAIRO_OPERATOR_SATURATE;
664 _cairo_gl_surface_embedded_operand_init (cairo_gl_surface_t *surface)
666 cairo_gl_operand_t *operand = &surface->operand;
667 cairo_surface_attributes_t *attributes = &operand->texture.attributes;
669 memset (operand, 0, sizeof (cairo_gl_operand_t));
671 operand->type = CAIRO_GL_OPERAND_TEXTURE;
672 operand->texture.surface = surface;
673 operand->texture.tex = surface->tex;
676 if (_cairo_gl_device_requires_power_of_two_textures (surface->base.device)) {
677 cairo_matrix_init_identity (&attributes->matrix);
679 cairo_matrix_init_scale (&attributes->matrix,
680 1.0 / surface->width,
681 1.0 / surface->height);
684 attributes->extend = CAIRO_EXTEND_NONE;
685 attributes->filter = CAIRO_FILTER_NEAREST;
689 _cairo_gl_surface_init (cairo_device_t *device,
690 cairo_gl_surface_t *surface,
691 cairo_content_t content,
692 int width, int height)
694 CAIRO_TRACE_BEGIN (__func__);
695 assert (width > 0 && height > 0);
697 _cairo_surface_init (&surface->base,
698 &_cairo_gl_surface_backend,
702 surface->width = width;
703 surface->height = height;
704 surface->needs_update = FALSE;
705 surface->size_changed = FALSE;
706 surface->needs_to_cache = FALSE;
707 surface->image_node = NULL;
708 surface->force_no_cache = FALSE;
710 surface->image_content_scale_x = 1.0;
711 surface->image_content_scale_y = 1.0;
712 surface->blur_stage = CAIRO_GL_BLUR_STAGE_NONE;
714 surface->clip_on_stencil_buffer = NULL;
716 surface->content_synced = TRUE;
717 surface->content_cleared = FALSE;
719 _cairo_gl_surface_embedded_operand_init (surface);
720 CAIRO_TRACE_END (__func__);
724 _cairo_gl_surface_size_valid_for_context (cairo_gl_context_t *ctx,
725 int width, int height)
727 return width > 0 && height > 0 &&
728 width <= ctx->max_framebuffer_size &&
729 height <= ctx->max_framebuffer_size;
733 _cairo_gl_surface_size_valid (cairo_gl_surface_t *surface,
734 int width, int height)
736 cairo_gl_context_t *ctx = (cairo_gl_context_t *)surface->base.device;
737 return _cairo_gl_surface_size_valid_for_context (ctx, width, height);
740 static cairo_surface_t *
741 _cairo_gl_surface_create_scratch_for_texture (cairo_gl_context_t *ctx,
742 cairo_content_t content,
747 CAIRO_TRACE_BEGIN (__func__);
748 cairo_gl_surface_t *surface;
750 surface = calloc (1, sizeof (cairo_gl_surface_t));
751 if (unlikely (surface == NULL)) {
752 CAIRO_TRACE_END (__func__);
753 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_NO_MEMORY));
757 _cairo_gl_surface_init (&ctx->base, surface, content, width, height);
759 surface->supports_msaa = ctx->supports_msaa;
760 surface->num_samples = ctx->num_samples;
761 surface->supports_stencil = TRUE;
763 /* Create the texture used to store the surface's data. */
764 _cairo_gl_context_activate (ctx, CAIRO_GL_TEX_TEMP);
765 ctx->dispatch.BindTexture (ctx->tex_target, surface->tex);
766 ctx->dispatch.TexParameteri (ctx->tex_target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
767 ctx->dispatch.TexParameteri (ctx->tex_target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
769 CAIRO_TRACE_END (__func__);
770 return &surface->base;
773 static cairo_surface_t *
774 _create_scratch_internal (cairo_gl_context_t *ctx,
775 cairo_content_t content,
778 cairo_bool_t for_caching)
780 CAIRO_TRACE_BEGIN (__func__);
781 cairo_gl_surface_t *surface;
785 ctx->dispatch.GenTextures (1, &tex);
786 surface = (cairo_gl_surface_t *)
787 _cairo_gl_surface_create_scratch_for_texture (ctx, content,
789 if (unlikely (surface->base.status)) {
790 CAIRO_TRACE_END (__func__);
791 return &surface->base;
794 surface->owns_tex = TRUE;
796 /* adjust the texture size after setting our real extents */
805 case CAIRO_CONTENT_COLOR_ALPHA:
808 case CAIRO_CONTENT_ALPHA:
809 /* When using GL_ALPHA, compositing doesn't work properly, but for
810 * caching surfaces, we are just uploading pixel data, so it isn't
812 if (for_caching && !ctx->is_gl33)
817 case CAIRO_CONTENT_COLOR:
818 /* GL_RGB is almost what we want here -- sampling 1 alpha when
819 * texturing, using 1 as destination alpha factor in blending,
820 * etc. However, when filtering with GL_CLAMP_TO_BORDER, the
821 * alpha channel of the border color will also be clamped to
822 * 1, when we actually want the border color we explicitly
823 * specified. So, we have to store RGBA, and fill the alpha
824 * channel with 1 when blending.
830 ctx->dispatch.TexImage2D (ctx->tex_target, 0, format,
832 format, GL_UNSIGNED_BYTE, NULL);
834 CAIRO_TRACE_END (__func__);
835 return &surface->base;
839 _cairo_gl_surface_create_scratch (cairo_gl_context_t *ctx,
840 cairo_content_t content,
844 return _create_scratch_internal (ctx, content, width, height, FALSE);
848 _cairo_gl_surface_create_scratch_for_caching (cairo_gl_context_t *ctx,
849 cairo_content_t content,
853 return _create_scratch_internal (ctx, content, width, height, TRUE);
856 static cairo_status_t
857 _cairo_gl_surface_clear (cairo_gl_surface_t *surface,
858 const cairo_color_t *color)
860 CAIRO_TRACE_BEGIN (__func__);
861 cairo_gl_context_t *ctx;
862 cairo_status_t status;
865 status = _cairo_gl_context_acquire (surface->base.device, &ctx);
866 if (unlikely (status)) {
867 CAIRO_TRACE_END (__func__);
871 if (ctx->current_target == surface)
872 _cairo_gl_composite_flush (ctx);
874 /* FIXME: for glesv3 and glesv2 with ANGLE extension of multisample
875 supports, it is much more expensive to paint texture back to
876 multisample renderbuffer. Therefore, instead of clear
877 texture, we clear the renderbuffer.
878 In case, the next draw render target is texture, it will
879 blit renderbuffer back to texture */
880 if (ctx->gl_flavor != CAIRO_GL_FLAVOR_DESKTOP)
881 _cairo_gl_context_set_destination (ctx, surface, TRUE);
883 _cairo_gl_context_set_destination (ctx, surface, surface->msaa_active);
884 if (surface->base.content & CAIRO_CONTENT_COLOR) {
885 r = color->red * color->alpha;
886 g = color->green * color->alpha;
887 b = color->blue * color->alpha;
891 if (surface->base.content & CAIRO_CONTENT_ALPHA) {
897 _disable_scissor_buffer (ctx);
898 if (ctx->states_cache.clear_red != r ||
899 ctx->states_cache.clear_green != g ||
900 ctx->states_cache.clear_blue != b ||
901 ctx->states_cache.clear_alpha != a) {
903 ctx->states_cache.clear_red = r;
904 ctx->states_cache.clear_green = g;
905 ctx->states_cache.clear_blue = b;
906 ctx->states_cache.clear_alpha = a;
908 ctx->dispatch.ClearColor (r, g, b, a);
911 /* optimize for mobile gl driver with deferred rendering */
912 if (ctx->gl_flavor == CAIRO_GL_FLAVOR_DESKTOP)
913 ctx->dispatch.Clear (GL_COLOR_BUFFER_BIT);
915 if (surface->clip_on_stencil_buffer) {
916 _cairo_clip_destroy (surface->clip_on_stencil_buffer);
917 surface->clip_on_stencil_buffer = NULL;
919 ctx->dispatch.Clear (GL_COLOR_BUFFER_BIT | GL_STENCIL_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
923 surface->base.is_clear = TRUE;
925 surface->content_changed = TRUE;
926 surface->content_synced = FALSE;
927 surface->content_cleared = TRUE;
928 CAIRO_TRACE_END (__func__);
929 return _cairo_gl_context_release (ctx, status);
932 static cairo_surface_t *
933 _cairo_gl_surface_create_and_clear_scratch (cairo_gl_context_t *ctx,
934 cairo_content_t content,
938 CAIRO_TRACE_BEGIN (__func__);
939 cairo_gl_surface_t *surface;
940 cairo_int_status_t status;
942 surface = (cairo_gl_surface_t *)
943 _cairo_gl_surface_create_scratch (ctx, content, width, height);
944 if (unlikely (surface->base.status)) {
945 CAIRO_TRACE_END (__func__);
946 return &surface->base;
949 /* Cairo surfaces start out initialized to transparent (black) */
950 status = _cairo_gl_surface_clear (surface, CAIRO_COLOR_TRANSPARENT);
951 if (unlikely (status)) {
952 cairo_surface_destroy (&surface->base);
953 CAIRO_TRACE_END (__func__);
954 return _cairo_surface_create_in_error (status);
957 CAIRO_TRACE_END (__func__);
958 return &surface->base;
962 cairo_gl_surface_create (cairo_device_t *abstract_device,
963 cairo_content_t content,
967 CAIRO_TRACE_BEGIN (__func__);
968 cairo_gl_context_t *ctx;
969 cairo_gl_surface_t *surface;
970 cairo_status_t status;
972 if (! CAIRO_CONTENT_VALID (content)) {
973 CAIRO_TRACE_END (__func__);
974 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_INVALID_CONTENT));
977 if (abstract_device == NULL) {
978 CAIRO_TRACE_END (__func__);
979 return _cairo_image_surface_create_with_content (content, width, height);
982 if (abstract_device->status) {
983 CAIRO_TRACE_END (__func__);
984 return _cairo_surface_create_in_error (abstract_device->status);
987 if (abstract_device->backend->type != CAIRO_DEVICE_TYPE_GL) {
988 CAIRO_TRACE_END (__func__);
989 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_SURFACE_TYPE_MISMATCH));
992 status = _cairo_gl_context_acquire (abstract_device, &ctx);
993 if (unlikely (status)) {
994 CAIRO_TRACE_END (__func__);
995 return _cairo_surface_create_in_error (status);
998 if (! _cairo_gl_surface_size_valid_for_context (ctx, width, height)) {
999 status = _cairo_gl_context_release (ctx, status);
1000 CAIRO_TRACE_END (__func__);
1001 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_INVALID_SIZE));
1004 surface = (cairo_gl_surface_t *)
1005 _cairo_gl_surface_create_and_clear_scratch (ctx, content, width, height);
1006 if (unlikely (surface->base.status)) {
1007 status = _cairo_gl_context_release (ctx, surface->base.status);
1008 cairo_surface_destroy (&surface->base);
1009 CAIRO_TRACE_END (__func__);
1010 return _cairo_surface_create_in_error (status);
1013 status = _cairo_gl_context_release (ctx, status);
1014 if (unlikely (status)) {
1015 cairo_surface_destroy (&surface->base);
1016 CAIRO_TRACE_END (__func__);
1017 return _cairo_surface_create_in_error (status);
1020 CAIRO_TRACE_END (__func__);
1021 return &surface->base;
1023 slim_hidden_def (cairo_gl_surface_create);
1026 * cairo_gl_surface_create_for_texture:
1027 * @content: type of content in the surface
1028 * @tex: name of texture to use for storage of surface pixels
1029 * @width: width of the surface, in pixels
1030 * @height: height of the surface, in pixels
1032 * Creates a GL surface for the specified texture with the specified
1033 * content and dimensions. The texture must be kept around until the
1034 * #cairo_surface_t is destroyed or cairo_surface_finish() is called
1035 * on the surface. The initial contents of @tex will be used as the
1036 * initial image contents; you must explicitly clear the buffer,
1037 * using, for example, cairo_rectangle() and cairo_fill() if you want
1038 * it cleared. The format of @tex should be compatible with @content,
1039 * in the sense that it must have the color components required by
1042 * Return value: a pointer to the newly created surface. The caller
1043 * owns the surface and should call cairo_surface_destroy() when done
1046 * This function always returns a valid pointer, but it will return a
1047 * pointer to a "nil" surface if an error such as out of memory
1048 * occurs. You can use cairo_surface_status() to check for this.
1053 cairo_gl_surface_create_for_texture (cairo_device_t *abstract_device,
1054 cairo_content_t content,
1059 CAIRO_TRACE_BEGIN (__func__);
1060 cairo_gl_context_t *ctx;
1061 cairo_gl_surface_t *surface;
1062 cairo_status_t status;
1064 if (! CAIRO_CONTENT_VALID (content)) {
1065 CAIRO_TRACE_END (__func__);
1066 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_INVALID_CONTENT));
1069 if (abstract_device == NULL) {
1070 CAIRO_TRACE_END (__func__);
1071 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_NULL_POINTER));
1074 if (abstract_device->status) {
1075 CAIRO_TRACE_END (__func__);
1076 return _cairo_surface_create_in_error (abstract_device->status);
1079 if (abstract_device->backend->type != CAIRO_DEVICE_TYPE_GL) {
1080 CAIRO_TRACE_END (__func__);
1081 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_DEVICE_TYPE_MISMATCH));
1084 status = _cairo_gl_context_acquire (abstract_device, &ctx);
1085 if (unlikely (status)) {
1086 CAIRO_TRACE_END (__func__);
1087 return _cairo_surface_create_in_error (status);
1090 if (! _cairo_gl_surface_size_valid_for_context (ctx, width, height)) {
1091 status = _cairo_gl_context_release (ctx, status);
1092 CAIRO_TRACE_END (__func__);
1093 return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_INVALID_SIZE));
1096 surface = (cairo_gl_surface_t *)
1097 _cairo_gl_surface_create_scratch_for_texture (ctx, content,
1098 tex, width, height);
1099 status = _cairo_gl_context_release (ctx, status);
1100 CAIRO_TRACE_END (__func__);
1101 return &surface->base;
1103 slim_hidden_def (cairo_gl_surface_create_for_texture);
1107 cairo_gl_surface_set_size (cairo_surface_t *abstract_surface,
1111 cairo_gl_surface_t *surface = (cairo_gl_surface_t *) abstract_surface;
1113 if (unlikely (abstract_surface->status))
1115 if (unlikely (abstract_surface->finished)) {
1116 _cairo_surface_set_error (abstract_surface,
1117 _cairo_error (CAIRO_STATUS_SURFACE_FINISHED));
1121 if (! _cairo_surface_is_gl (abstract_surface) ||
1122 _cairo_gl_surface_is_texture (surface)) {
1123 _cairo_surface_set_error (abstract_surface,
1124 _cairo_error (CAIRO_STATUS_SURFACE_TYPE_MISMATCH));
1128 if (surface->width != width || surface->height != height) {
1129 surface->size_changed = TRUE;
1130 surface->width = width;
1131 surface->height = height;
1136 cairo_gl_surface_get_width (cairo_surface_t *abstract_surface)
1138 cairo_gl_surface_t *surface = (cairo_gl_surface_t *) abstract_surface;
1140 if (! _cairo_surface_is_gl (abstract_surface))
1143 return surface->width;
1147 cairo_gl_surface_get_height (cairo_surface_t *abstract_surface)
1149 cairo_gl_surface_t *surface = (cairo_gl_surface_t *) abstract_surface;
1151 if (! _cairo_surface_is_gl (abstract_surface))
1154 return surface->height;
1158 cairo_gl_surface_swapbuffers (cairo_surface_t *abstract_surface)
1160 cairo_gl_surface_t *surface = (cairo_gl_surface_t *) abstract_surface;
1162 if (unlikely (abstract_surface->status)) {
1163 CAIRO_TRACE_END (__func__);
1166 if (unlikely (abstract_surface->finished)) {
1167 _cairo_surface_set_error (abstract_surface,
1168 _cairo_error (CAIRO_STATUS_SURFACE_FINISHED));
1169 CAIRO_TRACE_END (__func__);
1173 if (! _cairo_surface_is_gl (abstract_surface)) {
1174 _cairo_surface_set_error (abstract_surface,
1175 CAIRO_STATUS_SURFACE_TYPE_MISMATCH);
1177 CAIRO_TRACE_END (__func__);
1181 if (! _cairo_gl_surface_is_texture (surface)) {
1182 cairo_gl_context_t *ctx;
1183 cairo_status_t status;
1185 status = _cairo_gl_context_acquire (surface->base.device, &ctx);
1186 if (unlikely (status)) {
1187 CAIRO_TRACE_END (__func__);
1191 /* And in any case we should flush any pending operations. */
1192 _cairo_gl_composite_flush (ctx);
1194 /* For swapping on EGL, at least, we need a valid context/target. */
1195 _cairo_gl_context_set_destination (ctx, surface, FALSE);
1197 ctx->swap_buffers (ctx, surface);
1199 /* according to khronos specs on egl 1.4, stencil buffer is
1200 * not preserved after eglSwapBuffers */
1201 if (surface->clip_on_stencil_buffer) {
1202 _cairo_clip_destroy (surface->clip_on_stencil_buffer);
1203 surface->clip_on_stencil_buffer = NULL;
1206 status = _cairo_gl_context_release (ctx, status);
1208 status = _cairo_surface_set_error (abstract_surface, status);
1210 CAIRO_TRACE_END (__func__);
1213 static cairo_surface_t *
1214 _cairo_gl_surface_create_similar (void *abstract_surface,
1215 cairo_content_t content,
1219 CAIRO_TRACE_BEGIN (__func__);
1220 cairo_surface_t *surface = abstract_surface;
1221 cairo_gl_context_t *ctx;
1222 cairo_status_t status;
1224 if (! _cairo_gl_surface_size_valid (abstract_surface, width, height)) {
1225 CAIRO_TRACE_END (__func__);
1226 return _cairo_image_surface_create_with_content (content, width, height);
1229 status = _cairo_gl_context_acquire (surface->device, &ctx);
1230 if (unlikely (status)) {
1231 CAIRO_TRACE_END (__func__);
1232 return _cairo_surface_create_in_error (status);
1235 surface = _cairo_gl_surface_create_and_clear_scratch (ctx, content, width, height);
1237 status = _cairo_gl_context_release (ctx, status);
1238 if (unlikely (status)) {
1239 cairo_surface_destroy (surface);
1240 CAIRO_TRACE_END (__func__);
1241 return _cairo_surface_create_in_error (status);
1244 CAIRO_TRACE_END (__func__);
1248 static cairo_int_status_t
1249 _cairo_gl_surface_fill_alpha_channel (cairo_gl_surface_t *dst,
1250 cairo_gl_context_t *ctx,
1252 int width, int height)
1254 CAIRO_TRACE_BEGIN (__func__);
1255 cairo_gl_composite_t setup;
1256 cairo_status_t status;
1258 _cairo_gl_composite_flush (ctx);
1259 ctx->dispatch.ColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_TRUE);
1261 status = _cairo_gl_composite_init (&setup, CAIRO_OPERATOR_SOURCE,
1263 if (unlikely (status))
1266 _cairo_gl_composite_set_solid_source (&setup, CAIRO_COLOR_BLACK);
1268 status = _cairo_gl_composite_begin (&setup, &ctx);
1269 if (unlikely (status))
1272 _cairo_gl_context_emit_rect (ctx, x, y, x + width, y + height);
1274 status = _cairo_gl_context_release (ctx, status);
1277 _cairo_gl_composite_fini (&setup);
1279 _cairo_gl_composite_flush (ctx);
1280 ctx->dispatch.ColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
1282 CAIRO_TRACE_END (__func__);
1287 _cairo_gl_surface_draw_image (cairo_gl_surface_t *dst,
1288 cairo_image_surface_t *src,
1289 int src_x, int src_y,
1290 int width, int height,
1291 int dst_x, int dst_y,
1292 cairo_bool_t force_flush)
1294 CAIRO_TRACE_BEGIN (__func__);
1295 GLenum internal_format, format, type;
1296 cairo_bool_t has_alpha, needs_swap;
1297 cairo_image_surface_t *clone = NULL;
1298 cairo_gl_context_t *ctx;
1300 cairo_image_surface_t *rgba_clone = NULL;
1301 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
1303 status = _cairo_gl_context_acquire (dst->base.device, &ctx);
1304 if (unlikely (status)) {
1305 CAIRO_TRACE_END (__func__);
1309 if (_cairo_gl_get_flavor (&ctx->dispatch) == CAIRO_GL_FLAVOR_ES2 ||
1310 _cairo_gl_get_flavor (&ctx->dispatch) == CAIRO_GL_FLAVOR_ES3) {
1311 pixman_format_code_t pixman_format;
1312 cairo_surface_pattern_t pattern;
1313 cairo_bool_t require_conversion = FALSE;
1314 pixman_format = _cairo_is_little_endian () ? PIXMAN_a8b8g8r8 : PIXMAN_r8g8b8a8;
1316 if (src->base.content != CAIRO_CONTENT_ALPHA) {
1317 if (src->pixman_format != pixman_format)
1318 require_conversion = TRUE;
1320 else if (dst->base.content != CAIRO_CONTENT_ALPHA)
1321 require_conversion = TRUE;
1323 if (src->pixman_format == PIXMAN_a1) {
1324 pixman_format = PIXMAN_a8;
1325 require_conversion = TRUE;
1329 if (require_conversion) {
1330 src->base.is_clear = FALSE;
1331 rgba_clone = (cairo_image_surface_t *)
1332 _cairo_image_surface_create_with_pixman_format (NULL,
1337 if (unlikely (rgba_clone->base.status))
1340 _cairo_pattern_init_for_surface (&pattern, &src->base);
1341 status = _cairo_surface_paint (&rgba_clone->base,
1342 CAIRO_OPERATOR_SOURCE,
1343 &pattern.base, NULL);
1344 _cairo_pattern_fini (&pattern.base);
1345 if (unlikely (status))
1350 } else if (ctx->is_gl33 && src->base.content == CAIRO_CONTENT_ALPHA) {
1351 /* use RGBA for ALPHA */
1352 pixman_format_code_t pixman_format;
1353 cairo_surface_pattern_t pattern;
1354 src->base.is_clear = FALSE;
1355 pixman_format = _cairo_is_little_endian () ? PIXMAN_a8b8g8r8 : PIXMAN_r8g8b8a8;
1357 rgba_clone = (cairo_image_surface_t *)
1358 _cairo_image_surface_create_with_pixman_format (NULL,
1363 if (unlikely (rgba_clone->base.status))
1366 _cairo_pattern_init_for_surface (&pattern, &src->base);
1367 status = _cairo_surface_paint (&rgba_clone->base,
1368 CAIRO_OPERATOR_SOURCE,
1369 &pattern.base, NULL);
1370 _cairo_pattern_fini (&pattern.base);
1371 if (unlikely (status))
1377 if (! _cairo_gl_get_image_format_and_type (ctx->gl_flavor,
1385 cairo_bool_t is_supported;
1387 clone = _cairo_image_surface_coerce (src);
1388 if (unlikely (status = clone->base.status))
1392 _cairo_gl_get_image_format_and_type (ctx->gl_flavor,
1393 clone->pixman_format,
1399 assert (is_supported);
1400 assert (!needs_swap);
1404 cpp = PIXMAN_FORMAT_BPP (src->pixman_format) / 8;
1407 status = _cairo_gl_surface_flush (&dst->base, 0);
1408 if (unlikely (status))
1412 if (_cairo_gl_surface_is_texture (dst)) {
1413 void *data_start = src->data + src_y * src->stride + src_x * cpp;
1414 void *data_start_gles2 = NULL;
1417 * Due to GL_UNPACK_ROW_LENGTH missing in GLES2 we have to extract the
1418 * image data ourselves in some cases. In particular, we must extract
1420 * a. we don't want full-length lines or
1421 * b. the row stride cannot be handled by GL itself using a 4 byte
1422 * alignment constraint
1424 if (src->stride < 0 ||
1425 (ctx->gl_flavor == CAIRO_GL_FLAVOR_ES2 &&
1426 (src->width * cpp < src->stride - 3 ||
1427 width != src->width)))
1429 ctx->dispatch.PixelStorei (GL_UNPACK_ALIGNMENT, 1);
1430 status = _cairo_gl_surface_extract_image_data (src, src_x, src_y,
1433 if (unlikely (status))
1436 data_start = data_start_gles2;
1440 /* When cpp != 4, setting GL_UNPACK_ALIGNMENT to cpp
1441 causes many failures in cairo tests with respect to
1443 ctx->dispatch.PixelStorei (GL_UNPACK_ALIGNMENT, 4);
1444 if (ctx->gl_flavor == CAIRO_GL_FLAVOR_DESKTOP ||
1445 ctx->gl_flavor == CAIRO_GL_FLAVOR_ES3)
1446 ctx->dispatch.PixelStorei (GL_UNPACK_ROW_LENGTH, src->stride / cpp);
1449 /* we must resolve the renderbuffer to texture before we
1451 status = _cairo_gl_surface_resolve_multisampling (dst);
1452 if (unlikely (status)) {
1453 free (data_start_gles2);
1457 _cairo_gl_context_activate (ctx, CAIRO_GL_TEX_TEMP);
1458 ctx->dispatch.BindTexture (ctx->tex_target, dst->tex);
1459 ctx->dispatch.TexParameteri (ctx->tex_target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
1460 ctx->dispatch.TexParameteri (ctx->tex_target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
1461 ctx->dispatch.TexSubImage2D (ctx->tex_target, 0,
1462 dst_x, dst_y, width, height,
1463 format, type, data_start);
1465 free (data_start_gles2);
1467 /* If we just treated some rgb-only data as rgba, then we have to
1468 * go back and fix up the alpha channel where we filled in this
1472 _cairo_gl_surface_fill_alpha_channel (dst, ctx,
1477 dst->content_synced = FALSE;
1479 cairo_surface_t *tmp;
1481 tmp = _cairo_gl_surface_create_scratch (ctx,
1484 if (unlikely (tmp->status))
1487 status = _cairo_gl_surface_draw_image ((cairo_gl_surface_t *) tmp,
1492 if (status == CAIRO_INT_STATUS_SUCCESS) {
1493 cairo_surface_pattern_t tmp_pattern;
1494 cairo_rectangle_int_t r;
1497 _cairo_pattern_init_for_surface (&tmp_pattern, tmp);
1498 cairo_matrix_init_translate (&tmp_pattern.base.matrix,
1500 tmp_pattern.base.filter = CAIRO_FILTER_NEAREST;
1501 tmp_pattern.base.extend = CAIRO_EXTEND_NONE;
1507 clip = _cairo_clip_intersect_rectangle (NULL, &r);
1508 status = _cairo_surface_paint (&dst->base,
1509 CAIRO_OPERATOR_SOURCE,
1512 _cairo_clip_destroy (clip);
1513 _cairo_pattern_fini (&tmp_pattern.base);
1516 cairo_surface_destroy (tmp);
1520 status = _cairo_gl_context_release (ctx, status);
1523 cairo_surface_destroy (&clone->base);
1526 cairo_surface_destroy (&rgba_clone->base);
1528 if (status == CAIRO_INT_STATUS_SUCCESS) {
1529 dst->content_changed = TRUE;
1530 dst->content_synced = FALSE;
1533 CAIRO_TRACE_END (__func__);
1538 # to avoid warning : defined but not used [-Wunused-function]
1539 static int _cairo_gl_surface_flavor (cairo_gl_surface_t *surface)
1541 cairo_gl_context_t *ctx = (cairo_gl_context_t *)surface->base.device;
1542 return ctx->gl_flavor;
1546 static cairo_status_t
1547 _cairo_gl_surface_finish (void *abstract_surface)
1549 CAIRO_TRACE_BEGIN (__func__);
1550 cairo_gl_surface_t *surface = abstract_surface;
1551 cairo_status_t status;
1552 cairo_gl_context_t *ctx;
1554 status = _cairo_gl_context_acquire (surface->base.device, &ctx);
1555 if (unlikely (status)) {
1556 CAIRO_TRACE_END (__func__);
1560 if ((ctx->operands[CAIRO_GL_TEX_SOURCE].type == CAIRO_GL_OPERAND_TEXTURE ||
1561 ctx->operands[CAIRO_GL_TEX_SOURCE].type == CAIRO_GL_OPERAND_GAUSSIAN) &&
1562 ctx->operands[CAIRO_GL_TEX_SOURCE].texture.surface == surface)
1563 _cairo_gl_context_destroy_operand (ctx, CAIRO_GL_TEX_SOURCE);
1564 if ((ctx->operands[CAIRO_GL_TEX_MASK].type == CAIRO_GL_OPERAND_TEXTURE ||
1565 ctx->operands[CAIRO_GL_TEX_MASK].type == CAIRO_GL_OPERAND_GAUSSIAN) &&
1566 ctx->operands[CAIRO_GL_TEX_MASK].texture.surface == surface)
1567 _cairo_gl_context_destroy_operand (ctx, CAIRO_GL_TEX_MASK);
1568 if (ctx->current_target == surface)
1569 ctx->current_target = NULL;
1572 ctx->dispatch.DeleteFramebuffers (1, &surface->fb);
1573 if (surface->depth_stencil)
1574 ctx->dispatch.DeleteRenderbuffers (1, &surface->depth_stencil);
1575 if (surface->owns_tex)
1576 ctx->dispatch.DeleteTextures (1, &surface->tex);
1578 if (surface->msaa_depth_stencil)
1579 ctx->dispatch.DeleteRenderbuffers (1, &surface->msaa_depth_stencil);
1580 if (surface->msaa_fb)
1581 ctx->dispatch.DeleteFramebuffers (1, &surface->msaa_fb);
1582 if (surface->msaa_rb)
1583 ctx->dispatch.DeleteRenderbuffers (1, &surface->msaa_rb);
1585 if (surface->image_node) {
1586 surface->image_node->node.pinned = FALSE;
1587 _cairo_rtree_node_remove (&ctx->image_cache->rtree,
1588 &surface->image_node->node);
1591 if (surface->clip_on_stencil_buffer)
1592 _cairo_clip_destroy (surface->clip_on_stencil_buffer);
1594 CAIRO_TRACE_END (__func__);
1595 return _cairo_gl_context_release (ctx, status);
1598 static cairo_image_surface_t *
1599 _cairo_gl_surface_map_to_image (void *abstract_surface,
1600 const cairo_rectangle_int_t *extents)
1602 CAIRO_TRACE_BEGIN (__func__);
1603 cairo_gl_surface_t *surface = abstract_surface;
1604 cairo_image_surface_t *image;
1605 cairo_gl_context_t *ctx;
1606 GLenum format, type;
1607 pixman_format_code_t pixman_format;
1609 cairo_bool_t flipped, mesa_invert;
1610 cairo_status_t status;
1613 status = _cairo_gl_context_acquire (surface->base.device, &ctx);
1614 if (unlikely (status)) {
1615 CAIRO_TRACE_END (__func__);
1616 return _cairo_image_surface_create_in_error (status);
1619 /* Want to use a switch statement here but the compiler gets whiny. */
1620 if (surface->base.content == CAIRO_CONTENT_COLOR_ALPHA) {
1622 pixman_format = PIXMAN_a8r8g8b8;
1623 type = GL_UNSIGNED_INT_8_8_8_8_REV;
1625 } else if (surface->base.content == CAIRO_CONTENT_COLOR) {
1627 pixman_format = PIXMAN_x8r8g8b8;
1628 type = GL_UNSIGNED_INT_8_8_8_8_REV;
1630 } else if (surface->base.content == CAIRO_CONTENT_ALPHA) {
1632 pixman_format = PIXMAN_a8;
1633 type = GL_UNSIGNED_BYTE;
1637 CAIRO_TRACE_END (__func__);
1641 /*if (_cairo_gl_surface_flavor (surface) == CAIRO_GL_FLAVOR_ES2 ||
1642 _cairo_gl_surface_flavor (surface) == CAIRO_GL_FLAVOR_ES3)*/ {
1643 /* If only RGBA is supported, we must download data in a compatible
1644 * format. This means that pixman will convert the data on the CPU when
1645 * interacting with other image surfaces. For ALPHA, GLES2 does not
1646 * support GL_PACK_ROW_LENGTH anyway, and this makes sure that the
1647 * pixman image that is created has row_stride = row_width * bpp. */
1648 if (surface->base.content == CAIRO_CONTENT_ALPHA || !ctx->can_read_bgra) {
1649 cairo_bool_t little_endian = _cairo_is_little_endian ();
1652 if (surface->base.content == CAIRO_CONTENT_COLOR) {
1653 pixman_format = little_endian ?
1654 PIXMAN_x8b8g8r8 : PIXMAN_r8g8b8x8;
1656 pixman_format = little_endian ?
1657 PIXMAN_a8b8g8r8 : PIXMAN_r8g8b8a8;
1661 /* GLES2 only supports GL_UNSIGNED_BYTE. */
1662 type = GL_UNSIGNED_BYTE;
1666 image = (cairo_image_surface_t*)
1667 _cairo_image_surface_create_with_pixman_format (NULL,
1672 if (unlikely (image->base.status)) {
1673 status = _cairo_gl_context_release (ctx, status);
1674 CAIRO_TRACE_END (__func__);
1678 cairo_surface_set_device_offset (&image->base, -extents->x, -extents->y);
1680 /* If the original surface has not been modified or
1681 * is clear, we can avoid downloading data. */
1682 if (surface->base.is_clear || surface->base.serial == 0) {
1683 status = _cairo_gl_context_release (ctx, status);
1684 CAIRO_TRACE_END (__func__);
1688 /* This is inefficient, as we'd rather just read the thing without making
1689 * it the destination. But then, this is the fallback path, so let's not
1690 * fall back instead.
1692 _cairo_gl_composite_flush (ctx);
1694 _cairo_gl_context_set_destination (ctx, surface, FALSE);
1696 flipped = ! _cairo_gl_surface_is_texture (surface);
1697 mesa_invert = flipped && ctx->has_mesa_pack_invert;
1699 ctx->dispatch.PixelStorei (GL_PACK_ALIGNMENT, cpp);
1700 if (ctx->gl_flavor == CAIRO_GL_FLAVOR_DESKTOP ||
1701 ctx->gl_flavor == CAIRO_GL_FLAVOR_ES3)
1702 ctx->dispatch.PixelStorei (GL_PACK_ROW_LENGTH, image->stride / cpp);
1705 ctx->dispatch.PixelStorei (GL_PACK_INVERT_MESA, 1);
1709 y = surface->height - extents->y - extents->height;
1711 ctx->dispatch.ReadPixels (extents->x, y,
1712 extents->width, extents->height,
1713 format, type, image->data);
1716 ctx->dispatch.PixelStorei (GL_PACK_INVERT_MESA, 0);
1718 status = _cairo_gl_context_release (ctx, status);
1719 if (unlikely (status)) {
1720 cairo_surface_destroy (&image->base);
1721 CAIRO_TRACE_END (__func__);
1722 return _cairo_image_surface_create_in_error (status);
1725 /* We must invert the image manualy if we lack GL_MESA_pack_invert */
1726 if (flipped && ! mesa_invert) {
1727 uint8_t stack[1024], *row = stack;
1728 uint8_t *top = image->data;
1729 uint8_t *bot = image->data + (image->height-1)*image->stride;
1731 if (image->stride > (int)sizeof(stack)) {
1732 row = malloc (image->stride);
1733 if (unlikely (row == NULL)) {
1734 cairo_surface_destroy (&image->base);
1735 CAIRO_TRACE_END (__func__);
1736 return _cairo_image_surface_create_in_error (_cairo_error (CAIRO_STATUS_NO_MEMORY));
1741 memcpy (row, top, image->stride);
1742 memcpy (top, bot, image->stride);
1743 memcpy (bot, row, image->stride);
1744 top += image->stride;
1745 bot -= image->stride;
1752 image->base.is_clear = FALSE;
1754 CAIRO_TRACE_END (__func__);
1758 static cairo_surface_t *
1759 _cairo_gl_surface_source (void *abstract_surface,
1760 cairo_rectangle_int_t *extents)
1762 cairo_gl_surface_t *surface = abstract_surface;
1765 extents->x = extents->y = 0;
1766 extents->width = surface->width;
1767 extents->height = surface->height;
1770 return &surface->base;
1773 static cairo_status_t
1774 _cairo_gl_surface_acquire_source_image (void *abstract_surface,
1775 cairo_image_surface_t **image_out,
1778 cairo_gl_surface_t *surface = abstract_surface;
1779 cairo_rectangle_int_t extents;
1781 *image_extra = NULL;
1783 extents.x = extents.y = 0;
1784 extents.width = surface->width;
1785 extents.height = surface->height;
1787 *image_out = (cairo_image_surface_t *)
1788 _cairo_gl_surface_map_to_image (surface, &extents);
1789 return (*image_out)->base.status;
1793 _cairo_gl_surface_release_source_image (void *abstract_surface,
1794 cairo_image_surface_t *image,
1797 cairo_surface_destroy (&image->base);
1800 static cairo_int_status_t
1801 _cairo_gl_surface_unmap_image (void *abstract_surface,
1802 cairo_image_surface_t *image)
1804 CAIRO_TRACE_BEGIN (__func__);
1805 cairo_int_status_t status;
1807 status = _cairo_gl_surface_draw_image (abstract_surface, image,
1809 image->width, image->height,
1810 image->base.device_transform_inverse.x0,
1811 image->base.device_transform_inverse.y0,
1814 cairo_surface_finish (&image->base);
1815 cairo_surface_destroy (&image->base);
1817 CAIRO_TRACE_END (__func__);
1822 _cairo_gl_surface_get_extents (void *abstract_surface,
1823 cairo_rectangle_int_t *rectangle)
1825 cairo_gl_surface_t *surface = abstract_surface;
1829 rectangle->width = surface->width;
1830 rectangle->height = surface->height;
1835 static cairo_status_t
1836 _cairo_gl_surface_flush (void *abstract_surface, unsigned flags)
1838 CAIRO_TRACE_BEGIN (__func__);
1839 cairo_gl_surface_t *surface = abstract_surface;
1840 cairo_status_t status;
1841 cairo_gl_context_t *ctx;
1844 CAIRO_TRACE_END (__func__);
1845 return CAIRO_STATUS_SUCCESS;
1848 status = _cairo_gl_context_acquire (surface->base.device, &ctx);
1849 if (unlikely (status)) {
1850 CAIRO_TRACE_END (__func__);
1854 if (((ctx->operands[CAIRO_GL_TEX_SOURCE].type == CAIRO_GL_OPERAND_TEXTURE ||
1855 ctx->operands[CAIRO_GL_TEX_SOURCE].type == CAIRO_GL_OPERAND_GAUSSIAN) &&
1856 ctx->operands[CAIRO_GL_TEX_SOURCE].texture.surface == surface) ||
1857 ((ctx->operands[CAIRO_GL_TEX_MASK].type == CAIRO_GL_OPERAND_TEXTURE ||
1858 ctx->operands[CAIRO_GL_TEX_MASK].type == CAIRO_GL_OPERAND_GAUSSIAN) &&
1859 ctx->operands[CAIRO_GL_TEX_MASK].texture.surface == surface) ||
1860 (ctx->current_target == surface))
1861 _cairo_gl_composite_flush (ctx);
1863 status = _cairo_gl_surface_resolve_multisampling (surface);
1865 if (ctx->msaa_type != CAIRO_GL_NONE_MULTISAMPLE_TO_TEXTURE)
1866 ctx->dispatch.Flush ();
1869 CAIRO_TRACE_END (__func__);
1870 return _cairo_gl_context_release (ctx, status);
1874 _cairo_gl_surface_resolve_multisampling (cairo_gl_surface_t *surface)
1876 cairo_gl_context_t *ctx;
1877 cairo_int_status_t status;
1879 if (surface->base.device == NULL)
1880 return CAIRO_INT_STATUS_SUCCESS;
1882 if (! surface->content_cleared) {
1883 /* GLES surfaces do not need explicit resolution. */
1884 if (! surface->msaa_active)
1885 return CAIRO_INT_STATUS_SUCCESS;
1886 else if (((cairo_gl_context_t *) surface->base.device)->gl_flavor == CAIRO_GL_FLAVOR_ES2 &&
1887 !((cairo_gl_context_t *) surface->base.device)->has_angle_multisampling)
1888 return CAIRO_INT_STATUS_SUCCESS;
1889 else if (! _cairo_gl_surface_is_texture (surface))
1890 return CAIRO_INT_STATUS_SUCCESS;
1893 status = _cairo_gl_context_acquire (surface->base.device, &ctx);
1894 if (unlikely (status))
1897 _cairo_gl_context_set_destination (ctx, surface, FALSE);
1899 status = _cairo_gl_context_release (ctx, status);
1901 surface->content_cleared = FALSE;
1906 static const cairo_compositor_t *
1907 get_compositor (cairo_gl_surface_t *surface)
1909 cairo_gl_context_t *ctx = (cairo_gl_context_t *)surface->base.device;
1910 return ctx->compositor;
1913 static cairo_int_status_t
1914 _cairo_gl_surface_paint (void *surface,
1915 cairo_operator_t op,
1916 const cairo_pattern_t *source,
1917 const cairo_clip_t *clip)
1919 CAIRO_TRACE_BEGIN (__func__);
1920 cairo_int_status_t status;
1921 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
1922 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
1924 status = cairo_device_acquire (dst->base.device);
1925 if (unlikely (status)) {
1926 CAIRO_TRACE_END (__func__);
1930 status = _cairo_surface_shadow_paint (surface, op, source, clip,
1932 ctx->source_scratch_in_use = FALSE;
1933 if (unlikely (status)) {
1934 cairo_device_release (dst->base.device);
1935 CAIRO_TRACE_END (__func__);
1939 if (source->shadow.draw_shadow_only) {
1940 if (status == CAIRO_INT_STATUS_SUCCESS) {
1941 dst->content_changed = TRUE;
1942 dst->content_synced = FALSE;
1945 ctx->source_scratch_in_use = FALSE;
1946 cairo_device_release (dst->base.device);
1947 CAIRO_TRACE_END (__func__);
1951 /* simplify the common case of clearing the surface */
1953 if (op == CAIRO_OPERATOR_CLEAR) {
1954 status = _cairo_gl_surface_clear (surface, CAIRO_COLOR_TRANSPARENT);
1955 cairo_device_release (dst->base.device);
1956 CAIRO_TRACE_END (__func__);
1959 else if (source->type == CAIRO_PATTERN_TYPE_SOLID &&
1960 (op == CAIRO_OPERATOR_SOURCE ||
1961 (op == CAIRO_OPERATOR_OVER && _cairo_pattern_is_opaque_solid (source)))) {
1962 status = _cairo_gl_surface_clear (surface,
1963 &((cairo_solid_pattern_t *) source)->color);
1964 cairo_device_release (dst->base.device);
1965 CAIRO_TRACE_END (__func__);
1970 status = _cairo_compositor_paint (get_compositor (surface), surface,
1972 if (status == CAIRO_INT_STATUS_SUCCESS) {
1973 dst->content_changed = TRUE;
1974 dst->content_synced = FALSE;
1977 ctx->source_scratch_in_use = FALSE;
1978 cairo_device_release (dst->base.device);
1979 CAIRO_TRACE_END (__func__);
1983 static cairo_int_status_t
1984 _cairo_gl_surface_mask (void *surface,
1985 cairo_operator_t op,
1986 const cairo_pattern_t *source,
1987 const cairo_pattern_t *mask,
1988 const cairo_clip_t *clip)
1990 CAIRO_TRACE_BEGIN (__func__);
1991 cairo_int_status_t status;
1992 cairo_gl_surface_t *dst = (cairo_gl_surface_t *) surface;
1993 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
1995 status = cairo_device_acquire (dst->base.device);
1996 if (unlikely (status)) {
1997 CAIRO_TRACE_END (__func__);
2001 status = _cairo_surface_shadow_mask (surface, op, source, mask, clip,
2003 ctx->source_scratch_in_use = FALSE;
2004 if (unlikely (status)) {
2005 cairo_device_release (dst->base.device);
2006 CAIRO_TRACE_END (__func__);
2010 if (source->shadow.draw_shadow_only) {
2011 if (status == CAIRO_INT_STATUS_SUCCESS) {
2012 dst->content_changed = TRUE;
2013 dst->content_synced = FALSE;
2016 ctx->source_scratch_in_use = FALSE;
2017 cairo_device_release (dst->base.device);
2018 CAIRO_TRACE_END (__func__);
2022 status = _cairo_compositor_mask (get_compositor (surface), surface,
2023 op, source, mask, clip);
2024 if (status == CAIRO_INT_STATUS_SUCCESS) {
2025 dst->content_changed = TRUE;
2026 dst->content_synced = FALSE;
2028 ctx->source_scratch_in_use = FALSE;
2029 cairo_device_release (dst->base.device);
2030 CAIRO_TRACE_END (__func__);
2034 status = _cairo_compositor_mask (get_compositor (surface), surface,
2035 op, source, mask, clip);
2036 if (status == CAIRO_INT_STATUS_SUCCESS) {
2037 dst->content_changed = TRUE;
2038 dst->content_synced = FALSE;
2041 ctx->source_scratch_in_use = FALSE;
2042 cairo_device_release (dst->base.device);
2043 CAIRO_TRACE_END (__func__);
2047 static cairo_int_status_t
2048 _cairo_gl_surface_stroke (void *surface,
2049 cairo_operator_t op,
2050 const cairo_pattern_t *source,
2051 const cairo_path_fixed_t *path,
2052 const cairo_stroke_style_t *style,
2053 const cairo_matrix_t *ctm,
2054 const cairo_matrix_t *ctm_inverse,
2056 cairo_antialias_t antialias,
2057 const cairo_clip_t *clip)
2059 CAIRO_TRACE_BEGIN (__func__);
2060 cairo_int_status_t status;
2061 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
2062 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
2063 cairo_shadow_type_t shadow_type = source->shadow.type;
2065 status = cairo_device_acquire (dst->base.device);
2066 if (unlikely (status)) {
2067 CAIRO_TRACE_END (__func__);
2071 if (shadow_type != CAIRO_SHADOW_INSET)
2072 status = _cairo_surface_shadow_stroke (surface, op, source, path,
2073 style, ctm, ctm_inverse,
2074 tolerance, antialias,
2075 clip, &source->shadow);
2077 ctx->source_scratch_in_use = FALSE;
2078 if (unlikely (status)) {
2079 cairo_device_release (dst->base.device);
2080 CAIRO_TRACE_END (__func__);
2084 dst->content_changed = TRUE;
2085 dst->content_synced = FALSE;
2087 if (shadow_type == CAIRO_SHADOW_DROP &&
2088 source->shadow.draw_shadow_only) {
2089 ctx->source_scratch_in_use = FALSE;
2090 cairo_device_release (dst->base.device);
2091 CAIRO_TRACE_END (__func__);
2095 ctx->source_scratch_in_use = FALSE;
2097 if (! source->shadow.draw_shadow_only)
2098 status = _cairo_compositor_stroke (get_compositor (surface), surface,
2099 op, source, path, style,
2100 ctm, ctm_inverse, tolerance,
2102 if (unlikely (status)) {
2103 ctx->source_scratch_in_use = FALSE;
2104 cairo_device_release (dst->base.device);
2105 CAIRO_TRACE_END (__func__);
2109 ctx->source_scratch_in_use = FALSE;
2111 if (shadow_type == CAIRO_SHADOW_INSET)
2112 status = _cairo_surface_shadow_stroke (surface, op, source, path,
2113 style, ctm, ctm_inverse,
2114 tolerance, antialias,
2115 clip, &source->shadow);
2117 ctx->source_scratch_in_use = FALSE;
2118 cairo_device_release (dst->base.device);
2119 CAIRO_TRACE_END (__func__);
2123 static cairo_int_status_t
2124 _cairo_gl_surface_fill (void *surface,
2125 cairo_operator_t op,
2126 const cairo_pattern_t *source,
2127 const cairo_path_fixed_t*path,
2128 cairo_fill_rule_t fill_rule,
2130 cairo_antialias_t antialias,
2131 const cairo_clip_t *clip)
2133 CAIRO_TRACE_BEGIN (__func__);
2134 cairo_status_t status;
2135 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
2136 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
2137 cairo_shadow_type_t shadow_type = source->shadow.type;
2139 status = cairo_device_acquire (dst->base.device);
2140 if (unlikely (status)) {
2141 CAIRO_TRACE_END (__func__);
2145 if (shadow_type != CAIRO_SHADOW_INSET)
2146 status = _cairo_surface_shadow_fill (surface, op, source, path,
2147 fill_rule, tolerance, antialias,
2148 clip, &source->shadow);
2150 ctx->source_scratch_in_use = FALSE;
2151 if (unlikely (status)) {
2152 cairo_device_release (dst->base.device);
2153 CAIRO_TRACE_END (__func__);
2157 dst->content_changed = TRUE;
2158 dst->content_synced = FALSE;
2160 if (shadow_type == CAIRO_SHADOW_DROP &&
2161 source->shadow.draw_shadow_only) {
2162 ctx->source_scratch_in_use = FALSE;
2163 cairo_device_release (dst->base.device);
2164 CAIRO_TRACE_END (__func__);
2168 ctx->source_scratch_in_use = FALSE;
2170 if (! source->shadow.draw_shadow_only) {
2171 if (! source->shadow.path_is_fill_with_spread ||
2172 source->shadow.type != CAIRO_SHADOW_INSET)
2173 status = _cairo_compositor_fill (get_compositor (surface),
2176 fill_rule, tolerance,
2180 status = _cairo_compositor_paint (get_compositor (surface),
2181 surface, op, source,
2185 if (unlikely (status)) {
2186 ctx->source_scratch_in_use = FALSE;
2187 cairo_device_release (dst->base.device);
2188 CAIRO_TRACE_END (__func__);
2192 ctx->source_scratch_in_use = FALSE;
2194 if (shadow_type == CAIRO_SHADOW_INSET)
2195 status = _cairo_surface_shadow_fill (surface, op, source, path,
2196 fill_rule, tolerance, antialias,
2197 clip, &source->shadow);
2199 ctx->source_scratch_in_use = FALSE;
2200 cairo_device_release (dst->base.device);
2202 CAIRO_TRACE_END (__func__);
2206 static cairo_int_status_t
2207 _cairo_gl_surface_glyphs (void *surface,
2208 cairo_operator_t op,
2209 const cairo_pattern_t *source,
2210 cairo_glyph_t *glyphs,
2212 cairo_scaled_font_t *font,
2213 const cairo_clip_t *clip)
2215 CAIRO_TRACE_BEGIN (__func__);
2216 cairo_int_status_t status;
2217 cairo_gl_surface_t *dst = (cairo_gl_surface_t *)surface;
2218 cairo_gl_context_t *ctx = (cairo_gl_context_t *)dst->base.device;
2219 cairo_shadow_type_t shadow_type = source->shadow.type;
2221 status = cairo_device_acquire (dst->base.device);
2222 if (unlikely (status)) {
2223 CAIRO_TRACE_END (__func__);
2227 if (shadow_type != CAIRO_SHADOW_INSET)
2228 status = _cairo_surface_shadow_glyphs (surface, op, source,
2231 clip, &source->shadow);
2233 ctx->source_scratch_in_use = FALSE;
2234 if (unlikely (status)) {
2235 cairo_device_release (dst->base.device);
2236 CAIRO_TRACE_END (__func__);
2240 dst->content_changed = TRUE;
2241 dst->content_synced = FALSE;
2243 if (shadow_type == CAIRO_SHADOW_DROP &&
2244 source->shadow.draw_shadow_only) {
2245 ctx->source_scratch_in_use = FALSE;
2246 cairo_device_release (dst->base.device);
2247 CAIRO_TRACE_END (__func__);
2251 ctx->source_scratch_in_use = FALSE;
2253 if (! source->shadow.draw_shadow_only)
2254 status = _cairo_compositor_glyphs (get_compositor (surface), surface,
2255 op, source, glyphs, num_glyphs,
2258 if (unlikely (status)) {
2259 ctx->source_scratch_in_use = FALSE;
2260 cairo_device_release (dst->base.device);
2261 CAIRO_TRACE_END (__func__);
2265 ctx->source_scratch_in_use = FALSE;
2267 if (shadow_type == CAIRO_SHADOW_INSET)
2268 status = _cairo_surface_shadow_glyphs (surface, op, source,
2271 clip, &source->shadow);
2273 ctx->source_scratch_in_use = FALSE;
2274 cairo_device_release (dst->base.device);
2275 CAIRO_TRACE_END (__func__);
2279 static const cairo_surface_backend_t _cairo_gl_surface_backend = {
2280 CAIRO_SURFACE_TYPE_GL,
2281 _cairo_gl_surface_finish,
2282 _cairo_default_context_create,
2284 _cairo_gl_surface_create_similar,
2285 NULL, /* similar image */
2286 _cairo_gl_surface_map_to_image,
2287 _cairo_gl_surface_unmap_image,
2289 _cairo_gl_surface_source,
2290 _cairo_gl_surface_acquire_source_image,
2291 _cairo_gl_surface_release_source_image,
2292 NULL, /* snapshot */
2294 NULL, /* copy_page */
2295 NULL, /* show_page */
2297 _cairo_gl_surface_get_extents,
2298 _cairo_image_surface_get_font_options,
2300 _cairo_gl_surface_flush,
2301 NULL, /* mark_dirty_rectangle */
2303 _cairo_gl_surface_paint,
2304 _cairo_gl_surface_mask,
2305 _cairo_gl_surface_stroke,
2306 _cairo_gl_surface_fill,
2307 NULL, /* fill/stroke */
2308 _cairo_gl_surface_glyphs,
2309 NULL, /* has_text_glyphs */
2310 NULL, /* show_text_glyphs */
2311 NULL, /* get_supported_mime_types */
2312 _cairo_gl_surface_shadow_surface,
2313 _cairo_gl_surface_glyph_shadow_surface,
2314 _cairo_gl_surface_shadow_mask_surface,
2315 _cairo_gl_surface_glyph_shadow_mask_surface,
2319 cairo_gl_surface_set_binding_texture (cairo_surface_t *abstract_surface,
2320 unsigned int texture)
2322 cairo_gl_surface_t *surface = (cairo_gl_surface_t *) abstract_surface;
2324 if ((cairo_surface_get_type (&surface->base) != CAIRO_SURFACE_TYPE_GL) ||
2326 return CAIRO_STATUS_SURFACE_TYPE_MISMATCH;
2328 surface->bounded_tex = texture;
2329 surface->operand.texture.tex = texture;
2331 return CAIRO_STATUS_SUCCESS;