1 /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
2 /* cairo - a vector graphics library with display and print output
4 * Copyright © 2003 University of Southern California
5 * Copyright © 2009,2010,2011 Intel Corporation
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is University of Southern
36 * Carl D. Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
40 /* The primarily reason for keeping a traps-compositor around is
41 * for validating cairo-xlib (which currently also uses traps).
46 #include "cairo-image-surface-private.h"
48 #include "cairo-compositor-private.h"
49 #include "cairo-spans-compositor-private.h"
51 #include "cairo-region-private.h"
52 #include "cairo-traps-private.h"
53 #include "cairo-tristrip-private.h"
55 static pixman_image_t *
56 to_pixman_image (cairo_surface_t *s)
58 return ((cairo_image_surface_t *)s)->pixman_image;
61 static cairo_int_status_t
62 acquire (void *abstract_dst)
64 return CAIRO_STATUS_SUCCESS;
67 static cairo_int_status_t
68 release (void *abstract_dst)
70 return CAIRO_STATUS_SUCCESS;
73 static cairo_int_status_t
74 set_clip_region (void *_surface,
75 cairo_region_t *region)
77 cairo_image_surface_t *surface = _surface;
78 pixman_region32_t *rgn = region ? ®ion->rgn : NULL;
80 if (! pixman_image_set_clip_region32 (surface->pixman_image, rgn))
81 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
83 return CAIRO_STATUS_SUCCESS;
86 static cairo_int_status_t
87 draw_image_boxes (void *_dst,
88 cairo_image_surface_t *image,
92 cairo_image_surface_t *dst = _dst;
93 struct _cairo_boxes_chunk *chunk;
96 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
98 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
99 for (i = 0; i < chunk->count; i++) {
100 cairo_box_t *b = &chunk->base[i];
101 int x = _cairo_fixed_integer_part (b->p1.x);
102 int y = _cairo_fixed_integer_part (b->p1.y);
103 int w = _cairo_fixed_integer_part (b->p2.x) - x;
104 int h = _cairo_fixed_integer_part (b->p2.y) - y;
105 if (dst->pixman_format != image->pixman_format ||
106 ! pixman_blt ((uint32_t *)image->data, (uint32_t *)dst->data,
107 image->stride / sizeof (uint32_t),
108 dst->stride / sizeof (uint32_t),
109 PIXMAN_FORMAT_BPP (image->pixman_format),
110 PIXMAN_FORMAT_BPP (dst->pixman_format),
115 pixman_image_composite32 (PIXMAN_OP_SRC,
116 image->pixman_image, NULL, dst->pixman_image,
124 return CAIRO_STATUS_SUCCESS;
127 static inline uint32_t
128 color_to_uint32 (const cairo_color_t *color)
131 (color->alpha_short >> 8 << 24) |
132 (color->red_short >> 8 << 16) |
133 (color->green_short & 0xff00) |
134 (color->blue_short >> 8);
137 static inline cairo_bool_t
138 color_to_pixel (const cairo_color_t *color,
139 pixman_format_code_t format,
144 if (!(format == PIXMAN_a8r8g8b8 ||
145 format == PIXMAN_x8r8g8b8 ||
146 format == PIXMAN_a8b8g8r8 ||
147 format == PIXMAN_x8b8g8r8 ||
148 format == PIXMAN_b8g8r8a8 ||
149 format == PIXMAN_b8g8r8x8 ||
150 format == PIXMAN_r5g6b5 ||
151 format == PIXMAN_b5g6r5 ||
152 format == PIXMAN_a8))
157 c = color_to_uint32 (color);
159 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_ABGR) {
160 c = ((c & 0xff000000) >> 0) |
161 ((c & 0x00ff0000) >> 16) |
162 ((c & 0x0000ff00) >> 0) |
163 ((c & 0x000000ff) << 16);
166 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_BGRA) {
167 c = ((c & 0xff000000) >> 24) |
168 ((c & 0x00ff0000) >> 8) |
169 ((c & 0x0000ff00) << 8) |
170 ((c & 0x000000ff) << 24);
173 if (format == PIXMAN_a8) {
175 } else if (format == PIXMAN_r5g6b5 || format == PIXMAN_b5g6r5) {
176 c = ((((c) >> 3) & 0x001f) |
177 (((c) >> 5) & 0x07e0) |
178 (((c) >> 8) & 0xf800));
186 _pixman_operator (cairo_operator_t op)
189 case CAIRO_OPERATOR_CLEAR:
190 return PIXMAN_OP_CLEAR;
192 case CAIRO_OPERATOR_SOURCE:
193 return PIXMAN_OP_SRC;
194 case CAIRO_OPERATOR_OVER:
195 return PIXMAN_OP_OVER;
196 case CAIRO_OPERATOR_IN:
198 case CAIRO_OPERATOR_OUT:
199 return PIXMAN_OP_OUT;
200 case CAIRO_OPERATOR_ATOP:
201 return PIXMAN_OP_ATOP;
203 case CAIRO_OPERATOR_DEST:
204 return PIXMAN_OP_DST;
205 case CAIRO_OPERATOR_DEST_OVER:
206 return PIXMAN_OP_OVER_REVERSE;
207 case CAIRO_OPERATOR_DEST_IN:
208 return PIXMAN_OP_IN_REVERSE;
209 case CAIRO_OPERATOR_DEST_OUT:
210 return PIXMAN_OP_OUT_REVERSE;
211 case CAIRO_OPERATOR_DEST_ATOP:
212 return PIXMAN_OP_ATOP_REVERSE;
214 case CAIRO_OPERATOR_XOR:
215 return PIXMAN_OP_XOR;
216 case CAIRO_OPERATOR_ADD:
217 return PIXMAN_OP_ADD;
218 case CAIRO_OPERATOR_SATURATE:
219 return PIXMAN_OP_SATURATE;
221 case CAIRO_OPERATOR_MULTIPLY:
222 return PIXMAN_OP_MULTIPLY;
223 case CAIRO_OPERATOR_SCREEN:
224 return PIXMAN_OP_SCREEN;
225 case CAIRO_OPERATOR_OVERLAY:
226 return PIXMAN_OP_OVERLAY;
227 case CAIRO_OPERATOR_DARKEN:
228 return PIXMAN_OP_DARKEN;
229 case CAIRO_OPERATOR_LIGHTEN:
230 return PIXMAN_OP_LIGHTEN;
231 case CAIRO_OPERATOR_COLOR_DODGE:
232 return PIXMAN_OP_COLOR_DODGE;
233 case CAIRO_OPERATOR_COLOR_BURN:
234 return PIXMAN_OP_COLOR_BURN;
235 case CAIRO_OPERATOR_HARD_LIGHT:
236 return PIXMAN_OP_HARD_LIGHT;
237 case CAIRO_OPERATOR_SOFT_LIGHT:
238 return PIXMAN_OP_SOFT_LIGHT;
239 case CAIRO_OPERATOR_DIFFERENCE:
240 return PIXMAN_OP_DIFFERENCE;
241 case CAIRO_OPERATOR_EXCLUSION:
242 return PIXMAN_OP_EXCLUSION;
243 case CAIRO_OPERATOR_HSL_HUE:
244 return PIXMAN_OP_HSL_HUE;
245 case CAIRO_OPERATOR_HSL_SATURATION:
246 return PIXMAN_OP_HSL_SATURATION;
247 case CAIRO_OPERATOR_HSL_COLOR:
248 return PIXMAN_OP_HSL_COLOR;
249 case CAIRO_OPERATOR_HSL_LUMINOSITY:
250 return PIXMAN_OP_HSL_LUMINOSITY;
254 return PIXMAN_OP_OVER;
259 fill_reduces_to_source (cairo_operator_t op,
260 const cairo_color_t *color,
261 cairo_image_surface_t *dst)
263 if (op == CAIRO_OPERATOR_SOURCE || op == CAIRO_OPERATOR_CLEAR)
265 if (op == CAIRO_OPERATOR_OVER && CAIRO_COLOR_IS_OPAQUE (color))
267 if (dst->base.is_clear)
268 return op == CAIRO_OPERATOR_OVER || op == CAIRO_OPERATOR_ADD;
273 static cairo_int_status_t
274 fill_rectangles (void *_dst,
276 const cairo_color_t *color,
277 cairo_rectangle_int_t *rects,
280 cairo_image_surface_t *dst = _dst;
284 TRACE ((stderr, "%s\n", __FUNCTION__));
286 if (fill_reduces_to_source (op, color, dst) &&
287 color_to_pixel (color, dst->pixman_format, &pixel))
289 for (i = 0; i < num_rects; i++) {
290 pixman_fill ((uint32_t *) dst->data, dst->stride / sizeof (uint32_t),
291 PIXMAN_FORMAT_BPP (dst->pixman_format),
292 rects[i].x, rects[i].y,
293 rects[i].width, rects[i].height,
299 pixman_image_t *src = _pixman_image_for_color (color);
301 op = _pixman_operator (op);
302 for (i = 0; i < num_rects; i++) {
303 pixman_image_composite32 (op,
304 src, NULL, dst->pixman_image,
307 rects[i].x, rects[i].y,
308 rects[i].width, rects[i].height);
311 pixman_image_unref (src);
314 return CAIRO_STATUS_SUCCESS;
317 static cairo_int_status_t
318 fill_boxes (void *_dst,
320 const cairo_color_t *color,
321 cairo_boxes_t *boxes)
323 cairo_image_surface_t *dst = _dst;
324 struct _cairo_boxes_chunk *chunk;
328 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
330 if (fill_reduces_to_source (op, color, dst) &&
331 color_to_pixel (color, dst->pixman_format, &pixel))
333 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
334 for (i = 0; i < chunk->count; i++) {
335 int x = _cairo_fixed_integer_part (chunk->base[i].p1.x);
336 int y = _cairo_fixed_integer_part (chunk->base[i].p1.y);
337 int w = _cairo_fixed_integer_part (chunk->base[i].p2.x) - x;
338 int h = _cairo_fixed_integer_part (chunk->base[i].p2.y) - y;
339 pixman_fill ((uint32_t *) dst->data,
340 dst->stride / sizeof (uint32_t),
341 PIXMAN_FORMAT_BPP (dst->pixman_format),
348 pixman_image_t *src = _pixman_image_for_color (color);
350 op = _pixman_operator (op);
351 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
352 for (i = 0; i < chunk->count; i++) {
353 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
354 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
355 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
356 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
357 pixman_image_composite32 (op,
358 src, NULL, dst->pixman_image,
366 pixman_image_unref (src);
369 return CAIRO_STATUS_SUCCESS;
372 static cairo_int_status_t
373 composite (void *_dst,
375 cairo_surface_t *abstract_src,
376 cairo_surface_t *abstract_mask,
386 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
387 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
389 TRACE ((stderr, "%s\n", __FUNCTION__));
392 pixman_image_composite32 (_pixman_operator (op),
393 src->pixman_image, mask->pixman_image, to_pixman_image (_dst),
399 pixman_image_composite32 (_pixman_operator (op),
400 src->pixman_image, NULL, to_pixman_image (_dst),
407 return CAIRO_STATUS_SUCCESS;
410 static cairo_int_status_t
412 cairo_surface_t *abstract_src,
413 cairo_surface_t *abstract_mask,
423 cairo_image_surface_t *dst = _dst;
424 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
425 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
427 TRACE ((stderr, "%s\n", __FUNCTION__));
429 #if PIXMAN_HAS_OP_LERP
430 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
431 src->pixman_image, mask->pixman_image, dst->pixman_image,
437 /* Punch the clip out of the destination */
438 TRACE ((stderr, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
440 mask->base.unique_id, mask->pixman_image,
441 dst->base.unique_id, dst->pixman_image));
442 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
443 mask->pixman_image, NULL, dst->pixman_image,
449 /* Now add the two results together */
450 TRACE ((stderr, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
452 src->base.unique_id, src->pixman_image,
453 mask->base.unique_id, mask->pixman_image,
454 dst->base.unique_id, dst->pixman_image));
455 pixman_image_composite32 (PIXMAN_OP_ADD,
456 src->pixman_image, mask->pixman_image, dst->pixman_image,
463 return CAIRO_STATUS_SUCCESS;
466 static cairo_int_status_t
467 composite_boxes (void *_dst,
469 cairo_surface_t *abstract_src,
470 cairo_surface_t *abstract_mask,
477 cairo_boxes_t *boxes,
478 const cairo_rectangle_int_t *extents)
480 pixman_image_t *dst = to_pixman_image (_dst);
481 pixman_image_t *src = ((cairo_image_source_t *)abstract_src)->pixman_image;
482 pixman_image_t *mask = abstract_mask ? ((cairo_image_source_t *)abstract_mask)->pixman_image : NULL;
483 pixman_image_t *free_src = NULL;
484 struct _cairo_boxes_chunk *chunk;
487 /* XXX consider using a region? saves multiple prepare-composite */
488 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
490 if (((cairo_surface_t *)_dst)->is_clear &&
491 (op == CAIRO_OPERATOR_SOURCE ||
492 op == CAIRO_OPERATOR_OVER ||
493 op == CAIRO_OPERATOR_ADD)) {
496 if (op == CAIRO_OPERATOR_CLEAR) {
497 #if PIXMAN_HAS_OP_LERP
498 op = PIXMAN_OP_LERP_CLEAR;
500 free_src = src = _pixman_image_for_color (CAIRO_COLOR_WHITE);
501 op = PIXMAN_OP_OUT_REVERSE;
503 } else if (op == CAIRO_OPERATOR_SOURCE) {
504 #if PIXMAN_HAS_OP_LERP
505 op = PIXMAN_OP_LERP_SRC;
507 return CAIRO_INT_STATUS_UNSUPPORTED;
510 op = _pixman_operator (op);
513 op = _pixman_operator (op);
516 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
517 for (i = 0; i < chunk->count; i++) {
518 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
519 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
520 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
521 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
523 pixman_image_composite32 (op, src, mask, dst,
524 x1 + src_x, y1 + src_y,
525 x1 + mask_x, y1 + mask_y,
526 x1 + dst_x, y1 + dst_y,
532 pixman_image_unref (free_src);
534 return CAIRO_STATUS_SUCCESS;
537 #define CAIRO_FIXED_16_16_MIN _cairo_fixed_from_int (-32768)
538 #define CAIRO_FIXED_16_16_MAX _cairo_fixed_from_int (32767)
541 line_exceeds_16_16 (const cairo_line_t *line)
544 line->p1.x <= CAIRO_FIXED_16_16_MIN ||
545 line->p1.x >= CAIRO_FIXED_16_16_MAX ||
547 line->p2.x <= CAIRO_FIXED_16_16_MIN ||
548 line->p2.x >= CAIRO_FIXED_16_16_MAX ||
550 line->p1.y <= CAIRO_FIXED_16_16_MIN ||
551 line->p1.y >= CAIRO_FIXED_16_16_MAX ||
553 line->p2.y <= CAIRO_FIXED_16_16_MIN ||
554 line->p2.y >= CAIRO_FIXED_16_16_MAX;
558 project_line_x_onto_16_16 (const cairo_line_t *line,
560 cairo_fixed_t bottom,
561 pixman_line_fixed_t *out)
563 /* XXX use fixed-point arithmetic? */
564 cairo_point_double_t p1, p2;
567 p1.x = _cairo_fixed_to_double (line->p1.x);
568 p1.y = _cairo_fixed_to_double (line->p1.y);
570 p2.x = _cairo_fixed_to_double (line->p2.x);
571 p2.y = _cairo_fixed_to_double (line->p2.y);
573 m = (p2.x - p1.x) / (p2.y - p1.y);
574 out->p1.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (top - line->p1.y));
575 out->p2.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (bottom - line->p1.y));
579 _pixman_image_add_traps (pixman_image_t *image,
580 int dst_x, int dst_y,
581 cairo_traps_t *traps)
583 cairo_trapezoid_t *t = traps->traps;
584 int num_traps = traps->num_traps;
585 while (num_traps--) {
586 pixman_trapezoid_t trap;
588 /* top/bottom will be clamped to surface bounds */
589 trap.top = _cairo_fixed_to_16_16 (t->top);
590 trap.bottom = _cairo_fixed_to_16_16 (t->bottom);
592 /* However, all the other coordinates will have been left untouched so
593 * as not to introduce numerical error. Recompute them if they
594 * exceed the 16.16 limits.
596 if (unlikely (line_exceeds_16_16 (&t->left))) {
597 project_line_x_onto_16_16 (&t->left, t->top, t->bottom, &trap.left);
598 trap.left.p1.y = trap.top;
599 trap.left.p2.y = trap.bottom;
601 trap.left.p1.x = _cairo_fixed_to_16_16 (t->left.p1.x);
602 trap.left.p1.y = _cairo_fixed_to_16_16 (t->left.p1.y);
603 trap.left.p2.x = _cairo_fixed_to_16_16 (t->left.p2.x);
604 trap.left.p2.y = _cairo_fixed_to_16_16 (t->left.p2.y);
607 if (unlikely (line_exceeds_16_16 (&t->right))) {
608 project_line_x_onto_16_16 (&t->right, t->top, t->bottom, &trap.right);
609 trap.right.p1.y = trap.top;
610 trap.right.p2.y = trap.bottom;
612 trap.right.p1.x = _cairo_fixed_to_16_16 (t->right.p1.x);
613 trap.right.p1.y = _cairo_fixed_to_16_16 (t->right.p1.y);
614 trap.right.p2.x = _cairo_fixed_to_16_16 (t->right.p2.x);
615 trap.right.p2.y = _cairo_fixed_to_16_16 (t->right.p2.y);
618 pixman_rasterize_trapezoid (image, &trap, -dst_x, -dst_y);
623 static cairo_int_status_t
624 composite_traps (void *_dst,
626 cairo_surface_t *abstract_src,
631 const cairo_rectangle_int_t *extents,
632 cairo_antialias_t antialias,
633 cairo_traps_t *traps)
635 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
636 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
637 pixman_image_t *mask;
638 pixman_format_code_t format;
640 TRACE ((stderr, "%s\n", __FUNCTION__));
642 /* Special case adding trapezoids onto a mask surface; we want to avoid
643 * creating an intermediate temporary mask unnecessarily.
645 * We make the assumption here that the portion of the trapezoids
646 * contained within the surface is bounded by [dst_x,dst_y,width,height];
647 * the Cairo core code passes bounds based on the trapezoid extents.
649 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
650 if (dst->pixman_format == format &&
651 (abstract_src == NULL ||
652 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
654 _pixman_image_add_traps (dst->pixman_image, dst_x, dst_y, traps);
655 return CAIRO_STATUS_SUCCESS;
658 mask = pixman_image_create_bits (format,
659 extents->width, extents->height,
661 if (unlikely (mask == NULL))
662 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
664 _pixman_image_add_traps (mask, extents->x, extents->y, traps);
665 pixman_image_composite32 (_pixman_operator (op),
666 src->pixman_image, mask, dst->pixman_image,
667 extents->x + src_x, extents->y + src_y,
669 extents->x - dst_x, extents->y - dst_y,
670 extents->width, extents->height);
672 pixman_image_unref (mask);
674 return CAIRO_STATUS_SUCCESS;
678 set_point (pixman_point_fixed_t *p, cairo_point_t *c)
680 p->x = _cairo_fixed_to_16_16 (c->x);
681 p->y = _cairo_fixed_to_16_16 (c->y);
685 _pixman_image_add_tristrip (pixman_image_t *image,
686 int dst_x, int dst_y,
687 cairo_tristrip_t *strip)
689 pixman_triangle_t tri;
690 pixman_point_fixed_t *p[3] = {&tri.p1, &tri.p2, &tri.p3 };
693 set_point (p[0], &strip->points[0]);
694 set_point (p[1], &strip->points[1]);
695 set_point (p[2], &strip->points[2]);
696 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
697 for (n = 3; n < strip->num_points; n++) {
698 set_point (p[n%3], &strip->points[n]);
699 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
703 static cairo_int_status_t
704 composite_tristrip (void *_dst,
706 cairo_surface_t *abstract_src,
711 const cairo_rectangle_int_t *extents,
712 cairo_antialias_t antialias,
713 cairo_tristrip_t *strip)
715 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
716 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
717 pixman_image_t *mask;
718 pixman_format_code_t format;
720 TRACE ((stderr, "%s\n", __FUNCTION__));
722 if (strip->num_points < 3)
723 return CAIRO_STATUS_SUCCESS;
725 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
726 if (dst->pixman_format == format &&
727 (abstract_src == NULL ||
728 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
730 _pixman_image_add_tristrip (dst->pixman_image, dst_x, dst_y, strip);
731 return CAIRO_STATUS_SUCCESS;
734 mask = pixman_image_create_bits (format,
735 extents->width, extents->height,
737 if (unlikely (mask == NULL))
738 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
740 _pixman_image_add_tristrip (mask, extents->x, extents->y, strip);
741 pixman_image_composite32 (_pixman_operator (op),
742 src->pixman_image, mask, dst->pixman_image,
743 extents->x + src_x, extents->y + src_y,
745 extents->x - dst_x, extents->y - dst_y,
746 extents->width, extents->height);
748 pixman_image_unref (mask);
750 return CAIRO_STATUS_SUCCESS;
753 static cairo_int_status_t
754 check_composite_glyphs (const cairo_composite_rectangles_t *extents,
755 cairo_scaled_font_t *scaled_font,
756 cairo_glyph_t *glyphs,
759 return CAIRO_STATUS_SUCCESS;
762 #if HAS_PIXMAN_GLYPHS
763 static pixman_glyph_cache_t *global_glyph_cache;
765 static inline pixman_glyph_cache_t *
766 get_glyph_cache (void)
768 if (!global_glyph_cache)
769 global_glyph_cache = pixman_glyph_cache_create ();
771 return global_glyph_cache;
775 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
776 cairo_scaled_glyph_t *scaled_glyph)
778 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
780 if (global_glyph_cache) {
781 pixman_glyph_cache_remove (
782 global_glyph_cache, scaled_font,
783 (void *)_cairo_scaled_glyph_index (scaled_glyph));
786 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
789 static cairo_int_status_t
790 composite_glyphs (void *_dst,
792 cairo_surface_t *_src,
797 cairo_composite_glyphs_info_t *info)
799 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
800 pixman_glyph_cache_t *glyph_cache;
801 pixman_glyph_t pglyphs_stack[CAIRO_STACK_ARRAY_LENGTH (pixman_glyph_t)];
802 pixman_glyph_t *pglyphs = pglyphs_stack;
806 TRACE ((stderr, "%s\n", __FUNCTION__));
808 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
810 glyph_cache = get_glyph_cache();
811 if (unlikely (glyph_cache == NULL)) {
812 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
816 pixman_glyph_cache_freeze (glyph_cache);
818 if (info->num_glyphs > ARRAY_LENGTH (pglyphs_stack)) {
819 pglyphs = _cairo_malloc_ab (info->num_glyphs, sizeof (pixman_glyph_t));
820 if (unlikely (pglyphs == NULL)) {
821 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
827 for (i = 0; i < info->num_glyphs; i++) {
828 unsigned long index = info->glyphs[i].index;
831 glyph = pixman_glyph_cache_lookup (glyph_cache, info->font, (void *)index);
833 cairo_scaled_glyph_t *scaled_glyph;
834 cairo_image_surface_t *glyph_surface;
836 /* This call can actually end up recursing, so we have to
837 * drop the mutex around it.
839 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
840 status = _cairo_scaled_glyph_lookup (info->font, index,
841 CAIRO_SCALED_GLYPH_INFO_SURFACE,
843 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
845 if (unlikely (status))
848 glyph_surface = scaled_glyph->surface;
849 glyph = pixman_glyph_cache_insert (glyph_cache, info->font, (void *)index,
850 glyph_surface->base.device_transform.x0,
851 glyph_surface->base.device_transform.y0,
852 glyph_surface->pixman_image);
853 if (unlikely (!glyph)) {
854 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
859 pg->x = _cairo_lround (info->glyphs[i].x);
860 pg->y = _cairo_lround (info->glyphs[i].y);
865 if (info->use_mask) {
866 pixman_format_code_t mask_format;
868 mask_format = pixman_glyph_get_mask_format (glyph_cache, pg - pglyphs, pglyphs);
870 pixman_composite_glyphs (_pixman_operator (op),
871 ((cairo_image_source_t *)_src)->pixman_image,
872 to_pixman_image (_dst),
874 info->extents.x + src_x, info->extents.y + src_y,
875 info->extents.x, info->extents.y,
876 info->extents.x - dst_x, info->extents.y - dst_y,
877 info->extents.width, info->extents.height,
878 glyph_cache, pg - pglyphs, pglyphs);
880 pixman_composite_glyphs_no_mask (_pixman_operator (op),
881 ((cairo_image_source_t *)_src)->pixman_image,
882 to_pixman_image (_dst),
885 glyph_cache, pg - pglyphs, pglyphs);
889 pixman_glyph_cache_thaw (glyph_cache);
891 if (pglyphs != pglyphs_stack)
895 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
900 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
901 cairo_scaled_glyph_t *scaled_glyph)
905 static cairo_int_status_t
906 composite_one_glyph (void *_dst,
908 cairo_surface_t *_src,
913 cairo_composite_glyphs_info_t *info)
915 cairo_image_surface_t *glyph_surface;
916 cairo_scaled_glyph_t *scaled_glyph;
917 cairo_status_t status;
920 TRACE ((stderr, "%s\n", __FUNCTION__));
922 status = _cairo_scaled_glyph_lookup (info->font,
923 info->glyphs[0].index,
924 CAIRO_SCALED_GLYPH_INFO_SURFACE,
927 if (unlikely (status))
930 glyph_surface = scaled_glyph->surface;
931 if (glyph_surface->width == 0 || glyph_surface->height == 0)
932 return CAIRO_INT_STATUS_NOTHING_TO_DO;
934 /* round glyph locations to the nearest pixel */
935 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
936 x = _cairo_lround (info->glyphs[0].x -
937 glyph_surface->base.device_transform.x0);
938 y = _cairo_lround (info->glyphs[0].y -
939 glyph_surface->base.device_transform.y0);
941 pixman_image_composite32 (_pixman_operator (op),
942 ((cairo_image_source_t *)_src)->pixman_image,
943 glyph_surface->pixman_image,
944 to_pixman_image (_dst),
945 x + src_x, y + src_y,
947 x - dst_x, y - dst_y,
948 glyph_surface->width,
949 glyph_surface->height);
951 return CAIRO_INT_STATUS_SUCCESS;
954 static cairo_int_status_t
955 composite_glyphs_via_mask (void *_dst,
957 cairo_surface_t *_src,
962 cairo_composite_glyphs_info_t *info)
964 cairo_scaled_glyph_t *glyph_cache[64];
965 pixman_image_t *white = _pixman_image_for_color (CAIRO_COLOR_WHITE);
966 cairo_scaled_glyph_t *scaled_glyph;
968 pixman_image_t *mask;
969 pixman_format_code_t format;
970 cairo_status_t status;
973 TRACE ((stderr, "%s\n", __FUNCTION__));
975 if (unlikely (white == NULL))
976 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
978 /* XXX convert the glyphs to common formats a8/a8r8g8b8 to hit
979 * optimised paths through pixman. Should we increase the bit
980 * depth of the target surface, we should reconsider the appropriate
984 status = _cairo_scaled_glyph_lookup (info->font,
985 info->glyphs[0].index,
986 CAIRO_SCALED_GLYPH_INFO_SURFACE,
988 if (unlikely (status)) {
989 pixman_image_unref (white);
993 memset (glyph_cache, 0, sizeof (glyph_cache));
994 glyph_cache[info->glyphs[0].index % ARRAY_LENGTH (glyph_cache)] = scaled_glyph;
997 i = (info->extents.width + 3) & ~3;
998 if (scaled_glyph->surface->base.content & CAIRO_CONTENT_COLOR) {
999 format = PIXMAN_a8r8g8b8;
1000 i = info->extents.width * 4;
1003 if (i * info->extents.height > (int) sizeof (buf)) {
1004 mask = pixman_image_create_bits (format,
1005 info->extents.width,
1006 info->extents.height,
1009 memset (buf, 0, i * info->extents.height);
1010 mask = pixman_image_create_bits (format,
1011 info->extents.width,
1012 info->extents.height,
1013 (uint32_t *)buf, i);
1015 if (unlikely (mask == NULL)) {
1016 pixman_image_unref (white);
1017 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1020 status = CAIRO_STATUS_SUCCESS;
1021 for (i = 0; i < info->num_glyphs; i++) {
1022 unsigned long glyph_index = info->glyphs[i].index;
1023 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1024 cairo_image_surface_t *glyph_surface;
1027 scaled_glyph = glyph_cache[cache_index];
1028 if (scaled_glyph == NULL ||
1029 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1031 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1032 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1035 if (unlikely (status)) {
1036 pixman_image_unref (mask);
1037 pixman_image_unref (white);
1041 glyph_cache[cache_index] = scaled_glyph;
1044 glyph_surface = scaled_glyph->surface;
1045 if (glyph_surface->width && glyph_surface->height) {
1046 if (glyph_surface->base.content & CAIRO_CONTENT_COLOR &&
1047 format == PIXMAN_a8) {
1048 pixman_image_t *ca_mask;
1050 format = PIXMAN_a8r8g8b8;
1051 ca_mask = pixman_image_create_bits (format,
1052 info->extents.width,
1053 info->extents.height,
1055 if (unlikely (ca_mask == NULL)) {
1056 pixman_image_unref (mask);
1057 pixman_image_unref (white);
1058 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1061 pixman_image_composite32 (PIXMAN_OP_SRC,
1062 white, mask, ca_mask,
1066 info->extents.width,
1067 info->extents.height);
1068 pixman_image_unref (mask);
1072 /* round glyph locations to the nearest pixel */
1073 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1074 x = _cairo_lround (info->glyphs[i].x -
1075 glyph_surface->base.device_transform.x0);
1076 y = _cairo_lround (info->glyphs[i].y -
1077 glyph_surface->base.device_transform.y0);
1079 if (glyph_surface->pixman_format == format) {
1080 pixman_image_composite32 (PIXMAN_OP_ADD,
1081 glyph_surface->pixman_image, NULL, mask,
1084 x - info->extents.x, y - info->extents.y,
1085 glyph_surface->width,
1086 glyph_surface->height);
1088 pixman_image_composite32 (PIXMAN_OP_ADD,
1089 white, glyph_surface->pixman_image, mask,
1092 x - info->extents.x, y - info->extents.y,
1093 glyph_surface->width,
1094 glyph_surface->height);
1099 if (format == PIXMAN_a8r8g8b8)
1100 pixman_image_set_component_alpha (mask, TRUE);
1102 pixman_image_composite32 (_pixman_operator (op),
1103 ((cairo_image_source_t *)_src)->pixman_image,
1105 to_pixman_image (_dst),
1106 info->extents.x + src_x, info->extents.y + src_y,
1108 info->extents.x - dst_x, info->extents.y - dst_y,
1109 info->extents.width, info->extents.height);
1110 pixman_image_unref (mask);
1111 pixman_image_unref (white);
1113 return CAIRO_STATUS_SUCCESS;
1116 static cairo_int_status_t
1117 composite_glyphs (void *_dst,
1118 cairo_operator_t op,
1119 cairo_surface_t *_src,
1124 cairo_composite_glyphs_info_t *info)
1126 cairo_scaled_glyph_t *glyph_cache[64];
1127 pixman_image_t *dst, *src;
1128 cairo_status_t status;
1131 TRACE ((stderr, "%s\n", __FUNCTION__));
1133 if (info->num_glyphs == 1)
1134 return composite_one_glyph(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1137 return composite_glyphs_via_mask(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1139 op = _pixman_operator (op);
1140 dst = to_pixman_image (_dst);
1141 src = ((cairo_image_source_t *)_src)->pixman_image;
1143 memset (glyph_cache, 0, sizeof (glyph_cache));
1144 status = CAIRO_STATUS_SUCCESS;
1146 for (i = 0; i < info->num_glyphs; i++) {
1148 cairo_image_surface_t *glyph_surface;
1149 cairo_scaled_glyph_t *scaled_glyph;
1150 unsigned long glyph_index = info->glyphs[i].index;
1151 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1153 scaled_glyph = glyph_cache[cache_index];
1154 if (scaled_glyph == NULL ||
1155 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1157 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1158 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1161 if (unlikely (status))
1164 glyph_cache[cache_index] = scaled_glyph;
1167 glyph_surface = scaled_glyph->surface;
1168 if (glyph_surface->width && glyph_surface->height) {
1169 /* round glyph locations to the nearest pixel */
1170 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1171 x = _cairo_lround (info->glyphs[i].x -
1172 glyph_surface->base.device_transform.x0);
1173 y = _cairo_lround (info->glyphs[i].y -
1174 glyph_surface->base.device_transform.y0);
1176 pixman_image_composite32 (op, src, glyph_surface->pixman_image, dst,
1177 x + src_x, y + src_y,
1179 x - dst_x, y - dst_y,
1180 glyph_surface->width,
1181 glyph_surface->height);
1189 static cairo_int_status_t
1190 check_composite (const cairo_composite_rectangles_t *extents)
1192 return CAIRO_STATUS_SUCCESS;
1195 const cairo_compositor_t *
1196 _cairo_image_traps_compositor_get (void)
1198 static cairo_traps_compositor_t compositor;
1200 if (compositor.base.delegate == NULL) {
1201 _cairo_traps_compositor_init (&compositor,
1202 &__cairo_no_compositor);
1203 compositor.acquire = acquire;
1204 compositor.release = release;
1205 compositor.set_clip_region = set_clip_region;
1206 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1207 compositor.draw_image_boxes = draw_image_boxes;
1208 //compositor.copy_boxes = copy_boxes;
1209 compositor.fill_boxes = fill_boxes;
1210 compositor.check_composite = check_composite;
1211 compositor.composite = composite;
1212 compositor.lerp = lerp;
1213 //compositor.check_composite_boxes = check_composite_boxes;
1214 compositor.composite_boxes = composite_boxes;
1215 //compositor.check_composite_traps = check_composite_traps;
1216 compositor.composite_traps = composite_traps;
1217 //compositor.check_composite_tristrip = check_composite_traps;
1218 compositor.composite_tristrip = composite_tristrip;
1219 compositor.check_composite_glyphs = check_composite_glyphs;
1220 compositor.composite_glyphs = composite_glyphs;
1223 return &compositor.base;
1226 const cairo_compositor_t *
1227 _cairo_image_mask_compositor_get (void)
1229 static cairo_mask_compositor_t compositor;
1231 if (compositor.base.delegate == NULL) {
1232 _cairo_mask_compositor_init (&compositor,
1233 _cairo_image_traps_compositor_get ());
1234 compositor.acquire = acquire;
1235 compositor.release = release;
1236 compositor.set_clip_region = set_clip_region;
1237 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1238 compositor.draw_image_boxes = draw_image_boxes;
1239 compositor.fill_rectangles = fill_rectangles;
1240 compositor.fill_boxes = fill_boxes;
1241 //compositor.check_composite = check_composite;
1242 compositor.composite = composite;
1243 //compositor.lerp = lerp;
1244 //compositor.check_composite_boxes = check_composite_boxes;
1245 compositor.composite_boxes = composite_boxes;
1246 compositor.check_composite_glyphs = check_composite_glyphs;
1247 compositor.composite_glyphs = composite_glyphs;
1250 return &compositor.base;
1253 #if PIXMAN_HAS_COMPOSITOR
1254 typedef struct _cairo_image_span_renderer {
1255 cairo_span_renderer_t base;
1257 pixman_image_compositor_t *compositor;
1258 pixman_image_t *src, *mask;
1260 cairo_rectangle_int_t extents;
1261 } cairo_image_span_renderer_t;
1262 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1264 static cairo_status_t
1265 _cairo_image_bounded_opaque_spans (void *abstract_renderer,
1267 const cairo_half_open_span_t *spans,
1270 cairo_image_span_renderer_t *r = abstract_renderer;
1273 return CAIRO_STATUS_SUCCESS;
1276 if (spans[0].coverage)
1277 pixman_image_compositor_blt (r->compositor,
1279 spans[1].x - spans[0].x, height,
1282 } while (--num_spans > 1);
1284 return CAIRO_STATUS_SUCCESS;
1287 static cairo_status_t
1288 _cairo_image_bounded_spans (void *abstract_renderer,
1290 const cairo_half_open_span_t *spans,
1293 cairo_image_span_renderer_t *r = abstract_renderer;
1296 return CAIRO_STATUS_SUCCESS;
1299 if (spans[0].coverage) {
1300 pixman_image_compositor_blt (r->compositor,
1302 spans[1].x - spans[0].x, height,
1303 r->opacity * spans[0].coverage);
1306 } while (--num_spans > 1);
1308 return CAIRO_STATUS_SUCCESS;
1311 static cairo_status_t
1312 _cairo_image_unbounded_spans (void *abstract_renderer,
1314 const cairo_half_open_span_t *spans,
1317 cairo_image_span_renderer_t *r = abstract_renderer;
1319 assert (y + height <= r->extents.height);
1320 if (y > r->extents.y) {
1321 pixman_image_compositor_blt (r->compositor,
1322 r->extents.x, r->extents.y,
1323 r->extents.width, y - r->extents.y,
1327 if (num_spans == 0) {
1328 pixman_image_compositor_blt (r->compositor,
1330 r->extents.width, height,
1333 if (spans[0].x != r->extents.x) {
1334 pixman_image_compositor_blt (r->compositor,
1336 spans[0].x - r->extents.x,
1342 assert (spans[0].x < r->extents.x + r->extents.width);
1343 pixman_image_compositor_blt (r->compositor,
1345 spans[1].x - spans[0].x, height,
1346 r->opacity * spans[0].coverage);
1348 } while (--num_spans > 1);
1350 if (spans[0].x != r->extents.x + r->extents.width) {
1351 assert (spans[0].x < r->extents.x + r->extents.width);
1352 pixman_image_compositor_blt (r->compositor,
1354 r->extents.x + r->extents.width - spans[0].x, height,
1359 r->extents.y = y + height;
1360 return CAIRO_STATUS_SUCCESS;
1363 static cairo_status_t
1364 _cairo_image_clipped_spans (void *abstract_renderer,
1366 const cairo_half_open_span_t *spans,
1369 cairo_image_span_renderer_t *r = abstract_renderer;
1374 if (! spans[0].inverse)
1375 pixman_image_compositor_blt (r->compositor,
1377 spans[1].x - spans[0].x, height,
1378 r->opacity * spans[0].coverage);
1380 } while (--num_spans > 1);
1382 r->extents.y = y + height;
1383 return CAIRO_STATUS_SUCCESS;
1386 static cairo_status_t
1387 _cairo_image_finish_unbounded_spans (void *abstract_renderer)
1389 cairo_image_span_renderer_t *r = abstract_renderer;
1391 if (r->extents.y < r->extents.height) {
1392 pixman_image_compositor_blt (r->compositor,
1393 r->extents.x, r->extents.y,
1395 r->extents.height - r->extents.y,
1399 return CAIRO_STATUS_SUCCESS;
1402 static cairo_int_status_t
1403 span_renderer_init (cairo_abstract_span_renderer_t *_r,
1404 const cairo_composite_rectangles_t *composite,
1405 cairo_bool_t needs_clip)
1407 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
1408 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1409 const cairo_pattern_t *source = &composite->source_pattern.base;
1410 cairo_operator_t op = composite->op;
1414 TRACE ((stderr, "%s\n", __FUNCTION__));
1416 if (op == CAIRO_OPERATOR_CLEAR) {
1417 op = PIXMAN_OP_LERP_CLEAR;
1418 } else if (dst->base.is_clear &&
1419 (op == CAIRO_OPERATOR_SOURCE ||
1420 op == CAIRO_OPERATOR_OVER ||
1421 op == CAIRO_OPERATOR_ADD)) {
1423 } else if (op == CAIRO_OPERATOR_SOURCE) {
1424 op = PIXMAN_OP_LERP_SRC;
1426 op = _pixman_operator (op);
1429 r->compositor = NULL;
1431 r->src = _pixman_image_for_pattern (dst, source, FALSE,
1432 &composite->unbounded,
1433 &composite->source_sample_area,
1435 if (unlikely (r->src == NULL))
1436 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1439 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
1440 r->opacity = composite->mask_pattern.solid.color.alpha;
1442 r->mask = _pixman_image_for_pattern (dst,
1443 &composite->mask_pattern.base,
1445 &composite->unbounded,
1446 &composite->mask_sample_area,
1448 if (unlikely (r->mask == NULL))
1449 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1451 /* XXX Component-alpha? */
1452 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
1453 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
1455 pixman_image_unref (r->src);
1463 if (composite->is_bounded) {
1464 if (r->opacity == 1.)
1465 r->base.render_rows = _cairo_image_bounded_opaque_spans;
1467 r->base.render_rows = _cairo_image_bounded_spans;
1468 r->base.finish = NULL;
1471 r->base.render_rows = _cairo_image_clipped_spans;
1473 r->base.render_rows = _cairo_image_unbounded_spans;
1474 r->base.finish = _cairo_image_finish_unbounded_spans;
1475 r->extents = composite->unbounded;
1476 r->extents.height += r->extents.y;
1480 pixman_image_create_compositor (op, r->src, r->mask, dst->pixman_image,
1481 composite->unbounded.x + src_x,
1482 composite->unbounded.y + src_y,
1483 composite->unbounded.x + mask_x,
1484 composite->unbounded.y + mask_y,
1485 composite->unbounded.x,
1486 composite->unbounded.y,
1487 composite->unbounded.width,
1488 composite->unbounded.height);
1489 if (unlikely (r->compositor == NULL))
1490 return CAIRO_INT_STATUS_NOTHING_TO_DO;
1492 return CAIRO_STATUS_SUCCESS;
1496 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
1497 cairo_int_status_t status)
1499 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
1501 TRACE ((stderr, "%s\n", __FUNCTION__));
1503 if (status == CAIRO_INT_STATUS_SUCCESS && r->base.finish)
1507 pixman_image_compositor_destroy (r->compositor);
1510 pixman_image_unref (r->src);
1512 pixman_image_unref (r->mask);
1515 typedef struct _cairo_image_span_renderer {
1516 cairo_span_renderer_t base;
1518 const cairo_composite_rectangles_t *composite;
1524 pixman_image_t *src, *mask;
1538 pixman_image_t *dst;
1543 cairo_rectangle_int_t extents;
1549 uint8_t buf[sizeof(cairo_abstract_span_renderer_t)-128];
1550 } cairo_image_span_renderer_t;
1551 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1553 static cairo_status_t
1554 _cairo_image_spans (void *abstract_renderer,
1556 const cairo_half_open_span_t *spans,
1559 cairo_image_span_renderer_t *r = abstract_renderer;
1560 uint8_t *mask, *row;
1564 return CAIRO_STATUS_SUCCESS;
1566 mask = r->u.mask.data + (y - r->u.mask.extents.y) * r->u.mask.stride;
1567 mask += spans[0].x - r->u.mask.extents.x;
1571 len = spans[1].x - spans[0].x;
1572 if (spans[0].coverage) {
1573 *row++ = r->opacity * spans[0].coverage;
1575 memset (row, row[-1], len);
1579 } while (--num_spans > 1);
1584 mask += r->u.mask.stride;
1585 memcpy (mask, row, len);
1588 return CAIRO_STATUS_SUCCESS;
1591 static cairo_status_t
1592 _cairo_image_spans_and_zero (void *abstract_renderer,
1594 const cairo_half_open_span_t *spans,
1597 cairo_image_span_renderer_t *r = abstract_renderer;
1601 mask = r->u.mask.data;
1602 if (y > r->u.mask.extents.y) {
1603 len = (y - r->u.mask.extents.y) * r->u.mask.stride;
1604 memset (mask, 0, len);
1608 r->u.mask.extents.y = y + height;
1609 r->u.mask.data = mask + height * r->u.mask.stride;
1610 if (num_spans == 0) {
1611 memset (mask, 0, height * r->u.mask.stride);
1613 uint8_t *row = mask;
1615 if (spans[0].x != r->u.mask.extents.x) {
1616 len = spans[0].x - r->u.mask.extents.x;
1617 memset (row, 0, len);
1622 len = spans[1].x - spans[0].x;
1623 *row++ = r->opacity * spans[0].coverage;
1625 memset (row, row[-1], --len);
1629 } while (--num_spans > 1);
1631 if (spans[0].x != r->u.mask.extents.x + r->u.mask.extents.width) {
1632 len = r->u.mask.extents.x + r->u.mask.extents.width - spans[0].x;
1633 memset (row, 0, len);
1638 mask += r->u.mask.stride;
1639 memcpy (mask, row, r->u.mask.extents.width);
1643 return CAIRO_STATUS_SUCCESS;
1646 static cairo_status_t
1647 _cairo_image_finish_spans_and_zero (void *abstract_renderer)
1649 cairo_image_span_renderer_t *r = abstract_renderer;
1651 if (r->u.mask.extents.y < r->u.mask.extents.height)
1652 memset (r->u.mask.data, 0, (r->u.mask.extents.height - r->u.mask.extents.y) * r->u.mask.stride);
1654 return CAIRO_STATUS_SUCCESS;
1657 static cairo_status_t
1658 _fill8_spans (void *abstract_renderer, int y, int h,
1659 const cairo_half_open_span_t *spans, unsigned num_spans)
1661 cairo_image_span_renderer_t *r = abstract_renderer;
1664 return CAIRO_STATUS_SUCCESS;
1666 if (likely(h == 1)) {
1668 if (spans[0].coverage) {
1669 int len = spans[1].x - spans[0].x;
1670 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
1672 *d = r->u.fill.pixel;
1674 memset(d, r->u.fill.pixel, len);
1677 } while (--num_spans > 1);
1680 if (spans[0].coverage) {
1683 int len = spans[1].x - spans[0].x;
1684 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1686 *d = r->u.fill.pixel;
1688 memset(d, r->u.fill.pixel, len);
1693 } while (--num_spans > 1);
1696 return CAIRO_STATUS_SUCCESS;
1699 static cairo_status_t
1700 _fill16_spans (void *abstract_renderer, int y, int h,
1701 const cairo_half_open_span_t *spans, unsigned num_spans)
1703 cairo_image_span_renderer_t *r = abstract_renderer;
1706 return CAIRO_STATUS_SUCCESS;
1708 if (likely(h == 1)) {
1710 if (spans[0].coverage) {
1711 int len = spans[1].x - spans[0].x;
1712 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*2);
1714 *d++ = r->u.fill.pixel;
1717 } while (--num_spans > 1);
1720 if (spans[0].coverage) {
1723 int len = spans[1].x - spans[0].x;
1724 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*2);
1726 *d++ = r->u.fill.pixel;
1731 } while (--num_spans > 1);
1734 return CAIRO_STATUS_SUCCESS;
1737 static cairo_status_t
1738 _fill32_spans (void *abstract_renderer, int y, int h,
1739 const cairo_half_open_span_t *spans, unsigned num_spans)
1741 cairo_image_span_renderer_t *r = abstract_renderer;
1744 return CAIRO_STATUS_SUCCESS;
1746 if (likely(h == 1)) {
1748 if (spans[0].coverage) {
1749 int len = spans[1].x - spans[0].x;
1751 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1752 spans[0].x, y, len, 1, r->u.fill.pixel);
1754 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
1756 *d++ = r->u.fill.pixel;
1760 } while (--num_spans > 1);
1763 if (spans[0].coverage) {
1764 if (spans[1].x - spans[0].x > 16) {
1765 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1766 spans[0].x, y, spans[1].x - spans[0].x, h,
1771 int len = spans[1].x - spans[0].x;
1772 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
1774 *d++ = r->u.fill.pixel;
1780 } while (--num_spans > 1);
1783 return CAIRO_STATUS_SUCCESS;
1787 static cairo_status_t
1788 _fill_spans (void *abstract_renderer, int y, int h,
1789 const cairo_half_open_span_t *spans, unsigned num_spans)
1791 cairo_image_span_renderer_t *r = abstract_renderer;
1794 return CAIRO_STATUS_SUCCESS;
1797 if (spans[0].coverage) {
1798 pixman_fill ((uint32_t *) r->data, r->stride, r->bpp,
1800 spans[1].x - spans[0].x, h,
1804 } while (--num_spans > 1);
1806 return CAIRO_STATUS_SUCCESS;
1810 static cairo_status_t
1811 _blit_spans (void *abstract_renderer, int y, int h,
1812 const cairo_half_open_span_t *spans, unsigned num_spans)
1814 cairo_image_span_renderer_t *r = abstract_renderer;
1818 return CAIRO_STATUS_SUCCESS;
1821 if (likely (h == 1)) {
1822 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
1823 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
1825 if (spans[0].coverage) {
1826 void *s = src + spans[0].x*cpp;
1827 void *d = dst + spans[0].x*cpp;
1828 int len = (spans[1].x - spans[0].x) * cpp;
1831 *(uint8_t *)d = *(uint8_t *)s;
1834 *(uint16_t *)d = *(uint16_t *)s;
1837 *(uint32_t *)d = *(uint32_t *)s;
1841 *(uint64_t *)d = *(uint64_t *)s;
1850 } while (--num_spans > 1);
1853 if (spans[0].coverage) {
1856 void *src = r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x*cpp;
1857 void *dst = r->u.blit.data + yy*r->u.blit.stride + spans[0].x*cpp;
1858 int len = (spans[1].x - spans[0].x) * cpp;
1861 *(uint8_t *)dst = *(uint8_t *)src;
1864 *(uint16_t *)dst = *(uint16_t *)src;
1867 *(uint32_t *)dst = *(uint32_t *)src;
1871 *(uint64_t *)dst = *(uint64_t *)src;
1875 memcpy(dst, src, len);
1882 } while (--num_spans > 1);
1885 return CAIRO_STATUS_SUCCESS;
1888 static cairo_status_t
1889 _mono_spans (void *abstract_renderer, int y, int h,
1890 const cairo_half_open_span_t *spans, unsigned num_spans)
1892 cairo_image_span_renderer_t *r = abstract_renderer;
1895 return CAIRO_STATUS_SUCCESS;
1898 if (spans[0].coverage) {
1899 pixman_image_composite32 (r->op,
1900 r->src, NULL, r->u.composite.dst,
1901 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1904 spans[1].x - spans[0].x, h);
1907 } while (--num_spans > 1);
1909 return CAIRO_STATUS_SUCCESS;
1912 static cairo_status_t
1913 _mono_unbounded_spans (void *abstract_renderer, int y, int h,
1914 const cairo_half_open_span_t *spans, unsigned num_spans)
1916 cairo_image_span_renderer_t *r = abstract_renderer;
1918 if (num_spans == 0) {
1919 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1920 r->src, NULL, r->u.composite.dst,
1921 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1923 r->composite->unbounded.x, y,
1924 r->composite->unbounded.width, h);
1925 r->u.composite.mask_y = y + h;
1926 return CAIRO_STATUS_SUCCESS;
1929 if (y != r->u.composite.mask_y) {
1930 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1931 r->src, NULL, r->u.composite.dst,
1932 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1934 r->composite->unbounded.x, r->u.composite.mask_y,
1935 r->composite->unbounded.width, y - r->u.composite.mask_y);
1938 if (spans[0].x != r->composite->unbounded.x) {
1939 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1940 r->src, NULL, r->u.composite.dst,
1941 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1943 r->composite->unbounded.x, y,
1944 spans[0].x - r->composite->unbounded.x, h);
1948 int op = spans[0].coverage ? r->op : PIXMAN_OP_CLEAR;
1949 pixman_image_composite32 (op,
1950 r->src, NULL, r->u.composite.dst,
1951 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1954 spans[1].x - spans[0].x, h);
1956 } while (--num_spans > 1);
1958 if (spans[0].x != r->composite->unbounded.x + r->composite->unbounded.width) {
1959 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1960 r->src, NULL, r->u.composite.dst,
1961 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1964 r->composite->unbounded.x + r->composite->unbounded.width - spans[0].x, h);
1967 r->u.composite.mask_y = y + h;
1968 return CAIRO_STATUS_SUCCESS;
1971 static cairo_status_t
1972 _mono_finish_unbounded_spans (void *abstract_renderer)
1974 cairo_image_span_renderer_t *r = abstract_renderer;
1976 if (r->u.composite.mask_y < r->composite->unbounded.y + r->composite->unbounded.height) {
1977 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1978 r->src, NULL, r->u.composite.dst,
1979 r->composite->unbounded.x + r->u.composite.src_x, r->u.composite.mask_y + r->u.composite.src_y,
1981 r->composite->unbounded.x, r->u.composite.mask_y,
1982 r->composite->unbounded.width,
1983 r->composite->unbounded.y + r->composite->unbounded.height - r->u.composite.mask_y);
1986 return CAIRO_STATUS_SUCCESS;
1989 static cairo_int_status_t
1990 mono_renderer_init (cairo_image_span_renderer_t *r,
1991 const cairo_composite_rectangles_t *composite,
1992 cairo_antialias_t antialias,
1993 cairo_bool_t needs_clip)
1995 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1997 if (antialias != CAIRO_ANTIALIAS_NONE)
1998 return CAIRO_INT_STATUS_UNSUPPORTED;
2000 if (!_cairo_pattern_is_opaque_solid (&composite->mask_pattern.base))
2001 return CAIRO_INT_STATUS_UNSUPPORTED;
2003 r->base.render_rows = NULL;
2004 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2005 const cairo_color_t *color;
2007 color = &composite->source_pattern.solid.color;
2008 if (composite->op == CAIRO_OPERATOR_CLEAR)
2009 color = CAIRO_COLOR_TRANSPARENT;
2011 if (fill_reduces_to_source (composite->op, color, dst) &&
2012 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2013 /* Use plain C for the fill operations as the span length is
2014 * typically small, too small to payback the startup overheads of
2017 switch (PIXMAN_FORMAT_BPP(dst->pixman_format)) {
2018 case 8: r->base.render_rows = _fill8_spans; break;
2019 case 16: r->base.render_rows = _fill16_spans; break;
2020 case 32: r->base.render_rows = _fill32_spans; break;
2023 r->u.fill.data = dst->data;
2024 r->u.fill.stride = dst->stride;
2026 } else if ((composite->op == CAIRO_OPERATOR_SOURCE ||
2027 (composite->op == CAIRO_OPERATOR_OVER &&
2028 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2029 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2030 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2031 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2033 cairo_image_surface_t *src =
2034 to_image_surface(composite->source_pattern.surface.surface);
2037 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2039 composite->bounded.x + tx >= 0 &&
2040 composite->bounded.y + ty >= 0 &&
2041 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2042 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2044 r->u.blit.stride = dst->stride;
2045 r->u.blit.data = dst->data;
2046 r->u.blit.src_stride = src->stride;
2047 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2048 r->base.render_rows = _blit_spans;
2052 if (r->base.render_rows == NULL) {
2053 r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
2054 &composite->unbounded,
2055 &composite->source_sample_area,
2056 &r->u.composite.src_x, &r->u.composite.src_y);
2057 if (unlikely (r->src == NULL))
2058 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2060 r->u.composite.dst = to_pixman_image (composite->surface);
2061 r->op = _pixman_operator (composite->op);
2062 if (composite->is_bounded == 0) {
2063 r->base.render_rows = _mono_unbounded_spans;
2064 r->base.finish = _mono_finish_unbounded_spans;
2065 r->u.composite.mask_y = composite->unbounded.y;
2067 r->base.render_rows = _mono_spans;
2069 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
2071 return CAIRO_INT_STATUS_SUCCESS;
2074 #define ONE_HALF 0x7f
2075 #define RB_MASK 0x00ff00ff
2076 #define RB_ONE_HALF 0x007f007f
2077 #define RB_MASK_PLUS_ONE 0x01000100
2079 static inline uint32_t
2080 mul8x2_8 (uint32_t a, uint8_t b)
2082 uint32_t t = (a & RB_MASK) * b + RB_ONE_HALF;
2083 return ((t + ((t >> G_SHIFT) & RB_MASK)) >> G_SHIFT) & RB_MASK;
2086 static inline uint32_t
2087 add8x2_8x2 (uint32_t a, uint32_t b)
2090 t |= RB_MASK_PLUS_ONE - ((t >> G_SHIFT) & RB_MASK);
2094 static inline uint8_t
2095 mul8_8 (uint8_t a, uint8_t b)
2097 uint16_t t = a * (uint16_t)b + ONE_HALF;
2098 return ((t >> G_SHIFT) + t) >> G_SHIFT;
2101 static inline uint32_t
2102 lerp8x4 (uint32_t src, uint8_t a, uint32_t dst)
2104 return (add8x2_8x2 (mul8x2_8 (src, a),
2105 mul8x2_8 (dst, ~a)) |
2106 add8x2_8x2 (mul8x2_8 (src >> G_SHIFT, a),
2107 mul8x2_8 (dst >> G_SHIFT, ~a)) << G_SHIFT);
2110 static cairo_status_t
2111 _fill_a8_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2112 const cairo_half_open_span_t *spans, unsigned num_spans)
2114 cairo_image_span_renderer_t *r = abstract_renderer;
2117 return CAIRO_STATUS_SUCCESS;
2119 if (likely(h == 1)) {
2120 uint8_t *d = r->u.fill.data + r->u.fill.stride*y;
2122 uint8_t a = spans[0].coverage;
2124 int len = spans[1].x - spans[0].x;
2126 memset(d + spans[0].x, r->u.fill.pixel, len);
2128 uint8_t s = mul8_8(a, r->u.fill.pixel);
2129 uint8_t *dst = d + spans[0].x;
2132 uint8_t t = mul8_8(*dst, a);
2138 } while (--num_spans > 1);
2141 uint8_t a = spans[0].coverage;
2146 int len = spans[1].x - spans[0].x;
2147 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2148 memset(d, r->u.fill.pixel, len);
2152 uint8_t s = mul8_8(a, r->u.fill.pixel);
2155 int len = spans[1].x - spans[0].x;
2156 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2158 uint8_t t = mul8_8(*d, a);
2166 } while (--num_spans > 1);
2169 return CAIRO_STATUS_SUCCESS;
2172 static cairo_status_t
2173 _fill_xrgb32_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2174 const cairo_half_open_span_t *spans, unsigned num_spans)
2176 cairo_image_span_renderer_t *r = abstract_renderer;
2179 return CAIRO_STATUS_SUCCESS;
2181 if (likely(h == 1)) {
2183 uint8_t a = spans[0].coverage;
2185 int len = spans[1].x - spans[0].x;
2186 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2189 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
2190 spans[0].x, y, len, 1, r->u.fill.pixel);
2192 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2194 *d++ = r->u.fill.pixel;
2196 } else while (len--) {
2197 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2202 } while (--num_spans > 1);
2205 uint8_t a = spans[0].coverage;
2208 if (spans[1].x - spans[0].x > 16) {
2209 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
2210 spans[0].x, y, spans[1].x - spans[0].x, h,
2215 int len = spans[1].x - spans[0].x;
2216 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2218 *d++ = r->u.fill.pixel;
2225 int len = spans[1].x - spans[0].x;
2226 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2228 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2236 } while (--num_spans > 1);
2239 return CAIRO_STATUS_SUCCESS;
2242 static cairo_status_t
2243 _fill_a8_lerp_spans (void *abstract_renderer, int y, int h,
2244 const cairo_half_open_span_t *spans, unsigned num_spans)
2246 cairo_image_span_renderer_t *r = abstract_renderer;
2249 return CAIRO_STATUS_SUCCESS;
2251 if (likely(h == 1)) {
2253 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2255 int len = spans[1].x - spans[0].x;
2256 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
2257 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2260 uint16_t t = *d*ia + p;
2261 *d++ = (t + (t>>8)) >> 8;
2265 } while (--num_spans > 1);
2268 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2271 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2274 int len = spans[1].x - spans[0].x;
2275 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2277 uint16_t t = *d*ia + p;
2278 *d++ = (t + (t>>8)) >> 8;
2284 } while (--num_spans > 1);
2287 return CAIRO_STATUS_SUCCESS;
2290 static cairo_status_t
2291 _fill_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2292 const cairo_half_open_span_t *spans, unsigned num_spans)
2294 cairo_image_span_renderer_t *r = abstract_renderer;
2297 return CAIRO_STATUS_SUCCESS;
2299 if (likely(h == 1)) {
2301 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2303 int len = spans[1].x - spans[0].x;
2304 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2306 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2311 } while (--num_spans > 1);
2314 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2318 int len = spans[1].x - spans[0].x;
2319 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2321 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2328 } while (--num_spans > 1);
2331 return CAIRO_STATUS_SUCCESS;
2334 static cairo_status_t
2335 _blit_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2336 const cairo_half_open_span_t *spans, unsigned num_spans)
2338 cairo_image_span_renderer_t *r = abstract_renderer;
2341 return CAIRO_STATUS_SUCCESS;
2343 if (likely(h == 1)) {
2344 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
2345 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
2347 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2349 uint32_t *s = (uint32_t*)src + spans[0].x;
2350 uint32_t *d = (uint32_t*)dst + spans[0].x;
2351 int len = spans[1].x - spans[0].x;
2356 memcpy(d, s, len*4);
2359 *d = lerp8x4 (*s, a, *d);
2365 } while (--num_spans > 1);
2368 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2372 uint32_t *s = (uint32_t *)(r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x * 4);
2373 uint32_t *d = (uint32_t *)(r->u.blit.data + yy*r->u.blit.stride + spans[0].x * 4);
2374 int len = spans[1].x - spans[0].x;
2379 memcpy(d, s, len * 4);
2382 *d = lerp8x4 (*s, a, *d);
2390 } while (--num_spans > 1);
2393 return CAIRO_STATUS_SUCCESS;
2396 static cairo_status_t
2397 _inplace_spans (void *abstract_renderer,
2399 const cairo_half_open_span_t *spans,
2402 cairo_image_span_renderer_t *r = abstract_renderer;
2407 return CAIRO_STATUS_SUCCESS;
2409 if (num_spans == 2 && spans[0].coverage == 0xff) {
2410 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2411 spans[0].x + r->u.composite.src_x,
2412 y + r->u.composite.src_y,
2415 spans[1].x - spans[0].x, h);
2416 return CAIRO_STATUS_SUCCESS;
2419 mask = (uint8_t *)pixman_image_get_data (r->mask);
2422 int len = spans[1].x - spans[0].x;
2423 *mask++ = spans[0].coverage;
2425 memset (mask, spans[0].coverage, --len);
2430 } while (--num_spans > 1);
2432 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2433 x0 + r->u.composite.src_x,
2434 y + r->u.composite.src_y,
2439 return CAIRO_STATUS_SUCCESS;
2442 static cairo_status_t
2443 _inplace_src_spans (void *abstract_renderer,
2445 const cairo_half_open_span_t *spans,
2448 cairo_image_span_renderer_t *r = abstract_renderer;
2453 return CAIRO_STATUS_SUCCESS;
2458 int len = spans[1].x - spans[0].x;
2459 if (spans[0].coverage == 0xff) {
2460 if (spans[0].x != x0) {
2461 #if PIXMAN_HAS_OP_LERP
2462 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2463 r->src, r->mask, r->u.composite.dst,
2464 x0 + r->u.composite.src_x,
2465 y + r->u.composite.src_y,
2468 spans[0].x - x0, h);
2470 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2471 r->mask, NULL, r->u.composite.dst,
2475 spans[0].x - x0, h);
2476 pixman_image_composite32 (PIXMAN_OP_ADD,
2477 r->src, r->mask, r->u.composite.dst,
2478 x0 + r->u.composite.src_x,
2479 y + r->u.composite.src_y,
2482 spans[0].x - x0, h);
2486 pixman_image_composite32 (PIXMAN_OP_SRC,
2487 r->src, NULL, r->u.composite.dst,
2488 spans[0].x + r->u.composite.src_x,
2489 y + r->u.composite.src_y,
2492 spans[1].x - spans[0].x, h);
2496 } else if (spans[0].coverage == 0x0) {
2497 if (spans[0].x != x0) {
2498 #if PIXMAN_HAS_OP_LERP
2499 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2500 r->src, r->mask, r->u.composite.dst,
2501 x0 + r->u.composite.src_x,
2502 y + r->u.composite.src_y,
2505 spans[0].x - x0, h);
2507 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2508 r->mask, NULL, r->u.composite.dst,
2512 spans[0].x - x0, h);
2513 pixman_image_composite32 (PIXMAN_OP_ADD,
2514 r->src, r->mask, r->u.composite.dst,
2515 x0 + r->u.composite.src_x,
2516 y + r->u.composite.src_y,
2519 spans[0].x - x0, h);
2526 *m++ = spans[0].coverage;
2528 memset (m, spans[0].coverage, --len);
2533 } while (--num_spans > 1);
2535 if (spans[0].x != x0) {
2536 #if PIXMAN_HAS_OP_LERP
2537 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2538 r->src, r->mask, r->u.composite.dst,
2539 x0 + r->u.composite.src_x,
2540 y + r->u.composite.src_y,
2543 spans[0].x - x0, h);
2545 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2546 r->mask, NULL, r->u.composite.dst,
2550 spans[0].x - x0, h);
2551 pixman_image_composite32 (PIXMAN_OP_ADD,
2552 r->src, r->mask, r->u.composite.dst,
2553 x0 + r->u.composite.src_x,
2554 y + r->u.composite.src_y,
2557 spans[0].x - x0, h);
2561 return CAIRO_STATUS_SUCCESS;
2564 static cairo_int_status_t
2565 inplace_renderer_init (cairo_image_span_renderer_t *r,
2566 const cairo_composite_rectangles_t *composite,
2567 cairo_antialias_t antialias,
2568 cairo_bool_t needs_clip)
2570 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2572 if (composite->mask_pattern.base.type != CAIRO_PATTERN_TYPE_SOLID)
2573 return CAIRO_INT_STATUS_UNSUPPORTED;
2575 r->base.render_rows = NULL;
2576 r->op = composite->mask_pattern.solid.color.alpha_short >> 8;
2578 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2579 const cairo_color_t *color;
2581 color = &composite->source_pattern.solid.color;
2582 if (composite->op == CAIRO_OPERATOR_CLEAR)
2583 color = CAIRO_COLOR_TRANSPARENT;
2585 if (fill_reduces_to_source (composite->op, color, dst) &&
2586 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2587 /* Use plain C for the fill operations as the span length is
2588 * typically small, too small to payback the startup overheads of
2591 if (r->op == 0xff) {
2592 switch (dst->format) {
2593 case CAIRO_FORMAT_A8:
2594 r->base.render_rows = _fill_a8_lerp_opaque_spans;
2596 case CAIRO_FORMAT_RGB24:
2597 case CAIRO_FORMAT_ARGB32:
2598 r->base.render_rows = _fill_xrgb32_lerp_opaque_spans;
2600 case CAIRO_FORMAT_A1:
2601 case CAIRO_FORMAT_RGB16_565:
2602 case CAIRO_FORMAT_RGB30:
2603 case CAIRO_FORMAT_INVALID:
2607 switch (dst->format) {
2608 case CAIRO_FORMAT_A8:
2609 r->base.render_rows = _fill_a8_lerp_spans;
2611 case CAIRO_FORMAT_RGB24:
2612 case CAIRO_FORMAT_ARGB32:
2613 r->base.render_rows = _fill_xrgb32_lerp_spans;
2615 case CAIRO_FORMAT_A1:
2616 case CAIRO_FORMAT_RGB16_565:
2617 case CAIRO_FORMAT_RGB30:
2618 case CAIRO_FORMAT_INVALID:
2622 r->u.fill.data = dst->data;
2623 r->u.fill.stride = dst->stride;
2625 } else if ((dst->format == CAIRO_FORMAT_ARGB32 || dst->format == CAIRO_FORMAT_RGB24) &&
2626 (composite->op == CAIRO_OPERATOR_SOURCE ||
2627 (composite->op == CAIRO_OPERATOR_OVER &&
2628 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2629 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2630 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2631 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2633 cairo_image_surface_t *src =
2634 to_image_surface(composite->source_pattern.surface.surface);
2637 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2639 composite->bounded.x + tx >= 0 &&
2640 composite->bounded.y + ty >= 0 &&
2641 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2642 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2644 assert(PIXMAN_FORMAT_BPP(dst->pixman_format) == 32);
2645 r->u.blit.stride = dst->stride;
2646 r->u.blit.data = dst->data;
2647 r->u.blit.src_stride = src->stride;
2648 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2649 r->base.render_rows = _blit_xrgb32_lerp_spans;
2652 if (r->base.render_rows == NULL) {
2654 const cairo_pattern_t *src = &composite->source_pattern.base;
2657 return CAIRO_INT_STATUS_UNSUPPORTED;
2659 if (composite->is_bounded == 0)
2660 return CAIRO_INT_STATUS_UNSUPPORTED;
2662 width = (composite->bounded.width + 3) & ~3;
2663 r->base.render_rows = _inplace_spans;
2664 if (dst->base.is_clear &&
2665 (composite->op == CAIRO_OPERATOR_SOURCE ||
2666 composite->op == CAIRO_OPERATOR_OVER ||
2667 composite->op == CAIRO_OPERATOR_ADD)) {
2668 r->op = PIXMAN_OP_SRC;
2669 } else if (composite->op == CAIRO_OPERATOR_SOURCE) {
2670 r->base.render_rows = _inplace_src_spans;
2671 r->u.composite.mask_y = r->composite->unbounded.y;
2672 width = (composite->unbounded.width + 3) & ~3;
2673 } else if (composite->op == CAIRO_OPERATOR_CLEAR) {
2674 r->op = PIXMAN_OP_OUT_REVERSE;
2677 r->op = _pixman_operator (composite->op);
2680 if (width > sizeof (r->buf))
2681 return CAIRO_INT_STATUS_UNSUPPORTED;
2683 r->src = _pixman_image_for_pattern (dst, src, FALSE,
2684 &composite->bounded,
2685 &composite->source_sample_area,
2686 &r->u.composite.src_x, &r->u.composite.src_y);
2687 if (unlikely (r->src == NULL))
2688 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2690 /* Create an effectively unbounded mask by repeating the single line */
2691 r->mask = pixman_image_create_bits (PIXMAN_a8,
2692 width, composite->unbounded.height,
2693 (uint32_t *)r->buf, 0);
2694 if (unlikely (r->mask == NULL)) {
2695 pixman_image_unref (r->src);
2696 return _cairo_error(CAIRO_STATUS_NO_MEMORY);
2699 r->u.composite.dst = dst->pixman_image;
2702 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
2704 return CAIRO_INT_STATUS_SUCCESS;
2707 static cairo_int_status_t
2708 span_renderer_init (cairo_abstract_span_renderer_t *_r,
2709 const cairo_composite_rectangles_t *composite,
2710 cairo_antialias_t antialias,
2711 cairo_bool_t needs_clip)
2713 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
2714 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2715 const cairo_pattern_t *source = &composite->source_pattern.base;
2716 cairo_operator_t op = composite->op;
2717 cairo_int_status_t status;
2719 TRACE ((stderr, "%s: antialias=%d, needs_clip=%d\n", __FUNCTION__,
2720 antialias, needs_clip));
2723 return CAIRO_INT_STATUS_UNSUPPORTED;
2725 r->composite = composite;
2728 r->base.finish = NULL;
2730 status = mono_renderer_init (r, composite, antialias, needs_clip);
2731 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2734 status = inplace_renderer_init (r, composite, antialias, needs_clip);
2735 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2740 if (op == CAIRO_OPERATOR_CLEAR) {
2741 #if PIXMAN_HAS_OP_LERP
2742 op = PIXMAN_OP_LERP_CLEAR;
2744 source = &_cairo_pattern_white.base;
2745 op = PIXMAN_OP_OUT_REVERSE;
2747 } else if (dst->base.is_clear &&
2748 (op == CAIRO_OPERATOR_SOURCE ||
2749 op == CAIRO_OPERATOR_OVER ||
2750 op == CAIRO_OPERATOR_ADD)) {
2752 } else if (op == CAIRO_OPERATOR_SOURCE) {
2753 #if PIXMAN_HAS_OP_LERP
2754 op = PIXMAN_OP_LERP_SRC;
2756 return CAIRO_INT_STATUS_UNSUPPORTED;
2759 op = _pixman_operator (op);
2763 r->src = _pixman_image_for_pattern (dst, source, FALSE,
2764 &composite->unbounded,
2765 &composite->source_sample_area,
2766 &r->u.mask.src_x, &r->u.mask.src_y);
2767 if (unlikely (r->src == NULL))
2768 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2771 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2772 r->opacity = composite->mask_pattern.solid.color.alpha;
2774 pixman_image_t *mask;
2777 mask = _pixman_image_for_pattern (dst,
2778 &composite->mask_pattern.base,
2780 &composite->unbounded,
2781 &composite->mask_sample_area,
2783 if (unlikely (mask == NULL))
2784 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2786 /* XXX Component-alpha? */
2787 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
2788 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
2790 pixman_image_unref (r->src);
2792 r->u.mask.src_x = mask_x;
2793 r->u.mask.src_y = mask_y;
2798 pixman_image_unref (mask);
2799 return CAIRO_INT_STATUS_UNSUPPORTED;
2803 r->u.mask.extents = composite->unbounded;
2804 r->u.mask.stride = (r->u.mask.extents.width + 3) & ~3;
2805 if (r->u.mask.extents.height * r->u.mask.stride > (int)sizeof (r->buf)) {
2806 r->mask = pixman_image_create_bits (PIXMAN_a8,
2807 r->u.mask.extents.width,
2808 r->u.mask.extents.height,
2811 r->base.render_rows = _cairo_image_spans;
2812 r->base.finish = NULL;
2814 r->mask = pixman_image_create_bits (PIXMAN_a8,
2815 r->u.mask.extents.width,
2816 r->u.mask.extents.height,
2817 (uint32_t *)r->buf, r->u.mask.stride);
2819 r->base.render_rows = _cairo_image_spans_and_zero;
2820 r->base.finish = _cairo_image_finish_spans_and_zero;
2822 if (unlikely (r->mask == NULL))
2823 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2825 r->u.mask.data = (uint8_t *) pixman_image_get_data (r->mask);
2826 r->u.mask.stride = pixman_image_get_stride (r->mask);
2828 r->u.mask.extents.height += r->u.mask.extents.y;
2829 return CAIRO_STATUS_SUCCESS;
2833 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
2834 cairo_int_status_t status)
2836 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
2838 TRACE ((stderr, "%s\n", __FUNCTION__));
2840 if (likely (status == CAIRO_INT_STATUS_SUCCESS && r->bpp == 0)) {
2841 const cairo_composite_rectangles_t *composite = r->composite;
2846 pixman_image_composite32 (r->op, r->src, r->mask,
2847 to_pixman_image (composite->surface),
2848 composite->unbounded.x + r->u.mask.src_x,
2849 composite->unbounded.y + r->u.mask.src_y,
2851 composite->unbounded.x,
2852 composite->unbounded.y,
2853 composite->unbounded.width,
2854 composite->unbounded.height);
2858 pixman_image_unref (r->src);
2860 pixman_image_unref (r->mask);
2864 const cairo_compositor_t *
2865 _cairo_image_spans_compositor_get (void)
2867 static cairo_spans_compositor_t spans;
2868 static cairo_compositor_t shape;
2870 if (spans.base.delegate == NULL) {
2871 _cairo_shape_mask_compositor_init (&shape,
2872 _cairo_image_traps_compositor_get());
2873 shape.glyphs = NULL;
2875 _cairo_spans_compositor_init (&spans, &shape);
2878 #if PIXMAN_HAS_OP_LERP
2879 spans.flags |= CAIRO_SPANS_COMPOSITOR_HAS_LERP;
2882 //spans.acquire = acquire;
2883 //spans.release = release;
2884 spans.fill_boxes = fill_boxes;
2885 spans.draw_image_boxes = draw_image_boxes;
2886 //spans.copy_boxes = copy_boxes;
2887 spans.pattern_to_surface = _cairo_image_source_create_for_pattern;
2888 //spans.check_composite_boxes = check_composite_boxes;
2889 spans.composite_boxes = composite_boxes;
2890 //spans.check_span_renderer = check_span_renderer;
2891 spans.renderer_init = span_renderer_init;
2892 spans.renderer_fini = span_renderer_fini;