1 /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
2 /* cairo - a vector graphics library with display and print output
4 * Copyright © 2003 University of Southern California
5 * Copyright © 2009,2010,2011 Intel Corporation
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is University of Southern
36 * Carl D. Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
40 /* The primarily reason for keeping a traps-compositor around is
41 * for validating cairo-xlib (which currently also uses traps).
46 #include "cairo-image-surface-private.h"
48 #include "cairo-compositor-private.h"
49 #include "cairo-spans-compositor-private.h"
51 #include "cairo-region-private.h"
52 #include "cairo-traps-private.h"
53 #include "cairo-tristrip-private.h"
55 #if CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
56 #include "cairo-thread-local-private.h"
59 static pixman_image_t *
60 to_pixman_image (cairo_surface_t *s)
62 return ((cairo_image_surface_t *)s)->pixman_image;
65 static cairo_int_status_t
66 acquire (void *abstract_dst)
68 return CAIRO_STATUS_SUCCESS;
71 static cairo_int_status_t
72 release (void *abstract_dst)
74 return CAIRO_STATUS_SUCCESS;
77 static cairo_int_status_t
78 set_clip_region (void *_surface,
79 cairo_region_t *region)
81 cairo_image_surface_t *surface = _surface;
82 pixman_region32_t *rgn = region ? ®ion->rgn : NULL;
84 if (! pixman_image_set_clip_region32 (surface->pixman_image, rgn))
85 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
87 return CAIRO_STATUS_SUCCESS;
90 static cairo_int_status_t
91 draw_image_boxes (void *_dst,
92 cairo_image_surface_t *image,
96 cairo_image_surface_t *dst = _dst;
97 struct _cairo_boxes_chunk *chunk;
100 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
102 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
103 for (i = 0; i < chunk->count; i++) {
104 cairo_box_t *b = &chunk->base[i];
105 int x = _cairo_fixed_integer_part (b->p1.x);
106 int y = _cairo_fixed_integer_part (b->p1.y);
107 int w = _cairo_fixed_integer_part (b->p2.x) - x;
108 int h = _cairo_fixed_integer_part (b->p2.y) - y;
109 if (dst->pixman_format != image->pixman_format ||
110 ! pixman_blt ((uint32_t *)image->data, (uint32_t *)dst->data,
111 image->stride / sizeof (uint32_t),
112 dst->stride / sizeof (uint32_t),
113 PIXMAN_FORMAT_BPP (image->pixman_format),
114 PIXMAN_FORMAT_BPP (dst->pixman_format),
119 pixman_image_composite32 (PIXMAN_OP_SRC,
120 image->pixman_image, NULL, dst->pixman_image,
128 return CAIRO_STATUS_SUCCESS;
131 static inline uint32_t
132 color_to_uint32 (const cairo_color_t *color)
135 (color->alpha_short >> 8 << 24) |
136 (color->red_short >> 8 << 16) |
137 (color->green_short & 0xff00) |
138 (color->blue_short >> 8);
141 static inline cairo_bool_t
142 color_to_pixel (const cairo_color_t *color,
143 pixman_format_code_t format,
148 if (!(format == PIXMAN_a8r8g8b8 ||
149 format == PIXMAN_x8r8g8b8 ||
150 format == PIXMAN_a8b8g8r8 ||
151 format == PIXMAN_x8b8g8r8 ||
152 format == PIXMAN_b8g8r8a8 ||
153 format == PIXMAN_b8g8r8x8 ||
154 format == PIXMAN_r5g6b5 ||
155 format == PIXMAN_b5g6r5 ||
156 format == PIXMAN_a8))
161 c = color_to_uint32 (color);
163 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_ABGR) {
164 c = ((c & 0xff000000) >> 0) |
165 ((c & 0x00ff0000) >> 16) |
166 ((c & 0x0000ff00) >> 0) |
167 ((c & 0x000000ff) << 16);
170 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_BGRA) {
171 c = ((c & 0xff000000) >> 24) |
172 ((c & 0x00ff0000) >> 8) |
173 ((c & 0x0000ff00) << 8) |
174 ((c & 0x000000ff) << 24);
177 if (format == PIXMAN_a8) {
179 } else if (format == PIXMAN_r5g6b5 || format == PIXMAN_b5g6r5) {
180 c = ((((c) >> 3) & 0x001f) |
181 (((c) >> 5) & 0x07e0) |
182 (((c) >> 8) & 0xf800));
190 _pixman_operator (cairo_operator_t op)
193 case CAIRO_OPERATOR_CLEAR:
194 return PIXMAN_OP_CLEAR;
196 case CAIRO_OPERATOR_SOURCE:
197 return PIXMAN_OP_SRC;
198 case CAIRO_OPERATOR_OVER:
199 return PIXMAN_OP_OVER;
200 case CAIRO_OPERATOR_IN:
202 case CAIRO_OPERATOR_OUT:
203 return PIXMAN_OP_OUT;
204 case CAIRO_OPERATOR_ATOP:
205 return PIXMAN_OP_ATOP;
207 case CAIRO_OPERATOR_DEST:
208 return PIXMAN_OP_DST;
209 case CAIRO_OPERATOR_DEST_OVER:
210 return PIXMAN_OP_OVER_REVERSE;
211 case CAIRO_OPERATOR_DEST_IN:
212 return PIXMAN_OP_IN_REVERSE;
213 case CAIRO_OPERATOR_DEST_OUT:
214 return PIXMAN_OP_OUT_REVERSE;
215 case CAIRO_OPERATOR_DEST_ATOP:
216 return PIXMAN_OP_ATOP_REVERSE;
218 case CAIRO_OPERATOR_XOR:
219 return PIXMAN_OP_XOR;
220 case CAIRO_OPERATOR_ADD:
221 return PIXMAN_OP_ADD;
222 case CAIRO_OPERATOR_SATURATE:
223 return PIXMAN_OP_SATURATE;
225 case CAIRO_OPERATOR_MULTIPLY:
226 return PIXMAN_OP_MULTIPLY;
227 case CAIRO_OPERATOR_SCREEN:
228 return PIXMAN_OP_SCREEN;
229 case CAIRO_OPERATOR_OVERLAY:
230 return PIXMAN_OP_OVERLAY;
231 case CAIRO_OPERATOR_DARKEN:
232 return PIXMAN_OP_DARKEN;
233 case CAIRO_OPERATOR_LIGHTEN:
234 return PIXMAN_OP_LIGHTEN;
235 case CAIRO_OPERATOR_COLOR_DODGE:
236 return PIXMAN_OP_COLOR_DODGE;
237 case CAIRO_OPERATOR_COLOR_BURN:
238 return PIXMAN_OP_COLOR_BURN;
239 case CAIRO_OPERATOR_HARD_LIGHT:
240 return PIXMAN_OP_HARD_LIGHT;
241 case CAIRO_OPERATOR_SOFT_LIGHT:
242 return PIXMAN_OP_SOFT_LIGHT;
243 case CAIRO_OPERATOR_DIFFERENCE:
244 return PIXMAN_OP_DIFFERENCE;
245 case CAIRO_OPERATOR_EXCLUSION:
246 return PIXMAN_OP_EXCLUSION;
247 case CAIRO_OPERATOR_HSL_HUE:
248 return PIXMAN_OP_HSL_HUE;
249 case CAIRO_OPERATOR_HSL_SATURATION:
250 return PIXMAN_OP_HSL_SATURATION;
251 case CAIRO_OPERATOR_HSL_COLOR:
252 return PIXMAN_OP_HSL_COLOR;
253 case CAIRO_OPERATOR_HSL_LUMINOSITY:
254 return PIXMAN_OP_HSL_LUMINOSITY;
258 return PIXMAN_OP_OVER;
263 fill_reduces_to_source (cairo_operator_t op,
264 const cairo_color_t *color,
265 cairo_image_surface_t *dst)
267 if (op == CAIRO_OPERATOR_SOURCE || op == CAIRO_OPERATOR_CLEAR)
269 if (op == CAIRO_OPERATOR_OVER && CAIRO_COLOR_IS_OPAQUE (color))
271 if (dst->base.is_clear)
272 return op == CAIRO_OPERATOR_OVER || op == CAIRO_OPERATOR_ADD;
277 static cairo_int_status_t
278 fill_rectangles (void *_dst,
280 const cairo_color_t *color,
281 cairo_rectangle_int_t *rects,
284 cairo_image_surface_t *dst = _dst;
288 TRACE ((stderr, "%s\n", __FUNCTION__));
290 if (fill_reduces_to_source (op, color, dst) &&
291 color_to_pixel (color, dst->pixman_format, &pixel))
293 for (i = 0; i < num_rects; i++) {
294 pixman_fill ((uint32_t *) dst->data, dst->stride / sizeof (uint32_t),
295 PIXMAN_FORMAT_BPP (dst->pixman_format),
296 rects[i].x, rects[i].y,
297 rects[i].width, rects[i].height,
303 pixman_image_t *src = _pixman_image_for_color (color);
305 op = _pixman_operator (op);
306 for (i = 0; i < num_rects; i++) {
307 pixman_image_composite32 (op,
308 src, NULL, dst->pixman_image,
311 rects[i].x, rects[i].y,
312 rects[i].width, rects[i].height);
315 pixman_image_unref (src);
318 return CAIRO_STATUS_SUCCESS;
321 static cairo_int_status_t
322 fill_boxes (void *_dst,
324 const cairo_color_t *color,
325 cairo_boxes_t *boxes)
327 cairo_image_surface_t *dst = _dst;
328 struct _cairo_boxes_chunk *chunk;
332 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
334 if (fill_reduces_to_source (op, color, dst) &&
335 color_to_pixel (color, dst->pixman_format, &pixel))
337 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
338 for (i = 0; i < chunk->count; i++) {
339 int x = _cairo_fixed_integer_part (chunk->base[i].p1.x);
340 int y = _cairo_fixed_integer_part (chunk->base[i].p1.y);
341 int w = _cairo_fixed_integer_part (chunk->base[i].p2.x) - x;
342 int h = _cairo_fixed_integer_part (chunk->base[i].p2.y) - y;
343 pixman_fill ((uint32_t *) dst->data,
344 dst->stride / sizeof (uint32_t),
345 PIXMAN_FORMAT_BPP (dst->pixman_format),
352 pixman_image_t *src = _pixman_image_for_color (color);
354 op = _pixman_operator (op);
355 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
356 for (i = 0; i < chunk->count; i++) {
357 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
358 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
359 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
360 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
361 pixman_image_composite32 (op,
362 src, NULL, dst->pixman_image,
370 pixman_image_unref (src);
373 return CAIRO_STATUS_SUCCESS;
376 static cairo_int_status_t
377 composite (void *_dst,
379 cairo_surface_t *abstract_src,
380 cairo_surface_t *abstract_mask,
390 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
391 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
393 TRACE ((stderr, "%s\n", __FUNCTION__));
396 pixman_image_composite32 (_pixman_operator (op),
397 src->pixman_image, mask->pixman_image, to_pixman_image (_dst),
403 pixman_image_composite32 (_pixman_operator (op),
404 src->pixman_image, NULL, to_pixman_image (_dst),
411 return CAIRO_STATUS_SUCCESS;
414 static cairo_int_status_t
416 cairo_surface_t *abstract_src,
417 cairo_surface_t *abstract_mask,
427 cairo_image_surface_t *dst = _dst;
428 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
429 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
431 TRACE ((stderr, "%s\n", __FUNCTION__));
433 #if PIXMAN_HAS_OP_LERP
434 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
435 src->pixman_image, mask->pixman_image, dst->pixman_image,
441 /* Punch the clip out of the destination */
442 TRACE ((stderr, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
444 mask->base.unique_id, mask->pixman_image,
445 dst->base.unique_id, dst->pixman_image));
446 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
447 mask->pixman_image, NULL, dst->pixman_image,
453 /* Now add the two results together */
454 TRACE ((stderr, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
456 src->base.unique_id, src->pixman_image,
457 mask->base.unique_id, mask->pixman_image,
458 dst->base.unique_id, dst->pixman_image));
459 pixman_image_composite32 (PIXMAN_OP_ADD,
460 src->pixman_image, mask->pixman_image, dst->pixman_image,
467 return CAIRO_STATUS_SUCCESS;
470 static cairo_int_status_t
471 composite_boxes (void *_dst,
473 cairo_surface_t *abstract_src,
474 cairo_surface_t *abstract_mask,
481 cairo_boxes_t *boxes,
482 const cairo_rectangle_int_t *extents)
484 pixman_image_t *dst = to_pixman_image (_dst);
485 pixman_image_t *src = ((cairo_image_source_t *)abstract_src)->pixman_image;
486 pixman_image_t *mask = abstract_mask ? ((cairo_image_source_t *)abstract_mask)->pixman_image : NULL;
487 pixman_image_t *free_src = NULL;
488 struct _cairo_boxes_chunk *chunk;
491 /* XXX consider using a region? saves multiple prepare-composite */
492 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
494 if (((cairo_surface_t *)_dst)->is_clear &&
495 (op == CAIRO_OPERATOR_SOURCE ||
496 op == CAIRO_OPERATOR_OVER ||
497 op == CAIRO_OPERATOR_ADD)) {
500 if (op == CAIRO_OPERATOR_CLEAR) {
501 #if PIXMAN_HAS_OP_LERP
502 op = PIXMAN_OP_LERP_CLEAR;
504 free_src = src = _pixman_image_for_color (CAIRO_COLOR_WHITE);
505 op = PIXMAN_OP_OUT_REVERSE;
507 } else if (op == CAIRO_OPERATOR_SOURCE) {
508 #if PIXMAN_HAS_OP_LERP
509 op = PIXMAN_OP_LERP_SRC;
511 return CAIRO_INT_STATUS_UNSUPPORTED;
514 op = _pixman_operator (op);
517 op = _pixman_operator (op);
520 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
521 for (i = 0; i < chunk->count; i++) {
522 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
523 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
524 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
525 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
527 pixman_image_composite32 (op, src, mask, dst,
528 x1 + src_x, y1 + src_y,
529 x1 + mask_x, y1 + mask_y,
530 x1 + dst_x, y1 + dst_y,
536 pixman_image_unref (free_src);
538 return CAIRO_STATUS_SUCCESS;
541 #define CAIRO_FIXED_16_16_MIN _cairo_fixed_from_int (-32768)
542 #define CAIRO_FIXED_16_16_MAX _cairo_fixed_from_int (32767)
545 line_exceeds_16_16 (const cairo_line_t *line)
548 line->p1.x <= CAIRO_FIXED_16_16_MIN ||
549 line->p1.x >= CAIRO_FIXED_16_16_MAX ||
551 line->p2.x <= CAIRO_FIXED_16_16_MIN ||
552 line->p2.x >= CAIRO_FIXED_16_16_MAX ||
554 line->p1.y <= CAIRO_FIXED_16_16_MIN ||
555 line->p1.y >= CAIRO_FIXED_16_16_MAX ||
557 line->p2.y <= CAIRO_FIXED_16_16_MIN ||
558 line->p2.y >= CAIRO_FIXED_16_16_MAX;
562 project_line_x_onto_16_16 (const cairo_line_t *line,
564 cairo_fixed_t bottom,
565 pixman_line_fixed_t *out)
567 /* XXX use fixed-point arithmetic? */
568 cairo_point_double_t p1, p2;
571 p1.x = _cairo_fixed_to_double (line->p1.x);
572 p1.y = _cairo_fixed_to_double (line->p1.y);
574 p2.x = _cairo_fixed_to_double (line->p2.x);
575 p2.y = _cairo_fixed_to_double (line->p2.y);
577 m = (p2.x - p1.x) / (p2.y - p1.y);
578 out->p1.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (top - line->p1.y));
579 out->p2.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (bottom - line->p1.y));
583 _pixman_image_add_traps (pixman_image_t *image,
584 int dst_x, int dst_y,
585 cairo_traps_t *traps)
587 cairo_trapezoid_t *t = traps->traps;
588 int num_traps = traps->num_traps;
589 while (num_traps--) {
590 pixman_trapezoid_t trap;
592 /* top/bottom will be clamped to surface bounds */
593 trap.top = _cairo_fixed_to_16_16 (t->top);
594 trap.bottom = _cairo_fixed_to_16_16 (t->bottom);
596 /* However, all the other coordinates will have been left untouched so
597 * as not to introduce numerical error. Recompute them if they
598 * exceed the 16.16 limits.
600 if (unlikely (line_exceeds_16_16 (&t->left))) {
601 project_line_x_onto_16_16 (&t->left, t->top, t->bottom, &trap.left);
602 trap.left.p1.y = trap.top;
603 trap.left.p2.y = trap.bottom;
605 trap.left.p1.x = _cairo_fixed_to_16_16 (t->left.p1.x);
606 trap.left.p1.y = _cairo_fixed_to_16_16 (t->left.p1.y);
607 trap.left.p2.x = _cairo_fixed_to_16_16 (t->left.p2.x);
608 trap.left.p2.y = _cairo_fixed_to_16_16 (t->left.p2.y);
611 if (unlikely (line_exceeds_16_16 (&t->right))) {
612 project_line_x_onto_16_16 (&t->right, t->top, t->bottom, &trap.right);
613 trap.right.p1.y = trap.top;
614 trap.right.p2.y = trap.bottom;
616 trap.right.p1.x = _cairo_fixed_to_16_16 (t->right.p1.x);
617 trap.right.p1.y = _cairo_fixed_to_16_16 (t->right.p1.y);
618 trap.right.p2.x = _cairo_fixed_to_16_16 (t->right.p2.x);
619 trap.right.p2.y = _cairo_fixed_to_16_16 (t->right.p2.y);
622 pixman_rasterize_trapezoid (image, &trap, -dst_x, -dst_y);
627 static cairo_int_status_t
628 composite_traps (void *_dst,
630 cairo_surface_t *abstract_src,
635 const cairo_rectangle_int_t *extents,
636 cairo_antialias_t antialias,
637 cairo_traps_t *traps)
639 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
640 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
641 pixman_image_t *mask;
642 pixman_format_code_t format;
644 TRACE ((stderr, "%s\n", __FUNCTION__));
646 /* Special case adding trapezoids onto a mask surface; we want to avoid
647 * creating an intermediate temporary mask unnecessarily.
649 * We make the assumption here that the portion of the trapezoids
650 * contained within the surface is bounded by [dst_x,dst_y,width,height];
651 * the Cairo core code passes bounds based on the trapezoid extents.
653 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
654 if (dst->pixman_format == format &&
655 (abstract_src == NULL ||
656 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
658 _pixman_image_add_traps (dst->pixman_image, dst_x, dst_y, traps);
659 return CAIRO_STATUS_SUCCESS;
662 mask = pixman_image_create_bits (format,
663 extents->width, extents->height,
665 if (unlikely (mask == NULL))
666 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
668 _pixman_image_add_traps (mask, extents->x, extents->y, traps);
669 pixman_image_composite32 (_pixman_operator (op),
670 src->pixman_image, mask, dst->pixman_image,
671 extents->x + src_x, extents->y + src_y,
673 extents->x - dst_x, extents->y - dst_y,
674 extents->width, extents->height);
676 pixman_image_unref (mask);
678 return CAIRO_STATUS_SUCCESS;
682 set_point (pixman_point_fixed_t *p, cairo_point_t *c)
684 p->x = _cairo_fixed_to_16_16 (c->x);
685 p->y = _cairo_fixed_to_16_16 (c->y);
689 _pixman_image_add_tristrip (pixman_image_t *image,
690 int dst_x, int dst_y,
691 cairo_tristrip_t *strip)
693 pixman_triangle_t tri;
694 pixman_point_fixed_t *p[3] = {&tri.p1, &tri.p2, &tri.p3 };
697 set_point (p[0], &strip->points[0]);
698 set_point (p[1], &strip->points[1]);
699 set_point (p[2], &strip->points[2]);
700 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
701 for (n = 3; n < strip->num_points; n++) {
702 set_point (p[n%3], &strip->points[n]);
703 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
707 static cairo_int_status_t
708 composite_tristrip (void *_dst,
710 cairo_surface_t *abstract_src,
715 const cairo_rectangle_int_t *extents,
716 cairo_antialias_t antialias,
717 cairo_tristrip_t *strip)
719 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
720 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
721 pixman_image_t *mask;
722 pixman_format_code_t format;
724 TRACE ((stderr, "%s\n", __FUNCTION__));
726 if (strip->num_points < 3)
727 return CAIRO_STATUS_SUCCESS;
729 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
730 if (dst->pixman_format == format &&
731 (abstract_src == NULL ||
732 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
734 _pixman_image_add_tristrip (dst->pixman_image, dst_x, dst_y, strip);
735 return CAIRO_STATUS_SUCCESS;
738 mask = pixman_image_create_bits (format,
739 extents->width, extents->height,
741 if (unlikely (mask == NULL))
742 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
744 _pixman_image_add_tristrip (mask, extents->x, extents->y, strip);
745 pixman_image_composite32 (_pixman_operator (op),
746 src->pixman_image, mask, dst->pixman_image,
747 extents->x + src_x, extents->y + src_y,
749 extents->x - dst_x, extents->y - dst_y,
750 extents->width, extents->height);
752 pixman_image_unref (mask);
754 return CAIRO_STATUS_SUCCESS;
757 static cairo_int_status_t
758 check_composite_glyphs (const cairo_composite_rectangles_t *extents,
759 cairo_scaled_font_t *scaled_font,
760 cairo_glyph_t *glyphs,
763 return CAIRO_STATUS_SUCCESS;
766 #if HAS_PIXMAN_GLYPHS
767 #if CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
768 CAIRO_DEFINE_THREAD_LOCAL (pixman_glyph_cache_t *, per_thread_glyph_cache);
770 static pixman_glyph_cache_t *global_glyph_cache;
773 static inline pixman_glyph_cache_t *
774 get_glyph_cache (void)
776 pixman_glyph_cache_t **glyph_cache = NULL;
778 #if CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
779 glyph_cache = CAIRO_GET_THREAD_LOCAL (per_thread_glyph_cache);
781 glyph_cache = &global_glyph_cache;
784 if (! (*glyph_cache))
785 *glyph_cache = pixman_glyph_cache_create ();
791 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
792 cairo_scaled_glyph_t *scaled_glyph)
794 pixman_glyph_cache_t *glyph_cache = NULL;
796 #if CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
797 glyph_cache = *CAIRO_GET_THREAD_LOCAL (per_thread_glyph_cache);
799 glyph_cache = global_glyph_cache;
800 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
804 pixman_glyph_cache_remove (
805 glyph_cache, scaled_font,
806 (void *)_cairo_scaled_glyph_index (scaled_glyph));
809 #if ! CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
810 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
814 static cairo_int_status_t
815 composite_glyphs (void *_dst,
817 cairo_surface_t *_src,
822 cairo_composite_glyphs_info_t *info)
824 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
825 pixman_glyph_cache_t *glyph_cache;
826 pixman_glyph_t pglyphs_stack[CAIRO_STACK_ARRAY_LENGTH (pixman_glyph_t)];
827 pixman_glyph_t *pglyphs = pglyphs_stack;
831 TRACE ((stderr, "%s\n", __FUNCTION__));
833 #if ! CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
834 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
837 glyph_cache = get_glyph_cache();
838 if (unlikely (glyph_cache == NULL)) {
839 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
843 pixman_glyph_cache_freeze (glyph_cache);
845 if (info->num_glyphs > ARRAY_LENGTH (pglyphs_stack)) {
846 pglyphs = _cairo_malloc_ab (info->num_glyphs, sizeof (pixman_glyph_t));
847 if (unlikely (pglyphs == NULL)) {
848 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
854 for (i = 0; i < info->num_glyphs; i++) {
855 unsigned long index = info->glyphs[i].index;
858 glyph = pixman_glyph_cache_lookup (glyph_cache, info->font, (void *)index);
860 cairo_scaled_glyph_t *scaled_glyph;
861 cairo_image_surface_t *glyph_surface;
863 #if ! CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
864 /* This call can actually end up recursing, so we have to
865 * drop the mutex around it.
867 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
869 _cairo_scaled_font_freeze_cache (info->font);
871 status = _cairo_scaled_glyph_lookup (info->font, index,
872 CAIRO_SCALED_GLYPH_INFO_SURFACE,
875 #if ! CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
876 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
879 if (unlikely (status)) {
880 _cairo_scaled_font_thaw_cache (info->font);
884 glyph_surface = scaled_glyph->surface;
885 glyph = pixman_glyph_cache_insert (glyph_cache, info->font, (void *)index,
886 glyph_surface->base.device_transform.x0,
887 glyph_surface->base.device_transform.y0,
888 glyph_surface->pixman_image);
890 _cairo_scaled_font_thaw_cache (info->font);
892 if (unlikely (!glyph)) {
893 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
898 pg->x = _cairo_lround (info->glyphs[i].x);
899 pg->y = _cairo_lround (info->glyphs[i].y);
904 if (info->use_mask) {
905 pixman_format_code_t mask_format;
907 mask_format = pixman_glyph_get_mask_format (glyph_cache, pg - pglyphs, pglyphs);
909 pixman_composite_glyphs (_pixman_operator (op),
910 ((cairo_image_source_t *)_src)->pixman_image,
911 to_pixman_image (_dst),
913 info->extents.x + src_x, info->extents.y + src_y,
914 info->extents.x, info->extents.y,
915 info->extents.x - dst_x, info->extents.y - dst_y,
916 info->extents.width, info->extents.height,
917 glyph_cache, pg - pglyphs, pglyphs);
919 pixman_composite_glyphs_no_mask (_pixman_operator (op),
920 ((cairo_image_source_t *)_src)->pixman_image,
921 to_pixman_image (_dst),
924 glyph_cache, pg - pglyphs, pglyphs);
928 pixman_glyph_cache_thaw (glyph_cache);
930 if (pglyphs != pglyphs_stack)
934 #if ! CAIRO_ENABLE_PER_THREAD_GLYPH_CACHE
935 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
942 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
943 cairo_scaled_glyph_t *scaled_glyph)
947 static cairo_int_status_t
948 composite_one_glyph (void *_dst,
950 cairo_surface_t *_src,
955 cairo_composite_glyphs_info_t *info)
957 cairo_image_surface_t *glyph_surface;
958 cairo_scaled_glyph_t *scaled_glyph;
959 cairo_status_t status;
962 TRACE ((stderr, "%s\n", __FUNCTION__));
964 status = _cairo_scaled_glyph_lookup (info->font,
965 info->glyphs[0].index,
966 CAIRO_SCALED_GLYPH_INFO_SURFACE,
969 if (unlikely (status))
972 glyph_surface = scaled_glyph->surface;
973 if (glyph_surface->width == 0 || glyph_surface->height == 0)
974 return CAIRO_INT_STATUS_NOTHING_TO_DO;
976 /* round glyph locations to the nearest pixel */
977 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
978 x = _cairo_lround (info->glyphs[0].x -
979 glyph_surface->base.device_transform.x0);
980 y = _cairo_lround (info->glyphs[0].y -
981 glyph_surface->base.device_transform.y0);
983 pixman_image_composite32 (_pixman_operator (op),
984 ((cairo_image_source_t *)_src)->pixman_image,
985 glyph_surface->pixman_image,
986 to_pixman_image (_dst),
987 x + src_x, y + src_y,
989 x - dst_x, y - dst_y,
990 glyph_surface->width,
991 glyph_surface->height);
993 return CAIRO_INT_STATUS_SUCCESS;
996 static cairo_int_status_t
997 composite_glyphs_via_mask (void *_dst,
999 cairo_surface_t *_src,
1004 cairo_composite_glyphs_info_t *info)
1006 cairo_scaled_glyph_t *glyph_cache[64];
1007 pixman_image_t *white = _pixman_image_for_color (CAIRO_COLOR_WHITE);
1008 cairo_scaled_glyph_t *scaled_glyph;
1010 pixman_image_t *mask;
1011 pixman_format_code_t format;
1012 cairo_status_t status;
1015 TRACE ((stderr, "%s\n", __FUNCTION__));
1017 if (unlikely (white == NULL))
1018 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1020 /* XXX convert the glyphs to common formats a8/a8r8g8b8 to hit
1021 * optimised paths through pixman. Should we increase the bit
1022 * depth of the target surface, we should reconsider the appropriate
1026 status = _cairo_scaled_glyph_lookup (info->font,
1027 info->glyphs[0].index,
1028 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1030 if (unlikely (status)) {
1031 pixman_image_unref (white);
1035 memset (glyph_cache, 0, sizeof (glyph_cache));
1036 glyph_cache[info->glyphs[0].index % ARRAY_LENGTH (glyph_cache)] = scaled_glyph;
1039 i = (info->extents.width + 3) & ~3;
1040 if (scaled_glyph->surface->base.content & CAIRO_CONTENT_COLOR) {
1041 format = PIXMAN_a8r8g8b8;
1042 i = info->extents.width * 4;
1045 if (i * info->extents.height > (int) sizeof (buf)) {
1046 mask = pixman_image_create_bits (format,
1047 info->extents.width,
1048 info->extents.height,
1051 memset (buf, 0, i * info->extents.height);
1052 mask = pixman_image_create_bits (format,
1053 info->extents.width,
1054 info->extents.height,
1055 (uint32_t *)buf, i);
1057 if (unlikely (mask == NULL)) {
1058 pixman_image_unref (white);
1059 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1062 status = CAIRO_STATUS_SUCCESS;
1063 for (i = 0; i < info->num_glyphs; i++) {
1064 unsigned long glyph_index = info->glyphs[i].index;
1065 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1066 cairo_image_surface_t *glyph_surface;
1069 scaled_glyph = glyph_cache[cache_index];
1070 if (scaled_glyph == NULL ||
1071 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1073 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1074 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1077 if (unlikely (status)) {
1078 pixman_image_unref (mask);
1079 pixman_image_unref (white);
1083 glyph_cache[cache_index] = scaled_glyph;
1086 glyph_surface = scaled_glyph->surface;
1087 if (glyph_surface->width && glyph_surface->height) {
1088 if (glyph_surface->base.content & CAIRO_CONTENT_COLOR &&
1089 format == PIXMAN_a8) {
1090 pixman_image_t *ca_mask;
1092 format = PIXMAN_a8r8g8b8;
1093 ca_mask = pixman_image_create_bits (format,
1094 info->extents.width,
1095 info->extents.height,
1097 if (unlikely (ca_mask == NULL)) {
1098 pixman_image_unref (mask);
1099 pixman_image_unref (white);
1100 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1103 pixman_image_composite32 (PIXMAN_OP_SRC,
1104 white, mask, ca_mask,
1108 info->extents.width,
1109 info->extents.height);
1110 pixman_image_unref (mask);
1114 /* round glyph locations to the nearest pixel */
1115 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1116 x = _cairo_lround (info->glyphs[i].x -
1117 glyph_surface->base.device_transform.x0);
1118 y = _cairo_lround (info->glyphs[i].y -
1119 glyph_surface->base.device_transform.y0);
1121 if (glyph_surface->pixman_format == format) {
1122 pixman_image_composite32 (PIXMAN_OP_ADD,
1123 glyph_surface->pixman_image, NULL, mask,
1126 x - info->extents.x, y - info->extents.y,
1127 glyph_surface->width,
1128 glyph_surface->height);
1130 pixman_image_composite32 (PIXMAN_OP_ADD,
1131 white, glyph_surface->pixman_image, mask,
1134 x - info->extents.x, y - info->extents.y,
1135 glyph_surface->width,
1136 glyph_surface->height);
1141 if (format == PIXMAN_a8r8g8b8)
1142 pixman_image_set_component_alpha (mask, TRUE);
1144 pixman_image_composite32 (_pixman_operator (op),
1145 ((cairo_image_source_t *)_src)->pixman_image,
1147 to_pixman_image (_dst),
1148 info->extents.x + src_x, info->extents.y + src_y,
1150 info->extents.x - dst_x, info->extents.y - dst_y,
1151 info->extents.width, info->extents.height);
1152 pixman_image_unref (mask);
1153 pixman_image_unref (white);
1155 return CAIRO_STATUS_SUCCESS;
1158 static cairo_int_status_t
1159 composite_glyphs (void *_dst,
1160 cairo_operator_t op,
1161 cairo_surface_t *_src,
1166 cairo_composite_glyphs_info_t *info)
1168 cairo_scaled_glyph_t *glyph_cache[64];
1169 pixman_image_t *dst, *src;
1170 cairo_status_t status;
1173 TRACE ((stderr, "%s\n", __FUNCTION__));
1175 _cairo_scaled_font_freeze_cache (info->font);
1177 if (info->num_glyphs == 1) {
1178 status = composite_one_glyph(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1182 if (info->use_mask) {
1183 status = composite_glyphs_via_mask(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1187 op = _pixman_operator (op);
1188 dst = to_pixman_image (_dst);
1189 src = ((cairo_image_source_t *)_src)->pixman_image;
1191 memset (glyph_cache, 0, sizeof (glyph_cache));
1192 status = CAIRO_STATUS_SUCCESS;
1194 for (i = 0; i < info->num_glyphs; i++) {
1196 cairo_image_surface_t *glyph_surface;
1197 cairo_scaled_glyph_t *scaled_glyph;
1198 unsigned long glyph_index = info->glyphs[i].index;
1199 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1201 scaled_glyph = glyph_cache[cache_index];
1202 if (scaled_glyph == NULL ||
1203 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1205 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1206 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1209 if (unlikely (status))
1212 glyph_cache[cache_index] = scaled_glyph;
1215 glyph_surface = scaled_glyph->surface;
1216 if (glyph_surface->width && glyph_surface->height) {
1217 /* round glyph locations to the nearest pixel */
1218 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1219 x = _cairo_lround (info->glyphs[i].x -
1220 glyph_surface->base.device_transform.x0);
1221 y = _cairo_lround (info->glyphs[i].y -
1222 glyph_surface->base.device_transform.y0);
1224 pixman_image_composite32 (op, src, glyph_surface->pixman_image, dst,
1225 x + src_x, y + src_y,
1227 x - dst_x, y - dst_y,
1228 glyph_surface->width,
1229 glyph_surface->height);
1234 _cairo_scaled_font_thaw_cache (info->font);
1240 static cairo_int_status_t
1241 check_composite (const cairo_composite_rectangles_t *extents)
1243 return CAIRO_STATUS_SUCCESS;
1246 const cairo_compositor_t *
1247 _cairo_image_traps_compositor_get (void)
1249 static cairo_traps_compositor_t compositor;
1251 if (compositor.base.delegate == NULL) {
1252 _cairo_traps_compositor_init (&compositor,
1253 &__cairo_no_compositor);
1254 compositor.acquire = acquire;
1255 compositor.release = release;
1256 compositor.set_clip_region = set_clip_region;
1257 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1258 compositor.draw_image_boxes = draw_image_boxes;
1259 //compositor.copy_boxes = copy_boxes;
1260 compositor.fill_boxes = fill_boxes;
1261 compositor.check_composite = check_composite;
1262 compositor.composite = composite;
1263 compositor.lerp = lerp;
1264 //compositor.check_composite_boxes = check_composite_boxes;
1265 compositor.composite_boxes = composite_boxes;
1266 //compositor.check_composite_traps = check_composite_traps;
1267 compositor.composite_traps = composite_traps;
1268 //compositor.check_composite_tristrip = check_composite_traps;
1269 compositor.composite_tristrip = composite_tristrip;
1270 compositor.check_composite_glyphs = check_composite_glyphs;
1271 compositor.composite_glyphs = composite_glyphs;
1274 return &compositor.base;
1277 const cairo_compositor_t *
1278 _cairo_image_mask_compositor_get (void)
1280 static cairo_mask_compositor_t compositor;
1282 if (compositor.base.delegate == NULL) {
1283 _cairo_mask_compositor_init (&compositor,
1284 _cairo_image_traps_compositor_get ());
1285 compositor.acquire = acquire;
1286 compositor.release = release;
1287 compositor.set_clip_region = set_clip_region;
1288 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1289 compositor.draw_image_boxes = draw_image_boxes;
1290 compositor.fill_rectangles = fill_rectangles;
1291 compositor.fill_boxes = fill_boxes;
1292 //compositor.check_composite = check_composite;
1293 compositor.composite = composite;
1294 //compositor.lerp = lerp;
1295 //compositor.check_composite_boxes = check_composite_boxes;
1296 compositor.composite_boxes = composite_boxes;
1297 compositor.check_composite_glyphs = check_composite_glyphs;
1298 compositor.composite_glyphs = composite_glyphs;
1301 return &compositor.base;
1304 #if PIXMAN_HAS_COMPOSITOR
1305 typedef struct _cairo_image_span_renderer {
1306 cairo_span_renderer_t base;
1308 pixman_image_compositor_t *compositor;
1309 pixman_image_t *src, *mask;
1311 cairo_rectangle_int_t extents;
1312 } cairo_image_span_renderer_t;
1313 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1315 static cairo_status_t
1316 _cairo_image_bounded_opaque_spans (void *abstract_renderer,
1318 const cairo_half_open_span_t *spans,
1321 cairo_image_span_renderer_t *r = abstract_renderer;
1324 return CAIRO_STATUS_SUCCESS;
1327 if (spans[0].coverage)
1328 pixman_image_compositor_blt (r->compositor,
1330 spans[1].x - spans[0].x, height,
1333 } while (--num_spans > 1);
1335 return CAIRO_STATUS_SUCCESS;
1338 static cairo_status_t
1339 _cairo_image_bounded_spans (void *abstract_renderer,
1341 const cairo_half_open_span_t *spans,
1344 cairo_image_span_renderer_t *r = abstract_renderer;
1347 return CAIRO_STATUS_SUCCESS;
1350 if (spans[0].coverage) {
1351 pixman_image_compositor_blt (r->compositor,
1353 spans[1].x - spans[0].x, height,
1354 r->opacity * spans[0].coverage);
1357 } while (--num_spans > 1);
1359 return CAIRO_STATUS_SUCCESS;
1362 static cairo_status_t
1363 _cairo_image_unbounded_spans (void *abstract_renderer,
1365 const cairo_half_open_span_t *spans,
1368 cairo_image_span_renderer_t *r = abstract_renderer;
1370 assert (y + height <= r->extents.height);
1371 if (y > r->extents.y) {
1372 pixman_image_compositor_blt (r->compositor,
1373 r->extents.x, r->extents.y,
1374 r->extents.width, y - r->extents.y,
1378 if (num_spans == 0) {
1379 pixman_image_compositor_blt (r->compositor,
1381 r->extents.width, height,
1384 if (spans[0].x != r->extents.x) {
1385 pixman_image_compositor_blt (r->compositor,
1387 spans[0].x - r->extents.x,
1393 assert (spans[0].x < r->extents.x + r->extents.width);
1394 pixman_image_compositor_blt (r->compositor,
1396 spans[1].x - spans[0].x, height,
1397 r->opacity * spans[0].coverage);
1399 } while (--num_spans > 1);
1401 if (spans[0].x != r->extents.x + r->extents.width) {
1402 assert (spans[0].x < r->extents.x + r->extents.width);
1403 pixman_image_compositor_blt (r->compositor,
1405 r->extents.x + r->extents.width - spans[0].x, height,
1410 r->extents.y = y + height;
1411 return CAIRO_STATUS_SUCCESS;
1414 static cairo_status_t
1415 _cairo_image_clipped_spans (void *abstract_renderer,
1417 const cairo_half_open_span_t *spans,
1420 cairo_image_span_renderer_t *r = abstract_renderer;
1425 if (! spans[0].inverse)
1426 pixman_image_compositor_blt (r->compositor,
1428 spans[1].x - spans[0].x, height,
1429 r->opacity * spans[0].coverage);
1431 } while (--num_spans > 1);
1433 r->extents.y = y + height;
1434 return CAIRO_STATUS_SUCCESS;
1437 static cairo_status_t
1438 _cairo_image_finish_unbounded_spans (void *abstract_renderer)
1440 cairo_image_span_renderer_t *r = abstract_renderer;
1442 if (r->extents.y < r->extents.height) {
1443 pixman_image_compositor_blt (r->compositor,
1444 r->extents.x, r->extents.y,
1446 r->extents.height - r->extents.y,
1450 return CAIRO_STATUS_SUCCESS;
1453 static cairo_int_status_t
1454 span_renderer_init (cairo_abstract_span_renderer_t *_r,
1455 const cairo_composite_rectangles_t *composite,
1456 cairo_bool_t needs_clip)
1458 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
1459 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1460 const cairo_pattern_t *source = &composite->source_pattern.base;
1461 cairo_operator_t op = composite->op;
1465 TRACE ((stderr, "%s\n", __FUNCTION__));
1467 if (op == CAIRO_OPERATOR_CLEAR) {
1468 op = PIXMAN_OP_LERP_CLEAR;
1469 } else if (dst->base.is_clear &&
1470 (op == CAIRO_OPERATOR_SOURCE ||
1471 op == CAIRO_OPERATOR_OVER ||
1472 op == CAIRO_OPERATOR_ADD)) {
1474 } else if (op == CAIRO_OPERATOR_SOURCE) {
1475 op = PIXMAN_OP_LERP_SRC;
1477 op = _pixman_operator (op);
1480 r->compositor = NULL;
1482 r->src = _pixman_image_for_pattern (dst, source, FALSE,
1483 &composite->unbounded,
1484 &composite->source_sample_area,
1486 if (unlikely (r->src == NULL))
1487 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1490 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
1491 r->opacity = composite->mask_pattern.solid.color.alpha;
1493 r->mask = _pixman_image_for_pattern (dst,
1494 &composite->mask_pattern.base,
1496 &composite->unbounded,
1497 &composite->mask_sample_area,
1499 if (unlikely (r->mask == NULL))
1500 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1502 /* XXX Component-alpha? */
1503 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
1504 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
1506 pixman_image_unref (r->src);
1514 if (composite->is_bounded) {
1515 if (r->opacity == 1.)
1516 r->base.render_rows = _cairo_image_bounded_opaque_spans;
1518 r->base.render_rows = _cairo_image_bounded_spans;
1519 r->base.finish = NULL;
1522 r->base.render_rows = _cairo_image_clipped_spans;
1524 r->base.render_rows = _cairo_image_unbounded_spans;
1525 r->base.finish = _cairo_image_finish_unbounded_spans;
1526 r->extents = composite->unbounded;
1527 r->extents.height += r->extents.y;
1531 pixman_image_create_compositor (op, r->src, r->mask, dst->pixman_image,
1532 composite->unbounded.x + src_x,
1533 composite->unbounded.y + src_y,
1534 composite->unbounded.x + mask_x,
1535 composite->unbounded.y + mask_y,
1536 composite->unbounded.x,
1537 composite->unbounded.y,
1538 composite->unbounded.width,
1539 composite->unbounded.height);
1540 if (unlikely (r->compositor == NULL))
1541 return CAIRO_INT_STATUS_NOTHING_TO_DO;
1543 return CAIRO_STATUS_SUCCESS;
1547 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
1548 cairo_int_status_t status)
1550 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
1552 TRACE ((stderr, "%s\n", __FUNCTION__));
1554 if (status == CAIRO_INT_STATUS_SUCCESS && r->base.finish)
1558 pixman_image_compositor_destroy (r->compositor);
1561 pixman_image_unref (r->src);
1563 pixman_image_unref (r->mask);
1566 typedef struct _cairo_image_span_renderer {
1567 cairo_span_renderer_t base;
1569 const cairo_composite_rectangles_t *composite;
1575 pixman_image_t *src, *mask;
1589 pixman_image_t *dst;
1595 cairo_rectangle_int_t extents;
1602 #define SZ_BUF (sizeof (cairo_abstract_span_renderer_t) - sizeof (cairo_image_span_renderer_t))
1603 } cairo_image_span_renderer_t;
1604 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1606 static cairo_status_t
1607 _cairo_image_spans (void *abstract_renderer,
1609 const cairo_half_open_span_t *spans,
1612 cairo_image_span_renderer_t *r = abstract_renderer;
1613 uint8_t *mask, *row;
1617 return CAIRO_STATUS_SUCCESS;
1619 mask = r->u.mask.data + (y - r->u.mask.extents.y) * r->u.mask.stride;
1620 mask += spans[0].x - r->u.mask.extents.x;
1624 len = spans[1].x - spans[0].x;
1625 if (spans[0].coverage) {
1626 *row++ = r->opacity * spans[0].coverage;
1628 memset (row, row[-1], len);
1632 } while (--num_spans > 1);
1637 mask += r->u.mask.stride;
1638 memcpy (mask, row, len);
1641 return CAIRO_STATUS_SUCCESS;
1644 static cairo_status_t
1645 _cairo_image_spans_and_zero (void *abstract_renderer,
1647 const cairo_half_open_span_t *spans,
1650 cairo_image_span_renderer_t *r = abstract_renderer;
1654 mask = r->u.mask.data;
1655 if (y > r->u.mask.extents.y) {
1656 len = (y - r->u.mask.extents.y) * r->u.mask.stride;
1657 memset (mask, 0, len);
1661 r->u.mask.extents.y = y + height;
1662 r->u.mask.data = mask + height * r->u.mask.stride;
1663 if (num_spans == 0) {
1664 memset (mask, 0, height * r->u.mask.stride);
1666 uint8_t *row = mask;
1668 if (spans[0].x != r->u.mask.extents.x) {
1669 len = spans[0].x - r->u.mask.extents.x;
1670 memset (row, 0, len);
1675 len = spans[1].x - spans[0].x;
1676 *row++ = r->opacity * spans[0].coverage;
1678 memset (row, row[-1], --len);
1682 } while (--num_spans > 1);
1684 if (spans[0].x != r->u.mask.extents.x + r->u.mask.extents.width) {
1685 len = r->u.mask.extents.x + r->u.mask.extents.width - spans[0].x;
1686 memset (row, 0, len);
1691 mask += r->u.mask.stride;
1692 memcpy (mask, row, r->u.mask.extents.width);
1696 return CAIRO_STATUS_SUCCESS;
1699 static cairo_status_t
1700 _cairo_image_finish_spans_and_zero (void *abstract_renderer)
1702 cairo_image_span_renderer_t *r = abstract_renderer;
1704 if (r->u.mask.extents.y < r->u.mask.extents.height)
1705 memset (r->u.mask.data, 0, (r->u.mask.extents.height - r->u.mask.extents.y) * r->u.mask.stride);
1707 return CAIRO_STATUS_SUCCESS;
1710 static cairo_status_t
1711 _fill8_spans (void *abstract_renderer, int y, int h,
1712 const cairo_half_open_span_t *spans, unsigned num_spans)
1714 cairo_image_span_renderer_t *r = abstract_renderer;
1717 return CAIRO_STATUS_SUCCESS;
1719 if (likely(h == 1)) {
1721 if (spans[0].coverage) {
1722 int len = spans[1].x - spans[0].x;
1723 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
1725 *d = r->u.fill.pixel;
1727 memset(d, r->u.fill.pixel, len);
1730 } while (--num_spans > 1);
1733 if (spans[0].coverage) {
1736 int len = spans[1].x - spans[0].x;
1737 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1739 *d = r->u.fill.pixel;
1741 memset(d, r->u.fill.pixel, len);
1746 } while (--num_spans > 1);
1749 return CAIRO_STATUS_SUCCESS;
1752 static cairo_status_t
1753 _fill16_spans (void *abstract_renderer, int y, int h,
1754 const cairo_half_open_span_t *spans, unsigned num_spans)
1756 cairo_image_span_renderer_t *r = abstract_renderer;
1759 return CAIRO_STATUS_SUCCESS;
1761 if (likely(h == 1)) {
1763 if (spans[0].coverage) {
1764 int len = spans[1].x - spans[0].x;
1765 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*2);
1767 *d++ = r->u.fill.pixel;
1770 } while (--num_spans > 1);
1773 if (spans[0].coverage) {
1776 int len = spans[1].x - spans[0].x;
1777 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*2);
1779 *d++ = r->u.fill.pixel;
1784 } while (--num_spans > 1);
1787 return CAIRO_STATUS_SUCCESS;
1790 static cairo_status_t
1791 _fill32_spans (void *abstract_renderer, int y, int h,
1792 const cairo_half_open_span_t *spans, unsigned num_spans)
1794 cairo_image_span_renderer_t *r = abstract_renderer;
1797 return CAIRO_STATUS_SUCCESS;
1799 if (likely(h == 1)) {
1801 if (spans[0].coverage) {
1802 int len = spans[1].x - spans[0].x;
1804 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1805 spans[0].x, y, len, 1, r->u.fill.pixel);
1807 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
1809 *d++ = r->u.fill.pixel;
1813 } while (--num_spans > 1);
1816 if (spans[0].coverage) {
1817 if (spans[1].x - spans[0].x > 16) {
1818 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1819 spans[0].x, y, spans[1].x - spans[0].x, h,
1824 int len = spans[1].x - spans[0].x;
1825 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
1827 *d++ = r->u.fill.pixel;
1833 } while (--num_spans > 1);
1836 return CAIRO_STATUS_SUCCESS;
1840 static cairo_status_t
1841 _fill_spans (void *abstract_renderer, int y, int h,
1842 const cairo_half_open_span_t *spans, unsigned num_spans)
1844 cairo_image_span_renderer_t *r = abstract_renderer;
1847 return CAIRO_STATUS_SUCCESS;
1850 if (spans[0].coverage) {
1851 pixman_fill ((uint32_t *) r->data, r->stride, r->bpp,
1853 spans[1].x - spans[0].x, h,
1857 } while (--num_spans > 1);
1859 return CAIRO_STATUS_SUCCESS;
1863 static cairo_status_t
1864 _blit_spans (void *abstract_renderer, int y, int h,
1865 const cairo_half_open_span_t *spans, unsigned num_spans)
1867 cairo_image_span_renderer_t *r = abstract_renderer;
1871 return CAIRO_STATUS_SUCCESS;
1874 if (likely (h == 1)) {
1875 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
1876 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
1878 if (spans[0].coverage) {
1879 void *s = src + spans[0].x*cpp;
1880 void *d = dst + spans[0].x*cpp;
1881 int len = (spans[1].x - spans[0].x) * cpp;
1884 *(uint8_t *)d = *(uint8_t *)s;
1887 *(uint16_t *)d = *(uint16_t *)s;
1890 *(uint32_t *)d = *(uint32_t *)s;
1894 *(uint64_t *)d = *(uint64_t *)s;
1903 } while (--num_spans > 1);
1906 if (spans[0].coverage) {
1909 void *src = r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x*cpp;
1910 void *dst = r->u.blit.data + yy*r->u.blit.stride + spans[0].x*cpp;
1911 int len = (spans[1].x - spans[0].x) * cpp;
1914 *(uint8_t *)dst = *(uint8_t *)src;
1917 *(uint16_t *)dst = *(uint16_t *)src;
1920 *(uint32_t *)dst = *(uint32_t *)src;
1924 *(uint64_t *)dst = *(uint64_t *)src;
1928 memcpy(dst, src, len);
1935 } while (--num_spans > 1);
1938 return CAIRO_STATUS_SUCCESS;
1941 static cairo_status_t
1942 _mono_spans (void *abstract_renderer, int y, int h,
1943 const cairo_half_open_span_t *spans, unsigned num_spans)
1945 cairo_image_span_renderer_t *r = abstract_renderer;
1948 return CAIRO_STATUS_SUCCESS;
1951 if (spans[0].coverage) {
1952 pixman_image_composite32 (r->op,
1953 r->src, NULL, r->u.composite.dst,
1954 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1957 spans[1].x - spans[0].x, h);
1960 } while (--num_spans > 1);
1962 return CAIRO_STATUS_SUCCESS;
1965 static cairo_status_t
1966 _mono_unbounded_spans (void *abstract_renderer, int y, int h,
1967 const cairo_half_open_span_t *spans, unsigned num_spans)
1969 cairo_image_span_renderer_t *r = abstract_renderer;
1971 if (num_spans == 0) {
1972 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1973 r->src, NULL, r->u.composite.dst,
1974 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1976 r->composite->unbounded.x, y,
1977 r->composite->unbounded.width, h);
1978 r->u.composite.mask_y = y + h;
1979 return CAIRO_STATUS_SUCCESS;
1982 if (y != r->u.composite.mask_y) {
1983 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1984 r->src, NULL, r->u.composite.dst,
1985 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1987 r->composite->unbounded.x, r->u.composite.mask_y,
1988 r->composite->unbounded.width, y - r->u.composite.mask_y);
1991 if (spans[0].x != r->composite->unbounded.x) {
1992 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1993 r->src, NULL, r->u.composite.dst,
1994 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1996 r->composite->unbounded.x, y,
1997 spans[0].x - r->composite->unbounded.x, h);
2001 int op = spans[0].coverage ? r->op : PIXMAN_OP_CLEAR;
2002 pixman_image_composite32 (op,
2003 r->src, NULL, r->u.composite.dst,
2004 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2007 spans[1].x - spans[0].x, h);
2009 } while (--num_spans > 1);
2011 if (spans[0].x != r->composite->unbounded.x + r->composite->unbounded.width) {
2012 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2013 r->src, NULL, r->u.composite.dst,
2014 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2017 r->composite->unbounded.x + r->composite->unbounded.width - spans[0].x, h);
2020 r->u.composite.mask_y = y + h;
2021 return CAIRO_STATUS_SUCCESS;
2024 static cairo_status_t
2025 _mono_finish_unbounded_spans (void *abstract_renderer)
2027 cairo_image_span_renderer_t *r = abstract_renderer;
2029 if (r->u.composite.mask_y < r->composite->unbounded.y + r->composite->unbounded.height) {
2030 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2031 r->src, NULL, r->u.composite.dst,
2032 r->composite->unbounded.x + r->u.composite.src_x, r->u.composite.mask_y + r->u.composite.src_y,
2034 r->composite->unbounded.x, r->u.composite.mask_y,
2035 r->composite->unbounded.width,
2036 r->composite->unbounded.y + r->composite->unbounded.height - r->u.composite.mask_y);
2039 return CAIRO_STATUS_SUCCESS;
2042 static cairo_int_status_t
2043 mono_renderer_init (cairo_image_span_renderer_t *r,
2044 const cairo_composite_rectangles_t *composite,
2045 cairo_antialias_t antialias,
2046 cairo_bool_t needs_clip)
2048 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2050 if (antialias != CAIRO_ANTIALIAS_NONE)
2051 return CAIRO_INT_STATUS_UNSUPPORTED;
2053 if (!_cairo_pattern_is_opaque_solid (&composite->mask_pattern.base))
2054 return CAIRO_INT_STATUS_UNSUPPORTED;
2056 r->base.render_rows = NULL;
2057 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2058 const cairo_color_t *color;
2060 color = &composite->source_pattern.solid.color;
2061 if (composite->op == CAIRO_OPERATOR_CLEAR)
2062 color = CAIRO_COLOR_TRANSPARENT;
2064 if (fill_reduces_to_source (composite->op, color, dst) &&
2065 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2066 /* Use plain C for the fill operations as the span length is
2067 * typically small, too small to payback the startup overheads of
2070 switch (PIXMAN_FORMAT_BPP(dst->pixman_format)) {
2071 case 8: r->base.render_rows = _fill8_spans; break;
2072 case 16: r->base.render_rows = _fill16_spans; break;
2073 case 32: r->base.render_rows = _fill32_spans; break;
2076 r->u.fill.data = dst->data;
2077 r->u.fill.stride = dst->stride;
2079 } else if ((composite->op == CAIRO_OPERATOR_SOURCE ||
2080 (composite->op == CAIRO_OPERATOR_OVER &&
2081 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2082 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2083 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2084 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2086 cairo_image_surface_t *src =
2087 to_image_surface(composite->source_pattern.surface.surface);
2090 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2092 composite->bounded.x + tx >= 0 &&
2093 composite->bounded.y + ty >= 0 &&
2094 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2095 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2097 r->u.blit.stride = dst->stride;
2098 r->u.blit.data = dst->data;
2099 r->u.blit.src_stride = src->stride;
2100 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2101 r->base.render_rows = _blit_spans;
2105 if (r->base.render_rows == NULL) {
2106 r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
2107 &composite->unbounded,
2108 &composite->source_sample_area,
2109 &r->u.composite.src_x, &r->u.composite.src_y);
2110 if (unlikely (r->src == NULL))
2111 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2113 r->u.composite.dst = to_pixman_image (composite->surface);
2114 r->op = _pixman_operator (composite->op);
2115 if (composite->is_bounded == 0) {
2116 r->base.render_rows = _mono_unbounded_spans;
2117 r->base.finish = _mono_finish_unbounded_spans;
2118 r->u.composite.mask_y = composite->unbounded.y;
2120 r->base.render_rows = _mono_spans;
2122 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
2124 return CAIRO_INT_STATUS_SUCCESS;
2127 #define ONE_HALF 0x7f
2128 #define RB_MASK 0x00ff00ff
2129 #define RB_ONE_HALF 0x007f007f
2130 #define RB_MASK_PLUS_ONE 0x01000100
2132 static inline uint32_t
2133 mul8x2_8 (uint32_t a, uint8_t b)
2135 uint32_t t = (a & RB_MASK) * b + RB_ONE_HALF;
2136 return ((t + ((t >> G_SHIFT) & RB_MASK)) >> G_SHIFT) & RB_MASK;
2139 static inline uint32_t
2140 add8x2_8x2 (uint32_t a, uint32_t b)
2143 t |= RB_MASK_PLUS_ONE - ((t >> G_SHIFT) & RB_MASK);
2147 static inline uint8_t
2148 mul8_8 (uint8_t a, uint8_t b)
2150 uint16_t t = a * (uint16_t)b + ONE_HALF;
2151 return ((t >> G_SHIFT) + t) >> G_SHIFT;
2154 static inline uint32_t
2155 lerp8x4 (uint32_t src, uint8_t a, uint32_t dst)
2157 return (add8x2_8x2 (mul8x2_8 (src, a),
2158 mul8x2_8 (dst, ~a)) |
2159 add8x2_8x2 (mul8x2_8 (src >> G_SHIFT, a),
2160 mul8x2_8 (dst >> G_SHIFT, ~a)) << G_SHIFT);
2163 static cairo_status_t
2164 _fill_a8_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2165 const cairo_half_open_span_t *spans, unsigned num_spans)
2167 cairo_image_span_renderer_t *r = abstract_renderer;
2170 return CAIRO_STATUS_SUCCESS;
2172 if (likely(h == 1)) {
2173 uint8_t *d = r->u.fill.data + r->u.fill.stride*y;
2175 uint8_t a = spans[0].coverage;
2177 int len = spans[1].x - spans[0].x;
2179 memset(d + spans[0].x, r->u.fill.pixel, len);
2181 uint8_t s = mul8_8(a, r->u.fill.pixel);
2182 uint8_t *dst = d + spans[0].x;
2185 uint8_t t = mul8_8(*dst, a);
2191 } while (--num_spans > 1);
2194 uint8_t a = spans[0].coverage;
2199 int len = spans[1].x - spans[0].x;
2200 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2201 memset(d, r->u.fill.pixel, len);
2205 uint8_t s = mul8_8(a, r->u.fill.pixel);
2208 int len = spans[1].x - spans[0].x;
2209 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2211 uint8_t t = mul8_8(*d, a);
2219 } while (--num_spans > 1);
2222 return CAIRO_STATUS_SUCCESS;
2225 static cairo_status_t
2226 _fill_xrgb32_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2227 const cairo_half_open_span_t *spans, unsigned num_spans)
2229 cairo_image_span_renderer_t *r = abstract_renderer;
2232 return CAIRO_STATUS_SUCCESS;
2234 if (likely(h == 1)) {
2236 uint8_t a = spans[0].coverage;
2238 int len = spans[1].x - spans[0].x;
2239 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2242 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), 32,
2243 spans[0].x, y, len, 1, r->u.fill.pixel);
2245 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2247 *d++ = r->u.fill.pixel;
2249 } else while (len--) {
2250 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2255 } while (--num_spans > 1);
2258 uint8_t a = spans[0].coverage;
2261 if (spans[1].x - spans[0].x > 16) {
2262 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), 32,
2263 spans[0].x, y, spans[1].x - spans[0].x, h,
2268 int len = spans[1].x - spans[0].x;
2269 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2271 *d++ = r->u.fill.pixel;
2278 int len = spans[1].x - spans[0].x;
2279 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2281 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2289 } while (--num_spans > 1);
2292 return CAIRO_STATUS_SUCCESS;
2295 static cairo_status_t
2296 _fill_a8_lerp_spans (void *abstract_renderer, int y, int h,
2297 const cairo_half_open_span_t *spans, unsigned num_spans)
2299 cairo_image_span_renderer_t *r = abstract_renderer;
2302 return CAIRO_STATUS_SUCCESS;
2304 if (likely(h == 1)) {
2306 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2308 int len = spans[1].x - spans[0].x;
2309 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
2310 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2313 uint16_t t = *d*ia + p;
2314 *d++ = (t + (t>>8)) >> 8;
2318 } while (--num_spans > 1);
2321 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2324 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2327 int len = spans[1].x - spans[0].x;
2328 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2330 uint16_t t = *d*ia + p;
2331 *d++ = (t + (t>>8)) >> 8;
2337 } while (--num_spans > 1);
2340 return CAIRO_STATUS_SUCCESS;
2343 static cairo_status_t
2344 _fill_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2345 const cairo_half_open_span_t *spans, unsigned num_spans)
2347 cairo_image_span_renderer_t *r = abstract_renderer;
2350 return CAIRO_STATUS_SUCCESS;
2352 if (likely(h == 1)) {
2354 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2356 int len = spans[1].x - spans[0].x;
2357 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2359 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2364 } while (--num_spans > 1);
2367 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2371 int len = spans[1].x - spans[0].x;
2372 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2374 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2381 } while (--num_spans > 1);
2384 return CAIRO_STATUS_SUCCESS;
2387 static cairo_status_t
2388 _blit_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2389 const cairo_half_open_span_t *spans, unsigned num_spans)
2391 cairo_image_span_renderer_t *r = abstract_renderer;
2394 return CAIRO_STATUS_SUCCESS;
2396 if (likely(h == 1)) {
2397 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
2398 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
2400 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2402 uint32_t *s = (uint32_t*)src + spans[0].x;
2403 uint32_t *d = (uint32_t*)dst + spans[0].x;
2404 int len = spans[1].x - spans[0].x;
2409 memcpy(d, s, len*4);
2412 *d = lerp8x4 (*s, a, *d);
2418 } while (--num_spans > 1);
2421 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2425 uint32_t *s = (uint32_t *)(r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x * 4);
2426 uint32_t *d = (uint32_t *)(r->u.blit.data + yy*r->u.blit.stride + spans[0].x * 4);
2427 int len = spans[1].x - spans[0].x;
2432 memcpy(d, s, len * 4);
2435 *d = lerp8x4 (*s, a, *d);
2443 } while (--num_spans > 1);
2446 return CAIRO_STATUS_SUCCESS;
2449 static cairo_status_t
2450 _inplace_spans (void *abstract_renderer,
2452 const cairo_half_open_span_t *spans,
2455 cairo_image_span_renderer_t *r = abstract_renderer;
2460 return CAIRO_STATUS_SUCCESS;
2462 if (num_spans == 2 && spans[0].coverage == 0xff) {
2463 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2464 spans[0].x + r->u.composite.src_x,
2465 y + r->u.composite.src_y,
2468 spans[1].x - spans[0].x, h);
2469 return CAIRO_STATUS_SUCCESS;
2472 mask = (uint8_t *)pixman_image_get_data (r->mask);
2473 x1 = x0 = spans[0].x;
2475 int len = spans[1].x - spans[0].x;
2476 *mask++ = spans[0].coverage;
2478 if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
2480 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2481 x0 + r->u.composite.src_x,
2482 y + r->u.composite.src_y,
2487 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2488 spans[0].x + r->u.composite.src_x,
2489 y + r->u.composite.src_y,
2493 mask = (uint8_t *)pixman_image_get_data (r->mask);
2495 } else if (spans[0].coverage == 0x0 &&
2496 x1 - x0 > r->u.composite.run_length) {
2497 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2498 x0 + r->u.composite.src_x,
2499 y + r->u.composite.src_y,
2503 mask = (uint8_t *)pixman_image_get_data (r->mask);
2506 memset (mask, spans[0].coverage, --len);
2512 } while (--num_spans > 1);
2515 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2516 x0 + r->u.composite.src_x,
2517 y + r->u.composite.src_y,
2523 return CAIRO_STATUS_SUCCESS;
2526 static cairo_status_t
2527 _inplace_opacity_spans (void *abstract_renderer, int y, int h,
2528 const cairo_half_open_span_t *spans,
2531 cairo_image_span_renderer_t *r = abstract_renderer;
2536 return CAIRO_STATUS_SUCCESS;
2538 mask = (uint8_t *)pixman_image_get_data (r->mask);
2539 x1 = x0 = spans[0].x;
2541 int len = spans[1].x - spans[0].x;
2542 uint8_t m = mul8_8(spans[0].coverage, r->bpp);
2546 x1 - x0 > r->u.composite.run_length) {
2547 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2548 x0 + r->u.composite.src_x,
2549 y + r->u.composite.src_y,
2553 mask = (uint8_t *)pixman_image_get_data (r->mask);
2556 memset (mask, m, --len);
2562 } while (--num_spans > 1);
2565 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2566 x0 + r->u.composite.src_x,
2567 y + r->u.composite.src_y,
2573 return CAIRO_STATUS_SUCCESS;
2576 static cairo_status_t
2577 _inplace_src_spans (void *abstract_renderer, int y, int h,
2578 const cairo_half_open_span_t *spans,
2581 cairo_image_span_renderer_t *r = abstract_renderer;
2586 return CAIRO_STATUS_SUCCESS;
2591 int len = spans[1].x - spans[0].x;
2592 if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
2593 if (spans[0].x != x0) {
2594 #if PIXMAN_HAS_OP_LERP
2595 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2596 r->src, r->mask, r->u.composite.dst,
2597 x0 + r->u.composite.src_x,
2598 y + r->u.composite.src_y,
2601 spans[0].x - x0, h);
2603 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2604 r->mask, NULL, r->u.composite.dst,
2608 spans[0].x - x0, h);
2609 pixman_image_composite32 (PIXMAN_OP_ADD,
2610 r->src, r->mask, r->u.composite.dst,
2611 x0 + r->u.composite.src_x,
2612 y + r->u.composite.src_y,
2615 spans[0].x - x0, h);
2619 pixman_image_composite32 (PIXMAN_OP_SRC,
2620 r->src, NULL, r->u.composite.dst,
2621 spans[0].x + r->u.composite.src_x,
2622 y + r->u.composite.src_y,
2625 spans[1].x - spans[0].x, h);
2629 } else if (spans[0].coverage == 0x0) {
2630 if (spans[0].x != x0) {
2631 #if PIXMAN_HAS_OP_LERP
2632 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2633 r->src, r->mask, r->u.composite.dst,
2634 x0 + r->u.composite.src_x,
2635 y + r->u.composite.src_y,
2638 spans[0].x - x0, h);
2640 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2641 r->mask, NULL, r->u.composite.dst,
2645 spans[0].x - x0, h);
2646 pixman_image_composite32 (PIXMAN_OP_ADD,
2647 r->src, r->mask, r->u.composite.dst,
2648 x0 + r->u.composite.src_x,
2649 y + r->u.composite.src_y,
2652 spans[0].x - x0, h);
2659 *m++ = spans[0].coverage;
2661 memset (m, spans[0].coverage, --len);
2666 } while (--num_spans > 1);
2668 if (spans[0].x != x0) {
2669 #if PIXMAN_HAS_OP_LERP
2670 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2671 r->src, r->mask, r->u.composite.dst,
2672 x0 + r->u.composite.src_x,
2673 y + r->u.composite.src_y,
2676 spans[0].x - x0, h);
2678 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2679 r->mask, NULL, r->u.composite.dst,
2683 spans[0].x - x0, h);
2684 pixman_image_composite32 (PIXMAN_OP_ADD,
2685 r->src, r->mask, r->u.composite.dst,
2686 x0 + r->u.composite.src_x,
2687 y + r->u.composite.src_y,
2690 spans[0].x - x0, h);
2694 return CAIRO_STATUS_SUCCESS;
2697 static cairo_status_t
2698 _inplace_src_opacity_spans (void *abstract_renderer, int y, int h,
2699 const cairo_half_open_span_t *spans,
2702 cairo_image_span_renderer_t *r = abstract_renderer;
2707 return CAIRO_STATUS_SUCCESS;
2710 mask = (uint8_t *)pixman_image_get_data (r->mask);
2712 int len = spans[1].x - spans[0].x;
2713 uint8_t m = mul8_8(spans[0].coverage, r->bpp);
2715 if (spans[0].x != x0) {
2716 #if PIXMAN_HAS_OP_LERP
2717 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2718 r->src, r->mask, r->u.composite.dst,
2719 x0 + r->u.composite.src_x,
2720 y + r->u.composite.src_y,
2723 spans[0].x - x0, h);
2725 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2726 r->mask, NULL, r->u.composite.dst,
2730 spans[0].x - x0, h);
2731 pixman_image_composite32 (PIXMAN_OP_ADD,
2732 r->src, r->mask, r->u.composite.dst,
2733 x0 + r->u.composite.src_x,
2734 y + r->u.composite.src_y,
2737 spans[0].x - x0, h);
2741 mask = (uint8_t *)pixman_image_get_data (r->mask);
2746 memset (mask, m, --len);
2751 } while (--num_spans > 1);
2753 if (spans[0].x != x0) {
2754 #if PIXMAN_HAS_OP_LERP
2755 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2756 r->src, r->mask, r->u.composite.dst,
2757 x0 + r->u.composite.src_x,
2758 y + r->u.composite.src_y,
2761 spans[0].x - x0, h);
2763 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2764 r->mask, NULL, r->u.composite.dst,
2768 spans[0].x - x0, h);
2769 pixman_image_composite32 (PIXMAN_OP_ADD,
2770 r->src, r->mask, r->u.composite.dst,
2771 x0 + r->u.composite.src_x,
2772 y + r->u.composite.src_y,
2775 spans[0].x - x0, h);
2779 return CAIRO_STATUS_SUCCESS;
2782 static void free_pixels (pixman_image_t *image, void *data)
2787 static cairo_int_status_t
2788 inplace_renderer_init (cairo_image_span_renderer_t *r,
2789 const cairo_composite_rectangles_t *composite,
2790 cairo_antialias_t antialias,
2791 cairo_bool_t needs_clip)
2793 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2796 if (composite->mask_pattern.base.type != CAIRO_PATTERN_TYPE_SOLID)
2797 return CAIRO_INT_STATUS_UNSUPPORTED;
2799 r->base.render_rows = NULL;
2800 r->bpp = composite->mask_pattern.solid.color.alpha_short >> 8;
2802 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2803 const cairo_color_t *color;
2805 color = &composite->source_pattern.solid.color;
2806 if (composite->op == CAIRO_OPERATOR_CLEAR)
2807 color = CAIRO_COLOR_TRANSPARENT;
2809 if (fill_reduces_to_source (composite->op, color, dst) &&
2810 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2811 /* Use plain C for the fill operations as the span length is
2812 * typically small, too small to payback the startup overheads of
2815 if (r->bpp == 0xff) {
2816 switch (dst->format) {
2817 case CAIRO_FORMAT_A8:
2818 r->base.render_rows = _fill_a8_lerp_opaque_spans;
2820 case CAIRO_FORMAT_RGB24:
2821 case CAIRO_FORMAT_ARGB32:
2822 r->base.render_rows = _fill_xrgb32_lerp_opaque_spans;
2824 case CAIRO_FORMAT_A1:
2825 case CAIRO_FORMAT_RGB16_565:
2826 case CAIRO_FORMAT_RGB30:
2827 case CAIRO_FORMAT_INVALID:
2831 switch (dst->format) {
2832 case CAIRO_FORMAT_A8:
2833 r->base.render_rows = _fill_a8_lerp_spans;
2835 case CAIRO_FORMAT_RGB24:
2836 case CAIRO_FORMAT_ARGB32:
2837 r->base.render_rows = _fill_xrgb32_lerp_spans;
2839 case CAIRO_FORMAT_A1:
2840 case CAIRO_FORMAT_RGB16_565:
2841 case CAIRO_FORMAT_RGB30:
2842 case CAIRO_FORMAT_INVALID:
2846 r->u.fill.data = dst->data;
2847 r->u.fill.stride = dst->stride;
2849 } else if ((dst->format == CAIRO_FORMAT_ARGB32 || dst->format == CAIRO_FORMAT_RGB24) &&
2850 (composite->op == CAIRO_OPERATOR_SOURCE ||
2851 (composite->op == CAIRO_OPERATOR_OVER &&
2852 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2853 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2854 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2855 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2857 cairo_image_surface_t *src =
2858 to_image_surface(composite->source_pattern.surface.surface);
2861 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2863 composite->bounded.x + tx >= 0 &&
2864 composite->bounded.y + ty >= 0 &&
2865 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2866 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2868 assert(PIXMAN_FORMAT_BPP(dst->pixman_format) == 32);
2869 r->u.blit.stride = dst->stride;
2870 r->u.blit.data = dst->data;
2871 r->u.blit.src_stride = src->stride;
2872 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2873 r->base.render_rows = _blit_xrgb32_lerp_spans;
2876 if (r->base.render_rows == NULL) {
2877 const cairo_pattern_t *src = &composite->source_pattern.base;
2880 if (composite->is_bounded == 0)
2881 return CAIRO_INT_STATUS_UNSUPPORTED;
2883 r->base.render_rows = r->bpp == 0xff ? _inplace_spans : _inplace_opacity_spans;
2884 width = (composite->bounded.width + 3) & ~3;
2886 r->u.composite.run_length = 8;
2887 if (src->type == CAIRO_PATTERN_TYPE_LINEAR ||
2888 src->type == CAIRO_PATTERN_TYPE_RADIAL)
2889 r->u.composite.run_length = 256;
2890 if (dst->base.is_clear &&
2891 (composite->op == CAIRO_OPERATOR_SOURCE ||
2892 composite->op == CAIRO_OPERATOR_OVER ||
2893 composite->op == CAIRO_OPERATOR_ADD)) {
2894 r->op = PIXMAN_OP_SRC;
2895 } else if (composite->op == CAIRO_OPERATOR_SOURCE) {
2896 r->base.render_rows = r->bpp == 0xff ? _inplace_src_spans : _inplace_src_opacity_spans;
2897 r->u.composite.mask_y = r->composite->unbounded.y;
2898 width = (composite->unbounded.width + 3) & ~3;
2899 } else if (composite->op == CAIRO_OPERATOR_CLEAR) {
2900 r->op = PIXMAN_OP_OUT_REVERSE;
2903 r->op = _pixman_operator (composite->op);
2906 r->src = _pixman_image_for_pattern (dst, src, FALSE,
2907 &composite->bounded,
2908 &composite->source_sample_area,
2909 &r->u.composite.src_x, &r->u.composite.src_y);
2910 if (unlikely (r->src == NULL))
2911 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2913 /* Create an effectively unbounded mask by repeating the single line */
2915 if (width > SZ_BUF) {
2916 buf = malloc (width);
2917 if (unlikely (buf == NULL)) {
2918 pixman_image_unref (r->src);
2919 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2922 r->mask = pixman_image_create_bits (PIXMAN_a8,
2923 width, composite->unbounded.height,
2924 (uint32_t *)buf, 0);
2925 if (unlikely (r->mask == NULL)) {
2926 pixman_image_unref (r->src);
2929 return _cairo_error(CAIRO_STATUS_NO_MEMORY);
2933 pixman_image_set_destroy_function (r->mask, free_pixels, buf);
2935 r->u.composite.dst = dst->pixman_image;
2938 return CAIRO_INT_STATUS_SUCCESS;
2941 static cairo_int_status_t
2942 span_renderer_init (cairo_abstract_span_renderer_t *_r,
2943 const cairo_composite_rectangles_t *composite,
2944 cairo_antialias_t antialias,
2945 cairo_bool_t needs_clip)
2947 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
2948 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2949 const cairo_pattern_t *source = &composite->source_pattern.base;
2950 cairo_operator_t op = composite->op;
2951 cairo_int_status_t status;
2953 TRACE ((stderr, "%s: antialias=%d, needs_clip=%d\n", __FUNCTION__,
2954 antialias, needs_clip));
2957 return CAIRO_INT_STATUS_UNSUPPORTED;
2959 r->composite = composite;
2962 r->base.finish = NULL;
2964 status = mono_renderer_init (r, composite, antialias, needs_clip);
2965 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2968 status = inplace_renderer_init (r, composite, antialias, needs_clip);
2969 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2974 if (op == CAIRO_OPERATOR_CLEAR) {
2975 #if PIXMAN_HAS_OP_LERP
2976 op = PIXMAN_OP_LERP_CLEAR;
2978 source = &_cairo_pattern_white.base;
2979 op = PIXMAN_OP_OUT_REVERSE;
2981 } else if (dst->base.is_clear &&
2982 (op == CAIRO_OPERATOR_SOURCE ||
2983 op == CAIRO_OPERATOR_OVER ||
2984 op == CAIRO_OPERATOR_ADD)) {
2986 } else if (op == CAIRO_OPERATOR_SOURCE) {
2987 #if PIXMAN_HAS_OP_LERP
2988 op = PIXMAN_OP_LERP_SRC;
2990 return CAIRO_INT_STATUS_UNSUPPORTED;
2993 op = _pixman_operator (op);
2997 r->src = _pixman_image_for_pattern (dst, source, FALSE,
2998 &composite->unbounded,
2999 &composite->source_sample_area,
3000 &r->u.mask.src_x, &r->u.mask.src_y);
3001 if (unlikely (r->src == NULL))
3002 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3005 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
3006 r->opacity = composite->mask_pattern.solid.color.alpha;
3008 pixman_image_t *mask;
3011 mask = _pixman_image_for_pattern (dst,
3012 &composite->mask_pattern.base,
3014 &composite->unbounded,
3015 &composite->mask_sample_area,
3017 if (unlikely (mask == NULL))
3018 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3020 /* XXX Component-alpha? */
3021 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
3022 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
3024 pixman_image_unref (r->src);
3026 r->u.mask.src_x = mask_x;
3027 r->u.mask.src_y = mask_y;
3032 pixman_image_unref (mask);
3033 return CAIRO_INT_STATUS_UNSUPPORTED;
3037 r->u.mask.extents = composite->unbounded;
3038 r->u.mask.stride = (r->u.mask.extents.width + 3) & ~3;
3039 if (r->u.mask.extents.height * r->u.mask.stride > (int)sizeof (r->_buf)) {
3040 r->mask = pixman_image_create_bits (PIXMAN_a8,
3041 r->u.mask.extents.width,
3042 r->u.mask.extents.height,
3045 r->base.render_rows = _cairo_image_spans;
3046 r->base.finish = NULL;
3048 r->mask = pixman_image_create_bits (PIXMAN_a8,
3049 r->u.mask.extents.width,
3050 r->u.mask.extents.height,
3051 (uint32_t *)r->_buf, r->u.mask.stride);
3053 r->base.render_rows = _cairo_image_spans_and_zero;
3054 r->base.finish = _cairo_image_finish_spans_and_zero;
3056 if (unlikely (r->mask == NULL))
3057 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3059 r->u.mask.data = (uint8_t *) pixman_image_get_data (r->mask);
3060 r->u.mask.stride = pixman_image_get_stride (r->mask);
3062 r->u.mask.extents.height += r->u.mask.extents.y;
3063 return CAIRO_STATUS_SUCCESS;
3067 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
3068 cairo_int_status_t status)
3070 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
3072 TRACE ((stderr, "%s\n", __FUNCTION__));
3074 if (likely (status == CAIRO_INT_STATUS_SUCCESS && r->bpp == 0)) {
3075 const cairo_composite_rectangles_t *composite = r->composite;
3080 pixman_image_composite32 (r->op, r->src, r->mask,
3081 to_pixman_image (composite->surface),
3082 composite->unbounded.x + r->u.mask.src_x,
3083 composite->unbounded.y + r->u.mask.src_y,
3085 composite->unbounded.x,
3086 composite->unbounded.y,
3087 composite->unbounded.width,
3088 composite->unbounded.height);
3092 pixman_image_unref (r->src);
3094 pixman_image_unref (r->mask);
3098 const cairo_compositor_t *
3099 _cairo_image_spans_compositor_get (void)
3101 static cairo_spans_compositor_t spans;
3102 static cairo_compositor_t shape;
3104 if (spans.base.delegate == NULL) {
3105 _cairo_shape_mask_compositor_init (&shape,
3106 _cairo_image_traps_compositor_get());
3107 shape.glyphs = NULL;
3109 _cairo_spans_compositor_init (&spans, &shape);
3112 #if PIXMAN_HAS_OP_LERP
3113 spans.flags |= CAIRO_SPANS_COMPOSITOR_HAS_LERP;
3116 //spans.acquire = acquire;
3117 //spans.release = release;
3118 spans.fill_boxes = fill_boxes;
3119 spans.draw_image_boxes = draw_image_boxes;
3120 //spans.copy_boxes = copy_boxes;
3121 spans.pattern_to_surface = _cairo_image_source_create_for_pattern;
3122 //spans.check_composite_boxes = check_composite_boxes;
3123 spans.composite_boxes = composite_boxes;
3124 //spans.check_span_renderer = check_span_renderer;
3125 spans.renderer_init = span_renderer_init;
3126 spans.renderer_fini = span_renderer_fini;