1 /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
2 /* cairo - a vector graphics library with display and print output
4 * Copyright © 2003 University of Southern California
5 * Copyright © 2009,2010,2011 Intel Corporation
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is University of Southern
36 * Carl D. Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
40 /* The primarily reason for keeping a traps-compositor around is
41 * for validating cairo-xlib (which currently also uses traps).
46 #include "cairo-image-surface-private.h"
48 #include "cairo-compositor-private.h"
49 #include "cairo-spans-compositor-private.h"
51 #include "cairo-region-private.h"
52 #include "cairo-traps-private.h"
53 #include "cairo-tristrip-private.h"
55 #if CAIRO_HAS_TG_SURFACE
56 #include "cairo-thread-local-private.h"
59 static pixman_image_t *
60 to_pixman_image (cairo_surface_t *s)
62 return ((cairo_image_surface_t *)s)->pixman_image;
65 static cairo_int_status_t
66 acquire (void *abstract_dst)
68 return CAIRO_STATUS_SUCCESS;
71 static cairo_int_status_t
72 release (void *abstract_dst)
74 return CAIRO_STATUS_SUCCESS;
77 static cairo_int_status_t
78 set_clip_region (void *_surface,
79 cairo_region_t *region)
81 cairo_image_surface_t *surface = _surface;
82 pixman_region32_t *rgn = region ? ®ion->rgn : NULL;
84 if (! pixman_image_set_clip_region32 (surface->pixman_image, rgn))
85 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
87 return CAIRO_STATUS_SUCCESS;
90 static cairo_int_status_t
91 draw_image_boxes (void *_dst,
92 cairo_image_surface_t *image,
96 cairo_image_surface_t *dst = _dst;
97 struct _cairo_boxes_chunk *chunk;
100 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
102 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
103 for (i = 0; i < chunk->count; i++) {
104 cairo_box_t *b = &chunk->base[i];
105 int x = _cairo_fixed_integer_part (b->p1.x);
106 int y = _cairo_fixed_integer_part (b->p1.y);
107 int w = _cairo_fixed_integer_part (b->p2.x) - x;
108 int h = _cairo_fixed_integer_part (b->p2.y) - y;
109 if (dst->pixman_format != image->pixman_format ||
110 ! pixman_blt ((uint32_t *)image->data, (uint32_t *)dst->data,
111 image->stride / sizeof (uint32_t),
112 dst->stride / sizeof (uint32_t),
113 PIXMAN_FORMAT_BPP (image->pixman_format),
114 PIXMAN_FORMAT_BPP (dst->pixman_format),
119 pixman_image_composite32 (PIXMAN_OP_SRC,
120 image->pixman_image, NULL, dst->pixman_image,
128 return CAIRO_STATUS_SUCCESS;
131 static inline uint32_t
132 color_to_uint32 (const cairo_color_t *color)
135 (color->alpha_short >> 8 << 24) |
136 (color->red_short >> 8 << 16) |
137 (color->green_short & 0xff00) |
138 (color->blue_short >> 8);
141 static inline cairo_bool_t
142 color_to_pixel (const cairo_color_t *color,
143 pixman_format_code_t format,
148 if (!(format == PIXMAN_a8r8g8b8 ||
149 format == PIXMAN_x8r8g8b8 ||
150 format == PIXMAN_a8b8g8r8 ||
151 format == PIXMAN_x8b8g8r8 ||
152 format == PIXMAN_b8g8r8a8 ||
153 format == PIXMAN_b8g8r8x8 ||
154 format == PIXMAN_r5g6b5 ||
155 format == PIXMAN_b5g6r5 ||
156 format == PIXMAN_a8))
161 c = color_to_uint32 (color);
163 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_ABGR) {
164 c = ((c & 0xff000000) >> 0) |
165 ((c & 0x00ff0000) >> 16) |
166 ((c & 0x0000ff00) >> 0) |
167 ((c & 0x000000ff) << 16);
170 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_BGRA) {
171 c = ((c & 0xff000000) >> 24) |
172 ((c & 0x00ff0000) >> 8) |
173 ((c & 0x0000ff00) << 8) |
174 ((c & 0x000000ff) << 24);
177 if (format == PIXMAN_a8) {
179 } else if (format == PIXMAN_r5g6b5 || format == PIXMAN_b5g6r5) {
180 c = ((((c) >> 3) & 0x001f) |
181 (((c) >> 5) & 0x07e0) |
182 (((c) >> 8) & 0xf800));
190 _pixman_operator (cairo_operator_t op)
193 case CAIRO_OPERATOR_CLEAR:
194 return PIXMAN_OP_CLEAR;
196 case CAIRO_OPERATOR_SOURCE:
197 return PIXMAN_OP_SRC;
198 case CAIRO_OPERATOR_OVER:
199 return PIXMAN_OP_OVER;
200 case CAIRO_OPERATOR_IN:
202 case CAIRO_OPERATOR_OUT:
203 return PIXMAN_OP_OUT;
204 case CAIRO_OPERATOR_ATOP:
205 return PIXMAN_OP_ATOP;
207 case CAIRO_OPERATOR_DEST:
208 return PIXMAN_OP_DST;
209 case CAIRO_OPERATOR_DEST_OVER:
210 return PIXMAN_OP_OVER_REVERSE;
211 case CAIRO_OPERATOR_DEST_IN:
212 return PIXMAN_OP_IN_REVERSE;
213 case CAIRO_OPERATOR_DEST_OUT:
214 return PIXMAN_OP_OUT_REVERSE;
215 case CAIRO_OPERATOR_DEST_ATOP:
216 return PIXMAN_OP_ATOP_REVERSE;
218 case CAIRO_OPERATOR_XOR:
219 return PIXMAN_OP_XOR;
220 case CAIRO_OPERATOR_ADD:
221 return PIXMAN_OP_ADD;
222 case CAIRO_OPERATOR_SATURATE:
223 return PIXMAN_OP_SATURATE;
225 case CAIRO_OPERATOR_MULTIPLY:
226 return PIXMAN_OP_MULTIPLY;
227 case CAIRO_OPERATOR_SCREEN:
228 return PIXMAN_OP_SCREEN;
229 case CAIRO_OPERATOR_OVERLAY:
230 return PIXMAN_OP_OVERLAY;
231 case CAIRO_OPERATOR_DARKEN:
232 return PIXMAN_OP_DARKEN;
233 case CAIRO_OPERATOR_LIGHTEN:
234 return PIXMAN_OP_LIGHTEN;
235 case CAIRO_OPERATOR_COLOR_DODGE:
236 return PIXMAN_OP_COLOR_DODGE;
237 case CAIRO_OPERATOR_COLOR_BURN:
238 return PIXMAN_OP_COLOR_BURN;
239 case CAIRO_OPERATOR_HARD_LIGHT:
240 return PIXMAN_OP_HARD_LIGHT;
241 case CAIRO_OPERATOR_SOFT_LIGHT:
242 return PIXMAN_OP_SOFT_LIGHT;
243 case CAIRO_OPERATOR_DIFFERENCE:
244 return PIXMAN_OP_DIFFERENCE;
245 case CAIRO_OPERATOR_EXCLUSION:
246 return PIXMAN_OP_EXCLUSION;
247 case CAIRO_OPERATOR_HSL_HUE:
248 return PIXMAN_OP_HSL_HUE;
249 case CAIRO_OPERATOR_HSL_SATURATION:
250 return PIXMAN_OP_HSL_SATURATION;
251 case CAIRO_OPERATOR_HSL_COLOR:
252 return PIXMAN_OP_HSL_COLOR;
253 case CAIRO_OPERATOR_HSL_LUMINOSITY:
254 return PIXMAN_OP_HSL_LUMINOSITY;
258 return PIXMAN_OP_OVER;
263 fill_reduces_to_source (cairo_operator_t op,
264 const cairo_color_t *color,
265 cairo_image_surface_t *dst)
267 if (op == CAIRO_OPERATOR_SOURCE || op == CAIRO_OPERATOR_CLEAR)
269 if (op == CAIRO_OPERATOR_OVER && CAIRO_COLOR_IS_OPAQUE (color))
271 if (dst->base.is_clear)
272 return op == CAIRO_OPERATOR_OVER || op == CAIRO_OPERATOR_ADD;
277 static cairo_int_status_t
278 fill_rectangles (void *_dst,
280 const cairo_color_t *color,
281 cairo_rectangle_int_t *rects,
284 cairo_image_surface_t *dst = _dst;
288 TRACE ((stderr, "%s\n", __FUNCTION__));
290 if (fill_reduces_to_source (op, color, dst) &&
291 color_to_pixel (color, dst->pixman_format, &pixel))
293 for (i = 0; i < num_rects; i++) {
294 pixman_fill ((uint32_t *) dst->data, dst->stride / sizeof (uint32_t),
295 PIXMAN_FORMAT_BPP (dst->pixman_format),
296 rects[i].x, rects[i].y,
297 rects[i].width, rects[i].height,
303 pixman_image_t *src = _pixman_image_for_color (color);
305 return CAIRO_STATUS_NULL_POINTER;
307 op = _pixman_operator (op);
308 for (i = 0; i < num_rects; i++) {
309 pixman_image_composite32 (op,
310 src, NULL, dst->pixman_image,
313 rects[i].x, rects[i].y,
314 rects[i].width, rects[i].height);
317 pixman_image_unref (src);
320 return CAIRO_STATUS_SUCCESS;
323 static cairo_int_status_t
324 fill_boxes (void *_dst,
326 const cairo_color_t *color,
327 cairo_boxes_t *boxes)
329 cairo_image_surface_t *dst = _dst;
330 struct _cairo_boxes_chunk *chunk;
334 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
336 if (fill_reduces_to_source (op, color, dst) &&
337 color_to_pixel (color, dst->pixman_format, &pixel))
339 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
340 for (i = 0; i < chunk->count; i++) {
341 int x = _cairo_fixed_integer_part (chunk->base[i].p1.x);
342 int y = _cairo_fixed_integer_part (chunk->base[i].p1.y);
343 int w = _cairo_fixed_integer_part (chunk->base[i].p2.x) - x;
344 int h = _cairo_fixed_integer_part (chunk->base[i].p2.y) - y;
345 pixman_fill ((uint32_t *) dst->data,
346 dst->stride / sizeof (uint32_t),
347 PIXMAN_FORMAT_BPP (dst->pixman_format),
354 pixman_image_t *src = _pixman_image_for_color (color);
356 return CAIRO_STATUS_NULL_POINTER;
358 op = _pixman_operator (op);
359 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
360 for (i = 0; i < chunk->count; i++) {
361 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
362 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
363 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
364 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
365 pixman_image_composite32 (op,
366 src, NULL, dst->pixman_image,
374 pixman_image_unref (src);
377 return CAIRO_STATUS_SUCCESS;
380 static cairo_int_status_t
381 composite (void *_dst,
383 cairo_surface_t *abstract_src,
384 cairo_surface_t *abstract_mask,
394 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
395 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
397 TRACE ((stderr, "%s\n", __FUNCTION__));
400 pixman_image_composite32 (_pixman_operator (op),
401 src->pixman_image, mask->pixman_image, to_pixman_image (_dst),
407 pixman_image_composite32 (_pixman_operator (op),
408 src->pixman_image, NULL, to_pixman_image (_dst),
415 return CAIRO_STATUS_SUCCESS;
418 static cairo_int_status_t
420 cairo_surface_t *abstract_src,
421 cairo_surface_t *abstract_mask,
431 cairo_image_surface_t *dst = _dst;
432 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
433 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
435 TRACE ((stderr, "%s\n", __FUNCTION__));
437 #if PIXMAN_HAS_OP_LERP
438 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
439 src->pixman_image, mask->pixman_image, dst->pixman_image,
445 /* Punch the clip out of the destination */
446 TRACE ((stderr, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
448 mask->base.unique_id, mask->pixman_image,
449 dst->base.unique_id, dst->pixman_image));
450 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
451 mask->pixman_image, NULL, dst->pixman_image,
457 /* Now add the two results together */
458 TRACE ((stderr, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
460 src->base.unique_id, src->pixman_image,
461 mask->base.unique_id, mask->pixman_image,
462 dst->base.unique_id, dst->pixman_image));
463 pixman_image_composite32 (PIXMAN_OP_ADD,
464 src->pixman_image, mask->pixman_image, dst->pixman_image,
471 return CAIRO_STATUS_SUCCESS;
474 static cairo_int_status_t
475 lerp_color_glyph (void *_dst,
476 cairo_surface_t *abstract_src,
477 cairo_surface_t *abstract_mask,
487 cairo_image_surface_t *dst = _dst;
488 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
489 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
491 TRACE ((stderr, "%s\n", __FUNCTION__));
493 /* Punch the clip out of the destination */
494 TRACE ((stderr, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
496 mask->base.unique_id, mask->pixman_image,
497 dst->base.unique_id, dst->pixman_image));
498 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
499 mask->pixman_image, src->pixman_image, dst->pixman_image,
505 /* Now add the two results together */
506 TRACE ((stderr, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
508 src->base.unique_id, src->pixman_image,
509 mask->base.unique_id, mask->pixman_image,
510 dst->base.unique_id, dst->pixman_image));
511 pixman_image_composite32 (PIXMAN_OP_ADD,
512 src->pixman_image, mask->pixman_image, dst->pixman_image,
517 return CAIRO_STATUS_SUCCESS;
520 static cairo_int_status_t
521 composite_boxes (void *_dst,
523 cairo_surface_t *abstract_src,
524 cairo_surface_t *abstract_mask,
531 cairo_boxes_t *boxes,
532 const cairo_rectangle_int_t *extents)
534 pixman_image_t *dst = to_pixman_image (_dst);
535 pixman_image_t *src = ((cairo_image_source_t *)abstract_src)->pixman_image;
536 pixman_image_t *mask = abstract_mask ? ((cairo_image_source_t *)abstract_mask)->pixman_image : NULL;
537 pixman_image_t *free_src = NULL;
538 struct _cairo_boxes_chunk *chunk;
541 /* XXX consider using a region? saves multiple prepare-composite */
542 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
544 if (((cairo_surface_t *)_dst)->is_clear &&
545 (op == CAIRO_OPERATOR_SOURCE ||
546 op == CAIRO_OPERATOR_OVER ||
547 op == CAIRO_OPERATOR_ADD)) {
550 if (op == CAIRO_OPERATOR_CLEAR) {
551 #if PIXMAN_HAS_OP_LERP
552 op = PIXMAN_OP_LERP_CLEAR;
554 free_src = src = _pixman_image_for_color (CAIRO_COLOR_WHITE);
556 return CAIRO_STATUS_NULL_POINTER;
558 op = PIXMAN_OP_OUT_REVERSE;
560 } else if (op == CAIRO_OPERATOR_SOURCE) {
561 #if PIXMAN_HAS_OP_LERP
562 op = PIXMAN_OP_LERP_SRC;
564 return CAIRO_INT_STATUS_UNSUPPORTED;
567 op = _pixman_operator (op);
570 op = _pixman_operator (op);
573 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
574 for (i = 0; i < chunk->count; i++) {
575 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
576 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
577 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
578 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
580 pixman_image_composite32 (op, src, mask, dst,
581 x1 + src_x, y1 + src_y,
582 x1 + mask_x, y1 + mask_y,
583 x1 + dst_x, y1 + dst_y,
589 pixman_image_unref (free_src);
591 return CAIRO_STATUS_SUCCESS;
594 #define CAIRO_FIXED_16_16_MIN _cairo_fixed_from_int (-32768)
595 #define CAIRO_FIXED_16_16_MAX _cairo_fixed_from_int (32767)
598 line_exceeds_16_16 (const cairo_line_t *line)
601 line->p1.x <= CAIRO_FIXED_16_16_MIN ||
602 line->p1.x >= CAIRO_FIXED_16_16_MAX ||
604 line->p2.x <= CAIRO_FIXED_16_16_MIN ||
605 line->p2.x >= CAIRO_FIXED_16_16_MAX ||
607 line->p1.y <= CAIRO_FIXED_16_16_MIN ||
608 line->p1.y >= CAIRO_FIXED_16_16_MAX ||
610 line->p2.y <= CAIRO_FIXED_16_16_MIN ||
611 line->p2.y >= CAIRO_FIXED_16_16_MAX;
615 project_line_x_onto_16_16 (const cairo_line_t *line,
617 cairo_fixed_t bottom,
618 pixman_line_fixed_t *out)
620 /* XXX use fixed-point arithmetic? */
621 cairo_point_double_t p1, p2;
624 p1.x = _cairo_fixed_to_double (line->p1.x);
625 p1.y = _cairo_fixed_to_double (line->p1.y);
627 p2.x = _cairo_fixed_to_double (line->p2.x);
628 p2.y = _cairo_fixed_to_double (line->p2.y);
630 m = (p2.x - p1.x) / (p2.y - p1.y);
631 out->p1.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (top - line->p1.y));
632 out->p2.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (bottom - line->p1.y));
636 _pixman_image_add_traps (pixman_image_t *image,
637 int dst_x, int dst_y,
638 cairo_traps_t *traps)
640 cairo_trapezoid_t *t = traps->traps;
641 int num_traps = traps->num_traps;
642 while (num_traps--) {
643 pixman_trapezoid_t trap;
645 /* top/bottom will be clamped to surface bounds */
646 trap.top = _cairo_fixed_to_16_16 (t->top);
647 trap.bottom = _cairo_fixed_to_16_16 (t->bottom);
649 /* However, all the other coordinates will have been left untouched so
650 * as not to introduce numerical error. Recompute them if they
651 * exceed the 16.16 limits.
653 if (unlikely (line_exceeds_16_16 (&t->left))) {
654 project_line_x_onto_16_16 (&t->left, t->top, t->bottom, &trap.left);
655 trap.left.p1.y = trap.top;
656 trap.left.p2.y = trap.bottom;
658 trap.left.p1.x = _cairo_fixed_to_16_16 (t->left.p1.x);
659 trap.left.p1.y = _cairo_fixed_to_16_16 (t->left.p1.y);
660 trap.left.p2.x = _cairo_fixed_to_16_16 (t->left.p2.x);
661 trap.left.p2.y = _cairo_fixed_to_16_16 (t->left.p2.y);
664 if (unlikely (line_exceeds_16_16 (&t->right))) {
665 project_line_x_onto_16_16 (&t->right, t->top, t->bottom, &trap.right);
666 trap.right.p1.y = trap.top;
667 trap.right.p2.y = trap.bottom;
669 trap.right.p1.x = _cairo_fixed_to_16_16 (t->right.p1.x);
670 trap.right.p1.y = _cairo_fixed_to_16_16 (t->right.p1.y);
671 trap.right.p2.x = _cairo_fixed_to_16_16 (t->right.p2.x);
672 trap.right.p2.y = _cairo_fixed_to_16_16 (t->right.p2.y);
675 pixman_rasterize_trapezoid (image, &trap, -dst_x, -dst_y);
680 static cairo_int_status_t
681 composite_traps (void *_dst,
683 cairo_surface_t *abstract_src,
688 const cairo_rectangle_int_t *extents,
689 cairo_antialias_t antialias,
690 cairo_traps_t *traps)
692 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
693 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
694 pixman_image_t *mask;
695 pixman_format_code_t format;
697 TRACE ((stderr, "%s\n", __FUNCTION__));
699 /* Special case adding trapezoids onto a mask surface; we want to avoid
700 * creating an intermediate temporary mask unnecessarily.
702 * We make the assumption here that the portion of the trapezoids
703 * contained within the surface is bounded by [dst_x,dst_y,width,height];
704 * the Cairo core code passes bounds based on the trapezoid extents.
706 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
707 if (dst->pixman_format == format &&
708 (abstract_src == NULL ||
709 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
711 _pixman_image_add_traps (dst->pixman_image, dst_x, dst_y, traps);
712 return CAIRO_STATUS_SUCCESS;
715 mask = pixman_image_create_bits (format,
716 extents->width, extents->height,
718 if (unlikely (mask == NULL))
719 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
721 _pixman_image_add_traps (mask, extents->x, extents->y, traps);
723 pixman_image_composite32 (_pixman_operator (op),
724 src->pixman_image, mask, dst->pixman_image,
725 extents->x + src_x, extents->y + src_y,
727 extents->x - dst_x, extents->y - dst_y,
728 extents->width, extents->height);
729 pixman_image_unref (mask);
731 return CAIRO_STATUS_SUCCESS;
735 set_point (pixman_point_fixed_t *p, cairo_point_t *c)
737 p->x = _cairo_fixed_to_16_16 (c->x);
738 p->y = _cairo_fixed_to_16_16 (c->y);
742 _pixman_image_add_tristrip (pixman_image_t *image,
743 int dst_x, int dst_y,
744 cairo_tristrip_t *strip)
746 pixman_triangle_t tri;
747 pixman_point_fixed_t *p[3] = {&tri.p1, &tri.p2, &tri.p3 };
750 set_point (p[0], &strip->points[0]);
751 set_point (p[1], &strip->points[1]);
752 set_point (p[2], &strip->points[2]);
753 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
754 for (n = 3; n < strip->num_points; n++) {
755 set_point (p[n%3], &strip->points[n]);
756 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
760 static cairo_int_status_t
761 composite_tristrip (void *_dst,
763 cairo_surface_t *abstract_src,
768 const cairo_rectangle_int_t *extents,
769 cairo_antialias_t antialias,
770 cairo_tristrip_t *strip)
772 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
773 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
774 pixman_image_t *mask;
775 pixman_format_code_t format;
777 TRACE ((stderr, "%s\n", __FUNCTION__));
779 if (strip->num_points < 3)
780 return CAIRO_STATUS_SUCCESS;
782 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
783 if (dst->pixman_format == format &&
784 (abstract_src == NULL ||
785 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
787 _pixman_image_add_tristrip (dst->pixman_image, dst_x, dst_y, strip);
788 return CAIRO_STATUS_SUCCESS;
791 mask = pixman_image_create_bits (format,
792 extents->width, extents->height,
794 if (unlikely (mask == NULL))
795 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
797 _pixman_image_add_tristrip (mask, extents->x, extents->y, strip);
798 pixman_image_composite32 (_pixman_operator (op),
799 src->pixman_image, mask, dst->pixman_image,
800 extents->x + src_x, extents->y + src_y,
802 extents->x - dst_x, extents->y - dst_y,
803 extents->width, extents->height);
805 pixman_image_unref (mask);
807 return CAIRO_STATUS_SUCCESS;
810 static cairo_int_status_t
811 check_composite_glyphs (const cairo_composite_rectangles_t *extents,
812 cairo_scaled_font_t *scaled_font,
813 cairo_glyph_t *glyphs,
816 return CAIRO_STATUS_SUCCESS;
819 #if 0 && HAS_PIXMAN_GLYPHS
820 #if CAIRO_HAS_TG_SURFACE
821 CAIRO_DEFINE_THREAD_LOCAL (pixman_glyph_cache_t *, per_thread_glyph_cache);
823 static pixman_glyph_cache_t *global_glyph_cache;
826 static inline pixman_glyph_cache_t *
827 get_glyph_cache (void)
829 pixman_glyph_cache_t **glyph_cache = NULL;
831 #if CAIRO_HAS_TG_SURFACE
832 glyph_cache = CAIRO_GET_THREAD_LOCAL (per_thread_glyph_cache);
834 glyph_cache = &global_glyph_cache;
837 if (! (*glyph_cache))
838 *glyph_cache = pixman_glyph_cache_create ();
844 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
845 cairo_scaled_glyph_t *scaled_glyph)
847 pixman_glyph_cache_t *glyph_cache = NULL;
849 #if CAIRO_HAS_TG_SURFACE
850 glyph_cache = *CAIRO_GET_THREAD_LOCAL (per_thread_glyph_cache);
852 glyph_cache = global_glyph_cache;
853 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
857 pixman_glyph_cache_remove (
858 glyph_cache, scaled_font,
859 (void *)_cairo_scaled_glyph_index (scaled_glyph));
862 #if ! CAIRO_HAS_TG_SURFACE
863 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
867 static cairo_int_status_t
868 composite_glyphs (void *_dst,
870 cairo_surface_t *_src,
875 cairo_composite_glyphs_info_t *info)
877 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
878 pixman_glyph_cache_t *glyph_cache;
879 pixman_glyph_t pglyphs_stack[CAIRO_STACK_ARRAY_LENGTH (pixman_glyph_t)];
880 pixman_glyph_t *pglyphs = pglyphs_stack;
884 TRACE ((stderr, "%s\n", __FUNCTION__));
886 #if ! CAIRO_HAS_TG_SURFACE
887 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
890 glyph_cache = get_glyph_cache();
891 if (unlikely (glyph_cache == NULL)) {
892 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
896 pixman_glyph_cache_freeze (glyph_cache);
898 if (info->num_glyphs > ARRAY_LENGTH (pglyphs_stack)) {
899 pglyphs = _cairo_malloc_ab (info->num_glyphs, sizeof (pixman_glyph_t));
900 if (unlikely (pglyphs == NULL)) {
901 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
907 for (i = 0; i < info->num_glyphs; i++) {
908 unsigned long index = info->glyphs[i].index;
911 glyph = pixman_glyph_cache_lookup (glyph_cache, info->font, (void *)index);
913 cairo_scaled_glyph_t *scaled_glyph;
914 cairo_image_surface_t *glyph_surface;
916 #if ! CAIRO_HAS_TG_SURFACE
917 /* This call can actually end up recursing, so we have to
918 * drop the mutex around it.
920 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
922 _cairo_scaled_font_freeze_cache (info->font);
923 CAIRO_MUTEX_LOCK (_cairo_tg_scaled_glyph_mutex);
926 status = _cairo_scaled_glyph_lookup (info->font, index,
927 CAIRO_SCALED_GLYPH_INFO_SURFACE,
930 #if ! CAIRO_HAS_TG_SURFACE
931 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
934 if (unlikely (status)) {
935 #if CAIRO_HAS_TG_SURFACE
936 CAIRO_MUTEX_UNLOCK (_cairo_tg_scaled_glyph_mutex);
937 _cairo_scaled_font_thaw_cache (info->font);
942 glyph_surface = scaled_glyph->surface;
943 glyph = pixman_glyph_cache_insert (glyph_cache, info->font, (void *)index,
944 glyph_surface->base.device_transform.x0,
945 glyph_surface->base.device_transform.y0,
946 glyph_surface->pixman_image);
948 #if CAIRO_HAS_TG_SURFACE
949 CAIRO_MUTEX_UNLOCK (_cairo_tg_scaled_glyph_mutex);
950 _cairo_scaled_font_thaw_cache (info->font);
953 if (unlikely (!glyph)) {
954 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
959 pg->x = _cairo_lround (info->glyphs[i].x);
960 pg->y = _cairo_lround (info->glyphs[i].y);
965 if (info->use_mask) {
966 pixman_format_code_t mask_format;
968 mask_format = pixman_glyph_get_mask_format (glyph_cache, pg - pglyphs, pglyphs);
970 pixman_composite_glyphs (_pixman_operator (op),
971 ((cairo_image_source_t *)_src)->pixman_image,
972 to_pixman_image (_dst),
974 info->extents.x + src_x, info->extents.y + src_y,
975 info->extents.x, info->extents.y,
976 info->extents.x - dst_x, info->extents.y - dst_y,
977 info->extents.width, info->extents.height,
978 glyph_cache, pg - pglyphs, pglyphs);
980 pixman_composite_glyphs_no_mask (_pixman_operator (op),
981 ((cairo_image_source_t *)_src)->pixman_image,
982 to_pixman_image (_dst),
985 glyph_cache, pg - pglyphs, pglyphs);
989 pixman_glyph_cache_thaw (glyph_cache);
991 if (pglyphs != pglyphs_stack)
995 #if ! CAIRO_HAS_TG_SURFACE
996 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
1003 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
1004 cairo_scaled_glyph_t *scaled_glyph)
1008 static cairo_int_status_t
1009 composite_one_glyph (void *_dst,
1010 cairo_operator_t op,
1011 cairo_surface_t *_src,
1016 cairo_composite_glyphs_info_t *info)
1018 cairo_image_surface_t *dst_surface = (cairo_image_surface_t *)_dst;
1019 cairo_image_surface_t *glyph_surface;
1020 cairo_scaled_glyph_t *scaled_glyph;
1021 cairo_status_t status;
1024 TRACE ((stderr, "%s\n", __FUNCTION__));
1026 status = _cairo_scaled_glyph_lookup (info->font,
1027 info->glyphs[0].index,
1028 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1031 if (unlikely (status))
1034 glyph_surface = scaled_glyph->surface;
1035 if (glyph_surface->width == 0 || glyph_surface->height == 0)
1036 return CAIRO_INT_STATUS_NOTHING_TO_DO;
1038 if (glyph_surface->format == CAIRO_FORMAT_ARGB32 &&
1039 dst_surface->format != CAIRO_FORMAT_ARGB32) {
1040 /* FIXME: color glyph */
1041 return CAIRO_STATUS_SURFACE_TYPE_MISMATCH;
1044 /* round glyph locations to the nearest pixel */
1045 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1046 x = _cairo_lround (info->glyphs[0].x -
1047 glyph_surface->base.device_transform.x0);
1048 y = _cairo_lround (info->glyphs[0].y -
1049 glyph_surface->base.device_transform.y0);
1052 if (glyph_surface->format != CAIRO_FORMAT_ARGB32 ||
1053 pixman_image_get_component_alpha (glyph_surface->pixman_image))
1054 pixman_image_composite32 (_pixman_operator (op),
1055 ((cairo_image_source_t *)_src)->pixman_image,
1056 glyph_surface->pixman_image,
1057 to_pixman_image (_dst),
1058 x + src_x, y + src_y,
1060 x - dst_x, y - dst_y,
1061 glyph_surface->width,
1062 glyph_surface->height);
1063 else /* color glyph */
1064 pixman_image_composite32 (_pixman_operator (op),
1065 glyph_surface->pixman_image,
1067 to_pixman_image (_dst),
1069 x + src_x, y + src_y,
1070 x - dst_x, y - dst_y,
1071 glyph_surface->width,
1072 glyph_surface->height);
1074 return CAIRO_INT_STATUS_SUCCESS;
1077 static cairo_int_status_t
1078 composite_glyphs_via_mask (void *_dst,
1079 cairo_operator_t op,
1080 cairo_surface_t *_src,
1085 cairo_composite_glyphs_info_t *info)
1087 cairo_scaled_glyph_t *glyph_cache[64];
1088 pixman_image_t *white = _pixman_image_for_color (CAIRO_COLOR_WHITE);
1089 cairo_scaled_glyph_t *scaled_glyph;
1091 pixman_image_t *mask;
1092 pixman_format_code_t format;
1093 cairo_status_t status;
1095 cairo_bool_t component_alpha = FALSE;
1097 TRACE ((stderr, "%s\n", __FUNCTION__));
1099 if (unlikely (white == NULL))
1100 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1102 /* XXX convert the glyphs to common formats a8/a8r8g8b8 to hit
1103 * optimised paths through pixman. Should we increase the bit
1104 * depth of the target surface, we should reconsider the appropriate
1108 status = _cairo_scaled_glyph_lookup (info->font,
1109 info->glyphs[0].index,
1110 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1112 if (unlikely (status)) {
1113 pixman_image_unref (white);
1117 memset (glyph_cache, 0, sizeof (glyph_cache));
1118 glyph_cache[info->glyphs[0].index % ARRAY_LENGTH (glyph_cache)] = scaled_glyph;
1121 i = (info->extents.width + 3) & ~3;
1122 if (scaled_glyph->surface->base.content & CAIRO_CONTENT_COLOR) {
1123 format = PIXMAN_a8r8g8b8;
1124 i = info->extents.width * 4;
1127 if (i * info->extents.height > (int) sizeof (buf)) {
1128 mask = pixman_image_create_bits (format,
1129 info->extents.width,
1130 info->extents.height,
1133 memset (buf, 0, i * info->extents.height);
1134 mask = pixman_image_create_bits (format,
1135 info->extents.width,
1136 info->extents.height,
1137 (uint32_t *)buf, i);
1139 if (unlikely (mask == NULL)) {
1140 pixman_image_unref (white);
1141 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1144 status = CAIRO_STATUS_SUCCESS;
1145 for (i = 0; i < info->num_glyphs; i++) {
1146 unsigned long glyph_index = info->glyphs[i].index;
1147 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1148 cairo_image_surface_t *glyph_surface;
1151 scaled_glyph = glyph_cache[cache_index];
1152 if (scaled_glyph == NULL ||
1153 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1155 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1156 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1159 if (unlikely (status)) {
1160 pixman_image_unref (mask);
1161 pixman_image_unref (white);
1165 glyph_cache[cache_index] = scaled_glyph;
1168 glyph_surface = scaled_glyph->surface;
1169 if (! component_alpha)
1170 component_alpha = pixman_image_get_component_alpha (glyph_surface->pixman_image);
1171 if (glyph_surface->width && glyph_surface->height) {
1172 if (glyph_surface->base.content & CAIRO_CONTENT_COLOR &&
1173 format == PIXMAN_a8) {
1174 pixman_image_t *ca_mask;
1176 format = PIXMAN_a8r8g8b8;
1177 ca_mask = pixman_image_create_bits (format,
1178 info->extents.width,
1179 info->extents.height,
1181 if (unlikely (ca_mask == NULL)) {
1182 pixman_image_unref (mask);
1183 pixman_image_unref (white);
1184 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1187 pixman_image_composite32 (PIXMAN_OP_SRC,
1188 white, mask, ca_mask,
1192 info->extents.width,
1193 info->extents.height);
1194 pixman_image_unref (mask);
1198 /* round glyph locations to the nearest pixel */
1199 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1200 x = _cairo_lround (info->glyphs[i].x -
1201 glyph_surface->base.device_transform.x0);
1202 y = _cairo_lround (info->glyphs[i].y -
1203 glyph_surface->base.device_transform.y0);
1205 if (glyph_surface->pixman_format == format) {
1206 pixman_image_composite32 (PIXMAN_OP_ADD,
1207 glyph_surface->pixman_image, NULL, mask,
1210 x - info->extents.x, y - info->extents.y,
1211 glyph_surface->width,
1212 glyph_surface->height);
1214 pixman_image_composite32 (PIXMAN_OP_ADD,
1215 white, glyph_surface->pixman_image, mask,
1218 x - info->extents.x, y - info->extents.y,
1219 glyph_surface->width,
1220 glyph_surface->height);
1225 if (format == PIXMAN_a8r8g8b8 && component_alpha)
1226 pixman_image_set_component_alpha (mask, TRUE);
1228 if (format != PIXMAN_a8r8g8b8 || component_alpha)
1229 pixman_image_composite32 (_pixman_operator (op),
1230 ((cairo_image_source_t *)_src)->pixman_image,
1232 to_pixman_image (_dst),
1233 info->extents.x + src_x, info->extents.y + src_y,
1235 info->extents.x - dst_x, info->extents.y - dst_y,
1236 info->extents.width, info->extents.height);
1237 else /* color glyph */
1238 pixman_image_composite32 (_pixman_operator (op), mask, NULL,
1239 to_pixman_image (_dst),
1241 info->extents.x + src_x, info->extents.y + src_y,
1242 info->extents.x - dst_x, info->extents.y - dst_y,
1243 info->extents.width, info->extents.height);
1244 pixman_image_unref (mask);
1245 pixman_image_unref (white);
1247 return CAIRO_STATUS_SUCCESS;
1250 static cairo_int_status_t
1251 composite_glyphs (void *_dst,
1252 cairo_operator_t op,
1253 cairo_surface_t *_src,
1258 cairo_composite_glyphs_info_t *info)
1260 cairo_scaled_glyph_t *glyph_cache[64];
1261 pixman_image_t *dst, *src;
1262 cairo_status_t status;
1264 cairo_image_surface_t *dst_surface = (cairo_image_surface_t *)_dst;
1266 TRACE ((stderr, "%s\n", __FUNCTION__));
1268 #if CAIRO_HAS_TG_SURFACE
1269 _cairo_scaled_font_freeze_cache (info->font);
1270 CAIRO_MUTEX_LOCK (_cairo_tg_scaled_glyph_mutex);
1273 if (0 && info->num_glyphs == 1) {
1274 status = composite_one_glyph(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1278 if (0 && info->use_mask) {
1279 status = composite_glyphs_via_mask(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1283 op = _pixman_operator (op);
1284 dst = to_pixman_image (_dst);
1285 src = ((cairo_image_source_t *)_src)->pixman_image;
1287 memset (glyph_cache, 0, sizeof (glyph_cache));
1288 status = CAIRO_STATUS_SUCCESS;
1290 for (i = 0; i < info->num_glyphs; i++) {
1292 cairo_image_surface_t *glyph_surface;
1293 cairo_scaled_glyph_t *scaled_glyph;
1294 unsigned long glyph_index = info->glyphs[i].index;
1295 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1297 scaled_glyph = glyph_cache[cache_index];
1298 if (scaled_glyph == NULL ||
1299 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1301 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1302 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1305 if (unlikely (status))
1308 glyph_cache[cache_index] = scaled_glyph;
1311 glyph_surface = scaled_glyph->surface;
1312 if (glyph_surface->format == CAIRO_FORMAT_ARGB32 &&
1313 dst_surface->format != CAIRO_FORMAT_ARGB32) {
1314 /* FIXME: color glyph */
1315 return CAIRO_STATUS_SURFACE_TYPE_MISMATCH;
1318 if (glyph_surface->width && glyph_surface->height) {
1319 /* round glyph locations to the nearest pixel */
1320 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1321 x = _cairo_lround (info->glyphs[i].x -
1322 glyph_surface->base.device_transform.x0);
1323 y = _cairo_lround (info->glyphs[i].y -
1324 glyph_surface->base.device_transform.y0);
1326 if (glyph_surface->format != CAIRO_FORMAT_ARGB32 ||
1327 pixman_image_get_component_alpha (glyph_surface->pixman_image))
1328 pixman_image_composite32 (op, src, glyph_surface->pixman_image, dst,
1329 x + src_x, y + src_y,
1331 x - dst_x, y - dst_y,
1332 glyph_surface->width,
1333 glyph_surface->height);
1334 else /* Color glyph. */
1335 pixman_image_composite32 (op, glyph_surface->pixman_image, NULL, dst,
1337 x + src_x, y + src_y,
1338 x - dst_x, y - dst_y,
1339 glyph_surface->width,
1340 glyph_surface->height);
1345 #if CAIRO_HAS_TG_SURFACE
1346 _cairo_scaled_font_thaw_cache (info->font);
1347 CAIRO_MUTEX_UNLOCK (_cairo_tg_scaled_glyph_mutex);
1354 static cairo_int_status_t
1355 check_composite (const cairo_composite_rectangles_t *extents)
1357 return CAIRO_STATUS_SUCCESS;
1360 const cairo_compositor_t *
1361 _cairo_image_traps_compositor_get (void)
1363 static cairo_traps_compositor_t compositor;
1365 if (compositor.base.delegate == NULL) {
1366 _cairo_traps_compositor_init (&compositor,
1367 &__cairo_no_compositor);
1368 compositor.acquire = acquire;
1369 compositor.release = release;
1370 compositor.set_clip_region = set_clip_region;
1371 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1372 compositor.draw_image_boxes = draw_image_boxes;
1373 //compositor.copy_boxes = copy_boxes;
1374 compositor.fill_boxes = fill_boxes;
1375 compositor.check_composite = check_composite;
1376 compositor.composite = composite;
1377 compositor.lerp = lerp;
1378 compositor.lerp_color_glyph = lerp_color_glyph;
1379 //compositor.check_composite_boxes = check_composite_boxes;
1380 compositor.composite_boxes = composite_boxes;
1381 //compositor.check_composite_traps = check_composite_traps;
1382 compositor.composite_traps = composite_traps;
1383 //compositor.check_composite_tristrip = check_composite_traps;
1384 compositor.composite_tristrip = composite_tristrip;
1385 compositor.check_composite_glyphs = check_composite_glyphs;
1386 compositor.composite_glyphs = composite_glyphs;
1389 return &compositor.base;
1392 const cairo_compositor_t *
1393 _cairo_image_mask_compositor_get (void)
1395 static cairo_mask_compositor_t compositor;
1397 if (compositor.base.delegate == NULL) {
1398 _cairo_mask_compositor_init (&compositor,
1399 _cairo_image_traps_compositor_get ());
1400 compositor.acquire = acquire;
1401 compositor.release = release;
1402 compositor.set_clip_region = set_clip_region;
1403 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1404 compositor.draw_image_boxes = draw_image_boxes;
1405 compositor.fill_rectangles = fill_rectangles;
1406 compositor.fill_boxes = fill_boxes;
1407 //compositor.check_composite = check_composite;
1408 compositor.composite = composite;
1409 //compositor.lerp = lerp;
1410 //compositor.check_composite_boxes = check_composite_boxes;
1411 compositor.composite_boxes = composite_boxes;
1412 compositor.check_composite_glyphs = check_composite_glyphs;
1413 compositor.composite_glyphs = composite_glyphs;
1416 return &compositor.base;
1419 #if PIXMAN_HAS_COMPOSITOR
1420 typedef struct _cairo_image_span_renderer {
1421 cairo_span_renderer_t base;
1423 pixman_image_compositor_t *compositor;
1424 pixman_image_t *src, *mask;
1426 cairo_rectangle_int_t extents;
1427 } cairo_image_span_renderer_t;
1428 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1430 static cairo_status_t
1431 _cairo_image_bounded_opaque_spans (void *abstract_renderer,
1433 const cairo_half_open_span_t *spans,
1436 cairo_image_span_renderer_t *r = abstract_renderer;
1439 return CAIRO_STATUS_SUCCESS;
1442 if (spans[0].coverage)
1443 pixman_image_compositor_blt (r->compositor,
1445 spans[1].x - spans[0].x, height,
1448 } while (--num_spans > 1);
1450 return CAIRO_STATUS_SUCCESS;
1453 static cairo_status_t
1454 _cairo_image_bounded_spans (void *abstract_renderer,
1456 const cairo_half_open_span_t *spans,
1459 cairo_image_span_renderer_t *r = abstract_renderer;
1462 return CAIRO_STATUS_SUCCESS;
1465 if (spans[0].coverage) {
1466 pixman_image_compositor_blt (r->compositor,
1468 spans[1].x - spans[0].x, height,
1469 r->opacity * spans[0].coverage);
1472 } while (--num_spans > 1);
1474 return CAIRO_STATUS_SUCCESS;
1477 static cairo_status_t
1478 _cairo_image_unbounded_spans (void *abstract_renderer,
1480 const cairo_half_open_span_t *spans,
1483 cairo_image_span_renderer_t *r = abstract_renderer;
1485 assert (y + height <= r->extents.height);
1486 if (y > r->extents.y) {
1487 pixman_image_compositor_blt (r->compositor,
1488 r->extents.x, r->extents.y,
1489 r->extents.width, y - r->extents.y,
1493 if (num_spans == 0) {
1494 pixman_image_compositor_blt (r->compositor,
1496 r->extents.width, height,
1499 if (spans[0].x != r->extents.x) {
1500 pixman_image_compositor_blt (r->compositor,
1502 spans[0].x - r->extents.x,
1508 assert (spans[0].x < r->extents.x + r->extents.width);
1509 pixman_image_compositor_blt (r->compositor,
1511 spans[1].x - spans[0].x, height,
1512 r->opacity * spans[0].coverage);
1514 } while (--num_spans > 1);
1516 if (spans[0].x != r->extents.x + r->extents.width) {
1517 assert (spans[0].x < r->extents.x + r->extents.width);
1518 pixman_image_compositor_blt (r->compositor,
1520 r->extents.x + r->extents.width - spans[0].x, height,
1525 r->extents.y = y + height;
1526 return CAIRO_STATUS_SUCCESS;
1529 static cairo_status_t
1530 _cairo_image_clipped_spans (void *abstract_renderer,
1532 const cairo_half_open_span_t *spans,
1535 cairo_image_span_renderer_t *r = abstract_renderer;
1540 if (! spans[0].inverse)
1541 pixman_image_compositor_blt (r->compositor,
1543 spans[1].x - spans[0].x, height,
1544 r->opacity * spans[0].coverage);
1546 } while (--num_spans > 1);
1548 r->extents.y = y + height;
1549 return CAIRO_STATUS_SUCCESS;
1552 static cairo_status_t
1553 _cairo_image_finish_unbounded_spans (void *abstract_renderer)
1555 cairo_image_span_renderer_t *r = abstract_renderer;
1557 if (r->extents.y < r->extents.height) {
1558 pixman_image_compositor_blt (r->compositor,
1559 r->extents.x, r->extents.y,
1561 r->extents.height - r->extents.y,
1565 return CAIRO_STATUS_SUCCESS;
1568 static cairo_int_status_t
1569 span_renderer_init (cairo_abstract_span_renderer_t *_r,
1570 const cairo_composite_rectangles_t *composite,
1571 cairo_bool_t needs_clip)
1573 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
1574 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1575 const cairo_pattern_t *source = &composite->source_pattern.base;
1576 cairo_operator_t op = composite->op;
1580 TRACE ((stderr, "%s\n", __FUNCTION__));
1582 if (op == CAIRO_OPERATOR_CLEAR) {
1583 op = PIXMAN_OP_LERP_CLEAR;
1584 } else if (dst->base.is_clear &&
1585 (op == CAIRO_OPERATOR_SOURCE ||
1586 op == CAIRO_OPERATOR_OVER ||
1587 op == CAIRO_OPERATOR_ADD)) {
1589 } else if (op == CAIRO_OPERATOR_SOURCE) {
1590 op = PIXMAN_OP_LERP_SRC;
1592 op = _pixman_operator (op);
1595 r->compositor = NULL;
1597 r->src = _pixman_image_for_pattern (dst, source, FALSE,
1598 &composite->unbounded,
1599 &composite->source_sample_area,
1601 if (unlikely (r->src == NULL))
1602 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1605 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
1606 r->opacity = composite->mask_pattern.solid.color.alpha;
1608 r->mask = _pixman_image_for_pattern (dst,
1609 &composite->mask_pattern.base,
1611 &composite->unbounded,
1612 &composite->mask_sample_area,
1614 if (unlikely (r->mask == NULL))
1615 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1617 /* XXX Component-alpha? */
1618 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
1619 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
1621 pixman_image_unref (r->src);
1629 if (composite->is_bounded) {
1630 if (r->opacity == 1.)
1631 r->base.render_rows = _cairo_image_bounded_opaque_spans;
1633 r->base.render_rows = _cairo_image_bounded_spans;
1634 r->base.finish = NULL;
1637 r->base.render_rows = _cairo_image_clipped_spans;
1639 r->base.render_rows = _cairo_image_unbounded_spans;
1640 r->base.finish = _cairo_image_finish_unbounded_spans;
1641 r->extents = composite->unbounded;
1642 r->extents.height += r->extents.y;
1646 pixman_image_create_compositor (op, r->src, r->mask, dst->pixman_image,
1647 composite->unbounded.x + src_x,
1648 composite->unbounded.y + src_y,
1649 composite->unbounded.x + mask_x,
1650 composite->unbounded.y + mask_y,
1651 composite->unbounded.x,
1652 composite->unbounded.y,
1653 composite->unbounded.width,
1654 composite->unbounded.height);
1655 if (unlikely (r->compositor == NULL))
1656 return CAIRO_INT_STATUS_NOTHING_TO_DO;
1658 return CAIRO_STATUS_SUCCESS;
1662 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
1663 cairo_int_status_t status)
1665 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
1667 TRACE ((stderr, "%s\n", __FUNCTION__));
1669 if (status == CAIRO_INT_STATUS_SUCCESS && r->base.finish)
1673 pixman_image_compositor_destroy (r->compositor);
1676 pixman_image_unref (r->src);
1678 pixman_image_unref (r->mask);
1681 typedef struct _cairo_image_span_renderer {
1682 cairo_span_renderer_t base;
1684 const cairo_composite_rectangles_t *composite;
1690 pixman_image_t *src, *mask;
1704 pixman_image_t *dst;
1710 cairo_rectangle_int_t extents;
1717 #define SZ_BUF (sizeof (cairo_abstract_span_renderer_t) - sizeof (cairo_image_span_renderer_t))
1718 } cairo_image_span_renderer_t;
1719 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1721 static cairo_status_t
1722 _cairo_image_spans (void *abstract_renderer,
1724 const cairo_half_open_span_t *spans,
1727 cairo_image_span_renderer_t *r = abstract_renderer;
1728 uint8_t *mask, *row;
1732 return CAIRO_STATUS_SUCCESS;
1734 mask = r->u.mask.data + (y - r->u.mask.extents.y) * r->u.mask.stride;
1735 mask += spans[0].x - r->u.mask.extents.x;
1739 len = spans[1].x - spans[0].x;
1740 if (spans[0].coverage) {
1741 *row++ = r->opacity * spans[0].coverage;
1743 memset (row, row[-1], len);
1747 } while (--num_spans > 1);
1752 mask += r->u.mask.stride;
1753 memcpy (mask, row, len);
1756 return CAIRO_STATUS_SUCCESS;
1759 static cairo_status_t
1760 _cairo_image_spans_and_zero (void *abstract_renderer,
1762 const cairo_half_open_span_t *spans,
1765 cairo_image_span_renderer_t *r = abstract_renderer;
1769 mask = r->u.mask.data;
1770 if (y > r->u.mask.extents.y) {
1771 len = (y - r->u.mask.extents.y) * r->u.mask.stride;
1772 memset (mask, 0, len);
1776 r->u.mask.extents.y = y + height;
1777 r->u.mask.data = mask + height * r->u.mask.stride;
1778 if (num_spans == 0) {
1779 memset (mask, 0, height * r->u.mask.stride);
1781 uint8_t *row = mask;
1783 if (spans[0].x != r->u.mask.extents.x) {
1784 len = spans[0].x - r->u.mask.extents.x;
1785 memset (row, 0, len);
1790 len = spans[1].x - spans[0].x;
1791 *row++ = r->opacity * spans[0].coverage;
1793 memset (row, row[-1], --len);
1797 } while (--num_spans > 1);
1799 if (spans[0].x != r->u.mask.extents.x + r->u.mask.extents.width) {
1800 len = r->u.mask.extents.x + r->u.mask.extents.width - spans[0].x;
1801 memset (row, 0, len);
1806 mask += r->u.mask.stride;
1807 memcpy (mask, row, r->u.mask.extents.width);
1811 return CAIRO_STATUS_SUCCESS;
1814 static cairo_status_t
1815 _cairo_image_finish_spans_and_zero (void *abstract_renderer)
1817 cairo_image_span_renderer_t *r = abstract_renderer;
1819 if (r->u.mask.extents.y < r->u.mask.extents.height)
1820 memset (r->u.mask.data, 0, (r->u.mask.extents.height - r->u.mask.extents.y) * r->u.mask.stride);
1822 return CAIRO_STATUS_SUCCESS;
1825 static cairo_status_t
1826 _fill8_spans (void *abstract_renderer, int y, int h,
1827 const cairo_half_open_span_t *spans, unsigned num_spans)
1829 cairo_image_span_renderer_t *r = abstract_renderer;
1832 return CAIRO_STATUS_SUCCESS;
1834 if (likely(h == 1)) {
1836 if (spans[0].coverage) {
1837 int len = spans[1].x - spans[0].x;
1838 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
1840 *d = r->u.fill.pixel;
1842 memset(d, r->u.fill.pixel, len);
1845 } while (--num_spans > 1);
1848 if (spans[0].coverage) {
1851 int len = spans[1].x - spans[0].x;
1852 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1854 *d = r->u.fill.pixel;
1856 memset(d, r->u.fill.pixel, len);
1861 } while (--num_spans > 1);
1864 return CAIRO_STATUS_SUCCESS;
1867 static cairo_status_t
1868 _fill16_spans (void *abstract_renderer, int y, int h,
1869 const cairo_half_open_span_t *spans, unsigned num_spans)
1871 cairo_image_span_renderer_t *r = abstract_renderer;
1874 return CAIRO_STATUS_SUCCESS;
1876 if (likely(h == 1)) {
1878 if (spans[0].coverage) {
1879 int len = spans[1].x - spans[0].x;
1880 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*2);
1882 *d++ = r->u.fill.pixel;
1885 } while (--num_spans > 1);
1888 if (spans[0].coverage) {
1891 int len = spans[1].x - spans[0].x;
1892 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*2);
1894 *d++ = r->u.fill.pixel;
1899 } while (--num_spans > 1);
1902 return CAIRO_STATUS_SUCCESS;
1905 static cairo_status_t
1906 _fill32_spans (void *abstract_renderer, int y, int h,
1907 const cairo_half_open_span_t *spans, unsigned num_spans)
1909 cairo_image_span_renderer_t *r = abstract_renderer;
1912 return CAIRO_STATUS_SUCCESS;
1914 if (likely(h == 1)) {
1916 if (spans[0].coverage) {
1917 int len = spans[1].x - spans[0].x;
1919 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1920 spans[0].x, y, len, 1, r->u.fill.pixel);
1922 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
1924 *d++ = r->u.fill.pixel;
1928 } while (--num_spans > 1);
1931 if (spans[0].coverage) {
1932 if (spans[1].x - spans[0].x > 16) {
1933 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1934 spans[0].x, y, spans[1].x - spans[0].x, h,
1939 int len = spans[1].x - spans[0].x;
1940 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
1942 *d++ = r->u.fill.pixel;
1948 } while (--num_spans > 1);
1951 return CAIRO_STATUS_SUCCESS;
1955 static cairo_status_t
1956 _fill_spans (void *abstract_renderer, int y, int h,
1957 const cairo_half_open_span_t *spans, unsigned num_spans)
1959 cairo_image_span_renderer_t *r = abstract_renderer;
1962 return CAIRO_STATUS_SUCCESS;
1965 if (spans[0].coverage) {
1966 pixman_fill ((uint32_t *) r->data, r->stride, r->bpp,
1968 spans[1].x - spans[0].x, h,
1972 } while (--num_spans > 1);
1974 return CAIRO_STATUS_SUCCESS;
1978 static cairo_status_t
1979 _blit_spans (void *abstract_renderer, int y, int h,
1980 const cairo_half_open_span_t *spans, unsigned num_spans)
1982 cairo_image_span_renderer_t *r = abstract_renderer;
1986 return CAIRO_STATUS_SUCCESS;
1989 if (likely (h == 1)) {
1990 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
1991 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
1993 if (spans[0].coverage) {
1994 void *s = src + spans[0].x*cpp;
1995 void *d = dst + spans[0].x*cpp;
1996 int len = (spans[1].x - spans[0].x) * cpp;
1999 *(uint8_t *)d = *(uint8_t *)s;
2002 *(uint16_t *)d = *(uint16_t *)s;
2005 *(uint32_t *)d = *(uint32_t *)s;
2009 *(uint64_t *)d = *(uint64_t *)s;
2018 } while (--num_spans > 1);
2021 if (spans[0].coverage) {
2024 void *src = r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x*cpp;
2025 void *dst = r->u.blit.data + yy*r->u.blit.stride + spans[0].x*cpp;
2026 int len = (spans[1].x - spans[0].x) * cpp;
2029 *(uint8_t *)dst = *(uint8_t *)src;
2032 *(uint16_t *)dst = *(uint16_t *)src;
2035 *(uint32_t *)dst = *(uint32_t *)src;
2039 *(uint64_t *)dst = *(uint64_t *)src;
2043 memcpy(dst, src, len);
2050 } while (--num_spans > 1);
2053 return CAIRO_STATUS_SUCCESS;
2056 static cairo_status_t
2057 _mono_spans (void *abstract_renderer, int y, int h,
2058 const cairo_half_open_span_t *spans, unsigned num_spans)
2060 cairo_image_span_renderer_t *r = abstract_renderer;
2063 return CAIRO_STATUS_SUCCESS;
2066 if (spans[0].coverage) {
2067 pixman_image_composite32 (r->op,
2068 r->src, NULL, r->u.composite.dst,
2069 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2072 spans[1].x - spans[0].x, h);
2075 } while (--num_spans > 1);
2077 return CAIRO_STATUS_SUCCESS;
2080 static cairo_status_t
2081 _mono_unbounded_spans (void *abstract_renderer, int y, int h,
2082 const cairo_half_open_span_t *spans, unsigned num_spans)
2084 cairo_image_span_renderer_t *r = abstract_renderer;
2086 if (num_spans == 0) {
2087 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2088 r->src, NULL, r->u.composite.dst,
2089 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2091 r->composite->unbounded.x, y,
2092 r->composite->unbounded.width, h);
2093 r->u.composite.mask_y = y + h;
2094 return CAIRO_STATUS_SUCCESS;
2097 if (y != r->u.composite.mask_y) {
2098 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2099 r->src, NULL, r->u.composite.dst,
2100 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2102 r->composite->unbounded.x, r->u.composite.mask_y,
2103 r->composite->unbounded.width, y - r->u.composite.mask_y);
2106 if (spans[0].x != r->composite->unbounded.x) {
2107 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2108 r->src, NULL, r->u.composite.dst,
2109 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2111 r->composite->unbounded.x, y,
2112 spans[0].x - r->composite->unbounded.x, h);
2116 int op = spans[0].coverage ? r->op : PIXMAN_OP_CLEAR;
2117 pixman_image_composite32 (op,
2118 r->src, NULL, r->u.composite.dst,
2119 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2122 spans[1].x - spans[0].x, h);
2124 } while (--num_spans > 1);
2126 if (spans[0].x != r->composite->unbounded.x + r->composite->unbounded.width) {
2127 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2128 r->src, NULL, r->u.composite.dst,
2129 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2132 r->composite->unbounded.x + r->composite->unbounded.width - spans[0].x, h);
2135 r->u.composite.mask_y = y + h;
2136 return CAIRO_STATUS_SUCCESS;
2139 static cairo_status_t
2140 _mono_finish_unbounded_spans (void *abstract_renderer)
2142 cairo_image_span_renderer_t *r = abstract_renderer;
2144 if (r->u.composite.mask_y < r->composite->unbounded.y + r->composite->unbounded.height) {
2145 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2146 r->src, NULL, r->u.composite.dst,
2147 r->composite->unbounded.x + r->u.composite.src_x, r->u.composite.mask_y + r->u.composite.src_y,
2149 r->composite->unbounded.x, r->u.composite.mask_y,
2150 r->composite->unbounded.width,
2151 r->composite->unbounded.y + r->composite->unbounded.height - r->u.composite.mask_y);
2154 return CAIRO_STATUS_SUCCESS;
2157 static cairo_int_status_t
2158 mono_renderer_init (cairo_image_span_renderer_t *r,
2159 const cairo_composite_rectangles_t *composite,
2160 cairo_antialias_t antialias,
2161 cairo_bool_t needs_clip)
2163 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2165 if (antialias != CAIRO_ANTIALIAS_NONE)
2166 return CAIRO_INT_STATUS_UNSUPPORTED;
2168 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2169 composite->source_pattern.base.filter == CAIRO_FILTER_GAUSSIAN)
2170 return CAIRO_INT_STATUS_UNSUPPORTED;
2172 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2173 composite->mask_pattern.base.filter == CAIRO_FILTER_GAUSSIAN)
2174 return CAIRO_INT_STATUS_UNSUPPORTED;
2176 if (!_cairo_pattern_is_opaque_solid (&composite->mask_pattern.base))
2177 return CAIRO_INT_STATUS_UNSUPPORTED;
2179 r->base.render_rows = NULL;
2180 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2181 const cairo_color_t *color;
2183 color = &composite->source_pattern.solid.color;
2184 if (composite->op == CAIRO_OPERATOR_CLEAR)
2185 color = CAIRO_COLOR_TRANSPARENT;
2187 if (fill_reduces_to_source (composite->op, color, dst) &&
2188 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2189 /* Use plain C for the fill operations as the span length is
2190 * typically small, too small to payback the startup overheads of
2193 switch (PIXMAN_FORMAT_BPP(dst->pixman_format)) {
2194 case 8: r->base.render_rows = _fill8_spans; break;
2195 case 16: r->base.render_rows = _fill16_spans; break;
2196 case 32: r->base.render_rows = _fill32_spans; break;
2199 r->u.fill.data = dst->data;
2200 r->u.fill.stride = dst->stride;
2202 } else if ((composite->op == CAIRO_OPERATOR_SOURCE ||
2203 (composite->op == CAIRO_OPERATOR_OVER &&
2204 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2205 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2206 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2207 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2209 cairo_image_surface_t *src =
2210 to_image_surface(composite->source_pattern.surface.surface);
2213 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2215 composite->bounded.x + tx >= 0 &&
2216 composite->bounded.y + ty >= 0 &&
2217 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2218 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2220 r->u.blit.stride = dst->stride;
2221 r->u.blit.data = dst->data;
2222 r->u.blit.src_stride = src->stride;
2223 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2224 r->base.render_rows = _blit_spans;
2228 if (r->base.render_rows == NULL) {
2229 r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
2230 &composite->unbounded,
2231 &composite->source_sample_area,
2232 &r->u.composite.src_x, &r->u.composite.src_y);
2233 if (unlikely (r->src == NULL))
2234 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2236 r->u.composite.dst = to_pixman_image (composite->surface);
2237 r->op = _pixman_operator (composite->op);
2238 if (composite->is_bounded == 0) {
2239 r->base.render_rows = _mono_unbounded_spans;
2240 r->base.finish = _mono_finish_unbounded_spans;
2241 r->u.composite.mask_y = composite->unbounded.y;
2243 r->base.render_rows = _mono_spans;
2245 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
2247 return CAIRO_INT_STATUS_SUCCESS;
2250 #define ONE_HALF 0x7f
2251 #define RB_MASK 0x00ff00ff
2252 #define RB_ONE_HALF 0x007f007f
2253 #define RB_MASK_PLUS_ONE 0x01000100
2255 static inline uint32_t
2256 mul8x2_8 (uint32_t a, uint8_t b)
2258 uint32_t t = (a & RB_MASK) * b + RB_ONE_HALF;
2259 return ((t + ((t >> G_SHIFT) & RB_MASK)) >> G_SHIFT) & RB_MASK;
2262 static inline uint32_t
2263 add8x2_8x2 (uint32_t a, uint32_t b)
2266 t |= RB_MASK_PLUS_ONE - ((t >> G_SHIFT) & RB_MASK);
2270 static inline uint8_t
2271 mul8_8 (uint8_t a, uint8_t b)
2273 uint16_t t = a * (uint16_t)b + ONE_HALF;
2274 return ((t >> G_SHIFT) + t) >> G_SHIFT;
2277 static inline uint32_t
2278 lerp8x4 (uint32_t src, uint8_t a, uint32_t dst)
2280 return (add8x2_8x2 (mul8x2_8 (src, a),
2281 mul8x2_8 (dst, ~a)) |
2282 add8x2_8x2 (mul8x2_8 (src >> G_SHIFT, a),
2283 mul8x2_8 (dst >> G_SHIFT, ~a)) << G_SHIFT);
2286 static cairo_status_t
2287 _fill_a8_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2288 const cairo_half_open_span_t *spans, unsigned num_spans)
2290 cairo_image_span_renderer_t *r = abstract_renderer;
2293 return CAIRO_STATUS_SUCCESS;
2295 if (likely(h == 1)) {
2296 uint8_t *d = r->u.fill.data + r->u.fill.stride*y;
2298 uint8_t a = spans[0].coverage;
2300 int len = spans[1].x - spans[0].x;
2302 memset(d + spans[0].x, r->u.fill.pixel, len);
2304 uint8_t s = mul8_8(a, r->u.fill.pixel);
2305 uint8_t *dst = d + spans[0].x;
2308 uint8_t t = mul8_8(*dst, a);
2314 } while (--num_spans > 1);
2317 uint8_t a = spans[0].coverage;
2322 int len = spans[1].x - spans[0].x;
2323 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2324 memset(d, r->u.fill.pixel, len);
2328 uint8_t s = mul8_8(a, r->u.fill.pixel);
2331 int len = spans[1].x - spans[0].x;
2332 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2334 uint8_t t = mul8_8(*d, a);
2342 } while (--num_spans > 1);
2345 return CAIRO_STATUS_SUCCESS;
2348 static cairo_status_t
2349 _fill_xrgb32_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2350 const cairo_half_open_span_t *spans, unsigned num_spans)
2352 cairo_image_span_renderer_t *r = abstract_renderer;
2355 return CAIRO_STATUS_SUCCESS;
2357 if (likely(h == 1)) {
2359 uint8_t a = spans[0].coverage;
2361 int len = spans[1].x - spans[0].x;
2362 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2365 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), 32,
2366 spans[0].x, y, len, 1, r->u.fill.pixel);
2368 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2370 *d++ = r->u.fill.pixel;
2372 } else while (len--) {
2373 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2378 } while (--num_spans > 1);
2381 uint8_t a = spans[0].coverage;
2384 if (spans[1].x - spans[0].x > 16) {
2385 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), 32,
2386 spans[0].x, y, spans[1].x - spans[0].x, h,
2391 int len = spans[1].x - spans[0].x;
2392 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2394 *d++ = r->u.fill.pixel;
2401 int len = spans[1].x - spans[0].x;
2402 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2404 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2412 } while (--num_spans > 1);
2415 return CAIRO_STATUS_SUCCESS;
2418 static cairo_status_t
2419 _fill_a8_lerp_spans (void *abstract_renderer, int y, int h,
2420 const cairo_half_open_span_t *spans, unsigned num_spans)
2422 cairo_image_span_renderer_t *r = abstract_renderer;
2425 return CAIRO_STATUS_SUCCESS;
2427 if (likely(h == 1)) {
2429 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2431 int len = spans[1].x - spans[0].x;
2432 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
2433 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2436 uint16_t t = *d*ia + p;
2437 *d++ = (t + (t>>8)) >> 8;
2441 } while (--num_spans > 1);
2444 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2447 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2450 int len = spans[1].x - spans[0].x;
2451 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2453 uint16_t t = *d*ia + p;
2454 *d++ = (t + (t>>8)) >> 8;
2460 } while (--num_spans > 1);
2463 return CAIRO_STATUS_SUCCESS;
2466 static cairo_status_t
2467 _fill_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2468 const cairo_half_open_span_t *spans, unsigned num_spans)
2470 cairo_image_span_renderer_t *r = abstract_renderer;
2473 return CAIRO_STATUS_SUCCESS;
2475 if (likely(h == 1)) {
2477 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2479 int len = spans[1].x - spans[0].x;
2480 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2482 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2487 } while (--num_spans > 1);
2490 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2494 int len = spans[1].x - spans[0].x;
2495 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2497 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2504 } while (--num_spans > 1);
2507 return CAIRO_STATUS_SUCCESS;
2510 static cairo_status_t
2511 _blit_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2512 const cairo_half_open_span_t *spans, unsigned num_spans)
2514 cairo_image_span_renderer_t *r = abstract_renderer;
2517 return CAIRO_STATUS_SUCCESS;
2519 if (likely(h == 1)) {
2520 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
2521 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
2523 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2525 uint32_t *s = (uint32_t*)src + spans[0].x;
2526 uint32_t *d = (uint32_t*)dst + spans[0].x;
2527 int len = spans[1].x - spans[0].x;
2532 memcpy(d, s, len*4);
2535 *d = lerp8x4 (*s, a, *d);
2541 } while (--num_spans > 1);
2544 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2548 uint32_t *s = (uint32_t *)(r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x * 4);
2549 uint32_t *d = (uint32_t *)(r->u.blit.data + yy*r->u.blit.stride + spans[0].x * 4);
2550 int len = spans[1].x - spans[0].x;
2555 memcpy(d, s, len * 4);
2558 *d = lerp8x4 (*s, a, *d);
2566 } while (--num_spans > 1);
2569 return CAIRO_STATUS_SUCCESS;
2572 static cairo_status_t
2573 _inplace_spans (void *abstract_renderer,
2575 const cairo_half_open_span_t *spans,
2578 cairo_image_span_renderer_t *r = abstract_renderer;
2583 return CAIRO_STATUS_SUCCESS;
2585 if (num_spans == 2 && spans[0].coverage == 0xff) {
2586 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2587 spans[0].x + r->u.composite.src_x,
2588 y + r->u.composite.src_y,
2591 spans[1].x - spans[0].x, h);
2592 return CAIRO_STATUS_SUCCESS;
2595 mask = (uint8_t *)pixman_image_get_data (r->mask);
2596 x1 = x0 = spans[0].x;
2598 int len = spans[1].x - spans[0].x;
2600 return CAIRO_STATUS_NULL_POINTER;
2601 *mask++ = spans[0].coverage;
2603 if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
2605 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2606 x0 + r->u.composite.src_x,
2607 y + r->u.composite.src_y,
2612 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2613 spans[0].x + r->u.composite.src_x,
2614 y + r->u.composite.src_y,
2618 mask = (uint8_t *)pixman_image_get_data (r->mask);
2620 } else if (spans[0].coverage == 0x0 &&
2621 x1 - x0 > r->u.composite.run_length) {
2622 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2623 x0 + r->u.composite.src_x,
2624 y + r->u.composite.src_y,
2628 mask = (uint8_t *)pixman_image_get_data (r->mask);
2631 memset (mask, spans[0].coverage, --len);
2637 } while (--num_spans > 1);
2640 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2641 x0 + r->u.composite.src_x,
2642 y + r->u.composite.src_y,
2648 return CAIRO_STATUS_SUCCESS;
2651 static cairo_status_t
2652 _inplace_opacity_spans (void *abstract_renderer, int y, int h,
2653 const cairo_half_open_span_t *spans,
2656 cairo_image_span_renderer_t *r = abstract_renderer;
2661 return CAIRO_STATUS_SUCCESS;
2663 mask = (uint8_t *)pixman_image_get_data (r->mask);
2664 x1 = x0 = spans[0].x;
2666 int len = spans[1].x - spans[0].x;
2667 uint8_t m = mul8_8(spans[0].coverage, r->bpp);
2669 return CAIRO_STATUS_NULL_POINTER;
2673 x1 - x0 > r->u.composite.run_length) {
2674 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2675 x0 + r->u.composite.src_x,
2676 y + r->u.composite.src_y,
2680 mask = (uint8_t *)pixman_image_get_data (r->mask);
2683 memset (mask, m, --len);
2689 } while (--num_spans > 1);
2692 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2693 x0 + r->u.composite.src_x,
2694 y + r->u.composite.src_y,
2700 return CAIRO_STATUS_SUCCESS;
2703 static cairo_status_t
2704 _inplace_src_spans (void *abstract_renderer, int y, int h,
2705 const cairo_half_open_span_t *spans,
2708 cairo_image_span_renderer_t *r = abstract_renderer;
2713 return CAIRO_STATUS_SUCCESS;
2718 int len = spans[1].x - spans[0].x;
2719 if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
2720 if (spans[0].x != x0) {
2721 #if PIXMAN_HAS_OP_LERP
2722 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2723 r->src, r->mask, r->u.composite.dst,
2724 x0 + r->u.composite.src_x,
2725 y + r->u.composite.src_y,
2728 spans[0].x - x0, h);
2730 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2731 r->mask, NULL, r->u.composite.dst,
2735 spans[0].x - x0, h);
2736 pixman_image_composite32 (PIXMAN_OP_ADD,
2737 r->src, r->mask, r->u.composite.dst,
2738 x0 + r->u.composite.src_x,
2739 y + r->u.composite.src_y,
2742 spans[0].x - x0, h);
2746 pixman_image_composite32 (PIXMAN_OP_SRC,
2747 r->src, NULL, r->u.composite.dst,
2748 spans[0].x + r->u.composite.src_x,
2749 y + r->u.composite.src_y,
2752 spans[1].x - spans[0].x, h);
2756 } else if (spans[0].coverage == 0x0) {
2757 if (spans[0].x != x0) {
2758 #if PIXMAN_HAS_OP_LERP
2759 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2760 r->src, r->mask, r->u.composite.dst,
2761 x0 + r->u.composite.src_x,
2762 y + r->u.composite.src_y,
2765 spans[0].x - x0, h);
2767 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2768 r->mask, NULL, r->u.composite.dst,
2772 spans[0].x - x0, h);
2773 pixman_image_composite32 (PIXMAN_OP_ADD,
2774 r->src, r->mask, r->u.composite.dst,
2775 x0 + r->u.composite.src_x,
2776 y + r->u.composite.src_y,
2779 spans[0].x - x0, h);
2786 *m++ = spans[0].coverage;
2788 memset (m, spans[0].coverage, --len);
2793 } while (--num_spans > 1);
2795 if (spans[0].x != x0) {
2796 #if PIXMAN_HAS_OP_LERP
2797 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2798 r->src, r->mask, r->u.composite.dst,
2799 x0 + r->u.composite.src_x,
2800 y + r->u.composite.src_y,
2803 spans[0].x - x0, h);
2805 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2806 r->mask, NULL, r->u.composite.dst,
2810 spans[0].x - x0, h);
2811 pixman_image_composite32 (PIXMAN_OP_ADD,
2812 r->src, r->mask, r->u.composite.dst,
2813 x0 + r->u.composite.src_x,
2814 y + r->u.composite.src_y,
2817 spans[0].x - x0, h);
2821 return CAIRO_STATUS_SUCCESS;
2824 static cairo_status_t
2825 _inplace_src_opacity_spans (void *abstract_renderer, int y, int h,
2826 const cairo_half_open_span_t *spans,
2829 cairo_image_span_renderer_t *r = abstract_renderer;
2834 return CAIRO_STATUS_SUCCESS;
2837 mask = (uint8_t *)pixman_image_get_data (r->mask);
2839 int len = spans[1].x - spans[0].x;
2840 uint8_t m = mul8_8(spans[0].coverage, r->bpp);
2842 if (spans[0].x != x0) {
2843 #if PIXMAN_HAS_OP_LERP
2844 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2845 r->src, r->mask, r->u.composite.dst,
2846 x0 + r->u.composite.src_x,
2847 y + r->u.composite.src_y,
2850 spans[0].x - x0, h);
2852 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2853 r->mask, NULL, r->u.composite.dst,
2857 spans[0].x - x0, h);
2858 pixman_image_composite32 (PIXMAN_OP_ADD,
2859 r->src, r->mask, r->u.composite.dst,
2860 x0 + r->u.composite.src_x,
2861 y + r->u.composite.src_y,
2864 spans[0].x - x0, h);
2868 mask = (uint8_t *)pixman_image_get_data (r->mask);
2872 return CAIRO_STATUS_NULL_POINTER;
2875 memset (mask, m, --len);
2880 } while (--num_spans > 1);
2882 if (spans[0].x != x0) {
2883 #if PIXMAN_HAS_OP_LERP
2884 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2885 r->src, r->mask, r->u.composite.dst,
2886 x0 + r->u.composite.src_x,
2887 y + r->u.composite.src_y,
2890 spans[0].x - x0, h);
2892 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2893 r->mask, NULL, r->u.composite.dst,
2897 spans[0].x - x0, h);
2898 pixman_image_composite32 (PIXMAN_OP_ADD,
2899 r->src, r->mask, r->u.composite.dst,
2900 x0 + r->u.composite.src_x,
2901 y + r->u.composite.src_y,
2904 spans[0].x - x0, h);
2908 return CAIRO_STATUS_SUCCESS;
2911 static void free_pixels (pixman_image_t *image, void *data)
2916 static cairo_int_status_t
2917 inplace_renderer_init (cairo_image_span_renderer_t *r,
2918 const cairo_composite_rectangles_t *composite,
2919 cairo_antialias_t antialias,
2920 cairo_bool_t needs_clip)
2922 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2925 if (composite->mask_pattern.base.type != CAIRO_PATTERN_TYPE_SOLID)
2926 return CAIRO_INT_STATUS_UNSUPPORTED;
2928 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2929 composite->source_pattern.base.filter == CAIRO_FILTER_GAUSSIAN)
2930 return CAIRO_INT_STATUS_UNSUPPORTED;
2932 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2933 composite->mask_pattern.base.filter == CAIRO_FILTER_GAUSSIAN)
2934 return CAIRO_INT_STATUS_UNSUPPORTED;
2936 r->base.render_rows = NULL;
2937 r->bpp = composite->mask_pattern.solid.color.alpha_short >> 8;
2939 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2940 const cairo_color_t *color;
2942 color = &composite->source_pattern.solid.color;
2943 if (composite->op == CAIRO_OPERATOR_CLEAR)
2944 color = CAIRO_COLOR_TRANSPARENT;
2946 if (fill_reduces_to_source (composite->op, color, dst) &&
2947 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2948 /* Use plain C for the fill operations as the span length is
2949 * typically small, too small to payback the startup overheads of
2952 if (r->bpp == 0xff) {
2953 switch (dst->format) {
2954 case CAIRO_FORMAT_A8:
2955 r->base.render_rows = _fill_a8_lerp_opaque_spans;
2957 case CAIRO_FORMAT_RGB24:
2958 case CAIRO_FORMAT_ARGB32:
2959 r->base.render_rows = _fill_xrgb32_lerp_opaque_spans;
2961 case CAIRO_FORMAT_A1:
2962 case CAIRO_FORMAT_RGB16_565:
2963 case CAIRO_FORMAT_RGB30:
2964 case CAIRO_FORMAT_INVALID:
2968 switch (dst->format) {
2969 case CAIRO_FORMAT_A8:
2970 r->base.render_rows = _fill_a8_lerp_spans;
2972 case CAIRO_FORMAT_RGB24:
2973 case CAIRO_FORMAT_ARGB32:
2974 r->base.render_rows = _fill_xrgb32_lerp_spans;
2976 case CAIRO_FORMAT_A1:
2977 case CAIRO_FORMAT_RGB16_565:
2978 case CAIRO_FORMAT_RGB30:
2979 case CAIRO_FORMAT_INVALID:
2983 r->u.fill.data = dst->data;
2984 r->u.fill.stride = dst->stride;
2986 } else if ((dst->format == CAIRO_FORMAT_ARGB32 || dst->format == CAIRO_FORMAT_RGB24) &&
2987 (composite->op == CAIRO_OPERATOR_SOURCE ||
2988 (composite->op == CAIRO_OPERATOR_OVER &&
2989 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2990 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2991 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2992 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2994 cairo_image_surface_t *src =
2995 to_image_surface(composite->source_pattern.surface.surface);
2998 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
3000 composite->bounded.x + tx >= 0 &&
3001 composite->bounded.y + ty >= 0 &&
3002 composite->bounded.x + composite->bounded.width + tx <= src->width &&
3003 composite->bounded.y + composite->bounded.height + ty <= src->height) {
3005 assert(PIXMAN_FORMAT_BPP(dst->pixman_format) == 32);
3006 r->u.blit.stride = dst->stride;
3007 r->u.blit.data = dst->data;
3008 r->u.blit.src_stride = src->stride;
3009 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
3010 r->base.render_rows = _blit_xrgb32_lerp_spans;
3013 if (r->base.render_rows == NULL) {
3014 const cairo_pattern_t *src = &composite->source_pattern.base;
3017 if (composite->is_bounded == 0)
3018 return CAIRO_INT_STATUS_UNSUPPORTED;
3020 r->base.render_rows = r->bpp == 0xff ? _inplace_spans : _inplace_opacity_spans;
3021 width = (composite->bounded.width + 3) & ~3;
3023 r->u.composite.run_length = 8;
3024 if (src->type == CAIRO_PATTERN_TYPE_LINEAR ||
3025 src->type == CAIRO_PATTERN_TYPE_RADIAL)
3026 r->u.composite.run_length = 256;
3027 if (dst->base.is_clear &&
3028 (composite->op == CAIRO_OPERATOR_SOURCE ||
3029 composite->op == CAIRO_OPERATOR_OVER ||
3030 composite->op == CAIRO_OPERATOR_ADD)) {
3031 r->op = PIXMAN_OP_SRC;
3032 } else if (composite->op == CAIRO_OPERATOR_SOURCE) {
3033 r->base.render_rows = r->bpp == 0xff ? _inplace_src_spans : _inplace_src_opacity_spans;
3034 r->u.composite.mask_y = r->composite->unbounded.y;
3035 width = (composite->unbounded.width + 3) & ~3;
3036 } else if (composite->op == CAIRO_OPERATOR_CLEAR) {
3037 r->op = PIXMAN_OP_OUT_REVERSE;
3040 r->op = _pixman_operator (composite->op);
3043 r->src = _pixman_image_for_pattern (dst, src, FALSE,
3044 &composite->bounded,
3045 &composite->source_sample_area,
3046 &r->u.composite.src_x, &r->u.composite.src_y);
3047 if (unlikely (r->src == NULL))
3048 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3050 /* Create an effectively unbounded mask by repeating the single line */
3052 if (width > SZ_BUF) {
3053 buf = malloc (width);
3054 if (unlikely (buf == NULL)) {
3055 pixman_image_unref (r->src);
3056 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3059 r->mask = pixman_image_create_bits (PIXMAN_a8,
3060 width, composite->unbounded.height,
3061 (uint32_t *)buf, 0);
3062 if (unlikely (r->mask == NULL)) {
3063 pixman_image_unref (r->src);
3066 return _cairo_error(CAIRO_STATUS_NO_MEMORY);
3070 pixman_image_set_destroy_function (r->mask, free_pixels, buf);
3072 r->u.composite.dst = dst->pixman_image;
3075 return CAIRO_INT_STATUS_SUCCESS;
3078 static cairo_int_status_t
3079 span_renderer_init (cairo_abstract_span_renderer_t *_r,
3080 const cairo_composite_rectangles_t *composite,
3081 cairo_antialias_t antialias,
3082 cairo_bool_t needs_clip)
3084 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
3085 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
3086 const cairo_pattern_t *source = &composite->source_pattern.base;
3087 cairo_operator_t op = composite->op;
3088 cairo_int_status_t status;
3090 TRACE ((stderr, "%s: antialias=%d, needs_clip=%d\n", __FUNCTION__,
3091 antialias, needs_clip));
3094 return CAIRO_INT_STATUS_UNSUPPORTED;
3096 r->composite = composite;
3099 r->base.finish = NULL;
3101 status = mono_renderer_init (r, composite, antialias, needs_clip);
3102 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
3105 status = inplace_renderer_init (r, composite, antialias, needs_clip);
3106 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
3111 if (op == CAIRO_OPERATOR_CLEAR) {
3112 #if PIXMAN_HAS_OP_LERP
3113 op = PIXMAN_OP_LERP_CLEAR;
3115 source = &_cairo_pattern_white.base;
3116 op = PIXMAN_OP_OUT_REVERSE;
3118 } else if (dst->base.is_clear &&
3119 (op == CAIRO_OPERATOR_SOURCE ||
3120 op == CAIRO_OPERATOR_OVER ||
3121 op == CAIRO_OPERATOR_ADD)) {
3123 } else if (op == CAIRO_OPERATOR_SOURCE) {
3124 if (_cairo_pattern_is_opaque (&composite->source_pattern.base,
3125 &composite->source_sample_area))
3127 op = PIXMAN_OP_OVER;
3131 #if PIXMAN_HAS_OP_LERP
3132 op = PIXMAN_OP_LERP_SRC;
3134 return CAIRO_INT_STATUS_UNSUPPORTED;
3138 op = _pixman_operator (op);
3142 r->src = _pixman_image_for_pattern (dst, source, FALSE,
3143 &composite->unbounded,
3144 &composite->source_sample_area,
3145 &r->u.mask.src_x, &r->u.mask.src_y);
3146 if (unlikely (r->src == NULL))
3147 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3150 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
3151 r->opacity = composite->mask_pattern.solid.color.alpha;
3153 pixman_image_t *mask;
3156 mask = _pixman_image_for_pattern (dst,
3157 &composite->mask_pattern.base,
3159 &composite->unbounded,
3160 &composite->mask_sample_area,
3162 if (unlikely (mask == NULL))
3163 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3165 /* XXX Component-alpha? */
3166 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
3167 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
3169 pixman_image_unref (r->src);
3171 r->u.mask.src_x = mask_x;
3172 r->u.mask.src_y = mask_y;
3177 pixman_image_unref (mask);
3178 return CAIRO_INT_STATUS_UNSUPPORTED;
3182 r->u.mask.extents = composite->unbounded;
3183 r->u.mask.stride = (r->u.mask.extents.width + 3) & ~3;
3184 if (r->u.mask.extents.height * r->u.mask.stride > (int)sizeof (r->_buf)) {
3185 r->mask = pixman_image_create_bits (PIXMAN_a8,
3186 r->u.mask.extents.width,
3187 r->u.mask.extents.height,
3190 r->base.render_rows = _cairo_image_spans;
3191 r->base.finish = NULL;
3193 r->mask = pixman_image_create_bits (PIXMAN_a8,
3194 r->u.mask.extents.width,
3195 r->u.mask.extents.height,
3196 (uint32_t *)r->_buf, r->u.mask.stride);
3198 r->base.render_rows = _cairo_image_spans_and_zero;
3199 r->base.finish = _cairo_image_finish_spans_and_zero;
3201 if (unlikely (r->mask == NULL))
3202 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3204 r->u.mask.data = (uint8_t *) pixman_image_get_data (r->mask);
3205 r->u.mask.stride = pixman_image_get_stride (r->mask);
3207 r->u.mask.extents.height += r->u.mask.extents.y;
3208 return CAIRO_STATUS_SUCCESS;
3212 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
3213 cairo_int_status_t status)
3215 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
3217 TRACE ((stderr, "%s\n", __FUNCTION__));
3219 if (likely (status == CAIRO_INT_STATUS_SUCCESS)) {
3224 if (likely (status == CAIRO_INT_STATUS_SUCCESS && r->bpp == 0)) {
3225 const cairo_composite_rectangles_t *composite = r->composite;
3227 pixman_image_composite32 (r->op, r->src, r->mask,
3228 to_pixman_image (composite->surface),
3229 composite->unbounded.x + r->u.mask.src_x,
3230 composite->unbounded.y + r->u.mask.src_y,
3232 composite->unbounded.x,
3233 composite->unbounded.y,
3234 composite->unbounded.width,
3235 composite->unbounded.height);
3239 pixman_image_unref (r->src);
3241 pixman_image_unref (r->mask);
3245 const cairo_compositor_t *
3246 _cairo_image_spans_compositor_get (void)
3248 static cairo_spans_compositor_t spans;
3249 static cairo_compositor_t shape;
3251 if (spans.base.delegate == NULL) {
3252 _cairo_shape_mask_compositor_init (&shape,
3253 _cairo_image_traps_compositor_get());
3254 shape.glyphs = NULL;
3256 _cairo_spans_compositor_init (&spans, &shape);
3259 #if PIXMAN_HAS_OP_LERP
3260 spans.flags |= CAIRO_SPANS_COMPOSITOR_HAS_LERP;
3263 //spans.acquire = acquire;
3264 //spans.release = release;
3265 spans.fill_boxes = fill_boxes;
3266 spans.draw_image_boxes = draw_image_boxes;
3267 //spans.copy_boxes = copy_boxes;
3268 spans.pattern_to_surface = _cairo_image_source_create_for_pattern;
3269 //spans.check_composite_boxes = check_composite_boxes;
3270 spans.composite_boxes = composite_boxes;
3271 //spans.check_span_renderer = check_span_renderer;
3272 spans.renderer_init = span_renderer_init;
3273 spans.renderer_fini = span_renderer_fini;