1 /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
2 /* cairo - a vector graphics library with display and print output
4 * Copyright © 2003 University of Southern California
5 * Copyright © 2009,2010,2011 Intel Corporation
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is University of Southern
36 * Carl D. Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
40 /* The primarily reason for keeping a traps-compositor around is
41 * for validating cairo-xlib (which currently also uses traps).
46 #include "cairo-image-surface-private.h"
48 #include "cairo-compositor-private.h"
49 #include "cairo-spans-compositor-private.h"
51 #include "cairo-region-private.h"
52 #include "cairo-traps-private.h"
53 #include "cairo-tristrip-private.h"
55 static pixman_image_t *
56 to_pixman_image (cairo_surface_t *s)
58 return ((cairo_image_surface_t *)s)->pixman_image;
61 static cairo_int_status_t
62 acquire (void *abstract_dst)
64 return CAIRO_STATUS_SUCCESS;
67 static cairo_int_status_t
68 release (void *abstract_dst)
70 return CAIRO_STATUS_SUCCESS;
73 static cairo_int_status_t
74 set_clip_region (void *_surface,
75 cairo_region_t *region)
77 cairo_image_surface_t *surface = _surface;
78 pixman_region32_t *rgn = region ? ®ion->rgn : NULL;
80 if (! pixman_image_set_clip_region32 (surface->pixman_image, rgn))
81 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
83 return CAIRO_STATUS_SUCCESS;
86 static cairo_int_status_t
87 draw_image_boxes (void *_dst,
88 cairo_image_surface_t *image,
92 cairo_image_surface_t *dst = _dst;
93 struct _cairo_boxes_chunk *chunk;
96 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
98 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
99 for (i = 0; i < chunk->count; i++) {
100 cairo_box_t *b = &chunk->base[i];
101 int x = _cairo_fixed_integer_part (b->p1.x);
102 int y = _cairo_fixed_integer_part (b->p1.y);
103 int w = _cairo_fixed_integer_part (b->p2.x) - x;
104 int h = _cairo_fixed_integer_part (b->p2.y) - y;
105 if (dst->pixman_format != image->pixman_format ||
106 ! pixman_blt ((uint32_t *)image->data, (uint32_t *)dst->data,
107 image->stride / sizeof (uint32_t),
108 dst->stride / sizeof (uint32_t),
109 PIXMAN_FORMAT_BPP (image->pixman_format),
110 PIXMAN_FORMAT_BPP (dst->pixman_format),
115 pixman_image_composite32 (PIXMAN_OP_SRC,
116 image->pixman_image, NULL, dst->pixman_image,
124 return CAIRO_STATUS_SUCCESS;
127 static inline uint32_t
128 color_to_uint32 (const cairo_color_t *color)
131 (color->alpha_short >> 8 << 24) |
132 (color->red_short >> 8 << 16) |
133 (color->green_short & 0xff00) |
134 (color->blue_short >> 8);
137 static inline cairo_bool_t
138 color_to_pixel (const cairo_color_t *color,
139 pixman_format_code_t format,
144 if (!(format == PIXMAN_a8r8g8b8 ||
145 format == PIXMAN_x8r8g8b8 ||
146 format == PIXMAN_a8b8g8r8 ||
147 format == PIXMAN_x8b8g8r8 ||
148 format == PIXMAN_b8g8r8a8 ||
149 format == PIXMAN_b8g8r8x8 ||
150 format == PIXMAN_r5g6b5 ||
151 format == PIXMAN_b5g6r5 ||
152 format == PIXMAN_a8))
157 c = color_to_uint32 (color);
159 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_ABGR) {
160 c = ((c & 0xff000000) >> 0) |
161 ((c & 0x00ff0000) >> 16) |
162 ((c & 0x0000ff00) >> 0) |
163 ((c & 0x000000ff) << 16);
166 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_BGRA) {
167 c = ((c & 0xff000000) >> 24) |
168 ((c & 0x00ff0000) >> 8) |
169 ((c & 0x0000ff00) << 8) |
170 ((c & 0x000000ff) << 24);
173 if (format == PIXMAN_a8) {
175 } else if (format == PIXMAN_r5g6b5 || format == PIXMAN_b5g6r5) {
176 c = ((((c) >> 3) & 0x001f) |
177 (((c) >> 5) & 0x07e0) |
178 (((c) >> 8) & 0xf800));
186 _pixman_operator (cairo_operator_t op)
189 case CAIRO_OPERATOR_CLEAR:
190 return PIXMAN_OP_CLEAR;
192 case CAIRO_OPERATOR_SOURCE:
193 return PIXMAN_OP_SRC;
194 case CAIRO_OPERATOR_OVER:
195 return PIXMAN_OP_OVER;
196 case CAIRO_OPERATOR_IN:
198 case CAIRO_OPERATOR_OUT:
199 return PIXMAN_OP_OUT;
200 case CAIRO_OPERATOR_ATOP:
201 return PIXMAN_OP_ATOP;
203 case CAIRO_OPERATOR_DEST:
204 return PIXMAN_OP_DST;
205 case CAIRO_OPERATOR_DEST_OVER:
206 return PIXMAN_OP_OVER_REVERSE;
207 case CAIRO_OPERATOR_DEST_IN:
208 return PIXMAN_OP_IN_REVERSE;
209 case CAIRO_OPERATOR_DEST_OUT:
210 return PIXMAN_OP_OUT_REVERSE;
211 case CAIRO_OPERATOR_DEST_ATOP:
212 return PIXMAN_OP_ATOP_REVERSE;
214 case CAIRO_OPERATOR_XOR:
215 return PIXMAN_OP_XOR;
216 case CAIRO_OPERATOR_ADD:
217 return PIXMAN_OP_ADD;
218 case CAIRO_OPERATOR_SATURATE:
219 return PIXMAN_OP_SATURATE;
221 case CAIRO_OPERATOR_MULTIPLY:
222 return PIXMAN_OP_MULTIPLY;
223 case CAIRO_OPERATOR_SCREEN:
224 return PIXMAN_OP_SCREEN;
225 case CAIRO_OPERATOR_OVERLAY:
226 return PIXMAN_OP_OVERLAY;
227 case CAIRO_OPERATOR_DARKEN:
228 return PIXMAN_OP_DARKEN;
229 case CAIRO_OPERATOR_LIGHTEN:
230 return PIXMAN_OP_LIGHTEN;
231 case CAIRO_OPERATOR_COLOR_DODGE:
232 return PIXMAN_OP_COLOR_DODGE;
233 case CAIRO_OPERATOR_COLOR_BURN:
234 return PIXMAN_OP_COLOR_BURN;
235 case CAIRO_OPERATOR_HARD_LIGHT:
236 return PIXMAN_OP_HARD_LIGHT;
237 case CAIRO_OPERATOR_SOFT_LIGHT:
238 return PIXMAN_OP_SOFT_LIGHT;
239 case CAIRO_OPERATOR_DIFFERENCE:
240 return PIXMAN_OP_DIFFERENCE;
241 case CAIRO_OPERATOR_EXCLUSION:
242 return PIXMAN_OP_EXCLUSION;
243 case CAIRO_OPERATOR_HSL_HUE:
244 return PIXMAN_OP_HSL_HUE;
245 case CAIRO_OPERATOR_HSL_SATURATION:
246 return PIXMAN_OP_HSL_SATURATION;
247 case CAIRO_OPERATOR_HSL_COLOR:
248 return PIXMAN_OP_HSL_COLOR;
249 case CAIRO_OPERATOR_HSL_LUMINOSITY:
250 return PIXMAN_OP_HSL_LUMINOSITY;
254 return PIXMAN_OP_OVER;
259 fill_reduces_to_source (cairo_operator_t op,
260 const cairo_color_t *color,
261 cairo_image_surface_t *dst)
263 if (op == CAIRO_OPERATOR_SOURCE || op == CAIRO_OPERATOR_CLEAR)
265 if (op == CAIRO_OPERATOR_OVER && CAIRO_COLOR_IS_OPAQUE (color))
267 if (dst->base.is_clear)
268 return op == CAIRO_OPERATOR_OVER || op == CAIRO_OPERATOR_ADD;
273 static cairo_int_status_t
274 fill_rectangles (void *_dst,
276 const cairo_color_t *color,
277 cairo_rectangle_int_t *rects,
280 cairo_image_surface_t *dst = _dst;
284 TRACE ((stderr, "%s\n", __FUNCTION__));
286 if (fill_reduces_to_source (op, color, dst) &&
287 color_to_pixel (color, dst->pixman_format, &pixel))
289 for (i = 0; i < num_rects; i++) {
290 pixman_fill ((uint32_t *) dst->data, dst->stride / sizeof (uint32_t),
291 PIXMAN_FORMAT_BPP (dst->pixman_format),
292 rects[i].x, rects[i].y,
293 rects[i].width, rects[i].height,
299 pixman_image_t *src = _pixman_image_for_color (color);
301 op = _pixman_operator (op);
302 for (i = 0; i < num_rects; i++) {
303 pixman_image_composite32 (op,
304 src, NULL, dst->pixman_image,
307 rects[i].x, rects[i].y,
308 rects[i].width, rects[i].height);
311 pixman_image_unref (src);
314 return CAIRO_STATUS_SUCCESS;
317 static cairo_int_status_t
318 fill_boxes (void *_dst,
320 const cairo_color_t *color,
321 cairo_boxes_t *boxes)
323 cairo_image_surface_t *dst = _dst;
324 struct _cairo_boxes_chunk *chunk;
328 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
330 if (fill_reduces_to_source (op, color, dst) &&
331 color_to_pixel (color, dst->pixman_format, &pixel))
333 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
334 for (i = 0; i < chunk->count; i++) {
335 int x = _cairo_fixed_integer_part (chunk->base[i].p1.x);
336 int y = _cairo_fixed_integer_part (chunk->base[i].p1.y);
337 int w = _cairo_fixed_integer_part (chunk->base[i].p2.x) - x;
338 int h = _cairo_fixed_integer_part (chunk->base[i].p2.y) - y;
339 pixman_fill ((uint32_t *) dst->data,
340 dst->stride / sizeof (uint32_t),
341 PIXMAN_FORMAT_BPP (dst->pixman_format),
348 pixman_image_t *src = _pixman_image_for_color (color);
350 op = _pixman_operator (op);
351 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
352 for (i = 0; i < chunk->count; i++) {
353 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
354 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
355 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
356 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
357 pixman_image_composite32 (op,
358 src, NULL, dst->pixman_image,
366 pixman_image_unref (src);
369 return CAIRO_STATUS_SUCCESS;
372 static cairo_int_status_t
373 composite (void *_dst,
375 cairo_surface_t *abstract_src,
376 cairo_surface_t *abstract_mask,
386 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
387 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
389 TRACE ((stderr, "%s\n", __FUNCTION__));
392 pixman_image_composite32 (_pixman_operator (op),
393 src->pixman_image, mask->pixman_image, to_pixman_image (_dst),
399 pixman_image_composite32 (_pixman_operator (op),
400 src->pixman_image, NULL, to_pixman_image (_dst),
407 return CAIRO_STATUS_SUCCESS;
410 static cairo_int_status_t
412 cairo_surface_t *abstract_src,
413 cairo_surface_t *abstract_mask,
423 cairo_image_surface_t *dst = _dst;
424 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
425 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
427 TRACE ((stderr, "%s\n", __FUNCTION__));
429 #if PIXMAN_HAS_OP_LERP
430 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
431 src->pixman_image, mask->pixman_image, dst->pixman_image,
437 /* Punch the clip out of the destination */
438 TRACE ((stderr, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
440 mask->base.unique_id, mask->pixman_image,
441 dst->base.unique_id, dst->pixman_image));
442 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
443 mask->pixman_image, NULL, dst->pixman_image,
449 /* Now add the two results together */
450 TRACE ((stderr, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
452 src->base.unique_id, src->pixman_image,
453 mask->base.unique_id, mask->pixman_image,
454 dst->base.unique_id, dst->pixman_image));
455 pixman_image_composite32 (PIXMAN_OP_ADD,
456 src->pixman_image, mask->pixman_image, dst->pixman_image,
463 return CAIRO_STATUS_SUCCESS;
466 static cairo_int_status_t
467 composite_boxes (void *_dst,
469 cairo_surface_t *abstract_src,
470 cairo_surface_t *abstract_mask,
477 cairo_boxes_t *boxes,
478 const cairo_rectangle_int_t *extents)
480 pixman_image_t *dst = to_pixman_image (_dst);
481 pixman_image_t *src = ((cairo_image_source_t *)abstract_src)->pixman_image;
482 pixman_image_t *mask = abstract_mask ? ((cairo_image_source_t *)abstract_mask)->pixman_image : NULL;
483 pixman_image_t *free_src = NULL;
484 struct _cairo_boxes_chunk *chunk;
487 /* XXX consider using a region? saves multiple prepare-composite */
488 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
490 if (((cairo_surface_t *)_dst)->is_clear &&
491 (op == CAIRO_OPERATOR_SOURCE ||
492 op == CAIRO_OPERATOR_OVER ||
493 op == CAIRO_OPERATOR_ADD)) {
496 if (op == CAIRO_OPERATOR_CLEAR) {
497 #if PIXMAN_HAS_OP_LERP
498 op = PIXMAN_OP_LERP_CLEAR;
500 free_src = src = _pixman_image_for_color (CAIRO_COLOR_WHITE);
501 op = PIXMAN_OP_OUT_REVERSE;
503 } else if (op == CAIRO_OPERATOR_SOURCE) {
504 #if PIXMAN_HAS_OP_LERP
505 op = PIXMAN_OP_LERP_SRC;
507 return CAIRO_INT_STATUS_UNSUPPORTED;
510 op = _pixman_operator (op);
513 op = _pixman_operator (op);
516 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
517 for (i = 0; i < chunk->count; i++) {
518 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
519 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
520 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
521 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
523 pixman_image_composite32 (op, src, mask, dst,
524 x1 + src_x, y1 + src_y,
525 x1 + mask_x, y1 + mask_y,
526 x1 + dst_x, y1 + dst_y,
532 pixman_image_unref (free_src);
534 return CAIRO_STATUS_SUCCESS;
537 #define CAIRO_FIXED_16_16_MIN _cairo_fixed_from_int (-32768)
538 #define CAIRO_FIXED_16_16_MAX _cairo_fixed_from_int (32767)
541 line_exceeds_16_16 (const cairo_line_t *line)
544 line->p1.x <= CAIRO_FIXED_16_16_MIN ||
545 line->p1.x >= CAIRO_FIXED_16_16_MAX ||
547 line->p2.x <= CAIRO_FIXED_16_16_MIN ||
548 line->p2.x >= CAIRO_FIXED_16_16_MAX ||
550 line->p1.y <= CAIRO_FIXED_16_16_MIN ||
551 line->p1.y >= CAIRO_FIXED_16_16_MAX ||
553 line->p2.y <= CAIRO_FIXED_16_16_MIN ||
554 line->p2.y >= CAIRO_FIXED_16_16_MAX;
558 project_line_x_onto_16_16 (const cairo_line_t *line,
560 cairo_fixed_t bottom,
561 pixman_line_fixed_t *out)
563 /* XXX use fixed-point arithmetic? */
564 cairo_point_double_t p1, p2;
567 p1.x = _cairo_fixed_to_double (line->p1.x);
568 p1.y = _cairo_fixed_to_double (line->p1.y);
570 p2.x = _cairo_fixed_to_double (line->p2.x);
571 p2.y = _cairo_fixed_to_double (line->p2.y);
573 m = (p2.x - p1.x) / (p2.y - p1.y);
574 out->p1.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (top - line->p1.y));
575 out->p2.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (bottom - line->p1.y));
579 _pixman_image_add_traps (pixman_image_t *image,
580 int dst_x, int dst_y,
581 cairo_traps_t *traps)
583 cairo_trapezoid_t *t = traps->traps;
584 int num_traps = traps->num_traps;
585 while (num_traps--) {
586 pixman_trapezoid_t trap;
588 /* top/bottom will be clamped to surface bounds */
589 trap.top = _cairo_fixed_to_16_16 (t->top);
590 trap.bottom = _cairo_fixed_to_16_16 (t->bottom);
592 /* However, all the other coordinates will have been left untouched so
593 * as not to introduce numerical error. Recompute them if they
594 * exceed the 16.16 limits.
596 if (unlikely (line_exceeds_16_16 (&t->left))) {
597 project_line_x_onto_16_16 (&t->left, t->top, t->bottom, &trap.left);
598 trap.left.p1.y = trap.top;
599 trap.left.p2.y = trap.bottom;
601 trap.left.p1.x = _cairo_fixed_to_16_16 (t->left.p1.x);
602 trap.left.p1.y = _cairo_fixed_to_16_16 (t->left.p1.y);
603 trap.left.p2.x = _cairo_fixed_to_16_16 (t->left.p2.x);
604 trap.left.p2.y = _cairo_fixed_to_16_16 (t->left.p2.y);
607 if (unlikely (line_exceeds_16_16 (&t->right))) {
608 project_line_x_onto_16_16 (&t->right, t->top, t->bottom, &trap.right);
609 trap.right.p1.y = trap.top;
610 trap.right.p2.y = trap.bottom;
612 trap.right.p1.x = _cairo_fixed_to_16_16 (t->right.p1.x);
613 trap.right.p1.y = _cairo_fixed_to_16_16 (t->right.p1.y);
614 trap.right.p2.x = _cairo_fixed_to_16_16 (t->right.p2.x);
615 trap.right.p2.y = _cairo_fixed_to_16_16 (t->right.p2.y);
618 pixman_rasterize_trapezoid (image, &trap, -dst_x, -dst_y);
623 static cairo_int_status_t
624 composite_traps (void *_dst,
626 cairo_surface_t *abstract_src,
631 const cairo_rectangle_int_t *extents,
632 cairo_antialias_t antialias,
633 cairo_traps_t *traps)
635 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
636 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
637 pixman_image_t *mask;
638 pixman_format_code_t format;
640 TRACE ((stderr, "%s\n", __FUNCTION__));
642 /* Special case adding trapezoids onto a mask surface; we want to avoid
643 * creating an intermediate temporary mask unnecessarily.
645 * We make the assumption here that the portion of the trapezoids
646 * contained within the surface is bounded by [dst_x,dst_y,width,height];
647 * the Cairo core code passes bounds based on the trapezoid extents.
649 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
650 if (dst->pixman_format == format &&
651 (abstract_src == NULL ||
652 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
654 _pixman_image_add_traps (dst->pixman_image, dst_x, dst_y, traps);
655 return CAIRO_STATUS_SUCCESS;
658 mask = pixman_image_create_bits (format,
659 extents->width, extents->height,
661 if (unlikely (mask == NULL))
662 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
664 _pixman_image_add_traps (mask, extents->x, extents->y, traps);
665 pixman_image_composite32 (_pixman_operator (op),
666 src->pixman_image, mask, dst->pixman_image,
667 extents->x + src_x, extents->y + src_y,
669 extents->x - dst_x, extents->y - dst_y,
670 extents->width, extents->height);
672 pixman_image_unref (mask);
674 return CAIRO_STATUS_SUCCESS;
678 set_point (pixman_point_fixed_t *p, cairo_point_t *c)
680 p->x = _cairo_fixed_to_16_16 (c->x);
681 p->y = _cairo_fixed_to_16_16 (c->y);
685 _pixman_image_add_tristrip (pixman_image_t *image,
686 int dst_x, int dst_y,
687 cairo_tristrip_t *strip)
689 pixman_triangle_t tri;
690 pixman_point_fixed_t *p[3] = {&tri.p1, &tri.p2, &tri.p3 };
693 set_point (p[0], &strip->points[0]);
694 set_point (p[1], &strip->points[1]);
695 set_point (p[2], &strip->points[2]);
696 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
697 for (n = 3; n < strip->num_points; n++) {
698 set_point (p[n%3], &strip->points[n]);
699 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
703 static cairo_int_status_t
704 composite_tristrip (void *_dst,
706 cairo_surface_t *abstract_src,
711 const cairo_rectangle_int_t *extents,
712 cairo_antialias_t antialias,
713 cairo_tristrip_t *strip)
715 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
716 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
717 pixman_image_t *mask;
718 pixman_format_code_t format;
720 TRACE ((stderr, "%s\n", __FUNCTION__));
722 if (strip->num_points < 3)
723 return CAIRO_STATUS_SUCCESS;
725 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
726 if (dst->pixman_format == format &&
727 (abstract_src == NULL ||
728 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
730 _pixman_image_add_tristrip (dst->pixman_image, dst_x, dst_y, strip);
731 return CAIRO_STATUS_SUCCESS;
734 mask = pixman_image_create_bits (format,
735 extents->width, extents->height,
737 if (unlikely (mask == NULL))
738 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
740 _pixman_image_add_tristrip (mask, extents->x, extents->y, strip);
741 pixman_image_composite32 (_pixman_operator (op),
742 src->pixman_image, mask, dst->pixman_image,
743 extents->x + src_x, extents->y + src_y,
745 extents->x - dst_x, extents->y - dst_y,
746 extents->width, extents->height);
748 pixman_image_unref (mask);
750 return CAIRO_STATUS_SUCCESS;
753 static cairo_int_status_t
754 check_composite_glyphs (const cairo_composite_rectangles_t *extents,
755 cairo_scaled_font_t *scaled_font,
756 cairo_glyph_t *glyphs,
759 return CAIRO_STATUS_SUCCESS;
762 static cairo_int_status_t
763 composite_one_glyph (void *_dst,
765 cairo_surface_t *_src,
770 cairo_composite_glyphs_info_t *info)
772 cairo_image_surface_t *glyph_surface;
773 cairo_scaled_glyph_t *scaled_glyph;
774 cairo_status_t status;
777 TRACE ((stderr, "%s\n", __FUNCTION__));
779 status = _cairo_scaled_glyph_lookup (info->font,
780 info->glyphs[0].index,
781 CAIRO_SCALED_GLYPH_INFO_SURFACE,
784 if (unlikely (status))
787 glyph_surface = scaled_glyph->surface;
788 if (glyph_surface->width == 0 || glyph_surface->height == 0)
789 return CAIRO_INT_STATUS_NOTHING_TO_DO;
791 /* round glyph locations to the nearest pixel */
792 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
793 x = _cairo_lround (info->glyphs[0].x -
794 glyph_surface->base.device_transform.x0);
795 y = _cairo_lround (info->glyphs[0].y -
796 glyph_surface->base.device_transform.y0);
798 pixman_image_composite32 (_pixman_operator (op),
799 ((cairo_image_source_t *)_src)->pixman_image,
800 glyph_surface->pixman_image,
801 to_pixman_image (_dst),
802 x + src_x, y + src_y,
804 x - dst_x, y - dst_y,
805 glyph_surface->width,
806 glyph_surface->height);
808 return CAIRO_INT_STATUS_SUCCESS;
811 static cairo_int_status_t
812 composite_glyphs_via_mask (void *_dst,
814 cairo_surface_t *_src,
819 cairo_composite_glyphs_info_t *info)
821 cairo_scaled_glyph_t *glyph_cache[64];
822 cairo_bool_t component_alpha = FALSE;
824 pixman_image_t *mask;
825 cairo_status_t status;
828 TRACE ((stderr, "%s\n", __FUNCTION__));
830 /* XXX convert the glyphs to common formats a8/a8r8g8b8 to hit
831 * optimised paths through pixman. Should we increase the bit
832 * depth of the target surface, we should reconsider the appropriate
835 i = (info->extents.width + 3) & ~3;
836 if (i * info->extents.height > (int) sizeof (buf)) {
837 mask = pixman_image_create_bits (PIXMAN_a8,
839 info->extents.height,
842 memset (buf, 0, i * info->extents.height);
843 mask = pixman_image_create_bits (PIXMAN_a8,
845 info->extents.height,
848 if (unlikely (mask == NULL))
849 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
851 memset (glyph_cache, 0, sizeof (glyph_cache));
852 status = CAIRO_STATUS_SUCCESS;
854 for (i = 0; i < info->num_glyphs; i++) {
855 cairo_image_surface_t *glyph_surface;
856 cairo_scaled_glyph_t *scaled_glyph;
857 unsigned long glyph_index = info->glyphs[i].index;
858 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
861 scaled_glyph = glyph_cache[cache_index];
862 if (scaled_glyph == NULL ||
863 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
865 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
866 CAIRO_SCALED_GLYPH_INFO_SURFACE,
869 if (unlikely (status)) {
870 pixman_image_unref (mask);
874 glyph_cache[cache_index] = scaled_glyph;
877 glyph_surface = scaled_glyph->surface;
878 if (glyph_surface->width && glyph_surface->height) {
879 if (glyph_surface->base.content & CAIRO_CONTENT_COLOR &&
881 pixman_image_t *ca_mask;
883 ca_mask = pixman_image_create_bits (PIXMAN_a8r8g8b8,
885 info->extents.height,
887 if (unlikely (ca_mask == NULL)) {
888 pixman_image_unref (mask);
889 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
892 pixman_image_composite32 (PIXMAN_OP_SRC,
898 info->extents.height);
899 pixman_image_unref (mask);
901 component_alpha = TRUE;
904 /* round glyph locations to the nearest pixel */
905 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
906 x = _cairo_lround (info->glyphs[i].x -
907 glyph_surface->base.device_transform.x0);
908 y = _cairo_lround (info->glyphs[i].y -
909 glyph_surface->base.device_transform.y0);
911 pixman_image_composite32 (PIXMAN_OP_ADD,
912 glyph_surface->pixman_image, NULL, mask,
915 x - info->extents.x, y - info->extents.y,
916 glyph_surface->width,
917 glyph_surface->height);
922 pixman_image_set_component_alpha (mask, TRUE);
924 pixman_image_composite32 (_pixman_operator (op),
925 ((cairo_image_source_t *)_src)->pixman_image,
927 to_pixman_image (_dst),
928 info->extents.x + src_x, info->extents.y + src_y,
930 info->extents.x - dst_x, info->extents.y - dst_y,
931 info->extents.width, info->extents.height);
932 pixman_image_unref (mask);
934 return CAIRO_STATUS_SUCCESS;
937 static cairo_int_status_t
938 composite_glyphs (void *_dst,
940 cairo_surface_t *_src,
945 cairo_composite_glyphs_info_t *info)
947 cairo_scaled_glyph_t *glyph_cache[64];
948 pixman_image_t *dst, *src;
949 cairo_status_t status;
952 TRACE ((stderr, "%s\n", __FUNCTION__));
954 if (info->num_glyphs == 1)
955 return composite_one_glyph(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
958 return composite_glyphs_via_mask(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
960 op = _pixman_operator (op);
961 dst = to_pixman_image (_dst);
962 src = ((cairo_image_source_t *)_src)->pixman_image;
964 memset (glyph_cache, 0, sizeof (glyph_cache));
965 status = CAIRO_STATUS_SUCCESS;
967 for (i = 0; i < info->num_glyphs; i++) {
969 cairo_image_surface_t *glyph_surface;
970 cairo_scaled_glyph_t *scaled_glyph;
971 unsigned long glyph_index = info->glyphs[i].index;
972 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
974 scaled_glyph = glyph_cache[cache_index];
975 if (scaled_glyph == NULL ||
976 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
978 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
979 CAIRO_SCALED_GLYPH_INFO_SURFACE,
982 if (unlikely (status))
985 glyph_cache[cache_index] = scaled_glyph;
988 glyph_surface = scaled_glyph->surface;
989 if (glyph_surface->width && glyph_surface->height) {
990 /* round glyph locations to the nearest pixel */
991 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
992 x = _cairo_lround (info->glyphs[i].x -
993 glyph_surface->base.device_transform.x0);
994 y = _cairo_lround (info->glyphs[i].y -
995 glyph_surface->base.device_transform.y0);
997 pixman_image_composite32 (op, src, glyph_surface->pixman_image, dst,
998 x + src_x, y + src_y,
1000 x - dst_x, y - dst_y,
1001 glyph_surface->width,
1002 glyph_surface->height);
1009 static cairo_int_status_t
1010 check_composite (const cairo_composite_rectangles_t *extents)
1012 return CAIRO_STATUS_SUCCESS;
1015 const cairo_compositor_t *
1016 _cairo_image_traps_compositor_get (void)
1018 static cairo_traps_compositor_t compositor;
1020 if (compositor.base.delegate == NULL) {
1021 _cairo_traps_compositor_init (&compositor,
1022 &__cairo_no_compositor);
1023 compositor.acquire = acquire;
1024 compositor.release = release;
1025 compositor.set_clip_region = set_clip_region;
1026 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1027 compositor.draw_image_boxes = draw_image_boxes;
1028 //compositor.copy_boxes = copy_boxes;
1029 compositor.fill_boxes = fill_boxes;
1030 compositor.check_composite = check_composite;
1031 compositor.composite = composite;
1032 compositor.lerp = lerp;
1033 //compositor.check_composite_boxes = check_composite_boxes;
1034 compositor.composite_boxes = composite_boxes;
1035 //compositor.check_composite_traps = check_composite_traps;
1036 compositor.composite_traps = composite_traps;
1037 //compositor.check_composite_tristrip = check_composite_traps;
1038 compositor.composite_tristrip = composite_tristrip;
1039 compositor.check_composite_glyphs = check_composite_glyphs;
1040 compositor.composite_glyphs = composite_glyphs;
1043 return &compositor.base;
1046 const cairo_compositor_t *
1047 _cairo_image_mask_compositor_get (void)
1049 static cairo_mask_compositor_t compositor;
1051 if (compositor.base.delegate == NULL) {
1052 _cairo_mask_compositor_init (&compositor,
1053 _cairo_image_traps_compositor_get ());
1054 compositor.acquire = acquire;
1055 compositor.release = release;
1056 compositor.set_clip_region = set_clip_region;
1057 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1058 compositor.draw_image_boxes = draw_image_boxes;
1059 compositor.fill_rectangles = fill_rectangles;
1060 compositor.fill_boxes = fill_boxes;
1061 //compositor.check_composite = check_composite;
1062 compositor.composite = composite;
1063 //compositor.lerp = lerp;
1064 //compositor.check_composite_boxes = check_composite_boxes;
1065 compositor.composite_boxes = composite_boxes;
1066 compositor.check_composite_glyphs = check_composite_glyphs;
1067 compositor.composite_glyphs = composite_glyphs;
1070 return &compositor.base;
1073 #if PIXMAN_HAS_COMPOSITOR
1074 typedef struct _cairo_image_span_renderer {
1075 cairo_span_renderer_t base;
1077 pixman_image_compositor_t *compositor;
1078 pixman_image_t *src, *mask;
1080 cairo_rectangle_int_t extents;
1081 } cairo_image_span_renderer_t;
1082 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1084 static cairo_status_t
1085 _cairo_image_bounded_opaque_spans (void *abstract_renderer,
1087 const cairo_half_open_span_t *spans,
1090 cairo_image_span_renderer_t *r = abstract_renderer;
1093 return CAIRO_STATUS_SUCCESS;
1096 if (spans[0].coverage)
1097 pixman_image_compositor_blt (r->compositor,
1099 spans[1].x - spans[0].x, height,
1102 } while (--num_spans > 1);
1104 return CAIRO_STATUS_SUCCESS;
1107 static cairo_status_t
1108 _cairo_image_bounded_spans (void *abstract_renderer,
1110 const cairo_half_open_span_t *spans,
1113 cairo_image_span_renderer_t *r = abstract_renderer;
1116 return CAIRO_STATUS_SUCCESS;
1119 if (spans[0].coverage) {
1120 pixman_image_compositor_blt (r->compositor,
1122 spans[1].x - spans[0].x, height,
1123 r->opacity * spans[0].coverage);
1126 } while (--num_spans > 1);
1128 return CAIRO_STATUS_SUCCESS;
1131 static cairo_status_t
1132 _cairo_image_unbounded_spans (void *abstract_renderer,
1134 const cairo_half_open_span_t *spans,
1137 cairo_image_span_renderer_t *r = abstract_renderer;
1139 assert (y + height <= r->extents.height);
1140 if (y > r->extents.y) {
1141 pixman_image_compositor_blt (r->compositor,
1142 r->extents.x, r->extents.y,
1143 r->extents.width, y - r->extents.y,
1147 if (num_spans == 0) {
1148 pixman_image_compositor_blt (r->compositor,
1150 r->extents.width, height,
1153 if (spans[0].x != r->extents.x) {
1154 pixman_image_compositor_blt (r->compositor,
1156 spans[0].x - r->extents.x,
1162 assert (spans[0].x < r->extents.x + r->extents.width);
1163 pixman_image_compositor_blt (r->compositor,
1165 spans[1].x - spans[0].x, height,
1166 r->opacity * spans[0].coverage);
1168 } while (--num_spans > 1);
1170 if (spans[0].x != r->extents.x + r->extents.width) {
1171 assert (spans[0].x < r->extents.x + r->extents.width);
1172 pixman_image_compositor_blt (r->compositor,
1174 r->extents.x + r->extents.width - spans[0].x, height,
1179 r->extents.y = y + height;
1180 return CAIRO_STATUS_SUCCESS;
1183 static cairo_status_t
1184 _cairo_image_clipped_spans (void *abstract_renderer,
1186 const cairo_half_open_span_t *spans,
1189 cairo_image_span_renderer_t *r = abstract_renderer;
1194 if (! spans[0].inverse)
1195 pixman_image_compositor_blt (r->compositor,
1197 spans[1].x - spans[0].x, height,
1198 r->opacity * spans[0].coverage);
1200 } while (--num_spans > 1);
1202 r->extents.y = y + height;
1203 return CAIRO_STATUS_SUCCESS;
1206 static cairo_status_t
1207 _cairo_image_finish_unbounded_spans (void *abstract_renderer)
1209 cairo_image_span_renderer_t *r = abstract_renderer;
1211 if (r->extents.y < r->extents.height) {
1212 pixman_image_compositor_blt (r->compositor,
1213 r->extents.x, r->extents.y,
1215 r->extents.height - r->extents.y,
1219 return CAIRO_STATUS_SUCCESS;
1222 static cairo_int_status_t
1223 span_renderer_init (cairo_abstract_span_renderer_t *_r,
1224 const cairo_composite_rectangles_t *composite,
1225 cairo_bool_t needs_clip)
1227 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
1228 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1229 const cairo_pattern_t *source = &composite->source_pattern.base;
1230 cairo_operator_t op = composite->op;
1234 TRACE ((stderr, "%s\n", __FUNCTION__));
1236 if (op == CAIRO_OPERATOR_CLEAR) {
1237 op = PIXMAN_OP_LERP_CLEAR;
1238 } else if (dst->base.is_clear &&
1239 (op == CAIRO_OPERATOR_SOURCE ||
1240 op == CAIRO_OPERATOR_OVER ||
1241 op == CAIRO_OPERATOR_ADD)) {
1243 } else if (op == CAIRO_OPERATOR_SOURCE) {
1244 op = PIXMAN_OP_LERP_SRC;
1246 op = _pixman_operator (op);
1249 r->compositor = NULL;
1251 r->src = _pixman_image_for_pattern (dst, source, FALSE,
1252 &composite->unbounded,
1253 &composite->source_sample_area,
1255 if (unlikely (r->src == NULL))
1256 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1259 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
1260 r->opacity = composite->mask_pattern.solid.color.alpha;
1262 r->mask = _pixman_image_for_pattern (dst,
1263 &composite->mask_pattern.base,
1265 &composite->unbounded,
1266 &composite->mask_sample_area,
1268 if (unlikely (r->mask == NULL))
1269 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1271 /* XXX Component-alpha? */
1272 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
1273 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
1275 pixman_image_unref (r->src);
1283 if (composite->is_bounded) {
1284 if (r->opacity == 1.)
1285 r->base.render_rows = _cairo_image_bounded_opaque_spans;
1287 r->base.render_rows = _cairo_image_bounded_spans;
1288 r->base.finish = NULL;
1291 r->base.render_rows = _cairo_image_clipped_spans;
1293 r->base.render_rows = _cairo_image_unbounded_spans;
1294 r->base.finish = _cairo_image_finish_unbounded_spans;
1295 r->extents = composite->unbounded;
1296 r->extents.height += r->extents.y;
1300 pixman_image_create_compositor (op, r->src, r->mask, dst->pixman_image,
1301 composite->unbounded.x + src_x,
1302 composite->unbounded.y + src_y,
1303 composite->unbounded.x + mask_x,
1304 composite->unbounded.y + mask_y,
1305 composite->unbounded.x,
1306 composite->unbounded.y,
1307 composite->unbounded.width,
1308 composite->unbounded.height);
1309 if (unlikely (r->compositor == NULL))
1310 return CAIRO_INT_STATUS_NOTHING_TO_DO;
1312 return CAIRO_STATUS_SUCCESS;
1316 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
1317 cairo_int_status_t status)
1319 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
1321 TRACE ((stderr, "%s\n", __FUNCTION__));
1323 if (status == CAIRO_INT_STATUS_SUCCESS && r->base.finish)
1327 pixman_image_compositor_destroy (r->compositor);
1330 pixman_image_unref (r->src);
1332 pixman_image_unref (r->mask);
1335 typedef struct _cairo_image_span_renderer {
1336 cairo_span_renderer_t base;
1338 const cairo_composite_rectangles_t *composite;
1344 pixman_image_t *src, *mask;
1358 pixman_image_t *dst;
1363 cairo_rectangle_int_t extents;
1369 uint8_t buf[sizeof(cairo_abstract_span_renderer_t)-128];
1370 } cairo_image_span_renderer_t;
1371 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1373 static cairo_status_t
1374 _cairo_image_spans (void *abstract_renderer,
1376 const cairo_half_open_span_t *spans,
1379 cairo_image_span_renderer_t *r = abstract_renderer;
1380 uint8_t *mask, *row;
1384 return CAIRO_STATUS_SUCCESS;
1386 mask = r->u.mask.data + (y - r->u.mask.extents.y) * r->u.mask.stride;
1387 mask += spans[0].x - r->u.mask.extents.x;
1391 len = spans[1].x - spans[0].x;
1392 if (spans[0].coverage) {
1393 *row++ = r->opacity * spans[0].coverage;
1395 memset (row, row[-1], len);
1399 } while (--num_spans > 1);
1404 mask += r->u.mask.stride;
1405 memcpy (mask, row, len);
1408 return CAIRO_STATUS_SUCCESS;
1411 static cairo_status_t
1412 _cairo_image_spans_and_zero (void *abstract_renderer,
1414 const cairo_half_open_span_t *spans,
1417 cairo_image_span_renderer_t *r = abstract_renderer;
1421 mask = r->u.mask.data;
1422 if (y > r->u.mask.extents.y) {
1423 len = (y - r->u.mask.extents.y) * r->u.mask.stride;
1424 memset (mask, 0, len);
1428 r->u.mask.extents.y = y + height;
1429 r->u.mask.data = mask + height * r->u.mask.stride;
1430 if (num_spans == 0) {
1431 memset (mask, 0, height * r->u.mask.stride);
1433 uint8_t *row = mask;
1435 if (spans[0].x != r->u.mask.extents.x) {
1436 len = spans[0].x - r->u.mask.extents.x;
1437 memset (row, 0, len);
1442 len = spans[1].x - spans[0].x;
1443 *row++ = r->opacity * spans[0].coverage;
1445 memset (row, row[-1], --len);
1449 } while (--num_spans > 1);
1451 if (spans[0].x != r->u.mask.extents.x + r->u.mask.extents.width) {
1452 len = r->u.mask.extents.x + r->u.mask.extents.width - spans[0].x;
1453 memset (row, 0, len);
1458 mask += r->u.mask.stride;
1459 memcpy (mask, row, r->u.mask.extents.width);
1463 return CAIRO_STATUS_SUCCESS;
1466 static cairo_status_t
1467 _cairo_image_finish_spans_and_zero (void *abstract_renderer)
1469 cairo_image_span_renderer_t *r = abstract_renderer;
1471 if (r->u.mask.extents.y < r->u.mask.extents.height)
1472 memset (r->u.mask.data, 0, (r->u.mask.extents.height - r->u.mask.extents.y) * r->u.mask.stride);
1474 return CAIRO_STATUS_SUCCESS;
1477 static cairo_status_t
1478 _fill8_spans (void *abstract_renderer, int y, int h,
1479 const cairo_half_open_span_t *spans, unsigned num_spans)
1481 cairo_image_span_renderer_t *r = abstract_renderer;
1484 return CAIRO_STATUS_SUCCESS;
1486 if (likely(h == 1)) {
1488 if (spans[0].coverage) {
1489 int len = spans[1].x - spans[0].x;
1490 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
1492 *d = r->u.fill.pixel;
1494 memset(d, r->u.fill.pixel, len);
1497 } while (--num_spans > 1);
1500 if (spans[0].coverage) {
1503 int len = spans[1].x - spans[0].x;
1504 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1506 *d = r->u.fill.pixel;
1508 memset(d, r->u.fill.pixel, len);
1513 } while (--num_spans > 1);
1516 return CAIRO_STATUS_SUCCESS;
1519 static cairo_status_t
1520 _fill16_spans (void *abstract_renderer, int y, int h,
1521 const cairo_half_open_span_t *spans, unsigned num_spans)
1523 cairo_image_span_renderer_t *r = abstract_renderer;
1526 return CAIRO_STATUS_SUCCESS;
1528 if (likely(h == 1)) {
1530 if (spans[0].coverage) {
1531 int len = spans[1].x - spans[0].x;
1532 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*2);
1534 *d++ = r->u.fill.pixel;
1537 } while (--num_spans > 1);
1540 if (spans[0].coverage) {
1543 int len = spans[1].x - spans[0].x;
1544 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*2);
1546 *d++ = r->u.fill.pixel;
1551 } while (--num_spans > 1);
1554 return CAIRO_STATUS_SUCCESS;
1557 static cairo_status_t
1558 _fill32_spans (void *abstract_renderer, int y, int h,
1559 const cairo_half_open_span_t *spans, unsigned num_spans)
1561 cairo_image_span_renderer_t *r = abstract_renderer;
1564 return CAIRO_STATUS_SUCCESS;
1566 if (likely(h == 1)) {
1568 if (spans[0].coverage) {
1569 int len = spans[1].x - spans[0].x;
1571 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1572 spans[0].x, y, len, 1, r->u.fill.pixel);
1574 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
1576 *d++ = r->u.fill.pixel;
1580 } while (--num_spans > 1);
1583 if (spans[0].coverage) {
1584 if (spans[1].x - spans[0].x > 16) {
1585 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1586 spans[0].x, y, spans[1].x - spans[0].x, h,
1591 int len = spans[1].x - spans[0].x;
1592 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
1594 *d++ = r->u.fill.pixel;
1600 } while (--num_spans > 1);
1603 return CAIRO_STATUS_SUCCESS;
1607 static cairo_status_t
1608 _fill_spans (void *abstract_renderer, int y, int h,
1609 const cairo_half_open_span_t *spans, unsigned num_spans)
1611 cairo_image_span_renderer_t *r = abstract_renderer;
1614 return CAIRO_STATUS_SUCCESS;
1617 if (spans[0].coverage) {
1618 pixman_fill ((uint32_t *) r->data, r->stride, r->bpp,
1620 spans[1].x - spans[0].x, h,
1624 } while (--num_spans > 1);
1626 return CAIRO_STATUS_SUCCESS;
1630 static cairo_status_t
1631 _blit_spans (void *abstract_renderer, int y, int h,
1632 const cairo_half_open_span_t *spans, unsigned num_spans)
1634 cairo_image_span_renderer_t *r = abstract_renderer;
1638 return CAIRO_STATUS_SUCCESS;
1641 if (likely (h == 1)) {
1642 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
1643 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
1645 if (spans[0].coverage) {
1646 void *s = src + spans[0].x*cpp;
1647 void *d = dst + spans[0].x*cpp;
1648 int len = (spans[1].x - spans[0].x) * cpp;
1651 *(uint8_t *)d = *(uint8_t *)s;
1654 *(uint16_t *)d = *(uint16_t *)s;
1657 *(uint32_t *)d = *(uint32_t *)s;
1661 *(uint64_t *)d = *(uint64_t *)s;
1670 } while (--num_spans > 1);
1673 if (spans[0].coverage) {
1676 void *src = r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x*cpp;
1677 void *dst = r->u.blit.data + yy*r->u.blit.stride + spans[0].x*cpp;
1678 int len = (spans[1].x - spans[0].x) * cpp;
1681 *(uint8_t *)dst = *(uint8_t *)src;
1684 *(uint16_t *)dst = *(uint16_t *)src;
1687 *(uint32_t *)dst = *(uint32_t *)src;
1691 *(uint64_t *)dst = *(uint64_t *)src;
1695 memcpy(dst, src, len);
1702 } while (--num_spans > 1);
1705 return CAIRO_STATUS_SUCCESS;
1708 static cairo_status_t
1709 _mono_spans (void *abstract_renderer, int y, int h,
1710 const cairo_half_open_span_t *spans, unsigned num_spans)
1712 cairo_image_span_renderer_t *r = abstract_renderer;
1715 return CAIRO_STATUS_SUCCESS;
1718 if (spans[0].coverage) {
1719 pixman_image_composite32 (r->op,
1720 r->src, NULL, r->u.composite.dst,
1721 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1724 spans[1].x - spans[0].x, h);
1727 } while (--num_spans > 1);
1729 return CAIRO_STATUS_SUCCESS;
1732 static cairo_status_t
1733 _mono_unbounded_spans (void *abstract_renderer, int y, int h,
1734 const cairo_half_open_span_t *spans, unsigned num_spans)
1736 cairo_image_span_renderer_t *r = abstract_renderer;
1738 if (num_spans == 0) {
1739 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1740 r->src, NULL, r->u.composite.dst,
1741 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1743 r->composite->unbounded.x, y,
1744 r->composite->unbounded.width, h);
1745 r->u.composite.mask_y = y + h;
1746 return CAIRO_STATUS_SUCCESS;
1749 if (y != r->u.composite.mask_y) {
1750 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1751 r->src, NULL, r->u.composite.dst,
1752 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1754 r->composite->unbounded.x, r->u.composite.mask_y,
1755 r->composite->unbounded.width, y - r->u.composite.mask_y);
1758 if (spans[0].x != r->composite->unbounded.x) {
1759 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1760 r->src, NULL, r->u.composite.dst,
1761 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1763 r->composite->unbounded.x, y,
1764 spans[0].x - r->composite->unbounded.x, h);
1768 int op = spans[0].coverage ? r->op : PIXMAN_OP_CLEAR;
1769 pixman_image_composite32 (op,
1770 r->src, NULL, r->u.composite.dst,
1771 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1774 spans[1].x - spans[0].x, h);
1776 } while (--num_spans > 1);
1778 if (spans[0].x != r->composite->unbounded.x + r->composite->unbounded.width) {
1779 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1780 r->src, NULL, r->u.composite.dst,
1781 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1784 r->composite->unbounded.x + r->composite->unbounded.width - spans[0].x, h);
1787 r->u.composite.mask_y = y + h;
1788 return CAIRO_STATUS_SUCCESS;
1791 static cairo_status_t
1792 _mono_finish_unbounded_spans (void *abstract_renderer)
1794 cairo_image_span_renderer_t *r = abstract_renderer;
1796 if (r->u.composite.mask_y < r->composite->unbounded.y + r->composite->unbounded.height) {
1797 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1798 r->src, NULL, r->u.composite.dst,
1799 r->composite->unbounded.x + r->u.composite.src_x, r->u.composite.mask_y + r->u.composite.src_y,
1801 r->composite->unbounded.x, r->u.composite.mask_y,
1802 r->composite->unbounded.width,
1803 r->composite->unbounded.y + r->composite->unbounded.height - r->u.composite.mask_y);
1806 return CAIRO_STATUS_SUCCESS;
1809 static cairo_int_status_t
1810 mono_renderer_init (cairo_image_span_renderer_t *r,
1811 const cairo_composite_rectangles_t *composite,
1812 cairo_antialias_t antialias,
1813 cairo_bool_t needs_clip)
1815 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1817 if (antialias != CAIRO_ANTIALIAS_NONE)
1818 return CAIRO_INT_STATUS_UNSUPPORTED;
1820 if (!_cairo_pattern_is_opaque_solid (&composite->mask_pattern.base))
1821 return CAIRO_INT_STATUS_UNSUPPORTED;
1823 r->base.render_rows = NULL;
1824 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
1825 const cairo_color_t *color;
1827 color = &composite->source_pattern.solid.color;
1828 if (composite->op == CAIRO_OPERATOR_CLEAR)
1829 color = CAIRO_COLOR_TRANSPARENT;
1831 if (fill_reduces_to_source (composite->op, color, dst) &&
1832 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
1833 /* Use plain C for the fill operations as the span length is
1834 * typically small, too small to payback the startup overheads of
1837 switch (PIXMAN_FORMAT_BPP(dst->pixman_format)) {
1838 case 8: r->base.render_rows = _fill8_spans; break;
1839 case 16: r->base.render_rows = _fill16_spans; break;
1840 case 32: r->base.render_rows = _fill32_spans; break;
1843 r->u.fill.data = dst->data;
1844 r->u.fill.stride = dst->stride;
1846 } else if ((composite->op == CAIRO_OPERATOR_SOURCE ||
1847 (composite->op == CAIRO_OPERATOR_OVER &&
1848 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
1849 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
1850 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
1851 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
1853 cairo_image_surface_t *src =
1854 to_image_surface(composite->source_pattern.surface.surface);
1857 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
1859 composite->bounded.x + tx >= 0 &&
1860 composite->bounded.y + ty >= 0 &&
1861 composite->bounded.x + composite->bounded.width + tx <= src->width &&
1862 composite->bounded.y + composite->bounded.height + ty <= src->height) {
1864 r->u.blit.stride = dst->stride;
1865 r->u.blit.data = dst->data;
1866 r->u.blit.src_stride = src->stride;
1867 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
1868 r->base.render_rows = _blit_spans;
1872 if (r->base.render_rows == NULL) {
1873 r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
1874 &composite->unbounded,
1875 &composite->source_sample_area,
1876 &r->u.composite.src_x, &r->u.composite.src_y);
1877 if (unlikely (r->src == NULL))
1878 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1880 r->u.composite.dst = to_pixman_image (composite->surface);
1881 r->op = _pixman_operator (composite->op);
1882 if (composite->is_bounded == 0) {
1883 r->base.render_rows = _mono_unbounded_spans;
1884 r->base.finish = _mono_finish_unbounded_spans;
1885 r->u.composite.mask_y = composite->unbounded.y;
1887 r->base.render_rows = _mono_spans;
1889 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
1891 return CAIRO_INT_STATUS_SUCCESS;
1894 #define ONE_HALF 0x7f
1895 #define RB_MASK 0x00ff00ff
1896 #define RB_ONE_HALF 0x007f007f
1897 #define RB_MASK_PLUS_ONE 0x01000100
1899 static inline uint32_t
1900 mul8x2_8 (uint32_t a, uint8_t b)
1902 uint32_t t = (a & RB_MASK) * b + RB_ONE_HALF;
1903 return ((t + ((t >> G_SHIFT) & RB_MASK)) >> G_SHIFT) & RB_MASK;
1906 static inline uint32_t
1907 add8x2_8x2 (uint32_t a, uint32_t b)
1910 t |= RB_MASK_PLUS_ONE - ((t >> G_SHIFT) & RB_MASK);
1914 static inline uint8_t
1915 mul8_8 (uint8_t a, uint8_t b)
1917 uint16_t t = a * (uint16_t)b + ONE_HALF;
1918 return ((t >> G_SHIFT) + t) >> G_SHIFT;
1921 static inline uint32_t
1922 lerp8x4 (uint32_t src, uint8_t a, uint32_t dst)
1927 r1 = add8x2_8x2 (mul8x2_8 (src, a),
1928 mul8x2_8 (dst, ia));
1929 r2 = add8x2_8x2 (mul8x2_8 (src >> G_SHIFT, a),
1930 mul8x2_8 (dst >> G_SHIFT, ia));
1932 return r1 | (r2 << G_SHIFT);
1935 static cairo_status_t
1936 _fill_a8_lerp_opaque_spans (void *abstract_renderer, int y, int h,
1937 const cairo_half_open_span_t *spans, unsigned num_spans)
1939 cairo_image_span_renderer_t *r = abstract_renderer;
1942 return CAIRO_STATUS_SUCCESS;
1944 if (likely(h == 1)) {
1945 uint8_t *d = r->u.fill.data + r->u.fill.stride*y;
1947 uint8_t a = spans[0].coverage;
1949 int len = spans[1].x - spans[0].x;
1951 memset(d + spans[0].x, r->u.fill.pixel, len);
1953 uint8_t s = mul8_8(a, r->u.fill.pixel);
1954 uint8_t *dst = d + spans[0].x;
1957 uint8_t t = mul8_8(*dst, a);
1963 } while (--num_spans > 1);
1966 uint8_t a = spans[0].coverage;
1971 int len = spans[1].x - spans[0].x;
1972 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1973 memset(d, r->u.fill.pixel, len);
1977 uint8_t s = mul8_8(a, r->u.fill.pixel);
1980 int len = spans[1].x - spans[0].x;
1981 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1983 uint8_t t = mul8_8(*d, a);
1991 } while (--num_spans > 1);
1994 return CAIRO_STATUS_SUCCESS;
1997 static cairo_status_t
1998 _fill_xrgb32_lerp_opaque_spans (void *abstract_renderer, int y, int h,
1999 const cairo_half_open_span_t *spans, unsigned num_spans)
2001 cairo_image_span_renderer_t *r = abstract_renderer;
2004 return CAIRO_STATUS_SUCCESS;
2006 if (likely(h == 1)) {
2008 uint8_t a = spans[0].coverage;
2010 int len = spans[1].x - spans[0].x;
2011 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2014 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
2015 spans[0].x, y, len, 1, r->u.fill.pixel);
2017 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2019 *d++ = r->u.fill.pixel;
2021 } else while (len--) {
2022 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2027 } while (--num_spans > 1);
2030 uint8_t a = spans[0].coverage;
2033 if (spans[1].x - spans[0].x > 16) {
2034 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
2035 spans[0].x, y, spans[1].x - spans[0].x, h,
2040 int len = spans[1].x - spans[0].x;
2041 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2043 *d++ = r->u.fill.pixel;
2050 int len = spans[1].x - spans[0].x;
2051 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2053 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2061 } while (--num_spans > 1);
2064 return CAIRO_STATUS_SUCCESS;
2067 static cairo_status_t
2068 _fill_a8_lerp_spans (void *abstract_renderer, int y, int h,
2069 const cairo_half_open_span_t *spans, unsigned num_spans)
2071 cairo_image_span_renderer_t *r = abstract_renderer;
2074 return CAIRO_STATUS_SUCCESS;
2076 if (likely(h == 1)) {
2078 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2080 int len = spans[1].x - spans[0].x;
2081 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
2082 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2085 uint16_t t = *d*ia + p;
2086 *d++ = (t + (t>>8)) >> 8;
2090 } while (--num_spans > 1);
2093 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2096 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2099 int len = spans[1].x - spans[0].x;
2100 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2102 uint16_t t = *d*ia + p;
2103 *d++ = (t + (t>>8)) >> 8;
2109 } while (--num_spans > 1);
2112 return CAIRO_STATUS_SUCCESS;
2115 static cairo_status_t
2116 _fill_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2117 const cairo_half_open_span_t *spans, unsigned num_spans)
2119 cairo_image_span_renderer_t *r = abstract_renderer;
2122 return CAIRO_STATUS_SUCCESS;
2124 if (likely(h == 1)) {
2126 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2128 int len = spans[1].x - spans[0].x;
2129 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2131 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2136 } while (--num_spans > 1);
2139 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2143 int len = spans[1].x - spans[0].x;
2144 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2146 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2153 } while (--num_spans > 1);
2156 return CAIRO_STATUS_SUCCESS;
2159 static cairo_status_t
2160 _blit_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2161 const cairo_half_open_span_t *spans, unsigned num_spans)
2163 cairo_image_span_renderer_t *r = abstract_renderer;
2166 return CAIRO_STATUS_SUCCESS;
2168 if (likely(h == 1)) {
2169 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
2170 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
2172 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2174 uint32_t *s = (uint32_t*)src + spans[0].x;
2175 uint32_t *d = (uint32_t*)dst + spans[0].x;
2176 int len = spans[1].x - spans[0].x;
2181 memcpy(d, s, len*4);
2184 *d = lerp8x4 (*s, a, *d);
2190 } while (--num_spans > 1);
2193 uint8_t a = mul8_8 (spans[0].coverage, r->op);
2197 uint32_t *s = (uint32_t *)(r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x * 4);
2198 uint32_t *d = (uint32_t *)(r->u.blit.data + yy*r->u.blit.stride + spans[0].x * 4);
2199 int len = spans[1].x - spans[0].x;
2204 memcpy(d, s, len * 4);
2207 *d = lerp8x4 (*s, a, *d);
2215 } while (--num_spans > 1);
2218 return CAIRO_STATUS_SUCCESS;
2221 static cairo_status_t
2222 _inplace_spans (void *abstract_renderer,
2224 const cairo_half_open_span_t *spans,
2227 cairo_image_span_renderer_t *r = abstract_renderer;
2232 return CAIRO_STATUS_SUCCESS;
2234 if (num_spans == 2 && spans[0].coverage == 0xff) {
2235 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2236 spans[0].x + r->u.composite.src_x,
2237 y + r->u.composite.src_y,
2240 spans[1].x - spans[0].x, h);
2241 return CAIRO_STATUS_SUCCESS;
2244 mask = (uint8_t *)pixman_image_get_data (r->mask);
2247 int len = spans[1].x - spans[0].x;
2248 *mask++ = spans[0].coverage;
2250 memset (mask, spans[0].coverage, --len);
2255 } while (--num_spans > 1);
2257 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2258 x0 + r->u.composite.src_x,
2259 y + r->u.composite.src_y,
2264 return CAIRO_STATUS_SUCCESS;
2267 static cairo_status_t
2268 _inplace_src_spans (void *abstract_renderer,
2270 const cairo_half_open_span_t *spans,
2273 cairo_image_span_renderer_t *r = abstract_renderer;
2278 return CAIRO_STATUS_SUCCESS;
2283 int len = spans[1].x - spans[0].x;
2284 if (spans[0].coverage == 0xff) {
2285 if (spans[0].x != x0) {
2286 #if PIXMAN_HAS_OP_LERP
2287 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2288 r->src, r->mask, r->u.composite.dst,
2289 x0 + r->u.composite.src_x,
2290 y + r->u.composite.src_y,
2293 spans[0].x - x0, h);
2295 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2296 r->mask, NULL, r->u.composite.dst,
2300 spans[0].x - x0, h);
2301 pixman_image_composite32 (PIXMAN_OP_ADD,
2302 r->src, r->mask, r->u.composite.dst,
2303 x0 + r->u.composite.src_x,
2304 y + r->u.composite.src_y,
2307 spans[0].x - x0, h);
2311 pixman_image_composite32 (PIXMAN_OP_SRC,
2312 r->src, NULL, r->u.composite.dst,
2313 spans[0].x + r->u.composite.src_x,
2314 y + r->u.composite.src_y,
2317 spans[1].x - spans[0].x, h);
2321 } else if (spans[0].coverage == 0x0) {
2322 if (spans[0].x != x0) {
2323 #if PIXMAN_HAS_OP_LERP
2324 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2325 r->src, r->mask, r->u.composite.dst,
2326 x0 + r->u.composite.src_x,
2327 y + r->u.composite.src_y,
2330 spans[0].x - x0, h);
2332 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2333 r->mask, NULL, r->u.composite.dst,
2337 spans[0].x - x0, h);
2338 pixman_image_composite32 (PIXMAN_OP_ADD,
2339 r->src, r->mask, r->u.composite.dst,
2340 x0 + r->u.composite.src_x,
2341 y + r->u.composite.src_y,
2344 spans[0].x - x0, h);
2351 *m++ = spans[0].coverage;
2353 memset (m, spans[0].coverage, --len);
2358 } while (--num_spans > 1);
2360 if (spans[0].x != x0) {
2361 #if PIXMAN_HAS_OP_LERP
2362 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2363 r->src, r->mask, r->u.composite.dst,
2364 x0 + r->u.composite.src_x,
2365 y + r->u.composite.src_y,
2368 spans[0].x - x0, h);
2370 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2371 r->mask, NULL, r->u.composite.dst,
2375 spans[0].x - x0, h);
2376 pixman_image_composite32 (PIXMAN_OP_ADD,
2377 r->src, r->mask, r->u.composite.dst,
2378 x0 + r->u.composite.src_x,
2379 y + r->u.composite.src_y,
2382 spans[0].x - x0, h);
2386 return CAIRO_STATUS_SUCCESS;
2389 static cairo_int_status_t
2390 inplace_renderer_init (cairo_image_span_renderer_t *r,
2391 const cairo_composite_rectangles_t *composite,
2392 cairo_antialias_t antialias,
2393 cairo_bool_t needs_clip)
2395 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2397 if (composite->mask_pattern.base.type != CAIRO_PATTERN_TYPE_SOLID)
2398 return CAIRO_INT_STATUS_UNSUPPORTED;
2400 r->base.render_rows = NULL;
2401 r->op = composite->mask_pattern.solid.color.alpha_short >> 8;
2403 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2404 const cairo_color_t *color;
2406 color = &composite->source_pattern.solid.color;
2407 if (composite->op == CAIRO_OPERATOR_CLEAR)
2408 color = CAIRO_COLOR_TRANSPARENT;
2410 if (fill_reduces_to_source (composite->op, color, dst) &&
2411 color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
2412 /* Use plain C for the fill operations as the span length is
2413 * typically small, too small to payback the startup overheads of
2416 if (r->op == 0xff) {
2417 switch (dst->format) {
2418 case CAIRO_FORMAT_A8:
2419 r->base.render_rows = _fill_a8_lerp_opaque_spans;
2421 case CAIRO_FORMAT_RGB24:
2422 case CAIRO_FORMAT_ARGB32:
2423 r->base.render_rows = _fill_xrgb32_lerp_opaque_spans;
2425 case CAIRO_FORMAT_A1:
2426 case CAIRO_FORMAT_RGB16_565:
2427 case CAIRO_FORMAT_RGB30:
2428 case CAIRO_FORMAT_INVALID:
2432 switch (dst->format) {
2433 case CAIRO_FORMAT_A8:
2434 r->base.render_rows = _fill_a8_lerp_spans;
2436 case CAIRO_FORMAT_RGB24:
2437 case CAIRO_FORMAT_ARGB32:
2438 r->base.render_rows = _fill_xrgb32_lerp_spans;
2440 case CAIRO_FORMAT_A1:
2441 case CAIRO_FORMAT_RGB16_565:
2442 case CAIRO_FORMAT_RGB30:
2443 case CAIRO_FORMAT_INVALID:
2447 r->u.fill.data = dst->data;
2448 r->u.fill.stride = dst->stride;
2450 } else if ((dst->format == CAIRO_FORMAT_ARGB32 || dst->format == CAIRO_FORMAT_RGB24) &&
2451 (composite->op == CAIRO_OPERATOR_SOURCE ||
2452 (composite->op == CAIRO_OPERATOR_OVER &&
2453 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2454 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2455 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2456 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2458 cairo_image_surface_t *src =
2459 to_image_surface(composite->source_pattern.surface.surface);
2462 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2464 composite->bounded.x + tx >= 0 &&
2465 composite->bounded.y + ty >= 0 &&
2466 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2467 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2469 assert(PIXMAN_FORMAT_BPP(dst->pixman_format) == 32);
2470 r->u.blit.stride = dst->stride;
2471 r->u.blit.data = dst->data;
2472 r->u.blit.src_stride = src->stride;
2473 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2474 r->base.render_rows = _blit_xrgb32_lerp_spans;
2477 if (r->base.render_rows == NULL) {
2479 const cairo_pattern_t *src = &composite->source_pattern.base;
2482 return CAIRO_INT_STATUS_UNSUPPORTED;
2484 if (composite->is_bounded == 0)
2485 return CAIRO_INT_STATUS_UNSUPPORTED;
2487 width = (composite->bounded.width + 3) & ~3;
2488 r->base.render_rows = _inplace_spans;
2489 if (dst->base.is_clear &&
2490 (composite->op == CAIRO_OPERATOR_SOURCE ||
2491 composite->op == CAIRO_OPERATOR_OVER ||
2492 composite->op == CAIRO_OPERATOR_ADD)) {
2493 r->op = PIXMAN_OP_SRC;
2494 } else if (composite->op == CAIRO_OPERATOR_SOURCE) {
2495 r->base.render_rows = _inplace_src_spans;
2496 r->u.composite.mask_y = r->composite->unbounded.y;
2497 width = (composite->unbounded.width + 3) & ~3;
2498 } else if (composite->op == CAIRO_OPERATOR_CLEAR) {
2499 r->op = PIXMAN_OP_OUT_REVERSE;
2502 r->op = _pixman_operator (composite->op);
2505 if (width > sizeof (r->buf))
2506 return CAIRO_INT_STATUS_UNSUPPORTED;
2508 r->src = _pixman_image_for_pattern (dst, src, FALSE,
2509 &composite->bounded,
2510 &composite->source_sample_area,
2511 &r->u.composite.src_x, &r->u.composite.src_y);
2512 if (unlikely (r->src == NULL))
2513 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2515 /* Create an effectively unbounded mask by repeating the single line */
2516 r->mask = pixman_image_create_bits (PIXMAN_a8,
2517 width, composite->unbounded.height,
2518 (uint32_t *)r->buf, 0);
2519 if (unlikely (r->mask == NULL)) {
2520 pixman_image_unref (r->src);
2521 return _cairo_error(CAIRO_STATUS_NO_MEMORY);
2524 r->u.composite.dst = dst->pixman_image;
2527 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
2529 return CAIRO_INT_STATUS_SUCCESS;
2532 static cairo_int_status_t
2533 span_renderer_init (cairo_abstract_span_renderer_t *_r,
2534 const cairo_composite_rectangles_t *composite,
2535 cairo_antialias_t antialias,
2536 cairo_bool_t needs_clip)
2538 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
2539 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2540 const cairo_pattern_t *source = &composite->source_pattern.base;
2541 cairo_operator_t op = composite->op;
2542 cairo_int_status_t status;
2544 TRACE ((stderr, "%s\n", __FUNCTION__));
2547 return CAIRO_INT_STATUS_UNSUPPORTED;
2549 r->composite = composite;
2552 r->base.finish = NULL;
2554 status = mono_renderer_init (r, composite, antialias, needs_clip);
2555 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2558 status = inplace_renderer_init (r, composite, antialias, needs_clip);
2559 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2564 if (op == CAIRO_OPERATOR_CLEAR) {
2565 #if PIXMAN_HAS_OP_LERP
2566 op = PIXMAN_OP_LERP_CLEAR;
2568 source = &_cairo_pattern_white.base;
2569 op = PIXMAN_OP_OUT_REVERSE;
2571 } else if (dst->base.is_clear &&
2572 (op == CAIRO_OPERATOR_SOURCE ||
2573 op == CAIRO_OPERATOR_OVER ||
2574 op == CAIRO_OPERATOR_ADD)) {
2576 } else if (op == CAIRO_OPERATOR_SOURCE) {
2577 #if PIXMAN_HAS_OP_LERP
2578 op = PIXMAN_OP_LERP_SRC;
2580 return CAIRO_INT_STATUS_UNSUPPORTED;
2583 op = _pixman_operator (op);
2587 r->src = _pixman_image_for_pattern (dst, source, FALSE,
2588 &composite->unbounded,
2589 &composite->source_sample_area,
2590 &r->u.mask.src_x, &r->u.mask.src_y);
2591 if (unlikely (r->src == NULL))
2592 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2595 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2596 r->opacity = composite->mask_pattern.solid.color.alpha;
2598 pixman_image_t *mask;
2601 mask = _pixman_image_for_pattern (dst,
2602 &composite->mask_pattern.base,
2604 &composite->unbounded,
2605 &composite->mask_sample_area,
2607 if (unlikely (mask == NULL))
2608 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2610 /* XXX Component-alpha? */
2611 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
2612 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
2614 pixman_image_unref (r->src);
2616 r->u.mask.src_x = mask_x;
2617 r->u.mask.src_y = mask_y;
2622 pixman_image_unref (mask);
2623 return CAIRO_INT_STATUS_UNSUPPORTED;
2627 r->u.mask.extents = composite->unbounded;
2628 r->u.mask.stride = (r->u.mask.extents.width + 3) & ~3;
2629 if (r->u.mask.extents.height * r->u.mask.stride > (int)sizeof (r->buf)) {
2630 r->mask = pixman_image_create_bits (PIXMAN_a8,
2631 r->u.mask.extents.width,
2632 r->u.mask.extents.height,
2635 r->base.render_rows = _cairo_image_spans;
2636 r->base.finish = NULL;
2638 r->mask = pixman_image_create_bits (PIXMAN_a8,
2639 r->u.mask.extents.width,
2640 r->u.mask.extents.height,
2641 (uint32_t *)r->buf, r->u.mask.stride);
2643 r->base.render_rows = _cairo_image_spans_and_zero;
2644 r->base.finish = _cairo_image_finish_spans_and_zero;
2646 if (unlikely (r->mask == NULL))
2647 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2649 r->u.mask.data = (uint8_t *) pixman_image_get_data (r->mask);
2650 r->u.mask.stride = pixman_image_get_stride (r->mask);
2652 r->u.mask.extents.height += r->u.mask.extents.y;
2653 return CAIRO_STATUS_SUCCESS;
2657 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
2658 cairo_int_status_t status)
2660 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
2662 TRACE ((stderr, "%s\n", __FUNCTION__));
2664 if (likely (status == CAIRO_INT_STATUS_SUCCESS && r->bpp == 0)) {
2665 const cairo_composite_rectangles_t *composite = r->composite;
2670 pixman_image_composite32 (r->op, r->src, r->mask,
2671 to_pixman_image (composite->surface),
2672 composite->unbounded.x + r->u.mask.src_x,
2673 composite->unbounded.y + r->u.mask.src_y,
2675 composite->unbounded.x,
2676 composite->unbounded.y,
2677 composite->unbounded.width,
2678 composite->unbounded.height);
2682 pixman_image_unref (r->src);
2684 pixman_image_unref (r->mask);
2688 const cairo_compositor_t *
2689 _cairo_image_spans_compositor_get (void)
2691 static cairo_spans_compositor_t spans;
2692 static cairo_compositor_t shape;
2694 if (spans.base.delegate == NULL) {
2695 _cairo_shape_mask_compositor_init (&shape,
2696 _cairo_image_traps_compositor_get());
2697 shape.glyphs = NULL;
2699 _cairo_spans_compositor_init (&spans, &shape);
2702 #if PIXMAN_HAS_OP_LERP
2703 spans.flags |= CAIRO_SPANS_COMPOSITOR_HAS_LERP;
2706 //spans.acquire = acquire;
2707 //spans.release = release;
2708 spans.fill_boxes = fill_boxes;
2709 spans.draw_image_boxes = draw_image_boxes;
2710 //spans.copy_boxes = copy_boxes;
2711 spans.pattern_to_surface = _cairo_image_source_create_for_pattern;
2712 //spans.check_composite_boxes = check_composite_boxes;
2713 spans.composite_boxes = composite_boxes;
2714 //spans.check_span_renderer = check_span_renderer;
2715 spans.renderer_init = span_renderer_init;
2716 spans.renderer_fini = span_renderer_fini;