2 * Copyright © 2000 SuSE, Inc.
3 * Copyright © 1999 Keith Packard
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that
8 * copyright notice and this permission notice appear in supporting
9 * documentation, and that the name of SuSE not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. SuSE makes no representations about the
12 * suitability of this software for any purpose. It is provided "as is"
13 * without express or implied warranty.
15 * SuSE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL SuSE
17 * BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
18 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
19 * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
20 * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
22 * Author: Keith Packard, SuSE, Inc.
31 #include "pixman-private.h"
34 * Computing composite region
36 #define BOUND(v) (int16_t) ((v) < INT16_MIN ? INT16_MIN : (v) > INT16_MAX ? INT16_MAX : (v))
38 static inline pixman_bool_t
39 clip_general_image (pixman_region32_t * region,
40 pixman_region32_t * clip,
44 if (pixman_region32_n_rects(region) == 1 &&
45 pixman_region32_n_rects(clip) == 1)
47 pixman_box32_t * rbox = pixman_region32_rectangles(region, NULL);
48 pixman_box32_t * cbox = pixman_region32_rectangles(clip, NULL);
51 if (rbox->x1 < (v = cbox->x1 + dx))
53 if (rbox->x2 > (v = cbox->x2 + dx))
55 if (rbox->y1 < (v = cbox->y1 + dy))
57 if (rbox->y2 > (v = cbox->y2 + dy))
59 if (rbox->x1 >= rbox->x2 ||
62 pixman_region32_init (region);
65 else if (!pixman_region32_not_empty (clip))
72 pixman_region32_translate (region, -dx, -dy);
73 if (!pixman_region32_intersect (region, region, clip))
76 pixman_region32_translate(region, dx, dy);
78 return pixman_region32_not_empty(region);
82 static inline pixman_bool_t
83 clip_source_image (pixman_region32_t * region,
84 pixman_image_t * picture,
88 /* Source clips are ignored, unless they are explicitly turned on
89 * and the clip in question was set by an X client. (Because if
90 * the clip was not set by a client, then it is a hierarchy
91 * clip and those should always be ignored for sources).
93 if (!picture->common.clip_sources || !picture->common.client_clip)
96 return clip_general_image (region,
97 &picture->common.clip_region,
102 * returns FALSE if the final region is empty. Indistinguishable from
103 * an allocation failure, but rendering ignores those anyways.
106 pixman_compute_composite_region32 (pixman_region32_t * region,
107 pixman_image_t * src_image,
108 pixman_image_t * mask_image,
109 pixman_image_t * dst_image,
121 region->extents.x1 = dest_x;
123 region->extents.x2 = BOUND(v);
124 region->extents.y1 = dest_y;
126 region->extents.y2 = BOUND(v);
128 region->extents.x1 = MAX (region->extents.x1, 0);
129 region->extents.y1 = MAX (region->extents.y1, 0);
131 /* Some X servers rely on an old bug, where pixman would just believe the
132 * set clip_region and not clip against the destination geometry. So,
133 * since only X servers set "source clip", we don't clip against
134 * destination geometry when that is set.
136 if (!dst_image->common.clip_sources)
138 region->extents.x2 = MIN (region->extents.x2, dst_image->bits.width);
139 region->extents.y2 = MIN (region->extents.y2, dst_image->bits.height);
144 /* Check for empty operation */
145 if (region->extents.x1 >= region->extents.x2 ||
146 region->extents.y1 >= region->extents.y2)
148 pixman_region32_init (region);
152 if (dst_image->common.have_clip_region)
154 if (!clip_general_image (region, &dst_image->common.clip_region, 0, 0))
156 pixman_region32_fini (region);
161 if (dst_image->common.alpha_map && dst_image->common.alpha_map->common.have_clip_region)
163 if (!clip_general_image (region, &dst_image->common.alpha_map->common.clip_region,
164 -dst_image->common.alpha_origin_x,
165 -dst_image->common.alpha_origin_y))
167 pixman_region32_fini (region);
172 /* clip against src */
173 if (src_image->common.have_clip_region)
175 if (!clip_source_image (region, src_image, dest_x - src_x, dest_y - src_y))
177 pixman_region32_fini (region);
181 if (src_image->common.alpha_map && src_image->common.alpha_map->common.have_clip_region)
183 if (!clip_source_image (region, (pixman_image_t *)src_image->common.alpha_map,
184 dest_x - (src_x - src_image->common.alpha_origin_x),
185 dest_y - (src_y - src_image->common.alpha_origin_y)))
187 pixman_region32_fini (region);
191 /* clip against mask */
192 if (mask_image && mask_image->common.have_clip_region)
194 if (!clip_source_image (region, mask_image, dest_x - mask_x, dest_y - mask_y))
196 pixman_region32_fini (region);
199 if (mask_image->common.alpha_map && mask_image->common.alpha_map->common.have_clip_region)
201 if (!clip_source_image (region, (pixman_image_t *)mask_image->common.alpha_map,
202 dest_x - (mask_x - mask_image->common.alpha_origin_x),
203 dest_y - (mask_y - mask_image->common.alpha_origin_y)))
205 pixman_region32_fini (region);
214 PIXMAN_EXPORT pixman_bool_t
215 pixman_compute_composite_region (pixman_region16_t * region,
216 pixman_image_t * src_image,
217 pixman_image_t * mask_image,
218 pixman_image_t * dst_image,
228 pixman_region32_t r32;
229 pixman_bool_t retval;
231 pixman_region32_init (&r32);
233 retval = pixman_compute_composite_region32 (&r32, src_image, mask_image, dst_image,
234 src_x, src_y, mask_x, mask_y, dest_x, dest_y,
239 if (!pixman_region16_copy_from_region32 (region, &r32))
243 pixman_region32_fini (&r32);
248 pixman_multiply_overflows_int (unsigned int a,
251 return a >= INT32_MAX / b;
255 pixman_addition_overflows_int (unsigned int a,
258 return a > INT32_MAX - b;
262 pixman_malloc_ab(unsigned int a,
265 if (a >= INT32_MAX / b)
268 return malloc (a * b);
272 pixman_malloc_abc (unsigned int a,
276 if (a >= INT32_MAX / b)
278 else if (a * b >= INT32_MAX / c)
281 return malloc (a * b * c);
285 * Helper routine to expand a color component from 0 < n <= 8 bits to 16 bits by
288 static inline uint64_t
289 expand16(const uint8_t val, int nbits)
291 // Start out with the high bit of val in the high bit of result.
292 uint16_t result = (uint16_t)val << (16 - nbits);
297 // Copy the bits in result, doubling the number of bits each time, until we
300 result |= result >> nbits;
308 * This function expands images from ARGB8 format to ARGB16. To preserve
309 * precision, it needs to know the original source format. For example, if the
310 * source was PIXMAN_x1r5g5b5 and the red component contained bits 12345, then
311 * the expanded value is 12345123. To correctly expand this to 16 bits, it
312 * should be 1234512345123451 and not 1234512312345123.
315 pixman_expand(uint64_t *dst, const uint32_t *src,
316 pixman_format_code_t format, int width)
319 * Determine the sizes of each component and the masks and shifts required
320 * to extract them from the source pixel.
322 const int a_size = PIXMAN_FORMAT_A(format),
323 r_size = PIXMAN_FORMAT_R(format),
324 g_size = PIXMAN_FORMAT_G(format),
325 b_size = PIXMAN_FORMAT_B(format);
326 const int a_shift = 32 - a_size,
327 r_shift = 24 - r_size,
328 g_shift = 16 - g_size,
329 b_shift = 8 - b_size;
330 const uint8_t a_mask = ~(~0 << a_size),
331 r_mask = ~(~0 << r_size),
332 g_mask = ~(~0 << g_size),
333 b_mask = ~(~0 << b_size);
336 /* Start at the end so that we can do the expansion in place when src == dst */
337 for (i = width - 1; i >= 0; i--)
339 const uint32_t pixel = src[i];
340 // Extract the components.
341 const uint8_t a = (pixel >> a_shift) & a_mask,
342 r = (pixel >> r_shift) & r_mask,
343 g = (pixel >> g_shift) & g_mask,
344 b = (pixel >> b_shift) & b_mask;
345 const uint64_t a16 = a_size ? expand16(a, a_size) : 0xffff,
346 r16 = expand16(r, r_size),
347 g16 = expand16(g, g_size),
348 b16 = expand16(b, b_size);
350 dst[i] = a16 << 48 | r16 << 32 | g16 << 16 | b16;
355 * Contracting is easier than expanding. We just need to truncate the
359 pixman_contract(uint32_t *dst, const uint64_t *src, int width)
363 /* Start at the beginning so that we can do the contraction in place when
365 for (i = 0; i < width; i++)
367 const uint8_t a = src[i] >> 56,
371 dst[i] = a << 24 | r << 16 | g << 8 | b;
376 walk_region_internal (pixman_implementation_t *imp,
378 pixman_image_t * src_image,
379 pixman_image_t * mask_image,
380 pixman_image_t * dst_image,
389 pixman_bool_t src_repeat,
390 pixman_bool_t mask_repeat,
391 pixman_region32_t *region,
392 pixman_composite_func_t composite_rect)
395 const pixman_box32_t *pbox;
396 int w, h, w_this, h_this;
397 int x_msk, y_msk, x_src, y_src, x_dst, y_dst;
399 pbox = pixman_region32_rectangles (region, &n);
402 h = pbox->y2 - pbox->y1;
403 y_src = pbox->y1 - dest_y + src_y;
404 y_msk = pbox->y1 - dest_y + mask_y;
409 w = pbox->x2 - pbox->x1;
410 x_src = pbox->x1 - dest_x + src_x;
411 x_msk = pbox->x1 - dest_x + mask_x;
416 y_msk = MOD (y_msk, mask_image->bits.height);
417 if (h_this > mask_image->bits.height - y_msk)
418 h_this = mask_image->bits.height - y_msk;
422 y_src = MOD (y_src, src_image->bits.height);
423 if (h_this > src_image->bits.height - y_src)
424 h_this = src_image->bits.height - y_src;
431 x_msk = MOD (x_msk, mask_image->bits.width);
432 if (w_this > mask_image->bits.width - x_msk)
433 w_this = mask_image->bits.width - x_msk;
437 x_src = MOD (x_src, src_image->bits.width);
438 if (w_this > src_image->bits.width - x_src)
439 w_this = src_image->bits.width - x_src;
441 (*composite_rect) (imp,
442 op, src_image, mask_image, dst_image,
443 x_src, y_src, x_msk, y_msk, x_dst, y_dst,
460 _pixman_walk_composite_region (pixman_implementation_t *imp,
462 pixman_image_t * src_image,
463 pixman_image_t * mask_image,
464 pixman_image_t * dst_image,
473 pixman_composite_func_t composite_rect)
475 pixman_region32_t region;
477 pixman_region32_init (®ion);
479 if (pixman_compute_composite_region32 (
480 ®ion, src_image, mask_image, dst_image, src_x, src_y, mask_x, mask_y, dest_x, dest_y, width, height))
482 walk_region_internal (imp, op,
483 src_image, mask_image, dst_image,
484 src_x, src_y, mask_x, mask_y, dest_x, dest_y,
485 width, height, FALSE, FALSE,
490 pixman_region32_fini (®ion);
495 mask_is_solid (pixman_image_t *mask)
497 if (mask->type == SOLID)
500 if (mask->type == BITS &&
501 mask->common.repeat == PIXMAN_REPEAT_NORMAL &&
502 mask->bits.width == 1 &&
503 mask->bits.height == 1)
511 static const pixman_fast_path_t *
512 get_fast_path (const pixman_fast_path_t *fast_paths,
514 pixman_image_t *src_image,
515 pixman_image_t *mask_image,
516 pixman_image_t *dst_image,
517 pixman_bool_t is_pixbuf)
519 const pixman_fast_path_t *info;
521 for (info = fast_paths; info->op != PIXMAN_OP_NONE; info++)
523 pixman_bool_t valid_src = FALSE;
524 pixman_bool_t valid_mask = FALSE;
529 if ((info->src_format == PIXMAN_solid && _pixman_image_is_solid (src_image)) ||
530 (src_image->type == BITS && info->src_format == src_image->bits.format))
538 if ((info->mask_format == PIXMAN_null && !mask_image) ||
539 (mask_image && mask_image->type == BITS && info->mask_format == mask_image->bits.format))
543 if (info->flags & NEED_SOLID_MASK)
545 if (!mask_image || !mask_is_solid (mask_image))
549 if (info->flags & NEED_COMPONENT_ALPHA)
551 if (!mask_image || !mask_image->common.component_alpha)
559 if (info->dest_format != dst_image->bits.format)
562 if ((info->flags & NEED_PIXBUF) && !is_pixbuf)
571 static inline pixman_bool_t
572 image_covers (pixman_image_t *image, pixman_box32_t *extents, int x, int y)
574 if (image->common.type == BITS && image->common.repeat == PIXMAN_REPEAT_NONE)
576 if (x > extents->x1 || y > extents->y1 ||
577 x + image->bits.width < extents->x2 ||
578 y + image->bits.height < extents->y2)
588 _pixman_run_fast_path (const pixman_fast_path_t *paths,
589 pixman_implementation_t *imp,
592 pixman_image_t *mask,
593 pixman_image_t *dest,
603 pixman_composite_func_t func = NULL;
604 pixman_bool_t src_repeat = src->common.repeat == PIXMAN_REPEAT_NORMAL;
605 pixman_bool_t mask_repeat = mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
606 pixman_bool_t result;
608 if ((src->type == BITS || _pixman_image_is_solid (src)) &&
609 (!mask || mask->type == BITS)
610 && !src->common.transform && !(mask && mask->common.transform)
611 && !(mask && mask->common.alpha_map) && !src->common.alpha_map && !dest->common.alpha_map
612 && (src->common.filter != PIXMAN_FILTER_CONVOLUTION)
613 && (src->common.repeat != PIXMAN_REPEAT_PAD)
614 && (src->common.repeat != PIXMAN_REPEAT_REFLECT)
615 && (!mask || (mask->common.filter != PIXMAN_FILTER_CONVOLUTION &&
616 mask->common.repeat != PIXMAN_REPEAT_PAD &&
617 mask->common.repeat != PIXMAN_REPEAT_REFLECT))
618 && !src->common.read_func && !src->common.write_func
619 && !(mask && mask->common.read_func)
620 && !(mask && mask->common.write_func)
621 && !dest->common.read_func
622 && !dest->common.write_func)
624 const pixman_fast_path_t *info;
625 pixman_bool_t pixbuf;
628 src && src->type == BITS &&
629 mask && mask->type == BITS &&
630 src->bits.bits == mask->bits.bits &&
633 !mask->common.component_alpha &&
636 info = get_fast_path (paths, op, src, mask, dest, pixbuf);
642 if (info->src_format == PIXMAN_solid)
645 if (info->mask_format == PIXMAN_solid || info->flags & NEED_SOLID_MASK)
649 src->bits.width == 1 &&
650 src->bits.height == 1) ||
652 mask->bits.width == 1 &&
653 mask->bits.height == 1))
655 /* If src or mask are repeating 1x1 images and src_repeat or
656 * mask_repeat are still TRUE, it means the fast path we
657 * selected does not actually handle repeating images.
659 * So rather than call the "fast path" with a zillion
660 * 1x1 requests, we just fall back to the general code (which
661 * does do something sensible with 1x1 repeating images).
672 pixman_region32_t region;
673 pixman_region32_init (®ion);
675 if (pixman_compute_composite_region32 (
676 ®ion, src, mask, dest, src_x, src_y, mask_x, mask_y, dest_x, dest_y, width, height))
678 pixman_box32_t *extents = pixman_region32_extents (®ion);
680 if (image_covers (src, extents, dest_x - src_x, dest_y - src_y) &&
681 (!mask || image_covers (mask, extents, dest_x - mask_x, dest_y - mask_y)))
683 walk_region_internal (imp, op,
685 src_x, src_y, mask_x, mask_y,
688 src_repeat, mask_repeat,
696 pixman_region32_fini (®ion);
702 #define N_TMP_BOXES (16)
705 pixman_region16_copy_from_region32 (pixman_region16_t *dst,
706 pixman_region32_t *src)
709 pixman_box32_t *boxes32;
710 pixman_box16_t *boxes16;
711 pixman_bool_t retval;
713 boxes32 = pixman_region32_rectangles (src, &n_boxes);
715 boxes16 = pixman_malloc_ab (n_boxes, sizeof (pixman_box16_t));
720 for (i = 0; i < n_boxes; ++i)
722 boxes16[i].x1 = boxes32[i].x1;
723 boxes16[i].y1 = boxes32[i].y1;
724 boxes16[i].x2 = boxes32[i].x2;
725 boxes16[i].y2 = boxes32[i].y2;
728 pixman_region_fini (dst);
729 retval = pixman_region_init_rects (dst, boxes16, n_boxes);
735 pixman_region32_copy_from_region16 (pixman_region32_t *dst,
736 pixman_region16_t *src)
739 pixman_box16_t *boxes16;
740 pixman_box32_t *boxes32;
741 pixman_box32_t tmp_boxes[N_TMP_BOXES];
742 pixman_bool_t retval;
744 boxes16 = pixman_region_rectangles (src, &n_boxes);
746 if (n_boxes > N_TMP_BOXES)
747 boxes32 = pixman_malloc_ab (n_boxes, sizeof (pixman_box32_t));
754 for (i = 0; i < n_boxes; ++i)
756 boxes32[i].x1 = boxes16[i].x1;
757 boxes32[i].y1 = boxes16[i].y1;
758 boxes32[i].x2 = boxes16[i].x2;
759 boxes32[i].y2 = boxes16[i].y2;
762 pixman_region32_fini (dst);
763 retval = pixman_region32_init_rects (dst, boxes32, n_boxes);
765 if (boxes32 != tmp_boxes)