2 * Copyright © 2000 SuSE, Inc.
3 * Copyright © 1999 Keith Packard
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that
8 * copyright notice and this permission notice appear in supporting
9 * documentation, and that the name of SuSE not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. SuSE makes no representations about the
12 * suitability of this software for any purpose. It is provided "as is"
13 * without express or implied warranty.
15 * SuSE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL SuSE
17 * BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
18 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
19 * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
20 * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
22 * Author: Keith Packard, SuSE, Inc.
31 #include "pixman-private.h"
34 * Computing composite region
36 static inline pixman_bool_t
37 clip_general_image (pixman_region32_t * region,
38 pixman_region32_t * clip,
42 if (pixman_region32_n_rects (region) == 1 &&
43 pixman_region32_n_rects (clip) == 1)
45 pixman_box32_t * rbox = pixman_region32_rectangles (region, NULL);
46 pixman_box32_t * cbox = pixman_region32_rectangles (clip, NULL);
49 if (rbox->x1 < (v = cbox->x1 + dx))
51 if (rbox->x2 > (v = cbox->x2 + dx))
53 if (rbox->y1 < (v = cbox->y1 + dy))
55 if (rbox->y2 > (v = cbox->y2 + dy))
57 if (rbox->x1 >= rbox->x2 || rbox->y1 >= rbox->y2)
59 pixman_region32_init (region);
63 else if (!pixman_region32_not_empty (clip))
70 pixman_region32_translate (region, -dx, -dy);
72 if (!pixman_region32_intersect (region, region, clip))
76 pixman_region32_translate (region, dx, dy);
79 return pixman_region32_not_empty (region);
82 static inline pixman_bool_t
83 clip_source_image (pixman_region32_t * region,
84 pixman_image_t * image,
88 /* Source clips are ignored, unless they are explicitly turned on
89 * and the clip in question was set by an X client. (Because if
90 * the clip was not set by a client, then it is a hierarchy
91 * clip and those should always be ignored for sources).
93 if (!image->common.clip_sources || !image->common.client_clip)
96 return clip_general_image (region,
97 &image->common.clip_region,
102 * returns FALSE if the final region is empty. Indistinguishable from
103 * an allocation failure, but rendering ignores those anyways.
106 pixman_compute_composite_region32 (pixman_region32_t * region,
107 pixman_image_t * src_image,
108 pixman_image_t * mask_image,
109 pixman_image_t * dst_image,
119 region->extents.x1 = dest_x;
120 region->extents.x2 = dest_x + width;
121 region->extents.y1 = dest_y;
122 region->extents.y2 = dest_y + height;
124 region->extents.x1 = MAX (region->extents.x1, 0);
125 region->extents.y1 = MAX (region->extents.y1, 0);
126 region->extents.x2 = MIN (region->extents.x2, dst_image->bits.width);
127 region->extents.y2 = MIN (region->extents.y2, dst_image->bits.height);
131 /* Check for empty operation */
132 if (region->extents.x1 >= region->extents.x2 ||
133 region->extents.y1 >= region->extents.y2)
135 pixman_region32_init (region);
139 if (dst_image->common.have_clip_region)
141 if (!clip_general_image (region, &dst_image->common.clip_region, 0, 0))
143 pixman_region32_fini (region);
148 if (dst_image->common.alpha_map && dst_image->common.alpha_map->common.have_clip_region)
150 if (!clip_general_image (region, &dst_image->common.alpha_map->common.clip_region,
151 -dst_image->common.alpha_origin_x,
152 -dst_image->common.alpha_origin_y))
154 pixman_region32_fini (region);
159 /* clip against src */
160 if (src_image->common.have_clip_region)
162 if (!clip_source_image (region, src_image, dest_x - src_x, dest_y - src_y))
164 pixman_region32_fini (region);
168 if (src_image->common.alpha_map && src_image->common.alpha_map->common.have_clip_region)
170 if (!clip_source_image (region, (pixman_image_t *)src_image->common.alpha_map,
171 dest_x - (src_x - src_image->common.alpha_origin_x),
172 dest_y - (src_y - src_image->common.alpha_origin_y)))
174 pixman_region32_fini (region);
178 /* clip against mask */
179 if (mask_image && mask_image->common.have_clip_region)
181 if (!clip_source_image (region, mask_image, dest_x - mask_x, dest_y - mask_y))
183 pixman_region32_fini (region);
186 if (mask_image->common.alpha_map && mask_image->common.alpha_map->common.have_clip_region)
188 if (!clip_source_image (region, (pixman_image_t *)mask_image->common.alpha_map,
189 dest_x - (mask_x - mask_image->common.alpha_origin_x),
190 dest_y - (mask_y - mask_image->common.alpha_origin_y)))
192 pixman_region32_fini (region);
201 PIXMAN_EXPORT pixman_bool_t
202 pixman_compute_composite_region (pixman_region16_t * region,
203 pixman_image_t * src_image,
204 pixman_image_t * mask_image,
205 pixman_image_t * dst_image,
215 pixman_region32_t r32;
216 pixman_bool_t retval;
218 pixman_region32_init (&r32);
220 retval = pixman_compute_composite_region32 (
221 &r32, src_image, mask_image, dst_image,
222 src_x, src_y, mask_x, mask_y, dest_x, dest_y,
227 if (!pixman_region16_copy_from_region32 (region, &r32))
231 pixman_region32_fini (&r32);
236 pixman_multiply_overflows_int (unsigned int a,
239 return a >= INT32_MAX / b;
243 pixman_addition_overflows_int (unsigned int a,
246 return a > INT32_MAX - b;
250 pixman_malloc_ab (unsigned int a,
253 if (a >= INT32_MAX / b)
256 return malloc (a * b);
260 pixman_malloc_abc (unsigned int a,
264 if (a >= INT32_MAX / b)
266 else if (a * b >= INT32_MAX / c)
269 return malloc (a * b * c);
273 * Helper routine to expand a color component from 0 < n <= 8 bits to 16
274 * bits by replication.
276 static inline uint64_t
277 expand16 (const uint8_t val, int nbits)
279 /* Start out with the high bit of val in the high bit of result. */
280 uint16_t result = (uint16_t)val << (16 - nbits);
285 /* Copy the bits in result, doubling the number of bits each time, until
286 * we fill all 16 bits.
290 result |= result >> nbits;
298 * This function expands images from ARGB8 format to ARGB16. To preserve
299 * precision, it needs to know the original source format. For example, if the
300 * source was PIXMAN_x1r5g5b5 and the red component contained bits 12345, then
301 * the expanded value is 12345123. To correctly expand this to 16 bits, it
302 * should be 1234512345123451 and not 1234512312345123.
305 pixman_expand (uint64_t * dst,
306 const uint32_t * src,
307 pixman_format_code_t format,
311 * Determine the sizes of each component and the masks and shifts
312 * required to extract them from the source pixel.
314 const int a_size = PIXMAN_FORMAT_A (format),
315 r_size = PIXMAN_FORMAT_R (format),
316 g_size = PIXMAN_FORMAT_G (format),
317 b_size = PIXMAN_FORMAT_B (format);
318 const int a_shift = 32 - a_size,
319 r_shift = 24 - r_size,
320 g_shift = 16 - g_size,
321 b_shift = 8 - b_size;
322 const uint8_t a_mask = ~(~0 << a_size),
323 r_mask = ~(~0 << r_size),
324 g_mask = ~(~0 << g_size),
325 b_mask = ~(~0 << b_size);
328 /* Start at the end so that we can do the expansion in place
331 for (i = width - 1; i >= 0; i--)
333 const uint32_t pixel = src[i];
334 const uint8_t a = (pixel >> a_shift) & a_mask,
335 r = (pixel >> r_shift) & r_mask,
336 g = (pixel >> g_shift) & g_mask,
337 b = (pixel >> b_shift) & b_mask;
338 const uint64_t a16 = a_size ? expand16 (a, a_size) : 0xffff,
339 r16 = expand16 (r, r_size),
340 g16 = expand16 (g, g_size),
341 b16 = expand16 (b, b_size);
343 dst[i] = a16 << 48 | r16 << 32 | g16 << 16 | b16;
348 * Contracting is easier than expanding. We just need to truncate the
352 pixman_contract (uint32_t * dst,
358 /* Start at the beginning so that we can do the contraction in
359 * place when src == dst
361 for (i = 0; i < width; i++)
363 const uint8_t a = src[i] >> 56,
368 dst[i] = a << 24 | r << 16 | g << 8 | b;
373 walk_region_internal (pixman_implementation_t *imp,
375 pixman_image_t * src_image,
376 pixman_image_t * mask_image,
377 pixman_image_t * dst_image,
386 pixman_bool_t src_repeat,
387 pixman_bool_t mask_repeat,
388 pixman_region32_t * region,
389 pixman_composite_func_t composite_rect)
392 const pixman_box32_t *pbox;
393 int w, h, w_this, h_this;
394 int x_msk, y_msk, x_src, y_src, x_dst, y_dst;
396 pbox = pixman_region32_rectangles (region, &n);
399 h = pbox->y2 - pbox->y1;
400 y_src = pbox->y1 - dest_y + src_y;
401 y_msk = pbox->y1 - dest_y + mask_y;
407 w = pbox->x2 - pbox->x1;
408 x_src = pbox->x1 - dest_x + src_x;
409 x_msk = pbox->x1 - dest_x + mask_x;
414 y_msk = MOD (y_msk, mask_image->bits.height);
415 if (h_this > mask_image->bits.height - y_msk)
416 h_this = mask_image->bits.height - y_msk;
421 y_src = MOD (y_src, src_image->bits.height);
422 if (h_this > src_image->bits.height - y_src)
423 h_this = src_image->bits.height - y_src;
432 x_msk = MOD (x_msk, mask_image->bits.width);
433 if (w_this > mask_image->bits.width - x_msk)
434 w_this = mask_image->bits.width - x_msk;
439 x_src = MOD (x_src, src_image->bits.width);
440 if (w_this > src_image->bits.width - x_src)
441 w_this = src_image->bits.width - x_src;
444 (*composite_rect) (imp, op,
445 src_image, mask_image, dst_image,
446 x_src, y_src, x_msk, y_msk, x_dst, y_dst,
466 _pixman_walk_composite_region (pixman_implementation_t *imp,
468 pixman_image_t * src_image,
469 pixman_image_t * mask_image,
470 pixman_image_t * dst_image,
479 pixman_composite_func_t composite_rect)
481 pixman_region32_t region;
483 pixman_region32_init (®ion);
485 if (pixman_compute_composite_region32 (
486 ®ion, src_image, mask_image, dst_image,
487 src_x, src_y, mask_x, mask_y, dest_x, dest_y,
490 walk_region_internal (imp, op,
491 src_image, mask_image, dst_image,
492 src_x, src_y, mask_x, mask_y, dest_x, dest_y,
493 width, height, FALSE, FALSE,
497 pixman_region32_fini (®ion);
502 source_is_fastpathable (pixman_image_t *image)
504 if (image->common.transform ||
505 image->common.alpha_map ||
506 image->common.filter == PIXMAN_FILTER_CONVOLUTION ||
507 image->common.repeat == PIXMAN_REPEAT_PAD ||
508 image->common.repeat == PIXMAN_REPEAT_REFLECT)
513 if (image->type == BITS &&
514 (image->bits.read_func ||
515 image->bits.write_func ||
516 PIXMAN_FORMAT_IS_WIDE (image->bits.format)))
524 static const pixman_fast_path_t *
525 get_fast_path (const pixman_fast_path_t *fast_paths,
527 pixman_image_t * src,
528 pixman_image_t * mask,
529 pixman_image_t * dest,
535 pixman_format_code_t src_format, mask_format, dest_format;
536 const pixman_fast_path_t *info;
539 if (!source_is_fastpathable (src))
542 if (mask && !source_is_fastpathable (mask))
547 if (dest->common.alpha_map ||
548 dest->bits.read_func ||
549 dest->bits.write_func)
556 if (_pixman_image_is_solid (src))
558 src_format = PIXMAN_solid;
560 else if (src->type == BITS)
562 src_format = src->bits.format;
572 mask_format = PIXMAN_null;
574 else if (mask->common.component_alpha)
576 if (mask->type == BITS)
578 /* These are the *only* component_alpha formats
579 * we support for fast paths
581 if (mask->bits.format == PIXMAN_a8r8g8b8)
582 mask_format = PIXMAN_a8r8g8b8_ca;
583 else if (mask->bits.format == PIXMAN_a8b8g8r8)
584 mask_format = PIXMAN_a8b8g8r8_ca;
593 else if (_pixman_image_is_solid (mask))
595 mask_format = PIXMAN_solid;
597 else if (mask->common.type == BITS)
599 mask_format = mask->bits.format;
606 dest_format = dest->bits.format;
608 /* Check for pixbufs */
609 if ((mask_format == PIXMAN_a8r8g8b8 || mask_format == PIXMAN_a8b8g8r8) &&
610 (src->type == BITS && src->bits.bits == mask->bits.bits) &&
611 (src->common.repeat == mask->common.repeat) &&
612 (src_x == mask_x && src_y == mask_y))
614 if (src_format == PIXMAN_x8b8g8r8)
615 src_format = mask_format = PIXMAN_pixbuf;
616 else if (src_format == PIXMAN_x8r8g8b8)
617 src_format = mask_format = PIXMAN_rpixbuf;
622 for (info = fast_paths; info->op != PIXMAN_OP_NONE; ++info)
624 if (info->op == op &&
625 info->src_format == src_format &&
626 info->mask_format == mask_format &&
627 info->dest_format == dest_format)
636 static force_inline pixman_bool_t
637 image_covers (pixman_image_t *image,
638 pixman_box32_t *extents,
642 if (image->common.type == BITS &&
643 image->common.repeat == PIXMAN_REPEAT_NONE)
645 if (x > extents->x1 || y > extents->y1 ||
646 x + image->bits.width < extents->x2 ||
647 y + image->bits.height < extents->y2)
656 static force_inline pixman_bool_t
657 sources_cover (pixman_image_t *src,
658 pixman_image_t *mask,
659 pixman_box32_t *extents,
667 if (!image_covers (src, extents, dest_x - src_x, dest_y - src_y))
673 if (!image_covers (mask, extents, dest_x - mask_x, dest_y - mask_y))
680 _pixman_run_fast_path (const pixman_fast_path_t *paths,
681 pixman_implementation_t * imp,
683 pixman_image_t * src,
684 pixman_image_t * mask,
685 pixman_image_t * dest,
695 pixman_composite_func_t func = NULL;
696 pixman_bool_t src_repeat =
697 src->common.repeat == PIXMAN_REPEAT_NORMAL;
698 pixman_bool_t mask_repeat =
699 mask && mask->common.repeat == PIXMAN_REPEAT_NORMAL;
700 pixman_bool_t result;
701 const pixman_fast_path_t *info;
703 if ((info = get_fast_path (paths, op, src, mask, dest, src_x, src_y, mask_x, mask_y)))
707 if (info->src_format == PIXMAN_solid)
710 if (info->mask_format == PIXMAN_solid)
714 src->bits.width == 1 &&
715 src->bits.height == 1) ||
717 mask->bits.width == 1 &&
718 mask->bits.height == 1))
720 /* If src or mask are repeating 1x1 images and src_repeat or
721 * mask_repeat are still TRUE, it means the fast path we
722 * selected does not actually handle repeating images.
724 * So rather than calling the "fast path" with a zillion
725 * 1x1 requests, we just fall back to the general code (which
726 * does do something sensible with 1x1 repeating images).
736 pixman_region32_t region;
737 pixman_region32_init (®ion);
739 if (pixman_compute_composite_region32 (
740 ®ion, src, mask, dest,
741 src_x, src_y, mask_x, mask_y, dest_x, dest_y, width, height))
743 pixman_box32_t *extents = pixman_region32_extents (®ion);
747 src_x, src_y, mask_x, mask_y, dest_x, dest_y))
749 walk_region_internal (imp, op,
751 src_x, src_y, mask_x, mask_y,
754 src_repeat, mask_repeat,
761 pixman_region32_fini (®ion);
768 #define N_TMP_BOXES (16)
771 pixman_region16_copy_from_region32 (pixman_region16_t *dst,
772 pixman_region32_t *src)
775 pixman_box32_t *boxes32;
776 pixman_box16_t *boxes16;
777 pixman_bool_t retval;
779 boxes32 = pixman_region32_rectangles (src, &n_boxes);
781 boxes16 = pixman_malloc_ab (n_boxes, sizeof (pixman_box16_t));
786 for (i = 0; i < n_boxes; ++i)
788 boxes16[i].x1 = boxes32[i].x1;
789 boxes16[i].y1 = boxes32[i].y1;
790 boxes16[i].x2 = boxes32[i].x2;
791 boxes16[i].y2 = boxes32[i].y2;
794 pixman_region_fini (dst);
795 retval = pixman_region_init_rects (dst, boxes16, n_boxes);
801 pixman_region32_copy_from_region16 (pixman_region32_t *dst,
802 pixman_region16_t *src)
805 pixman_box16_t *boxes16;
806 pixman_box32_t *boxes32;
807 pixman_box32_t tmp_boxes[N_TMP_BOXES];
808 pixman_bool_t retval;
810 boxes16 = pixman_region_rectangles (src, &n_boxes);
812 if (n_boxes > N_TMP_BOXES)
813 boxes32 = pixman_malloc_ab (n_boxes, sizeof (pixman_box32_t));
820 for (i = 0; i < n_boxes; ++i)
822 boxes32[i].x1 = boxes16[i].x1;
823 boxes32[i].y1 = boxes16[i].y1;
824 boxes32[i].x2 = boxes16[i].x2;
825 boxes32[i].y2 = boxes16[i].y2;
828 pixman_region32_fini (dst);
829 retval = pixman_region32_init_rects (dst, boxes32, n_boxes);
831 if (boxes32 != tmp_boxes)