beta-0.89.2
[luatex.git] / source / libs / cairo / cairo-src / src / cairo-image-compositor.c
blob48072f81bd0cb96033d54cb8e0e40002f877b446
1 /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
2 /* cairo - a vector graphics library with display and print output
4 * Copyright © 2003 University of Southern California
5 * Copyright © 2009,2010,2011 Intel Corporation
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is University of Southern
33 * California.
35 * Contributor(s):
36 * Carl D. Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
40 /* The primarily reason for keeping a traps-compositor around is
41 * for validating cairo-xlib (which currently also uses traps).
44 #include "cairoint.h"
46 #include "cairo-image-surface-private.h"
48 #include "cairo-compositor-private.h"
49 #include "cairo-spans-compositor-private.h"
51 #include "cairo-region-private.h"
52 #include "cairo-traps-private.h"
53 #include "cairo-tristrip-private.h"
55 #include "cairo-pixman-private.h"
57 static pixman_image_t *
58 to_pixman_image (cairo_surface_t *s)
60 return ((cairo_image_surface_t *)s)->pixman_image;
63 static cairo_int_status_t
64 acquire (void *abstract_dst)
66 return CAIRO_STATUS_SUCCESS;
69 static cairo_int_status_t
70 release (void *abstract_dst)
72 return CAIRO_STATUS_SUCCESS;
75 static cairo_int_status_t
76 set_clip_region (void *_surface,
77 cairo_region_t *region)
79 cairo_image_surface_t *surface = _surface;
80 pixman_region32_t *rgn = region ? &region->rgn : NULL;
82 if (! pixman_image_set_clip_region32 (surface->pixman_image, rgn))
83 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
85 return CAIRO_STATUS_SUCCESS;
88 static cairo_int_status_t
89 draw_image_boxes (void *_dst,
90 cairo_image_surface_t *image,
91 cairo_boxes_t *boxes,
92 int dx, int dy)
94 cairo_image_surface_t *dst = _dst;
95 struct _cairo_boxes_chunk *chunk;
96 int i;
98 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
100 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
101 for (i = 0; i < chunk->count; i++) {
102 cairo_box_t *b = &chunk->base[i];
103 int x = _cairo_fixed_integer_part (b->p1.x);
104 int y = _cairo_fixed_integer_part (b->p1.y);
105 int w = _cairo_fixed_integer_part (b->p2.x) - x;
106 int h = _cairo_fixed_integer_part (b->p2.y) - y;
107 if (dst->pixman_format != image->pixman_format ||
108 ! pixman_blt ((uint32_t *)image->data, (uint32_t *)dst->data,
109 image->stride / sizeof (uint32_t),
110 dst->stride / sizeof (uint32_t),
111 PIXMAN_FORMAT_BPP (image->pixman_format),
112 PIXMAN_FORMAT_BPP (dst->pixman_format),
113 x + dx, y + dy,
114 x, y,
115 w, h))
117 pixman_image_composite32 (PIXMAN_OP_SRC,
118 image->pixman_image, NULL, dst->pixman_image,
119 x + dx, y + dy,
120 0, 0,
121 x, y,
122 w, h);
126 return CAIRO_STATUS_SUCCESS;
129 static inline uint32_t
130 color_to_uint32 (const cairo_color_t *color)
132 return
133 (color->alpha_short >> 8 << 24) |
134 (color->red_short >> 8 << 16) |
135 (color->green_short & 0xff00) |
136 (color->blue_short >> 8);
139 static inline cairo_bool_t
140 color_to_pixel (const cairo_color_t *color,
141 pixman_format_code_t format,
142 uint32_t *pixel)
144 uint32_t c;
146 if (!(format == PIXMAN_a8r8g8b8 ||
147 format == PIXMAN_x8r8g8b8 ||
148 format == PIXMAN_a8b8g8r8 ||
149 format == PIXMAN_x8b8g8r8 ||
150 format == PIXMAN_b8g8r8a8 ||
151 format == PIXMAN_b8g8r8x8 ||
152 format == PIXMAN_r5g6b5 ||
153 format == PIXMAN_b5g6r5 ||
154 format == PIXMAN_a8))
156 return FALSE;
159 c = color_to_uint32 (color);
161 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_ABGR) {
162 c = ((c & 0xff000000) >> 0) |
163 ((c & 0x00ff0000) >> 16) |
164 ((c & 0x0000ff00) >> 0) |
165 ((c & 0x000000ff) << 16);
168 if (PIXMAN_FORMAT_TYPE (format) == PIXMAN_TYPE_BGRA) {
169 c = ((c & 0xff000000) >> 24) |
170 ((c & 0x00ff0000) >> 8) |
171 ((c & 0x0000ff00) << 8) |
172 ((c & 0x000000ff) << 24);
175 if (format == PIXMAN_a8) {
176 c = c >> 24;
177 } else if (format == PIXMAN_r5g6b5 || format == PIXMAN_b5g6r5) {
178 c = ((((c) >> 3) & 0x001f) |
179 (((c) >> 5) & 0x07e0) |
180 (((c) >> 8) & 0xf800));
183 *pixel = c;
184 return TRUE;
187 static pixman_op_t
188 _pixman_operator (cairo_operator_t op)
190 switch ((int) op) {
191 case CAIRO_OPERATOR_CLEAR:
192 return PIXMAN_OP_CLEAR;
194 case CAIRO_OPERATOR_SOURCE:
195 return PIXMAN_OP_SRC;
196 case CAIRO_OPERATOR_OVER:
197 return PIXMAN_OP_OVER;
198 case CAIRO_OPERATOR_IN:
199 return PIXMAN_OP_IN;
200 case CAIRO_OPERATOR_OUT:
201 return PIXMAN_OP_OUT;
202 case CAIRO_OPERATOR_ATOP:
203 return PIXMAN_OP_ATOP;
205 case CAIRO_OPERATOR_DEST:
206 return PIXMAN_OP_DST;
207 case CAIRO_OPERATOR_DEST_OVER:
208 return PIXMAN_OP_OVER_REVERSE;
209 case CAIRO_OPERATOR_DEST_IN:
210 return PIXMAN_OP_IN_REVERSE;
211 case CAIRO_OPERATOR_DEST_OUT:
212 return PIXMAN_OP_OUT_REVERSE;
213 case CAIRO_OPERATOR_DEST_ATOP:
214 return PIXMAN_OP_ATOP_REVERSE;
216 case CAIRO_OPERATOR_XOR:
217 return PIXMAN_OP_XOR;
218 case CAIRO_OPERATOR_ADD:
219 return PIXMAN_OP_ADD;
220 case CAIRO_OPERATOR_SATURATE:
221 return PIXMAN_OP_SATURATE;
223 case CAIRO_OPERATOR_MULTIPLY:
224 return PIXMAN_OP_MULTIPLY;
225 case CAIRO_OPERATOR_SCREEN:
226 return PIXMAN_OP_SCREEN;
227 case CAIRO_OPERATOR_OVERLAY:
228 return PIXMAN_OP_OVERLAY;
229 case CAIRO_OPERATOR_DARKEN:
230 return PIXMAN_OP_DARKEN;
231 case CAIRO_OPERATOR_LIGHTEN:
232 return PIXMAN_OP_LIGHTEN;
233 case CAIRO_OPERATOR_COLOR_DODGE:
234 return PIXMAN_OP_COLOR_DODGE;
235 case CAIRO_OPERATOR_COLOR_BURN:
236 return PIXMAN_OP_COLOR_BURN;
237 case CAIRO_OPERATOR_HARD_LIGHT:
238 return PIXMAN_OP_HARD_LIGHT;
239 case CAIRO_OPERATOR_SOFT_LIGHT:
240 return PIXMAN_OP_SOFT_LIGHT;
241 case CAIRO_OPERATOR_DIFFERENCE:
242 return PIXMAN_OP_DIFFERENCE;
243 case CAIRO_OPERATOR_EXCLUSION:
244 return PIXMAN_OP_EXCLUSION;
245 case CAIRO_OPERATOR_HSL_HUE:
246 return PIXMAN_OP_HSL_HUE;
247 case CAIRO_OPERATOR_HSL_SATURATION:
248 return PIXMAN_OP_HSL_SATURATION;
249 case CAIRO_OPERATOR_HSL_COLOR:
250 return PIXMAN_OP_HSL_COLOR;
251 case CAIRO_OPERATOR_HSL_LUMINOSITY:
252 return PIXMAN_OP_HSL_LUMINOSITY;
254 default:
255 ASSERT_NOT_REACHED;
256 return PIXMAN_OP_OVER;
260 static cairo_bool_t
261 __fill_reduces_to_source (cairo_operator_t op,
262 const cairo_color_t *color,
263 const cairo_image_surface_t *dst)
265 if (op == CAIRO_OPERATOR_SOURCE || op == CAIRO_OPERATOR_CLEAR)
266 return TRUE;
267 if (op == CAIRO_OPERATOR_OVER && CAIRO_COLOR_IS_OPAQUE (color))
268 return TRUE;
269 if (dst->base.is_clear)
270 return op == CAIRO_OPERATOR_OVER || op == CAIRO_OPERATOR_ADD;
272 return FALSE;
275 static cairo_bool_t
276 fill_reduces_to_source (cairo_operator_t op,
277 const cairo_color_t *color,
278 const cairo_image_surface_t *dst,
279 uint32_t *pixel)
281 if (__fill_reduces_to_source (op, color, dst)) {
282 return color_to_pixel (color, dst->pixman_format, pixel);
285 return FALSE;
288 static cairo_int_status_t
289 fill_rectangles (void *_dst,
290 cairo_operator_t op,
291 const cairo_color_t *color,
292 cairo_rectangle_int_t *rects,
293 int num_rects)
295 cairo_image_surface_t *dst = _dst;
296 uint32_t pixel;
297 int i;
299 TRACE ((stderr, "%s\n", __FUNCTION__));
301 if (fill_reduces_to_source (op, color, dst, &pixel)) {
302 for (i = 0; i < num_rects; i++) {
303 pixman_fill ((uint32_t *) dst->data, dst->stride / sizeof (uint32_t),
304 PIXMAN_FORMAT_BPP (dst->pixman_format),
305 rects[i].x, rects[i].y,
306 rects[i].width, rects[i].height,
307 pixel);
309 } else {
310 pixman_image_t *src = _pixman_image_for_color (color);
311 if (unlikely (src == NULL))
312 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
314 op = _pixman_operator (op);
315 for (i = 0; i < num_rects; i++) {
316 pixman_image_composite32 (op,
317 src, NULL, dst->pixman_image,
318 0, 0,
319 0, 0,
320 rects[i].x, rects[i].y,
321 rects[i].width, rects[i].height);
324 pixman_image_unref (src);
327 return CAIRO_STATUS_SUCCESS;
330 static cairo_int_status_t
331 fill_boxes (void *_dst,
332 cairo_operator_t op,
333 const cairo_color_t *color,
334 cairo_boxes_t *boxes)
336 cairo_image_surface_t *dst = _dst;
337 struct _cairo_boxes_chunk *chunk;
338 uint32_t pixel;
339 int i;
341 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
343 if (fill_reduces_to_source (op, color, dst, &pixel)) {
344 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
345 for (i = 0; i < chunk->count; i++) {
346 int x = _cairo_fixed_integer_part (chunk->base[i].p1.x);
347 int y = _cairo_fixed_integer_part (chunk->base[i].p1.y);
348 int w = _cairo_fixed_integer_part (chunk->base[i].p2.x) - x;
349 int h = _cairo_fixed_integer_part (chunk->base[i].p2.y) - y;
350 pixman_fill ((uint32_t *) dst->data,
351 dst->stride / sizeof (uint32_t),
352 PIXMAN_FORMAT_BPP (dst->pixman_format),
353 x, y, w, h, pixel);
357 else
359 pixman_image_t *src = _pixman_image_for_color (color);
360 if (unlikely (src == NULL))
361 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
363 op = _pixman_operator (op);
364 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
365 for (i = 0; i < chunk->count; i++) {
366 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
367 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
368 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
369 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
370 pixman_image_composite32 (op,
371 src, NULL, dst->pixman_image,
372 0, 0,
373 0, 0,
374 x1, y1,
375 x2-x1, y2-y1);
379 pixman_image_unref (src);
382 return CAIRO_STATUS_SUCCESS;
385 static cairo_int_status_t
386 composite (void *_dst,
387 cairo_operator_t op,
388 cairo_surface_t *abstract_src,
389 cairo_surface_t *abstract_mask,
390 int src_x,
391 int src_y,
392 int mask_x,
393 int mask_y,
394 int dst_x,
395 int dst_y,
396 unsigned int width,
397 unsigned int height)
399 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
400 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
402 TRACE ((stderr, "%s\n", __FUNCTION__));
404 if (mask) {
405 pixman_image_composite32 (_pixman_operator (op),
406 src->pixman_image, mask->pixman_image, to_pixman_image (_dst),
407 src_x, src_y,
408 mask_x, mask_y,
409 dst_x, dst_y,
410 width, height);
411 } else {
412 pixman_image_composite32 (_pixman_operator (op),
413 src->pixman_image, NULL, to_pixman_image (_dst),
414 src_x, src_y,
415 0, 0,
416 dst_x, dst_y,
417 width, height);
420 return CAIRO_STATUS_SUCCESS;
423 static cairo_int_status_t
424 lerp (void *_dst,
425 cairo_surface_t *abstract_src,
426 cairo_surface_t *abstract_mask,
427 int src_x,
428 int src_y,
429 int mask_x,
430 int mask_y,
431 int dst_x,
432 int dst_y,
433 unsigned int width,
434 unsigned int height)
436 cairo_image_surface_t *dst = _dst;
437 cairo_image_source_t *src = (cairo_image_source_t *)abstract_src;
438 cairo_image_source_t *mask = (cairo_image_source_t *)abstract_mask;
440 TRACE ((stderr, "%s\n", __FUNCTION__));
442 #if PIXMAN_HAS_OP_LERP
443 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
444 src->pixman_image, mask->pixman_image, dst->pixman_image,
445 src_x, src_y,
446 mask_x, mask_y,
447 dst_x, dst_y,
448 width, height);
449 #else
450 /* Punch the clip out of the destination */
451 TRACE ((stderr, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
452 __FUNCTION__,
453 mask->base.unique_id, mask->pixman_image,
454 dst->base.unique_id, dst->pixman_image));
455 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
456 mask->pixman_image, NULL, dst->pixman_image,
457 mask_x, mask_y,
458 0, 0,
459 dst_x, dst_y,
460 width, height);
462 /* Now add the two results together */
463 TRACE ((stderr, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
464 __FUNCTION__,
465 src->base.unique_id, src->pixman_image,
466 mask->base.unique_id, mask->pixman_image,
467 dst->base.unique_id, dst->pixman_image));
468 pixman_image_composite32 (PIXMAN_OP_ADD,
469 src->pixman_image, mask->pixman_image, dst->pixman_image,
470 src_x, src_y,
471 mask_x, mask_y,
472 dst_x, dst_y,
473 width, height);
474 #endif
476 return CAIRO_STATUS_SUCCESS;
479 static cairo_int_status_t
480 composite_boxes (void *_dst,
481 cairo_operator_t op,
482 cairo_surface_t *abstract_src,
483 cairo_surface_t *abstract_mask,
484 int src_x,
485 int src_y,
486 int mask_x,
487 int mask_y,
488 int dst_x,
489 int dst_y,
490 cairo_boxes_t *boxes,
491 const cairo_rectangle_int_t *extents)
493 pixman_image_t *dst = to_pixman_image (_dst);
494 pixman_image_t *src = ((cairo_image_source_t *)abstract_src)->pixman_image;
495 pixman_image_t *mask = abstract_mask ? ((cairo_image_source_t *)abstract_mask)->pixman_image : NULL;
496 pixman_image_t *free_src = NULL;
497 struct _cairo_boxes_chunk *chunk;
498 int i;
500 /* XXX consider using a region? saves multiple prepare-composite */
501 TRACE ((stderr, "%s x %d\n", __FUNCTION__, boxes->num_boxes));
503 if (((cairo_surface_t *)_dst)->is_clear &&
504 (op == CAIRO_OPERATOR_SOURCE ||
505 op == CAIRO_OPERATOR_OVER ||
506 op == CAIRO_OPERATOR_ADD)) {
507 op = PIXMAN_OP_SRC;
508 } else if (mask) {
509 if (op == CAIRO_OPERATOR_CLEAR) {
510 #if PIXMAN_HAS_OP_LERP
511 op = PIXMAN_OP_LERP_CLEAR;
512 #else
513 free_src = src = _pixman_image_for_color (CAIRO_COLOR_WHITE);
514 if (unlikely (src == NULL))
515 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
516 op = PIXMAN_OP_OUT_REVERSE;
517 #endif
518 } else if (op == CAIRO_OPERATOR_SOURCE) {
519 #if PIXMAN_HAS_OP_LERP
520 op = PIXMAN_OP_LERP_SRC;
521 #else
522 return CAIRO_INT_STATUS_UNSUPPORTED;
523 #endif
524 } else {
525 op = _pixman_operator (op);
527 } else {
528 op = _pixman_operator (op);
531 for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
532 for (i = 0; i < chunk->count; i++) {
533 int x1 = _cairo_fixed_integer_part (chunk->base[i].p1.x);
534 int y1 = _cairo_fixed_integer_part (chunk->base[i].p1.y);
535 int x2 = _cairo_fixed_integer_part (chunk->base[i].p2.x);
536 int y2 = _cairo_fixed_integer_part (chunk->base[i].p2.y);
538 pixman_image_composite32 (op, src, mask, dst,
539 x1 + src_x, y1 + src_y,
540 x1 + mask_x, y1 + mask_y,
541 x1 + dst_x, y1 + dst_y,
542 x2 - x1, y2 - y1);
546 if (free_src)
547 pixman_image_unref (free_src);
549 return CAIRO_STATUS_SUCCESS;
552 #define CAIRO_FIXED_16_16_MIN _cairo_fixed_from_int (-32768)
553 #define CAIRO_FIXED_16_16_MAX _cairo_fixed_from_int (32767)
555 static cairo_bool_t
556 line_exceeds_16_16 (const cairo_line_t *line)
558 return
559 line->p1.x <= CAIRO_FIXED_16_16_MIN ||
560 line->p1.x >= CAIRO_FIXED_16_16_MAX ||
562 line->p2.x <= CAIRO_FIXED_16_16_MIN ||
563 line->p2.x >= CAIRO_FIXED_16_16_MAX ||
565 line->p1.y <= CAIRO_FIXED_16_16_MIN ||
566 line->p1.y >= CAIRO_FIXED_16_16_MAX ||
568 line->p2.y <= CAIRO_FIXED_16_16_MIN ||
569 line->p2.y >= CAIRO_FIXED_16_16_MAX;
572 static void
573 project_line_x_onto_16_16 (const cairo_line_t *line,
574 cairo_fixed_t top,
575 cairo_fixed_t bottom,
576 pixman_line_fixed_t *out)
578 /* XXX use fixed-point arithmetic? */
579 cairo_point_double_t p1, p2;
580 double m;
582 p1.x = _cairo_fixed_to_double (line->p1.x);
583 p1.y = _cairo_fixed_to_double (line->p1.y);
585 p2.x = _cairo_fixed_to_double (line->p2.x);
586 p2.y = _cairo_fixed_to_double (line->p2.y);
588 m = (p2.x - p1.x) / (p2.y - p1.y);
589 out->p1.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (top - line->p1.y));
590 out->p2.x = _cairo_fixed_16_16_from_double (p1.x + m * _cairo_fixed_to_double (bottom - line->p1.y));
593 void
594 _pixman_image_add_traps (pixman_image_t *image,
595 int dst_x, int dst_y,
596 cairo_traps_t *traps)
598 cairo_trapezoid_t *t = traps->traps;
599 int num_traps = traps->num_traps;
600 while (num_traps--) {
601 pixman_trapezoid_t trap;
603 /* top/bottom will be clamped to surface bounds */
604 trap.top = _cairo_fixed_to_16_16 (t->top);
605 trap.bottom = _cairo_fixed_to_16_16 (t->bottom);
607 /* However, all the other coordinates will have been left untouched so
608 * as not to introduce numerical error. Recompute them if they
609 * exceed the 16.16 limits.
611 if (unlikely (line_exceeds_16_16 (&t->left))) {
612 project_line_x_onto_16_16 (&t->left, t->top, t->bottom, &trap.left);
613 trap.left.p1.y = trap.top;
614 trap.left.p2.y = trap.bottom;
615 } else {
616 trap.left.p1.x = _cairo_fixed_to_16_16 (t->left.p1.x);
617 trap.left.p1.y = _cairo_fixed_to_16_16 (t->left.p1.y);
618 trap.left.p2.x = _cairo_fixed_to_16_16 (t->left.p2.x);
619 trap.left.p2.y = _cairo_fixed_to_16_16 (t->left.p2.y);
622 if (unlikely (line_exceeds_16_16 (&t->right))) {
623 project_line_x_onto_16_16 (&t->right, t->top, t->bottom, &trap.right);
624 trap.right.p1.y = trap.top;
625 trap.right.p2.y = trap.bottom;
626 } else {
627 trap.right.p1.x = _cairo_fixed_to_16_16 (t->right.p1.x);
628 trap.right.p1.y = _cairo_fixed_to_16_16 (t->right.p1.y);
629 trap.right.p2.x = _cairo_fixed_to_16_16 (t->right.p2.x);
630 trap.right.p2.y = _cairo_fixed_to_16_16 (t->right.p2.y);
633 pixman_rasterize_trapezoid (image, &trap, -dst_x, -dst_y);
634 t++;
638 static cairo_int_status_t
639 composite_traps (void *_dst,
640 cairo_operator_t op,
641 cairo_surface_t *abstract_src,
642 int src_x,
643 int src_y,
644 int dst_x,
645 int dst_y,
646 const cairo_rectangle_int_t *extents,
647 cairo_antialias_t antialias,
648 cairo_traps_t *traps)
650 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
651 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
652 cairo_int_status_t status;
653 pixman_image_t *mask;
654 pixman_format_code_t format;
656 TRACE ((stderr, "%s\n", __FUNCTION__));
658 /* pixman doesn't eliminate self-intersecting trapezoids/edges */
659 status = _cairo_bentley_ottmann_tessellate_traps (traps,
660 CAIRO_FILL_RULE_WINDING);
661 if (status != CAIRO_INT_STATUS_SUCCESS)
662 return status;
664 /* Special case adding trapezoids onto a mask surface; we want to avoid
665 * creating an intermediate temporary mask unnecessarily.
667 * We make the assumption here that the portion of the trapezoids
668 * contained within the surface is bounded by [dst_x,dst_y,width,height];
669 * the Cairo core code passes bounds based on the trapezoid extents.
671 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
672 if (dst->pixman_format == format &&
673 (abstract_src == NULL ||
674 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
676 _pixman_image_add_traps (dst->pixman_image, dst_x, dst_y, traps);
677 return CAIRO_STATUS_SUCCESS;
680 mask = pixman_image_create_bits (format,
681 extents->width, extents->height,
682 NULL, 0);
683 if (unlikely (mask == NULL))
684 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
686 _pixman_image_add_traps (mask, extents->x, extents->y, traps);
687 pixman_image_composite32 (_pixman_operator (op),
688 src->pixman_image, mask, dst->pixman_image,
689 extents->x + src_x, extents->y + src_y,
690 0, 0,
691 extents->x - dst_x, extents->y - dst_y,
692 extents->width, extents->height);
694 pixman_image_unref (mask);
696 return CAIRO_STATUS_SUCCESS;
699 #if PIXMAN_VERSION >= PIXMAN_VERSION_ENCODE(0,22,0)
700 static void
701 set_point (pixman_point_fixed_t *p, cairo_point_t *c)
703 p->x = _cairo_fixed_to_16_16 (c->x);
704 p->y = _cairo_fixed_to_16_16 (c->y);
707 void
708 _pixman_image_add_tristrip (pixman_image_t *image,
709 int dst_x, int dst_y,
710 cairo_tristrip_t *strip)
712 pixman_triangle_t tri;
713 pixman_point_fixed_t *p[3] = {&tri.p1, &tri.p2, &tri.p3 };
714 int n;
716 set_point (p[0], &strip->points[0]);
717 set_point (p[1], &strip->points[1]);
718 set_point (p[2], &strip->points[2]);
719 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
720 for (n = 3; n < strip->num_points; n++) {
721 set_point (p[n%3], &strip->points[n]);
722 pixman_add_triangles (image, -dst_x, -dst_y, 1, &tri);
726 static cairo_int_status_t
727 composite_tristrip (void *_dst,
728 cairo_operator_t op,
729 cairo_surface_t *abstract_src,
730 int src_x,
731 int src_y,
732 int dst_x,
733 int dst_y,
734 const cairo_rectangle_int_t *extents,
735 cairo_antialias_t antialias,
736 cairo_tristrip_t *strip)
738 cairo_image_surface_t *dst = (cairo_image_surface_t *) _dst;
739 cairo_image_source_t *src = (cairo_image_source_t *) abstract_src;
740 pixman_image_t *mask;
741 pixman_format_code_t format;
743 TRACE ((stderr, "%s\n", __FUNCTION__));
745 if (strip->num_points < 3)
746 return CAIRO_STATUS_SUCCESS;
748 if (1) { /* pixman doesn't eliminate self-intersecting triangles/edges */
749 cairo_int_status_t status;
750 cairo_traps_t traps;
751 int n;
753 _cairo_traps_init (&traps);
754 for (n = 0; n < strip->num_points; n++) {
755 cairo_point_t p[4];
757 p[0] = strip->points[0];
758 p[1] = strip->points[1];
759 p[2] = strip->points[2];
760 p[3] = strip->points[0];
762 _cairo_traps_tessellate_convex_quad (&traps, p);
764 status = composite_traps (_dst, op, abstract_src,
765 src_x, src_y,
766 dst_x, dst_y,
767 extents, antialias, &traps);
768 _cairo_traps_fini (&traps);
770 return status;
773 format = antialias == CAIRO_ANTIALIAS_NONE ? PIXMAN_a1 : PIXMAN_a8;
774 if (dst->pixman_format == format &&
775 (abstract_src == NULL ||
776 (op == CAIRO_OPERATOR_ADD && src->is_opaque_solid)))
778 _pixman_image_add_tristrip (dst->pixman_image, dst_x, dst_y, strip);
779 return CAIRO_STATUS_SUCCESS;
782 mask = pixman_image_create_bits (format,
783 extents->width, extents->height,
784 NULL, 0);
785 if (unlikely (mask == NULL))
786 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
788 _pixman_image_add_tristrip (mask, extents->x, extents->y, strip);
789 pixman_image_composite32 (_pixman_operator (op),
790 src->pixman_image, mask, dst->pixman_image,
791 extents->x + src_x, extents->y + src_y,
792 0, 0,
793 extents->x - dst_x, extents->y - dst_y,
794 extents->width, extents->height);
796 pixman_image_unref (mask);
798 return CAIRO_STATUS_SUCCESS;
800 #endif
802 static cairo_int_status_t
803 check_composite_glyphs (const cairo_composite_rectangles_t *extents,
804 cairo_scaled_font_t *scaled_font,
805 cairo_glyph_t *glyphs,
806 int *num_glyphs)
808 return CAIRO_STATUS_SUCCESS;
811 #if HAS_PIXMAN_GLYPHS
812 static pixman_glyph_cache_t *global_glyph_cache;
814 static inline pixman_glyph_cache_t *
815 get_glyph_cache (void)
817 if (!global_glyph_cache)
818 global_glyph_cache = pixman_glyph_cache_create ();
820 return global_glyph_cache;
823 void
824 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
825 cairo_scaled_glyph_t *scaled_glyph)
827 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
829 if (global_glyph_cache) {
830 pixman_glyph_cache_remove (
831 global_glyph_cache, scaled_font,
832 (void *)_cairo_scaled_glyph_index (scaled_glyph));
835 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
838 static cairo_int_status_t
839 composite_glyphs (void *_dst,
840 cairo_operator_t op,
841 cairo_surface_t *_src,
842 int src_x,
843 int src_y,
844 int dst_x,
845 int dst_y,
846 cairo_composite_glyphs_info_t *info)
848 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
849 pixman_glyph_cache_t *glyph_cache;
850 pixman_glyph_t pglyphs_stack[CAIRO_STACK_ARRAY_LENGTH (pixman_glyph_t)];
851 pixman_glyph_t *pglyphs = pglyphs_stack;
852 pixman_glyph_t *pg;
853 int i;
855 TRACE ((stderr, "%s\n", __FUNCTION__));
857 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
859 glyph_cache = get_glyph_cache();
860 if (unlikely (glyph_cache == NULL)) {
861 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
862 goto out_unlock;
865 pixman_glyph_cache_freeze (glyph_cache);
867 if (info->num_glyphs > ARRAY_LENGTH (pglyphs_stack)) {
868 pglyphs = _cairo_malloc_ab (info->num_glyphs, sizeof (pixman_glyph_t));
869 if (unlikely (pglyphs == NULL)) {
870 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
871 goto out_thaw;
875 pg = pglyphs;
876 for (i = 0; i < info->num_glyphs; i++) {
877 unsigned long index = info->glyphs[i].index;
878 const void *glyph;
880 glyph = pixman_glyph_cache_lookup (glyph_cache, info->font, (void *)index);
881 if (!glyph) {
882 cairo_scaled_glyph_t *scaled_glyph;
883 cairo_image_surface_t *glyph_surface;
885 /* This call can actually end up recursing, so we have to
886 * drop the mutex around it.
888 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
889 status = _cairo_scaled_glyph_lookup (info->font, index,
890 CAIRO_SCALED_GLYPH_INFO_SURFACE,
891 &scaled_glyph);
892 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex);
894 if (unlikely (status))
895 goto out_thaw;
897 glyph_surface = scaled_glyph->surface;
898 glyph = pixman_glyph_cache_insert (glyph_cache, info->font, (void *)index,
899 glyph_surface->base.device_transform.x0,
900 glyph_surface->base.device_transform.y0,
901 glyph_surface->pixman_image);
902 if (unlikely (!glyph)) {
903 status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
904 goto out_thaw;
908 pg->x = _cairo_lround (info->glyphs[i].x);
909 pg->y = _cairo_lround (info->glyphs[i].y);
910 pg->glyph = glyph;
911 pg++;
914 if (info->use_mask) {
915 pixman_format_code_t mask_format;
917 mask_format = pixman_glyph_get_mask_format (glyph_cache, pg - pglyphs, pglyphs);
919 pixman_composite_glyphs (_pixman_operator (op),
920 ((cairo_image_source_t *)_src)->pixman_image,
921 to_pixman_image (_dst),
922 mask_format,
923 info->extents.x + src_x, info->extents.y + src_y,
924 info->extents.x, info->extents.y,
925 info->extents.x - dst_x, info->extents.y - dst_y,
926 info->extents.width, info->extents.height,
927 glyph_cache, pg - pglyphs, pglyphs);
928 } else {
929 pixman_composite_glyphs_no_mask (_pixman_operator (op),
930 ((cairo_image_source_t *)_src)->pixman_image,
931 to_pixman_image (_dst),
932 src_x, src_y,
933 - dst_x, - dst_y,
934 glyph_cache, pg - pglyphs, pglyphs);
937 out_thaw:
938 pixman_glyph_cache_thaw (glyph_cache);
940 if (pglyphs != pglyphs_stack)
941 free(pglyphs);
943 out_unlock:
944 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex);
945 return status;
947 #else
948 void
949 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t *scaled_font,
950 cairo_scaled_glyph_t *scaled_glyph)
954 static cairo_int_status_t
955 composite_one_glyph (void *_dst,
956 cairo_operator_t op,
957 cairo_surface_t *_src,
958 int src_x,
959 int src_y,
960 int dst_x,
961 int dst_y,
962 cairo_composite_glyphs_info_t *info)
964 cairo_image_surface_t *glyph_surface;
965 cairo_scaled_glyph_t *scaled_glyph;
966 cairo_status_t status;
967 int x, y;
969 TRACE ((stderr, "%s\n", __FUNCTION__));
971 status = _cairo_scaled_glyph_lookup (info->font,
972 info->glyphs[0].index,
973 CAIRO_SCALED_GLYPH_INFO_SURFACE,
974 &scaled_glyph);
976 if (unlikely (status))
977 return status;
979 glyph_surface = scaled_glyph->surface;
980 if (glyph_surface->width == 0 || glyph_surface->height == 0)
981 return CAIRO_INT_STATUS_NOTHING_TO_DO;
983 /* round glyph locations to the nearest pixel */
984 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
985 x = _cairo_lround (info->glyphs[0].x -
986 glyph_surface->base.device_transform.x0);
987 y = _cairo_lround (info->glyphs[0].y -
988 glyph_surface->base.device_transform.y0);
990 pixman_image_composite32 (_pixman_operator (op),
991 ((cairo_image_source_t *)_src)->pixman_image,
992 glyph_surface->pixman_image,
993 to_pixman_image (_dst),
994 x + src_x, y + src_y,
995 0, 0,
996 x - dst_x, y - dst_y,
997 glyph_surface->width,
998 glyph_surface->height);
1000 return CAIRO_INT_STATUS_SUCCESS;
1003 static cairo_int_status_t
1004 composite_glyphs_via_mask (void *_dst,
1005 cairo_operator_t op,
1006 cairo_surface_t *_src,
1007 int src_x,
1008 int src_y,
1009 int dst_x,
1010 int dst_y,
1011 cairo_composite_glyphs_info_t *info)
1013 cairo_scaled_glyph_t *glyph_cache[64];
1014 pixman_image_t *white = _pixman_image_for_color (CAIRO_COLOR_WHITE);
1015 cairo_scaled_glyph_t *scaled_glyph;
1016 uint8_t buf[2048];
1017 pixman_image_t *mask;
1018 pixman_format_code_t format;
1019 cairo_status_t status;
1020 int i;
1022 TRACE ((stderr, "%s\n", __FUNCTION__));
1024 if (unlikely (white == NULL))
1025 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1027 /* XXX convert the glyphs to common formats a8/a8r8g8b8 to hit
1028 * optimised paths through pixman. Should we increase the bit
1029 * depth of the target surface, we should reconsider the appropriate
1030 * mask formats.
1033 status = _cairo_scaled_glyph_lookup (info->font,
1034 info->glyphs[0].index,
1035 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1036 &scaled_glyph);
1037 if (unlikely (status)) {
1038 pixman_image_unref (white);
1039 return status;
1042 memset (glyph_cache, 0, sizeof (glyph_cache));
1043 glyph_cache[info->glyphs[0].index % ARRAY_LENGTH (glyph_cache)] = scaled_glyph;
1045 format = PIXMAN_a8;
1046 i = (info->extents.width + 3) & ~3;
1047 if (scaled_glyph->surface->base.content & CAIRO_CONTENT_COLOR) {
1048 format = PIXMAN_a8r8g8b8;
1049 i = info->extents.width * 4;
1052 if (i * info->extents.height > (int) sizeof (buf)) {
1053 mask = pixman_image_create_bits (format,
1054 info->extents.width,
1055 info->extents.height,
1056 NULL, 0);
1057 } else {
1058 memset (buf, 0, i * info->extents.height);
1059 mask = pixman_image_create_bits (format,
1060 info->extents.width,
1061 info->extents.height,
1062 (uint32_t *)buf, i);
1064 if (unlikely (mask == NULL)) {
1065 pixman_image_unref (white);
1066 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1069 status = CAIRO_STATUS_SUCCESS;
1070 for (i = 0; i < info->num_glyphs; i++) {
1071 unsigned long glyph_index = info->glyphs[i].index;
1072 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1073 cairo_image_surface_t *glyph_surface;
1074 int x, y;
1076 scaled_glyph = glyph_cache[cache_index];
1077 if (scaled_glyph == NULL ||
1078 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1080 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1081 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1082 &scaled_glyph);
1084 if (unlikely (status)) {
1085 pixman_image_unref (mask);
1086 pixman_image_unref (white);
1087 return status;
1090 glyph_cache[cache_index] = scaled_glyph;
1093 glyph_surface = scaled_glyph->surface;
1094 if (glyph_surface->width && glyph_surface->height) {
1095 if (glyph_surface->base.content & CAIRO_CONTENT_COLOR &&
1096 format == PIXMAN_a8) {
1097 pixman_image_t *ca_mask;
1099 format = PIXMAN_a8r8g8b8;
1100 ca_mask = pixman_image_create_bits (format,
1101 info->extents.width,
1102 info->extents.height,
1103 NULL, 0);
1104 if (unlikely (ca_mask == NULL)) {
1105 pixman_image_unref (mask);
1106 pixman_image_unref (white);
1107 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1110 pixman_image_composite32 (PIXMAN_OP_SRC,
1111 white, mask, ca_mask,
1112 0, 0,
1113 0, 0,
1114 0, 0,
1115 info->extents.width,
1116 info->extents.height);
1117 pixman_image_unref (mask);
1118 mask = ca_mask;
1121 /* round glyph locations to the nearest pixel */
1122 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1123 x = _cairo_lround (info->glyphs[i].x -
1124 glyph_surface->base.device_transform.x0);
1125 y = _cairo_lround (info->glyphs[i].y -
1126 glyph_surface->base.device_transform.y0);
1128 if (glyph_surface->pixman_format == format) {
1129 pixman_image_composite32 (PIXMAN_OP_ADD,
1130 glyph_surface->pixman_image, NULL, mask,
1131 0, 0,
1132 0, 0,
1133 x - info->extents.x, y - info->extents.y,
1134 glyph_surface->width,
1135 glyph_surface->height);
1136 } else {
1137 pixman_image_composite32 (PIXMAN_OP_ADD,
1138 white, glyph_surface->pixman_image, mask,
1139 0, 0,
1140 0, 0,
1141 x - info->extents.x, y - info->extents.y,
1142 glyph_surface->width,
1143 glyph_surface->height);
1148 if (format == PIXMAN_a8r8g8b8)
1149 pixman_image_set_component_alpha (mask, TRUE);
1151 pixman_image_composite32 (_pixman_operator (op),
1152 ((cairo_image_source_t *)_src)->pixman_image,
1153 mask,
1154 to_pixman_image (_dst),
1155 info->extents.x + src_x, info->extents.y + src_y,
1156 0, 0,
1157 info->extents.x - dst_x, info->extents.y - dst_y,
1158 info->extents.width, info->extents.height);
1159 pixman_image_unref (mask);
1160 pixman_image_unref (white);
1162 return CAIRO_STATUS_SUCCESS;
1165 static cairo_int_status_t
1166 composite_glyphs (void *_dst,
1167 cairo_operator_t op,
1168 cairo_surface_t *_src,
1169 int src_x,
1170 int src_y,
1171 int dst_x,
1172 int dst_y,
1173 cairo_composite_glyphs_info_t *info)
1175 cairo_scaled_glyph_t *glyph_cache[64];
1176 pixman_image_t *dst, *src;
1177 cairo_status_t status;
1178 int i;
1180 TRACE ((stderr, "%s\n", __FUNCTION__));
1182 if (info->num_glyphs == 1)
1183 return composite_one_glyph(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1185 if (info->use_mask)
1186 return composite_glyphs_via_mask(_dst, op, _src, src_x, src_y, dst_x, dst_y, info);
1188 op = _pixman_operator (op);
1189 dst = to_pixman_image (_dst);
1190 src = ((cairo_image_source_t *)_src)->pixman_image;
1192 memset (glyph_cache, 0, sizeof (glyph_cache));
1193 status = CAIRO_STATUS_SUCCESS;
1195 for (i = 0; i < info->num_glyphs; i++) {
1196 int x, y;
1197 cairo_image_surface_t *glyph_surface;
1198 cairo_scaled_glyph_t *scaled_glyph;
1199 unsigned long glyph_index = info->glyphs[i].index;
1200 int cache_index = glyph_index % ARRAY_LENGTH (glyph_cache);
1202 scaled_glyph = glyph_cache[cache_index];
1203 if (scaled_glyph == NULL ||
1204 _cairo_scaled_glyph_index (scaled_glyph) != glyph_index)
1206 status = _cairo_scaled_glyph_lookup (info->font, glyph_index,
1207 CAIRO_SCALED_GLYPH_INFO_SURFACE,
1208 &scaled_glyph);
1210 if (unlikely (status))
1211 break;
1213 glyph_cache[cache_index] = scaled_glyph;
1216 glyph_surface = scaled_glyph->surface;
1217 if (glyph_surface->width && glyph_surface->height) {
1218 /* round glyph locations to the nearest pixel */
1219 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1220 x = _cairo_lround (info->glyphs[i].x -
1221 glyph_surface->base.device_transform.x0);
1222 y = _cairo_lround (info->glyphs[i].y -
1223 glyph_surface->base.device_transform.y0);
1225 pixman_image_composite32 (op, src, glyph_surface->pixman_image, dst,
1226 x + src_x, y + src_y,
1227 0, 0,
1228 x - dst_x, y - dst_y,
1229 glyph_surface->width,
1230 glyph_surface->height);
1234 return status;
1236 #endif
1238 static cairo_int_status_t
1239 check_composite (const cairo_composite_rectangles_t *extents)
1241 return CAIRO_STATUS_SUCCESS;
1244 const cairo_compositor_t *
1245 _cairo_image_traps_compositor_get (void)
1247 static cairo_traps_compositor_t compositor;
1249 if (compositor.base.delegate == NULL) {
1250 _cairo_traps_compositor_init (&compositor,
1251 &__cairo_no_compositor);
1252 compositor.acquire = acquire;
1253 compositor.release = release;
1254 compositor.set_clip_region = set_clip_region;
1255 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1256 compositor.draw_image_boxes = draw_image_boxes;
1257 //compositor.copy_boxes = copy_boxes;
1258 compositor.fill_boxes = fill_boxes;
1259 compositor.check_composite = check_composite;
1260 compositor.composite = composite;
1261 compositor.lerp = lerp;
1262 //compositor.check_composite_boxes = check_composite_boxes;
1263 compositor.composite_boxes = composite_boxes;
1264 //compositor.check_composite_traps = check_composite_traps;
1265 compositor.composite_traps = composite_traps;
1266 //compositor.check_composite_tristrip = check_composite_traps;
1267 #if PIXMAN_VERSION >= PIXMAN_VERSION_ENCODE(0,22,0)
1268 compositor.composite_tristrip = composite_tristrip;
1269 #endif
1270 compositor.check_composite_glyphs = check_composite_glyphs;
1271 compositor.composite_glyphs = composite_glyphs;
1274 return &compositor.base;
1277 const cairo_compositor_t *
1278 _cairo_image_mask_compositor_get (void)
1280 static cairo_mask_compositor_t compositor;
1282 if (compositor.base.delegate == NULL) {
1283 _cairo_mask_compositor_init (&compositor,
1284 _cairo_image_traps_compositor_get ());
1285 compositor.acquire = acquire;
1286 compositor.release = release;
1287 compositor.set_clip_region = set_clip_region;
1288 compositor.pattern_to_surface = _cairo_image_source_create_for_pattern;
1289 compositor.draw_image_boxes = draw_image_boxes;
1290 compositor.fill_rectangles = fill_rectangles;
1291 compositor.fill_boxes = fill_boxes;
1292 compositor.check_composite = check_composite;
1293 compositor.composite = composite;
1294 //compositor.lerp = lerp;
1295 //compositor.check_composite_boxes = check_composite_boxes;
1296 compositor.composite_boxes = composite_boxes;
1297 compositor.check_composite_glyphs = check_composite_glyphs;
1298 compositor.composite_glyphs = composite_glyphs;
1301 return &compositor.base;
1304 #if PIXMAN_HAS_COMPOSITOR
1305 typedef struct _cairo_image_span_renderer {
1306 cairo_span_renderer_t base;
1308 pixman_image_compositor_t *compositor;
1309 pixman_image_t *src, *mask;
1310 float opacity;
1311 cairo_rectangle_int_t extents;
1312 } cairo_image_span_renderer_t;
1313 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1315 static cairo_status_t
1316 _cairo_image_bounded_opaque_spans (void *abstract_renderer,
1317 int y, int height,
1318 const cairo_half_open_span_t *spans,
1319 unsigned num_spans)
1321 cairo_image_span_renderer_t *r = abstract_renderer;
1323 if (num_spans == 0)
1324 return CAIRO_STATUS_SUCCESS;
1326 do {
1327 if (spans[0].coverage)
1328 pixman_image_compositor_blt (r->compositor,
1329 spans[0].x, y,
1330 spans[1].x - spans[0].x, height,
1331 spans[0].coverage);
1332 spans++;
1333 } while (--num_spans > 1);
1335 return CAIRO_STATUS_SUCCESS;
1338 static cairo_status_t
1339 _cairo_image_bounded_spans (void *abstract_renderer,
1340 int y, int height,
1341 const cairo_half_open_span_t *spans,
1342 unsigned num_spans)
1344 cairo_image_span_renderer_t *r = abstract_renderer;
1346 if (num_spans == 0)
1347 return CAIRO_STATUS_SUCCESS;
1349 do {
1350 if (spans[0].coverage) {
1351 pixman_image_compositor_blt (r->compositor,
1352 spans[0].x, y,
1353 spans[1].x - spans[0].x, height,
1354 r->opacity * spans[0].coverage);
1356 spans++;
1357 } while (--num_spans > 1);
1359 return CAIRO_STATUS_SUCCESS;
1362 static cairo_status_t
1363 _cairo_image_unbounded_spans (void *abstract_renderer,
1364 int y, int height,
1365 const cairo_half_open_span_t *spans,
1366 unsigned num_spans)
1368 cairo_image_span_renderer_t *r = abstract_renderer;
1370 assert (y + height <= r->extents.height);
1371 if (y > r->extents.y) {
1372 pixman_image_compositor_blt (r->compositor,
1373 r->extents.x, r->extents.y,
1374 r->extents.width, y - r->extents.y,
1378 if (num_spans == 0) {
1379 pixman_image_compositor_blt (r->compositor,
1380 r->extents.x, y,
1381 r->extents.width, height,
1383 } else {
1384 if (spans[0].x != r->extents.x) {
1385 pixman_image_compositor_blt (r->compositor,
1386 r->extents.x, y,
1387 spans[0].x - r->extents.x,
1388 height,
1392 do {
1393 assert (spans[0].x < r->extents.x + r->extents.width);
1394 pixman_image_compositor_blt (r->compositor,
1395 spans[0].x, y,
1396 spans[1].x - spans[0].x, height,
1397 r->opacity * spans[0].coverage);
1398 spans++;
1399 } while (--num_spans > 1);
1401 if (spans[0].x != r->extents.x + r->extents.width) {
1402 assert (spans[0].x < r->extents.x + r->extents.width);
1403 pixman_image_compositor_blt (r->compositor,
1404 spans[0].x, y,
1405 r->extents.x + r->extents.width - spans[0].x, height,
1410 r->extents.y = y + height;
1411 return CAIRO_STATUS_SUCCESS;
1414 static cairo_status_t
1415 _cairo_image_clipped_spans (void *abstract_renderer,
1416 int y, int height,
1417 const cairo_half_open_span_t *spans,
1418 unsigned num_spans)
1420 cairo_image_span_renderer_t *r = abstract_renderer;
1422 assert (num_spans);
1424 do {
1425 if (! spans[0].inverse)
1426 pixman_image_compositor_blt (r->compositor,
1427 spans[0].x, y,
1428 spans[1].x - spans[0].x, height,
1429 r->opacity * spans[0].coverage);
1430 spans++;
1431 } while (--num_spans > 1);
1433 r->extents.y = y + height;
1434 return CAIRO_STATUS_SUCCESS;
1437 static cairo_status_t
1438 _cairo_image_finish_unbounded_spans (void *abstract_renderer)
1440 cairo_image_span_renderer_t *r = abstract_renderer;
1442 if (r->extents.y < r->extents.height) {
1443 pixman_image_compositor_blt (r->compositor,
1444 r->extents.x, r->extents.y,
1445 r->extents.width,
1446 r->extents.height - r->extents.y,
1450 return CAIRO_STATUS_SUCCESS;
1453 static cairo_int_status_t
1454 span_renderer_init (cairo_abstract_span_renderer_t *_r,
1455 const cairo_composite_rectangles_t *composite,
1456 cairo_bool_t needs_clip)
1458 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
1459 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
1460 const cairo_pattern_t *source = &composite->source_pattern.base;
1461 cairo_operator_t op = composite->op;
1462 int src_x, src_y;
1463 int mask_x, mask_y;
1465 TRACE ((stderr, "%s\n", __FUNCTION__));
1467 if (op == CAIRO_OPERATOR_CLEAR) {
1468 op = PIXMAN_OP_LERP_CLEAR;
1469 } else if (dst->base.is_clear &&
1470 (op == CAIRO_OPERATOR_SOURCE ||
1471 op == CAIRO_OPERATOR_OVER ||
1472 op == CAIRO_OPERATOR_ADD)) {
1473 op = PIXMAN_OP_SRC;
1474 } else if (op == CAIRO_OPERATOR_SOURCE) {
1475 op = PIXMAN_OP_LERP_SRC;
1476 } else {
1477 op = _pixman_operator (op);
1480 r->compositor = NULL;
1481 r->mask = NULL;
1482 r->src = _pixman_image_for_pattern (dst, source, FALSE,
1483 &composite->unbounded,
1484 &composite->source_sample_area,
1485 &src_x, &src_y);
1486 if (unlikely (r->src == NULL))
1487 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1489 r->opacity = 1.0;
1490 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
1491 r->opacity = composite->mask_pattern.solid.color.alpha;
1492 } else {
1493 r->mask = _pixman_image_for_pattern (dst,
1494 &composite->mask_pattern.base,
1495 TRUE,
1496 &composite->unbounded,
1497 &composite->mask_sample_area,
1498 &mask_x, &mask_y);
1499 if (unlikely (r->mask == NULL))
1500 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
1502 /* XXX Component-alpha? */
1503 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
1504 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
1506 pixman_image_unref (r->src);
1507 r->src = r->mask;
1508 src_x = mask_x;
1509 src_y = mask_y;
1510 r->mask = NULL;
1514 if (composite->is_bounded) {
1515 if (r->opacity == 1.)
1516 r->base.render_rows = _cairo_image_bounded_opaque_spans;
1517 else
1518 r->base.render_rows = _cairo_image_bounded_spans;
1519 r->base.finish = NULL;
1520 } else {
1521 if (needs_clip)
1522 r->base.render_rows = _cairo_image_clipped_spans;
1523 else
1524 r->base.render_rows = _cairo_image_unbounded_spans;
1525 r->base.finish = _cairo_image_finish_unbounded_spans;
1526 r->extents = composite->unbounded;
1527 r->extents.height += r->extents.y;
1530 r->compositor =
1531 pixman_image_create_compositor (op, r->src, r->mask, dst->pixman_image,
1532 composite->unbounded.x + src_x,
1533 composite->unbounded.y + src_y,
1534 composite->unbounded.x + mask_x,
1535 composite->unbounded.y + mask_y,
1536 composite->unbounded.x,
1537 composite->unbounded.y,
1538 composite->unbounded.width,
1539 composite->unbounded.height);
1540 if (unlikely (r->compositor == NULL))
1541 return CAIRO_INT_STATUS_NOTHING_TO_DO;
1543 return CAIRO_STATUS_SUCCESS;
1546 static void
1547 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
1548 cairo_int_status_t status)
1550 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
1552 TRACE ((stderr, "%s\n", __FUNCTION__));
1554 if (status == CAIRO_INT_STATUS_SUCCESS && r->base.finish)
1555 r->base.finish (r);
1557 if (r->compositor)
1558 pixman_image_compositor_destroy (r->compositor);
1560 if (r->src)
1561 pixman_image_unref (r->src);
1562 if (r->mask)
1563 pixman_image_unref (r->mask);
1565 #else
1566 typedef struct _cairo_image_span_renderer {
1567 cairo_span_renderer_t base;
1569 const cairo_composite_rectangles_t *composite;
1571 float opacity;
1572 uint8_t op;
1573 int bpp;
1575 pixman_image_t *src, *mask;
1576 union {
1577 struct fill {
1578 int stride;
1579 uint8_t *data;
1580 uint32_t pixel;
1581 } fill;
1582 struct blit {
1583 int stride;
1584 uint8_t *data;
1585 int src_stride;
1586 uint8_t *src_data;
1587 } blit;
1588 struct composite {
1589 pixman_image_t *dst;
1590 int src_x, src_y;
1591 int mask_x, mask_y;
1592 int run_length;
1593 } composite;
1594 struct finish {
1595 cairo_rectangle_int_t extents;
1596 int src_x, src_y;
1597 int stride;
1598 uint8_t *data;
1599 } mask;
1600 } u;
1601 uint8_t _buf[0];
1602 #define SZ_BUF (int)(sizeof (cairo_abstract_span_renderer_t) - sizeof (cairo_image_span_renderer_t))
1603 } cairo_image_span_renderer_t;
1604 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
1606 static cairo_status_t
1607 _cairo_image_spans (void *abstract_renderer,
1608 int y, int height,
1609 const cairo_half_open_span_t *spans,
1610 unsigned num_spans)
1612 cairo_image_span_renderer_t *r = abstract_renderer;
1613 uint8_t *mask, *row;
1614 int len;
1616 if (num_spans == 0)
1617 return CAIRO_STATUS_SUCCESS;
1619 mask = r->u.mask.data + (y - r->u.mask.extents.y) * r->u.mask.stride;
1620 mask += spans[0].x - r->u.mask.extents.x;
1621 row = mask;
1623 do {
1624 len = spans[1].x - spans[0].x;
1625 if (spans[0].coverage) {
1626 *row++ = r->opacity * spans[0].coverage;
1627 if (--len)
1628 memset (row, row[-1], len);
1630 row += len;
1631 spans++;
1632 } while (--num_spans > 1);
1634 len = row - mask;
1635 row = mask;
1636 while (--height) {
1637 mask += r->u.mask.stride;
1638 memcpy (mask, row, len);
1641 return CAIRO_STATUS_SUCCESS;
1644 static cairo_status_t
1645 _cairo_image_spans_and_zero (void *abstract_renderer,
1646 int y, int height,
1647 const cairo_half_open_span_t *spans,
1648 unsigned num_spans)
1650 cairo_image_span_renderer_t *r = abstract_renderer;
1651 uint8_t *mask;
1652 int len;
1654 mask = r->u.mask.data;
1655 if (y > r->u.mask.extents.y) {
1656 len = (y - r->u.mask.extents.y) * r->u.mask.stride;
1657 memset (mask, 0, len);
1658 mask += len;
1661 r->u.mask.extents.y = y + height;
1662 r->u.mask.data = mask + height * r->u.mask.stride;
1663 if (num_spans == 0) {
1664 memset (mask, 0, height * r->u.mask.stride);
1665 } else {
1666 uint8_t *row = mask;
1668 if (spans[0].x != r->u.mask.extents.x) {
1669 len = spans[0].x - r->u.mask.extents.x;
1670 memset (row, 0, len);
1671 row += len;
1674 do {
1675 len = spans[1].x - spans[0].x;
1676 *row++ = r->opacity * spans[0].coverage;
1677 if (len > 1) {
1678 memset (row, row[-1], --len);
1679 row += len;
1681 spans++;
1682 } while (--num_spans > 1);
1684 if (spans[0].x != r->u.mask.extents.x + r->u.mask.extents.width) {
1685 len = r->u.mask.extents.x + r->u.mask.extents.width - spans[0].x;
1686 memset (row, 0, len);
1689 row = mask;
1690 while (--height) {
1691 mask += r->u.mask.stride;
1692 memcpy (mask, row, r->u.mask.extents.width);
1696 return CAIRO_STATUS_SUCCESS;
1699 static cairo_status_t
1700 _cairo_image_finish_spans_and_zero (void *abstract_renderer)
1702 cairo_image_span_renderer_t *r = abstract_renderer;
1704 if (r->u.mask.extents.y < r->u.mask.extents.height)
1705 memset (r->u.mask.data, 0, (r->u.mask.extents.height - r->u.mask.extents.y) * r->u.mask.stride);
1707 return CAIRO_STATUS_SUCCESS;
1710 static cairo_status_t
1711 _fill8_spans (void *abstract_renderer, int y, int h,
1712 const cairo_half_open_span_t *spans, unsigned num_spans)
1714 cairo_image_span_renderer_t *r = abstract_renderer;
1716 if (num_spans == 0)
1717 return CAIRO_STATUS_SUCCESS;
1719 if (likely(h == 1)) {
1720 do {
1721 if (spans[0].coverage) {
1722 int len = spans[1].x - spans[0].x;
1723 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
1724 if (len == 1)
1725 *d = r->u.fill.pixel;
1726 else
1727 memset(d, r->u.fill.pixel, len);
1729 spans++;
1730 } while (--num_spans > 1);
1731 } else {
1732 do {
1733 if (spans[0].coverage) {
1734 int yy = y, hh = h;
1735 do {
1736 int len = spans[1].x - spans[0].x;
1737 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
1738 if (len == 1)
1739 *d = r->u.fill.pixel;
1740 else
1741 memset(d, r->u.fill.pixel, len);
1742 yy++;
1743 } while (--hh);
1745 spans++;
1746 } while (--num_spans > 1);
1749 return CAIRO_STATUS_SUCCESS;
1752 static cairo_status_t
1753 _fill16_spans (void *abstract_renderer, int y, int h,
1754 const cairo_half_open_span_t *spans, unsigned num_spans)
1756 cairo_image_span_renderer_t *r = abstract_renderer;
1758 if (num_spans == 0)
1759 return CAIRO_STATUS_SUCCESS;
1761 if (likely(h == 1)) {
1762 do {
1763 if (spans[0].coverage) {
1764 int len = spans[1].x - spans[0].x;
1765 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*2);
1766 while (len--)
1767 *d++ = r->u.fill.pixel;
1769 spans++;
1770 } while (--num_spans > 1);
1771 } else {
1772 do {
1773 if (spans[0].coverage) {
1774 int yy = y, hh = h;
1775 do {
1776 int len = spans[1].x - spans[0].x;
1777 uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*2);
1778 while (len--)
1779 *d++ = r->u.fill.pixel;
1780 yy++;
1781 } while (--hh);
1783 spans++;
1784 } while (--num_spans > 1);
1787 return CAIRO_STATUS_SUCCESS;
1790 static cairo_status_t
1791 _fill32_spans (void *abstract_renderer, int y, int h,
1792 const cairo_half_open_span_t *spans, unsigned num_spans)
1794 cairo_image_span_renderer_t *r = abstract_renderer;
1796 if (num_spans == 0)
1797 return CAIRO_STATUS_SUCCESS;
1799 if (likely(h == 1)) {
1800 do {
1801 if (spans[0].coverage) {
1802 int len = spans[1].x - spans[0].x;
1803 if (len > 32) {
1804 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1805 spans[0].x, y, len, 1, r->u.fill.pixel);
1806 } else {
1807 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
1808 while (len--)
1809 *d++ = r->u.fill.pixel;
1812 spans++;
1813 } while (--num_spans > 1);
1814 } else {
1815 do {
1816 if (spans[0].coverage) {
1817 if (spans[1].x - spans[0].x > 16) {
1818 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
1819 spans[0].x, y, spans[1].x - spans[0].x, h,
1820 r->u.fill.pixel);
1821 } else {
1822 int yy = y, hh = h;
1823 do {
1824 int len = spans[1].x - spans[0].x;
1825 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
1826 while (len--)
1827 *d++ = r->u.fill.pixel;
1828 yy++;
1829 } while (--hh);
1832 spans++;
1833 } while (--num_spans > 1);
1836 return CAIRO_STATUS_SUCCESS;
1839 #if 0
1840 static cairo_status_t
1841 _fill_spans (void *abstract_renderer, int y, int h,
1842 const cairo_half_open_span_t *spans, unsigned num_spans)
1844 cairo_image_span_renderer_t *r = abstract_renderer;
1846 if (num_spans == 0)
1847 return CAIRO_STATUS_SUCCESS;
1849 do {
1850 if (spans[0].coverage) {
1851 pixman_fill ((uint32_t *) r->data, r->stride, r->bpp,
1852 spans[0].x, y,
1853 spans[1].x - spans[0].x, h,
1854 r->pixel);
1856 spans++;
1857 } while (--num_spans > 1);
1859 return CAIRO_STATUS_SUCCESS;
1861 #endif
1863 static cairo_status_t
1864 _blit_spans (void *abstract_renderer, int y, int h,
1865 const cairo_half_open_span_t *spans, unsigned num_spans)
1867 cairo_image_span_renderer_t *r = abstract_renderer;
1868 int cpp;
1870 if (num_spans == 0)
1871 return CAIRO_STATUS_SUCCESS;
1873 cpp = r->bpp/8;
1874 if (likely (h == 1)) {
1875 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
1876 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
1877 do {
1878 if (spans[0].coverage) {
1879 void *s = src + spans[0].x*cpp;
1880 void *d = dst + spans[0].x*cpp;
1881 int len = (spans[1].x - spans[0].x) * cpp;
1882 switch (len) {
1883 case 1:
1884 *(uint8_t *)d = *(uint8_t *)s;
1885 break;
1886 case 2:
1887 *(uint16_t *)d = *(uint16_t *)s;
1888 break;
1889 case 4:
1890 *(uint32_t *)d = *(uint32_t *)s;
1891 break;
1892 #if HAVE_UINT64_T
1893 case 8:
1894 *(uint64_t *)d = *(uint64_t *)s;
1895 break;
1896 #endif
1897 default:
1898 memcpy(d, s, len);
1899 break;
1902 spans++;
1903 } while (--num_spans > 1);
1904 } else {
1905 do {
1906 if (spans[0].coverage) {
1907 int yy = y, hh = h;
1908 do {
1909 void *src = r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x*cpp;
1910 void *dst = r->u.blit.data + yy*r->u.blit.stride + spans[0].x*cpp;
1911 int len = (spans[1].x - spans[0].x) * cpp;
1912 switch (len) {
1913 case 1:
1914 *(uint8_t *)dst = *(uint8_t *)src;
1915 break;
1916 case 2:
1917 *(uint16_t *)dst = *(uint16_t *)src;
1918 break;
1919 case 4:
1920 *(uint32_t *)dst = *(uint32_t *)src;
1921 break;
1922 #if HAVE_UINT64_T
1923 case 8:
1924 *(uint64_t *)dst = *(uint64_t *)src;
1925 break;
1926 #endif
1927 default:
1928 memcpy(dst, src, len);
1929 break;
1931 yy++;
1932 } while (--hh);
1934 spans++;
1935 } while (--num_spans > 1);
1938 return CAIRO_STATUS_SUCCESS;
1941 static cairo_status_t
1942 _mono_spans (void *abstract_renderer, int y, int h,
1943 const cairo_half_open_span_t *spans, unsigned num_spans)
1945 cairo_image_span_renderer_t *r = abstract_renderer;
1947 if (num_spans == 0)
1948 return CAIRO_STATUS_SUCCESS;
1950 do {
1951 if (spans[0].coverage) {
1952 pixman_image_composite32 (r->op,
1953 r->src, NULL, r->u.composite.dst,
1954 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1955 0, 0,
1956 spans[0].x, y,
1957 spans[1].x - spans[0].x, h);
1959 spans++;
1960 } while (--num_spans > 1);
1962 return CAIRO_STATUS_SUCCESS;
1965 static cairo_status_t
1966 _mono_unbounded_spans (void *abstract_renderer, int y, int h,
1967 const cairo_half_open_span_t *spans, unsigned num_spans)
1969 cairo_image_span_renderer_t *r = abstract_renderer;
1971 if (num_spans == 0) {
1972 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1973 r->src, NULL, r->u.composite.dst,
1974 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1975 0, 0,
1976 r->composite->unbounded.x, y,
1977 r->composite->unbounded.width, h);
1978 r->u.composite.mask_y = y + h;
1979 return CAIRO_STATUS_SUCCESS;
1982 if (y != r->u.composite.mask_y) {
1983 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1984 r->src, NULL, r->u.composite.dst,
1985 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1986 0, 0,
1987 r->composite->unbounded.x, r->u.composite.mask_y,
1988 r->composite->unbounded.width, y - r->u.composite.mask_y);
1991 if (spans[0].x != r->composite->unbounded.x) {
1992 pixman_image_composite32 (PIXMAN_OP_CLEAR,
1993 r->src, NULL, r->u.composite.dst,
1994 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
1995 0, 0,
1996 r->composite->unbounded.x, y,
1997 spans[0].x - r->composite->unbounded.x, h);
2000 do {
2001 int op = spans[0].coverage ? r->op : PIXMAN_OP_CLEAR;
2002 pixman_image_composite32 (op,
2003 r->src, NULL, r->u.composite.dst,
2004 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2005 0, 0,
2006 spans[0].x, y,
2007 spans[1].x - spans[0].x, h);
2008 spans++;
2009 } while (--num_spans > 1);
2011 if (spans[0].x != r->composite->unbounded.x + r->composite->unbounded.width) {
2012 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2013 r->src, NULL, r->u.composite.dst,
2014 spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
2015 0, 0,
2016 spans[0].x, y,
2017 r->composite->unbounded.x + r->composite->unbounded.width - spans[0].x, h);
2020 r->u.composite.mask_y = y + h;
2021 return CAIRO_STATUS_SUCCESS;
2024 static cairo_status_t
2025 _mono_finish_unbounded_spans (void *abstract_renderer)
2027 cairo_image_span_renderer_t *r = abstract_renderer;
2029 if (r->u.composite.mask_y < r->composite->unbounded.y + r->composite->unbounded.height) {
2030 pixman_image_composite32 (PIXMAN_OP_CLEAR,
2031 r->src, NULL, r->u.composite.dst,
2032 r->composite->unbounded.x + r->u.composite.src_x, r->u.composite.mask_y + r->u.composite.src_y,
2033 0, 0,
2034 r->composite->unbounded.x, r->u.composite.mask_y,
2035 r->composite->unbounded.width,
2036 r->composite->unbounded.y + r->composite->unbounded.height - r->u.composite.mask_y);
2039 return CAIRO_STATUS_SUCCESS;
2042 static cairo_int_status_t
2043 mono_renderer_init (cairo_image_span_renderer_t *r,
2044 const cairo_composite_rectangles_t *composite,
2045 cairo_antialias_t antialias,
2046 cairo_bool_t needs_clip)
2048 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2050 if (antialias != CAIRO_ANTIALIAS_NONE)
2051 return CAIRO_INT_STATUS_UNSUPPORTED;
2053 if (!_cairo_pattern_is_opaque_solid (&composite->mask_pattern.base))
2054 return CAIRO_INT_STATUS_UNSUPPORTED;
2056 r->base.render_rows = NULL;
2057 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2058 const cairo_color_t *color;
2060 color = &composite->source_pattern.solid.color;
2061 if (composite->op == CAIRO_OPERATOR_CLEAR)
2062 color = CAIRO_COLOR_TRANSPARENT;
2064 if (fill_reduces_to_source (composite->op, color, dst, &r->u.fill.pixel)) {
2065 /* Use plain C for the fill operations as the span length is
2066 * typically small, too small to payback the startup overheads of
2067 * using SSE2 etc.
2069 switch (PIXMAN_FORMAT_BPP(dst->pixman_format)) {
2070 case 8: r->base.render_rows = _fill8_spans; break;
2071 case 16: r->base.render_rows = _fill16_spans; break;
2072 case 32: r->base.render_rows = _fill32_spans; break;
2073 default: break;
2075 r->u.fill.data = dst->data;
2076 r->u.fill.stride = dst->stride;
2078 } else if ((composite->op == CAIRO_OPERATOR_SOURCE ||
2079 (composite->op == CAIRO_OPERATOR_OVER &&
2080 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2081 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2082 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2083 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2085 cairo_image_surface_t *src =
2086 to_image_surface(composite->source_pattern.surface.surface);
2087 int tx, ty;
2089 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2090 &tx, &ty) &&
2091 composite->bounded.x + tx >= 0 &&
2092 composite->bounded.y + ty >= 0 &&
2093 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2094 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2096 r->u.blit.stride = dst->stride;
2097 r->u.blit.data = dst->data;
2098 r->u.blit.src_stride = src->stride;
2099 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2100 r->base.render_rows = _blit_spans;
2104 if (r->base.render_rows == NULL) {
2105 r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
2106 &composite->unbounded,
2107 &composite->source_sample_area,
2108 &r->u.composite.src_x, &r->u.composite.src_y);
2109 if (unlikely (r->src == NULL))
2110 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2112 r->u.composite.dst = to_pixman_image (composite->surface);
2113 r->op = _pixman_operator (composite->op);
2114 if (composite->is_bounded == 0) {
2115 r->base.render_rows = _mono_unbounded_spans;
2116 r->base.finish = _mono_finish_unbounded_spans;
2117 r->u.composite.mask_y = composite->unbounded.y;
2118 } else
2119 r->base.render_rows = _mono_spans;
2121 r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
2123 return CAIRO_INT_STATUS_SUCCESS;
2126 #define ONE_HALF 0x7f
2127 #define RB_MASK 0x00ff00ff
2128 #define RB_ONE_HALF 0x007f007f
2129 #define RB_MASK_PLUS_ONE 0x01000100
2130 #define G_SHIFT 8
2131 static inline uint32_t
2132 mul8x2_8 (uint32_t a, uint8_t b)
2134 uint32_t t = (a & RB_MASK) * b + RB_ONE_HALF;
2135 return ((t + ((t >> G_SHIFT) & RB_MASK)) >> G_SHIFT) & RB_MASK;
2138 static inline uint32_t
2139 add8x2_8x2 (uint32_t a, uint32_t b)
2141 uint32_t t = a + b;
2142 t |= RB_MASK_PLUS_ONE - ((t >> G_SHIFT) & RB_MASK);
2143 return t & RB_MASK;
2146 static inline uint8_t
2147 mul8_8 (uint8_t a, uint8_t b)
2149 uint16_t t = a * (uint16_t)b + ONE_HALF;
2150 return ((t >> G_SHIFT) + t) >> G_SHIFT;
2153 static inline uint32_t
2154 lerp8x4 (uint32_t src, uint8_t a, uint32_t dst)
2156 return (add8x2_8x2 (mul8x2_8 (src, a),
2157 mul8x2_8 (dst, ~a)) |
2158 add8x2_8x2 (mul8x2_8 (src >> G_SHIFT, a),
2159 mul8x2_8 (dst >> G_SHIFT, ~a)) << G_SHIFT);
2162 static cairo_status_t
2163 _fill_a8_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2164 const cairo_half_open_span_t *spans, unsigned num_spans)
2166 cairo_image_span_renderer_t *r = abstract_renderer;
2168 if (num_spans == 0)
2169 return CAIRO_STATUS_SUCCESS;
2171 if (likely(h == 1)) {
2172 uint8_t *d = r->u.fill.data + r->u.fill.stride*y;
2173 do {
2174 uint8_t a = spans[0].coverage;
2175 if (a) {
2176 int len = spans[1].x - spans[0].x;
2177 if (a == 0xff) {
2178 memset(d + spans[0].x, r->u.fill.pixel, len);
2179 } else {
2180 uint8_t s = mul8_8(a, r->u.fill.pixel);
2181 uint8_t *dst = d + spans[0].x;
2182 a = ~a;
2183 while (len--) {
2184 uint8_t t = mul8_8(*dst, a);
2185 *dst++ = t + s;
2189 spans++;
2190 } while (--num_spans > 1);
2191 } else {
2192 do {
2193 uint8_t a = spans[0].coverage;
2194 if (a) {
2195 int yy = y, hh = h;
2196 if (a == 0xff) {
2197 do {
2198 int len = spans[1].x - spans[0].x;
2199 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2200 memset(d, r->u.fill.pixel, len);
2201 yy++;
2202 } while (--hh);
2203 } else {
2204 uint8_t s = mul8_8(a, r->u.fill.pixel);
2205 a = ~a;
2206 do {
2207 int len = spans[1].x - spans[0].x;
2208 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2209 while (len--) {
2210 uint8_t t = mul8_8(*d, a);
2211 *d++ = t + s;
2213 yy++;
2214 } while (--hh);
2217 spans++;
2218 } while (--num_spans > 1);
2221 return CAIRO_STATUS_SUCCESS;
2224 static cairo_status_t
2225 _fill_xrgb32_lerp_opaque_spans (void *abstract_renderer, int y, int h,
2226 const cairo_half_open_span_t *spans, unsigned num_spans)
2228 cairo_image_span_renderer_t *r = abstract_renderer;
2230 if (num_spans == 0)
2231 return CAIRO_STATUS_SUCCESS;
2233 if (likely(h == 1)) {
2234 do {
2235 uint8_t a = spans[0].coverage;
2236 if (a) {
2237 int len = spans[1].x - spans[0].x;
2238 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2239 if (a == 0xff) {
2240 if (len > 31) {
2241 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), 32,
2242 spans[0].x, y, len, 1, r->u.fill.pixel);
2243 } else {
2244 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2245 while (len-- > 0)
2246 *d++ = r->u.fill.pixel;
2248 } else while (len-- > 0) {
2249 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2250 d++;
2253 spans++;
2254 } while (--num_spans > 1);
2255 } else {
2256 do {
2257 uint8_t a = spans[0].coverage;
2258 if (a) {
2259 if (a == 0xff) {
2260 if (spans[1].x - spans[0].x > 16) {
2261 pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), 32,
2262 spans[0].x, y, spans[1].x - spans[0].x, h,
2263 r->u.fill.pixel);
2264 } else {
2265 int yy = y, hh = h;
2266 do {
2267 int len = spans[1].x - spans[0].x;
2268 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2269 while (len--)
2270 *d++ = r->u.fill.pixel;
2271 yy++;
2272 } while (--hh);
2274 } else {
2275 int yy = y, hh = h;
2276 do {
2277 int len = spans[1].x - spans[0].x;
2278 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2279 while (len--) {
2280 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2281 d++;
2283 yy++;
2284 } while (--hh);
2287 spans++;
2288 } while (--num_spans > 1);
2291 return CAIRO_STATUS_SUCCESS;
2294 static cairo_status_t
2295 _fill_a8_lerp_spans (void *abstract_renderer, int y, int h,
2296 const cairo_half_open_span_t *spans, unsigned num_spans)
2298 cairo_image_span_renderer_t *r = abstract_renderer;
2300 if (num_spans == 0)
2301 return CAIRO_STATUS_SUCCESS;
2303 if (likely(h == 1)) {
2304 do {
2305 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2306 if (a) {
2307 int len = spans[1].x - spans[0].x;
2308 uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
2309 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2310 uint16_t ia = ~a;
2311 while (len--) {
2312 uint16_t t = *d*ia + p;
2313 *d++ = (t + (t>>8)) >> 8;
2316 spans++;
2317 } while (--num_spans > 1);
2318 } else {
2319 do {
2320 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2321 if (a) {
2322 int yy = y, hh = h;
2323 uint16_t p = (uint16_t)a * r->u.fill.pixel + 0x7f;
2324 uint16_t ia = ~a;
2325 do {
2326 int len = spans[1].x - spans[0].x;
2327 uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
2328 while (len--) {
2329 uint16_t t = *d*ia + p;
2330 *d++ = (t + (t>>8)) >> 8;
2332 yy++;
2333 } while (--hh);
2335 spans++;
2336 } while (--num_spans > 1);
2339 return CAIRO_STATUS_SUCCESS;
2342 static cairo_status_t
2343 _fill_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2344 const cairo_half_open_span_t *spans, unsigned num_spans)
2346 cairo_image_span_renderer_t *r = abstract_renderer;
2348 if (num_spans == 0)
2349 return CAIRO_STATUS_SUCCESS;
2351 if (likely(h == 1)) {
2352 do {
2353 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2354 if (a) {
2355 int len = spans[1].x - spans[0].x;
2356 uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
2357 while (len--) {
2358 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2359 d++;
2362 spans++;
2363 } while (--num_spans > 1);
2364 } else {
2365 do {
2366 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2367 if (a) {
2368 int yy = y, hh = h;
2369 do {
2370 int len = spans[1].x - spans[0].x;
2371 uint32_t *d = (uint32_t *)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
2372 while (len--) {
2373 *d = lerp8x4 (r->u.fill.pixel, a, *d);
2374 d++;
2376 yy++;
2377 } while (--hh);
2379 spans++;
2380 } while (--num_spans > 1);
2383 return CAIRO_STATUS_SUCCESS;
2386 static cairo_status_t
2387 _blit_xrgb32_lerp_spans (void *abstract_renderer, int y, int h,
2388 const cairo_half_open_span_t *spans, unsigned num_spans)
2390 cairo_image_span_renderer_t *r = abstract_renderer;
2392 if (num_spans == 0)
2393 return CAIRO_STATUS_SUCCESS;
2395 if (likely(h == 1)) {
2396 uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
2397 uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
2398 do {
2399 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2400 if (a) {
2401 uint32_t *s = (uint32_t*)src + spans[0].x;
2402 uint32_t *d = (uint32_t*)dst + spans[0].x;
2403 int len = spans[1].x - spans[0].x;
2404 if (a == 0xff) {
2405 if (len == 1)
2406 *d = *s;
2407 else
2408 memcpy(d, s, len*4);
2409 } else {
2410 while (len--) {
2411 *d = lerp8x4 (*s, a, *d);
2412 s++, d++;
2416 spans++;
2417 } while (--num_spans > 1);
2418 } else {
2419 do {
2420 uint8_t a = mul8_8 (spans[0].coverage, r->bpp);
2421 if (a) {
2422 int yy = y, hh = h;
2423 do {
2424 uint32_t *s = (uint32_t *)(r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x * 4);
2425 uint32_t *d = (uint32_t *)(r->u.blit.data + yy*r->u.blit.stride + spans[0].x * 4);
2426 int len = spans[1].x - spans[0].x;
2427 if (a == 0xff) {
2428 if (len == 1)
2429 *d = *s;
2430 else
2431 memcpy(d, s, len * 4);
2432 } else {
2433 while (len--) {
2434 *d = lerp8x4 (*s, a, *d);
2435 s++, d++;
2438 yy++;
2439 } while (--hh);
2441 spans++;
2442 } while (--num_spans > 1);
2445 return CAIRO_STATUS_SUCCESS;
2448 static cairo_status_t
2449 _inplace_spans (void *abstract_renderer,
2450 int y, int h,
2451 const cairo_half_open_span_t *spans,
2452 unsigned num_spans)
2454 cairo_image_span_renderer_t *r = abstract_renderer;
2455 uint8_t *mask;
2456 int x0, x1;
2458 if (num_spans == 0)
2459 return CAIRO_STATUS_SUCCESS;
2461 if (num_spans == 2 && spans[0].coverage == 0xff) {
2462 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2463 spans[0].x + r->u.composite.src_x,
2464 y + r->u.composite.src_y,
2465 0, 0,
2466 spans[0].x, y,
2467 spans[1].x - spans[0].x, h);
2468 return CAIRO_STATUS_SUCCESS;
2471 mask = (uint8_t *)pixman_image_get_data (r->mask);
2472 x1 = x0 = spans[0].x;
2473 do {
2474 int len = spans[1].x - spans[0].x;
2475 *mask++ = spans[0].coverage;
2476 if (len > 1) {
2477 if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
2478 if (x1 != x0) {
2479 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2480 x0 + r->u.composite.src_x,
2481 y + r->u.composite.src_y,
2482 0, 0,
2483 x0, y,
2484 x1 - x0, h);
2486 pixman_image_composite32 (r->op, r->src, NULL, r->u.composite.dst,
2487 spans[0].x + r->u.composite.src_x,
2488 y + r->u.composite.src_y,
2489 0, 0,
2490 spans[0].x, y,
2491 len, h);
2492 mask = (uint8_t *)pixman_image_get_data (r->mask);
2493 x0 = spans[1].x;
2494 } else if (spans[0].coverage == 0x0 &&
2495 x1 - x0 > r->u.composite.run_length) {
2496 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2497 x0 + r->u.composite.src_x,
2498 y + r->u.composite.src_y,
2499 0, 0,
2500 x0, y,
2501 x1 - x0, h);
2502 mask = (uint8_t *)pixman_image_get_data (r->mask);
2503 x0 = spans[1].x;
2504 }else {
2505 memset (mask, spans[0].coverage, --len);
2506 mask += len;
2509 x1 = spans[1].x;
2510 spans++;
2511 } while (--num_spans > 1);
2513 if (x1 != x0) {
2514 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2515 x0 + r->u.composite.src_x,
2516 y + r->u.composite.src_y,
2517 0, 0,
2518 x0, y,
2519 x1 - x0, h);
2522 return CAIRO_STATUS_SUCCESS;
2525 static cairo_status_t
2526 _inplace_opacity_spans (void *abstract_renderer, int y, int h,
2527 const cairo_half_open_span_t *spans,
2528 unsigned num_spans)
2530 cairo_image_span_renderer_t *r = abstract_renderer;
2531 uint8_t *mask;
2532 int x0, x1;
2534 if (num_spans == 0)
2535 return CAIRO_STATUS_SUCCESS;
2537 mask = (uint8_t *)pixman_image_get_data (r->mask);
2538 x1 = x0 = spans[0].x;
2539 do {
2540 int len = spans[1].x - spans[0].x;
2541 uint8_t m = mul8_8(spans[0].coverage, r->bpp);
2542 *mask++ = m;
2543 if (len > 1) {
2544 if (m == 0 &&
2545 x1 - x0 > r->u.composite.run_length) {
2546 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2547 x0 + r->u.composite.src_x,
2548 y + r->u.composite.src_y,
2549 0, 0,
2550 x0, y,
2551 x1 - x0, h);
2552 mask = (uint8_t *)pixman_image_get_data (r->mask);
2553 x0 = spans[1].x;
2554 }else {
2555 memset (mask, m, --len);
2556 mask += len;
2559 x1 = spans[1].x;
2560 spans++;
2561 } while (--num_spans > 1);
2563 if (x1 != x0) {
2564 pixman_image_composite32 (r->op, r->src, r->mask, r->u.composite.dst,
2565 x0 + r->u.composite.src_x,
2566 y + r->u.composite.src_y,
2567 0, 0,
2568 x0, y,
2569 x1 - x0, h);
2572 return CAIRO_STATUS_SUCCESS;
2575 static cairo_status_t
2576 _inplace_src_spans (void *abstract_renderer, int y, int h,
2577 const cairo_half_open_span_t *spans,
2578 unsigned num_spans)
2580 cairo_image_span_renderer_t *r = abstract_renderer;
2581 uint8_t *m;
2582 int x0;
2584 if (num_spans == 0)
2585 return CAIRO_STATUS_SUCCESS;
2587 x0 = spans[0].x;
2588 m = r->_buf;
2589 do {
2590 int len = spans[1].x - spans[0].x;
2591 if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
2592 if (spans[0].x != x0) {
2593 #if PIXMAN_HAS_OP_LERP
2594 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2595 r->src, r->mask, r->u.composite.dst,
2596 x0 + r->u.composite.src_x,
2597 y + r->u.composite.src_y,
2598 0, 0,
2599 x0, y,
2600 spans[0].x - x0, h);
2601 #else
2602 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2603 r->mask, NULL, r->u.composite.dst,
2604 0, 0,
2605 0, 0,
2606 x0, y,
2607 spans[0].x - x0, h);
2608 pixman_image_composite32 (PIXMAN_OP_ADD,
2609 r->src, r->mask, r->u.composite.dst,
2610 x0 + r->u.composite.src_x,
2611 y + r->u.composite.src_y,
2612 0, 0,
2613 x0, y,
2614 spans[0].x - x0, h);
2615 #endif
2618 pixman_image_composite32 (PIXMAN_OP_SRC,
2619 r->src, NULL, r->u.composite.dst,
2620 spans[0].x + r->u.composite.src_x,
2621 y + r->u.composite.src_y,
2622 0, 0,
2623 spans[0].x, y,
2624 spans[1].x - spans[0].x, h);
2626 m = r->_buf;
2627 x0 = spans[1].x;
2628 } else if (spans[0].coverage == 0x0) {
2629 if (spans[0].x != x0) {
2630 #if PIXMAN_HAS_OP_LERP
2631 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2632 r->src, r->mask, r->u.composite.dst,
2633 x0 + r->u.composite.src_x,
2634 y + r->u.composite.src_y,
2635 0, 0,
2636 x0, y,
2637 spans[0].x - x0, h);
2638 #else
2639 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2640 r->mask, NULL, r->u.composite.dst,
2641 0, 0,
2642 0, 0,
2643 x0, y,
2644 spans[0].x - x0, h);
2645 pixman_image_composite32 (PIXMAN_OP_ADD,
2646 r->src, r->mask, r->u.composite.dst,
2647 x0 + r->u.composite.src_x,
2648 y + r->u.composite.src_y,
2649 0, 0,
2650 x0, y,
2651 spans[0].x - x0, h);
2652 #endif
2655 m = r->_buf;
2656 x0 = spans[1].x;
2657 } else {
2658 *m++ = spans[0].coverage;
2659 if (len > 1) {
2660 memset (m, spans[0].coverage, --len);
2661 m += len;
2664 spans++;
2665 } while (--num_spans > 1);
2667 if (spans[0].x != x0) {
2668 #if PIXMAN_HAS_OP_LERP
2669 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2670 r->src, r->mask, r->u.composite.dst,
2671 x0 + r->u.composite.src_x,
2672 y + r->u.composite.src_y,
2673 0, 0,
2674 x0, y,
2675 spans[0].x - x0, h);
2676 #else
2677 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2678 r->mask, NULL, r->u.composite.dst,
2679 0, 0,
2680 0, 0,
2681 x0, y,
2682 spans[0].x - x0, h);
2683 pixman_image_composite32 (PIXMAN_OP_ADD,
2684 r->src, r->mask, r->u.composite.dst,
2685 x0 + r->u.composite.src_x,
2686 y + r->u.composite.src_y,
2687 0, 0,
2688 x0, y,
2689 spans[0].x - x0, h);
2690 #endif
2693 return CAIRO_STATUS_SUCCESS;
2696 static cairo_status_t
2697 _inplace_src_opacity_spans (void *abstract_renderer, int y, int h,
2698 const cairo_half_open_span_t *spans,
2699 unsigned num_spans)
2701 cairo_image_span_renderer_t *r = abstract_renderer;
2702 uint8_t *mask;
2703 int x0;
2705 if (num_spans == 0)
2706 return CAIRO_STATUS_SUCCESS;
2708 x0 = spans[0].x;
2709 mask = (uint8_t *)pixman_image_get_data (r->mask);
2710 do {
2711 int len = spans[1].x - spans[0].x;
2712 uint8_t m = mul8_8(spans[0].coverage, r->bpp);
2713 if (m == 0) {
2714 if (spans[0].x != x0) {
2715 #if PIXMAN_HAS_OP_LERP
2716 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2717 r->src, r->mask, r->u.composite.dst,
2718 x0 + r->u.composite.src_x,
2719 y + r->u.composite.src_y,
2720 0, 0,
2721 x0, y,
2722 spans[0].x - x0, h);
2723 #else
2724 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2725 r->mask, NULL, r->u.composite.dst,
2726 0, 0,
2727 0, 0,
2728 x0, y,
2729 spans[0].x - x0, h);
2730 pixman_image_composite32 (PIXMAN_OP_ADD,
2731 r->src, r->mask, r->u.composite.dst,
2732 x0 + r->u.composite.src_x,
2733 y + r->u.composite.src_y,
2734 0, 0,
2735 x0, y,
2736 spans[0].x - x0, h);
2737 #endif
2740 mask = (uint8_t *)pixman_image_get_data (r->mask);
2741 x0 = spans[1].x;
2742 } else {
2743 *mask++ = m;
2744 if (len > 1) {
2745 memset (mask, m, --len);
2746 mask += len;
2749 spans++;
2750 } while (--num_spans > 1);
2752 if (spans[0].x != x0) {
2753 #if PIXMAN_HAS_OP_LERP
2754 pixman_image_composite32 (PIXMAN_OP_LERP_SRC,
2755 r->src, r->mask, r->u.composite.dst,
2756 x0 + r->u.composite.src_x,
2757 y + r->u.composite.src_y,
2758 0, 0,
2759 x0, y,
2760 spans[0].x - x0, h);
2761 #else
2762 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE,
2763 r->mask, NULL, r->u.composite.dst,
2764 0, 0,
2765 0, 0,
2766 x0, y,
2767 spans[0].x - x0, h);
2768 pixman_image_composite32 (PIXMAN_OP_ADD,
2769 r->src, r->mask, r->u.composite.dst,
2770 x0 + r->u.composite.src_x,
2771 y + r->u.composite.src_y,
2772 0, 0,
2773 x0, y,
2774 spans[0].x - x0, h);
2775 #endif
2778 return CAIRO_STATUS_SUCCESS;
2781 static void free_pixels (pixman_image_t *image, void *data)
2783 free (data);
2786 static cairo_int_status_t
2787 inplace_renderer_init (cairo_image_span_renderer_t *r,
2788 const cairo_composite_rectangles_t *composite,
2789 cairo_antialias_t antialias,
2790 cairo_bool_t needs_clip)
2792 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2793 uint8_t *buf;
2795 if (composite->mask_pattern.base.type != CAIRO_PATTERN_TYPE_SOLID)
2796 return CAIRO_INT_STATUS_UNSUPPORTED;
2798 r->base.render_rows = NULL;
2799 r->bpp = composite->mask_pattern.solid.color.alpha_short >> 8;
2801 if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
2802 const cairo_color_t *color;
2804 color = &composite->source_pattern.solid.color;
2805 if (composite->op == CAIRO_OPERATOR_CLEAR)
2806 color = CAIRO_COLOR_TRANSPARENT;
2808 if (fill_reduces_to_source (composite->op, color, dst, &r->u.fill.pixel)) {
2809 /* Use plain C for the fill operations as the span length is
2810 * typically small, too small to payback the startup overheads of
2811 * using SSE2 etc.
2813 if (r->bpp == 0xff) {
2814 switch (dst->format) {
2815 case CAIRO_FORMAT_A8:
2816 r->base.render_rows = _fill_a8_lerp_opaque_spans;
2817 break;
2818 case CAIRO_FORMAT_RGB24:
2819 case CAIRO_FORMAT_ARGB32:
2820 r->base.render_rows = _fill_xrgb32_lerp_opaque_spans;
2821 break;
2822 case CAIRO_FORMAT_A1:
2823 case CAIRO_FORMAT_RGB16_565:
2824 case CAIRO_FORMAT_RGB30:
2825 case CAIRO_FORMAT_INVALID:
2826 default: break;
2828 } else {
2829 switch (dst->format) {
2830 case CAIRO_FORMAT_A8:
2831 r->base.render_rows = _fill_a8_lerp_spans;
2832 break;
2833 case CAIRO_FORMAT_RGB24:
2834 case CAIRO_FORMAT_ARGB32:
2835 r->base.render_rows = _fill_xrgb32_lerp_spans;
2836 break;
2837 case CAIRO_FORMAT_A1:
2838 case CAIRO_FORMAT_RGB16_565:
2839 case CAIRO_FORMAT_RGB30:
2840 case CAIRO_FORMAT_INVALID:
2841 default: break;
2844 r->u.fill.data = dst->data;
2845 r->u.fill.stride = dst->stride;
2847 } else if ((dst->format == CAIRO_FORMAT_ARGB32 || dst->format == CAIRO_FORMAT_RGB24) &&
2848 (composite->op == CAIRO_OPERATOR_SOURCE ||
2849 (composite->op == CAIRO_OPERATOR_OVER &&
2850 (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
2851 composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
2852 composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
2853 to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
2855 cairo_image_surface_t *src =
2856 to_image_surface(composite->source_pattern.surface.surface);
2857 int tx, ty;
2859 if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
2860 &tx, &ty) &&
2861 composite->bounded.x + tx >= 0 &&
2862 composite->bounded.y + ty >= 0 &&
2863 composite->bounded.x + composite->bounded.width + tx <= src->width &&
2864 composite->bounded.y + composite->bounded.height + ty <= src->height) {
2866 assert(PIXMAN_FORMAT_BPP(dst->pixman_format) == 32);
2867 r->u.blit.stride = dst->stride;
2868 r->u.blit.data = dst->data;
2869 r->u.blit.src_stride = src->stride;
2870 r->u.blit.src_data = src->data + src->stride * ty + tx * 4;
2871 r->base.render_rows = _blit_xrgb32_lerp_spans;
2874 if (r->base.render_rows == NULL) {
2875 const cairo_pattern_t *src = &composite->source_pattern.base;
2876 unsigned int width;
2878 if (composite->is_bounded == 0)
2879 return CAIRO_INT_STATUS_UNSUPPORTED;
2881 r->base.render_rows = r->bpp == 0xff ? _inplace_spans : _inplace_opacity_spans;
2882 width = (composite->bounded.width + 3) & ~3;
2884 r->u.composite.run_length = 8;
2885 if (src->type == CAIRO_PATTERN_TYPE_LINEAR ||
2886 src->type == CAIRO_PATTERN_TYPE_RADIAL)
2887 r->u.composite.run_length = 256;
2888 if (dst->base.is_clear &&
2889 (composite->op == CAIRO_OPERATOR_SOURCE ||
2890 composite->op == CAIRO_OPERATOR_OVER ||
2891 composite->op == CAIRO_OPERATOR_ADD)) {
2892 r->op = PIXMAN_OP_SRC;
2893 } else if (composite->op == CAIRO_OPERATOR_SOURCE) {
2894 r->base.render_rows = r->bpp == 0xff ? _inplace_src_spans : _inplace_src_opacity_spans;
2895 r->u.composite.mask_y = r->composite->unbounded.y;
2896 width = (composite->unbounded.width + 3) & ~3;
2897 } else if (composite->op == CAIRO_OPERATOR_CLEAR) {
2898 r->op = PIXMAN_OP_OUT_REVERSE;
2899 src = NULL;
2900 } else {
2901 r->op = _pixman_operator (composite->op);
2904 r->src = _pixman_image_for_pattern (dst, src, FALSE,
2905 &composite->bounded,
2906 &composite->source_sample_area,
2907 &r->u.composite.src_x, &r->u.composite.src_y);
2908 if (unlikely (r->src == NULL))
2909 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2911 /* Create an effectively unbounded mask by repeating the single line */
2912 buf = r->_buf;
2913 if (width > SZ_BUF) {
2914 buf = malloc (width);
2915 if (unlikely (buf == NULL)) {
2916 pixman_image_unref (r->src);
2917 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
2920 r->mask = pixman_image_create_bits (PIXMAN_a8,
2921 width, composite->unbounded.height,
2922 (uint32_t *)buf, 0);
2923 if (unlikely (r->mask == NULL)) {
2924 pixman_image_unref (r->src);
2925 if (buf != r->_buf)
2926 free (buf);
2927 return _cairo_error(CAIRO_STATUS_NO_MEMORY);
2930 if (buf != r->_buf)
2931 pixman_image_set_destroy_function (r->mask, free_pixels, buf);
2933 r->u.composite.dst = dst->pixman_image;
2936 return CAIRO_INT_STATUS_SUCCESS;
2939 static cairo_int_status_t
2940 span_renderer_init (cairo_abstract_span_renderer_t *_r,
2941 const cairo_composite_rectangles_t *composite,
2942 cairo_antialias_t antialias,
2943 cairo_bool_t needs_clip)
2945 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *)_r;
2946 cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
2947 const cairo_pattern_t *source = &composite->source_pattern.base;
2948 cairo_operator_t op = composite->op;
2949 cairo_int_status_t status;
2951 TRACE ((stderr, "%s: antialias=%d, needs_clip=%d\n", __FUNCTION__,
2952 antialias, needs_clip));
2954 if (needs_clip)
2955 return CAIRO_INT_STATUS_UNSUPPORTED;
2957 r->composite = composite;
2958 r->mask = NULL;
2959 r->src = NULL;
2960 r->base.finish = NULL;
2962 status = mono_renderer_init (r, composite, antialias, needs_clip);
2963 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2964 return status;
2966 status = inplace_renderer_init (r, composite, antialias, needs_clip);
2967 if (status != CAIRO_INT_STATUS_UNSUPPORTED)
2968 return status;
2970 r->bpp = 0;
2972 if (op == CAIRO_OPERATOR_CLEAR) {
2973 #if PIXMAN_HAS_OP_LERP
2974 op = PIXMAN_OP_LERP_CLEAR;
2975 #else
2976 source = &_cairo_pattern_white.base;
2977 op = PIXMAN_OP_OUT_REVERSE;
2978 #endif
2979 } else if (dst->base.is_clear &&
2980 (op == CAIRO_OPERATOR_SOURCE ||
2981 op == CAIRO_OPERATOR_OVER ||
2982 op == CAIRO_OPERATOR_ADD)) {
2983 op = PIXMAN_OP_SRC;
2984 } else if (op == CAIRO_OPERATOR_SOURCE) {
2985 if (_cairo_pattern_is_opaque (&composite->source_pattern.base,
2986 &composite->source_sample_area))
2988 op = PIXMAN_OP_OVER;
2990 else
2992 #if PIXMAN_HAS_OP_LERP
2993 op = PIXMAN_OP_LERP_SRC;
2994 #else
2995 return CAIRO_INT_STATUS_UNSUPPORTED;
2996 #endif
2998 } else {
2999 op = _pixman_operator (op);
3001 r->op = op;
3003 r->src = _pixman_image_for_pattern (dst, source, FALSE,
3004 &composite->unbounded,
3005 &composite->source_sample_area,
3006 &r->u.mask.src_x, &r->u.mask.src_y);
3007 if (unlikely (r->src == NULL))
3008 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3010 r->opacity = 1.0;
3011 if (composite->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
3012 r->opacity = composite->mask_pattern.solid.color.alpha;
3013 } else {
3014 pixman_image_t *mask;
3015 int mask_x, mask_y;
3017 mask = _pixman_image_for_pattern (dst,
3018 &composite->mask_pattern.base,
3019 TRUE,
3020 &composite->unbounded,
3021 &composite->mask_sample_area,
3022 &mask_x, &mask_y);
3023 if (unlikely (mask == NULL))
3024 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3026 /* XXX Component-alpha? */
3027 if ((dst->base.content & CAIRO_CONTENT_COLOR) == 0 &&
3028 _cairo_pattern_is_opaque (source, &composite->source_sample_area))
3030 pixman_image_unref (r->src);
3031 r->src = mask;
3032 r->u.mask.src_x = mask_x;
3033 r->u.mask.src_y = mask_y;
3034 mask = NULL;
3037 if (mask) {
3038 pixman_image_unref (mask);
3039 return CAIRO_INT_STATUS_UNSUPPORTED;
3043 r->u.mask.extents = composite->unbounded;
3044 r->u.mask.stride = (r->u.mask.extents.width + 3) & ~3;
3045 if (r->u.mask.extents.height * r->u.mask.stride > SZ_BUF) {
3046 r->mask = pixman_image_create_bits (PIXMAN_a8,
3047 r->u.mask.extents.width,
3048 r->u.mask.extents.height,
3049 NULL, 0);
3051 r->base.render_rows = _cairo_image_spans;
3052 r->base.finish = NULL;
3053 } else {
3054 r->mask = pixman_image_create_bits (PIXMAN_a8,
3055 r->u.mask.extents.width,
3056 r->u.mask.extents.height,
3057 (uint32_t *)r->_buf, r->u.mask.stride);
3059 r->base.render_rows = _cairo_image_spans_and_zero;
3060 r->base.finish = _cairo_image_finish_spans_and_zero;
3062 if (unlikely (r->mask == NULL))
3063 return _cairo_error (CAIRO_STATUS_NO_MEMORY);
3065 r->u.mask.data = (uint8_t *) pixman_image_get_data (r->mask);
3066 r->u.mask.stride = pixman_image_get_stride (r->mask);
3068 r->u.mask.extents.height += r->u.mask.extents.y;
3069 return CAIRO_STATUS_SUCCESS;
3072 static void
3073 span_renderer_fini (cairo_abstract_span_renderer_t *_r,
3074 cairo_int_status_t status)
3076 cairo_image_span_renderer_t *r = (cairo_image_span_renderer_t *) _r;
3078 TRACE ((stderr, "%s\n", __FUNCTION__));
3080 if (likely (status == CAIRO_INT_STATUS_SUCCESS)) {
3081 if (r->base.finish)
3082 r->base.finish (r);
3084 if (likely (status == CAIRO_INT_STATUS_SUCCESS && r->bpp == 0)) {
3085 const cairo_composite_rectangles_t *composite = r->composite;
3087 pixman_image_composite32 (r->op, r->src, r->mask,
3088 to_pixman_image (composite->surface),
3089 composite->unbounded.x + r->u.mask.src_x,
3090 composite->unbounded.y + r->u.mask.src_y,
3091 0, 0,
3092 composite->unbounded.x,
3093 composite->unbounded.y,
3094 composite->unbounded.width,
3095 composite->unbounded.height);
3098 if (r->src)
3099 pixman_image_unref (r->src);
3100 if (r->mask)
3101 pixman_image_unref (r->mask);
3103 #endif
3105 const cairo_compositor_t *
3106 _cairo_image_spans_compositor_get (void)
3108 static cairo_spans_compositor_t spans;
3109 static cairo_compositor_t shape;
3111 if (spans.base.delegate == NULL) {
3112 _cairo_shape_mask_compositor_init (&shape,
3113 _cairo_image_traps_compositor_get());
3114 shape.glyphs = NULL;
3116 _cairo_spans_compositor_init (&spans, &shape);
3118 spans.flags = 0;
3119 #if PIXMAN_HAS_OP_LERP
3120 spans.flags |= CAIRO_SPANS_COMPOSITOR_HAS_LERP;
3121 #endif
3123 //spans.acquire = acquire;
3124 //spans.release = release;
3125 spans.fill_boxes = fill_boxes;
3126 spans.draw_image_boxes = draw_image_boxes;
3127 //spans.copy_boxes = copy_boxes;
3128 spans.pattern_to_surface = _cairo_image_source_create_for_pattern;
3129 //spans.check_composite_boxes = check_composite_boxes;
3130 spans.composite_boxes = composite_boxes;
3131 //spans.check_span_renderer = check_span_renderer;
3132 spans.renderer_init = span_renderer_init;
3133 spans.renderer_fini = span_renderer_fini;
3136 return &spans.base;