1 /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
2 /* cairo - a vector graphics library with display and print output
4 * Copyright © 2003 University of Southern California
5 * Copyright © 2009,2010,2011 Intel Corporation
7 * This library is free software; you can redistribute it and/or
8 * modify it either under the terms of the GNU Lesser General Public
9 * License version 2.1 as published by the Free Software Foundation
10 * (the "LGPL") or, at your option, under the terms of the Mozilla
11 * Public License Version 1.1 (the "MPL"). If you do not alter this
12 * notice, a recipient may use your version of this file under either
13 * the MPL or the LGPL.
15 * You should have received a copy of the LGPL along with this library
16 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
18 * You should have received a copy of the MPL along with this library
19 * in the file COPYING-MPL-1.1
21 * The contents of this file are subject to the Mozilla Public License
22 * Version 1.1 (the "License"); you may not use this file except in
23 * compliance with the License. You may obtain a copy of the License at
24 * http://www.mozilla.org/MPL/
26 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
27 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
28 * the specific language governing rights and limitations.
30 * The Original Code is the cairo graphics library.
32 * The Initial Developer of the Original Code is University of Southern
36 * Carl D. Worth <cworth@cworth.org>
37 * Chris Wilson <chris@chris-wilson.co.uk>
40 /* The primarily reason for keeping a traps-compositor around is
41 * for validating cairo-xlib (which currently also uses traps).
46 #include "cairo-image-surface-private.h"
48 #include "cairo-compositor-private.h"
49 #include "cairo-spans-compositor-private.h"
51 #include "cairo-region-private.h"
52 #include "cairo-traps-private.h"
53 #include "cairo-tristrip-private.h"
55 #include "cairo-pixman-private.h"
57 static pixman_image_t
*
58 to_pixman_image (cairo_surface_t
*s
)
60 return ((cairo_image_surface_t
*)s
)->pixman_image
;
63 static cairo_int_status_t
64 acquire (void *abstract_dst
)
66 return CAIRO_STATUS_SUCCESS
;
69 static cairo_int_status_t
70 release (void *abstract_dst
)
72 return CAIRO_STATUS_SUCCESS
;
75 static cairo_int_status_t
76 set_clip_region (void *_surface
,
77 cairo_region_t
*region
)
79 cairo_image_surface_t
*surface
= _surface
;
80 pixman_region32_t
*rgn
= region
? ®ion
->rgn
: NULL
;
82 if (! pixman_image_set_clip_region32 (surface
->pixman_image
, rgn
))
83 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
85 return CAIRO_STATUS_SUCCESS
;
88 static cairo_int_status_t
89 draw_image_boxes (void *_dst
,
90 cairo_image_surface_t
*image
,
94 cairo_image_surface_t
*dst
= _dst
;
95 struct _cairo_boxes_chunk
*chunk
;
98 TRACE ((stderr
, "%s x %d\n", __FUNCTION__
, boxes
->num_boxes
));
100 for (chunk
= &boxes
->chunks
; chunk
; chunk
= chunk
->next
) {
101 for (i
= 0; i
< chunk
->count
; i
++) {
102 cairo_box_t
*b
= &chunk
->base
[i
];
103 int x
= _cairo_fixed_integer_part (b
->p1
.x
);
104 int y
= _cairo_fixed_integer_part (b
->p1
.y
);
105 int w
= _cairo_fixed_integer_part (b
->p2
.x
) - x
;
106 int h
= _cairo_fixed_integer_part (b
->p2
.y
) - y
;
107 if (dst
->pixman_format
!= image
->pixman_format
||
108 ! pixman_blt ((uint32_t *)image
->data
, (uint32_t *)dst
->data
,
109 image
->stride
/ sizeof (uint32_t),
110 dst
->stride
/ sizeof (uint32_t),
111 PIXMAN_FORMAT_BPP (image
->pixman_format
),
112 PIXMAN_FORMAT_BPP (dst
->pixman_format
),
117 pixman_image_composite32 (PIXMAN_OP_SRC
,
118 image
->pixman_image
, NULL
, dst
->pixman_image
,
126 return CAIRO_STATUS_SUCCESS
;
129 static inline uint32_t
130 color_to_uint32 (const cairo_color_t
*color
)
133 (color
->alpha_short
>> 8 << 24) |
134 (color
->red_short
>> 8 << 16) |
135 (color
->green_short
& 0xff00) |
136 (color
->blue_short
>> 8);
139 static inline cairo_bool_t
140 color_to_pixel (const cairo_color_t
*color
,
141 pixman_format_code_t format
,
146 if (!(format
== PIXMAN_a8r8g8b8
||
147 format
== PIXMAN_x8r8g8b8
||
148 format
== PIXMAN_a8b8g8r8
||
149 format
== PIXMAN_x8b8g8r8
||
150 format
== PIXMAN_b8g8r8a8
||
151 format
== PIXMAN_b8g8r8x8
||
152 format
== PIXMAN_r5g6b5
||
153 format
== PIXMAN_b5g6r5
||
154 format
== PIXMAN_a8
))
159 c
= color_to_uint32 (color
);
161 if (PIXMAN_FORMAT_TYPE (format
) == PIXMAN_TYPE_ABGR
) {
162 c
= ((c
& 0xff000000) >> 0) |
163 ((c
& 0x00ff0000) >> 16) |
164 ((c
& 0x0000ff00) >> 0) |
165 ((c
& 0x000000ff) << 16);
168 if (PIXMAN_FORMAT_TYPE (format
) == PIXMAN_TYPE_BGRA
) {
169 c
= ((c
& 0xff000000) >> 24) |
170 ((c
& 0x00ff0000) >> 8) |
171 ((c
& 0x0000ff00) << 8) |
172 ((c
& 0x000000ff) << 24);
175 if (format
== PIXMAN_a8
) {
177 } else if (format
== PIXMAN_r5g6b5
|| format
== PIXMAN_b5g6r5
) {
178 c
= ((((c
) >> 3) & 0x001f) |
179 (((c
) >> 5) & 0x07e0) |
180 (((c
) >> 8) & 0xf800));
188 _pixman_operator (cairo_operator_t op
)
191 case CAIRO_OPERATOR_CLEAR
:
192 return PIXMAN_OP_CLEAR
;
194 case CAIRO_OPERATOR_SOURCE
:
195 return PIXMAN_OP_SRC
;
196 case CAIRO_OPERATOR_OVER
:
197 return PIXMAN_OP_OVER
;
198 case CAIRO_OPERATOR_IN
:
200 case CAIRO_OPERATOR_OUT
:
201 return PIXMAN_OP_OUT
;
202 case CAIRO_OPERATOR_ATOP
:
203 return PIXMAN_OP_ATOP
;
205 case CAIRO_OPERATOR_DEST
:
206 return PIXMAN_OP_DST
;
207 case CAIRO_OPERATOR_DEST_OVER
:
208 return PIXMAN_OP_OVER_REVERSE
;
209 case CAIRO_OPERATOR_DEST_IN
:
210 return PIXMAN_OP_IN_REVERSE
;
211 case CAIRO_OPERATOR_DEST_OUT
:
212 return PIXMAN_OP_OUT_REVERSE
;
213 case CAIRO_OPERATOR_DEST_ATOP
:
214 return PIXMAN_OP_ATOP_REVERSE
;
216 case CAIRO_OPERATOR_XOR
:
217 return PIXMAN_OP_XOR
;
218 case CAIRO_OPERATOR_ADD
:
219 return PIXMAN_OP_ADD
;
220 case CAIRO_OPERATOR_SATURATE
:
221 return PIXMAN_OP_SATURATE
;
223 case CAIRO_OPERATOR_MULTIPLY
:
224 return PIXMAN_OP_MULTIPLY
;
225 case CAIRO_OPERATOR_SCREEN
:
226 return PIXMAN_OP_SCREEN
;
227 case CAIRO_OPERATOR_OVERLAY
:
228 return PIXMAN_OP_OVERLAY
;
229 case CAIRO_OPERATOR_DARKEN
:
230 return PIXMAN_OP_DARKEN
;
231 case CAIRO_OPERATOR_LIGHTEN
:
232 return PIXMAN_OP_LIGHTEN
;
233 case CAIRO_OPERATOR_COLOR_DODGE
:
234 return PIXMAN_OP_COLOR_DODGE
;
235 case CAIRO_OPERATOR_COLOR_BURN
:
236 return PIXMAN_OP_COLOR_BURN
;
237 case CAIRO_OPERATOR_HARD_LIGHT
:
238 return PIXMAN_OP_HARD_LIGHT
;
239 case CAIRO_OPERATOR_SOFT_LIGHT
:
240 return PIXMAN_OP_SOFT_LIGHT
;
241 case CAIRO_OPERATOR_DIFFERENCE
:
242 return PIXMAN_OP_DIFFERENCE
;
243 case CAIRO_OPERATOR_EXCLUSION
:
244 return PIXMAN_OP_EXCLUSION
;
245 case CAIRO_OPERATOR_HSL_HUE
:
246 return PIXMAN_OP_HSL_HUE
;
247 case CAIRO_OPERATOR_HSL_SATURATION
:
248 return PIXMAN_OP_HSL_SATURATION
;
249 case CAIRO_OPERATOR_HSL_COLOR
:
250 return PIXMAN_OP_HSL_COLOR
;
251 case CAIRO_OPERATOR_HSL_LUMINOSITY
:
252 return PIXMAN_OP_HSL_LUMINOSITY
;
256 return PIXMAN_OP_OVER
;
261 __fill_reduces_to_source (cairo_operator_t op
,
262 const cairo_color_t
*color
,
263 const cairo_image_surface_t
*dst
)
265 if (op
== CAIRO_OPERATOR_SOURCE
|| op
== CAIRO_OPERATOR_CLEAR
)
267 if (op
== CAIRO_OPERATOR_OVER
&& CAIRO_COLOR_IS_OPAQUE (color
))
269 if (dst
->base
.is_clear
)
270 return op
== CAIRO_OPERATOR_OVER
|| op
== CAIRO_OPERATOR_ADD
;
276 fill_reduces_to_source (cairo_operator_t op
,
277 const cairo_color_t
*color
,
278 const cairo_image_surface_t
*dst
,
281 if (__fill_reduces_to_source (op
, color
, dst
)) {
282 return color_to_pixel (color
, dst
->pixman_format
, pixel
);
288 static cairo_int_status_t
289 fill_rectangles (void *_dst
,
291 const cairo_color_t
*color
,
292 cairo_rectangle_int_t
*rects
,
295 cairo_image_surface_t
*dst
= _dst
;
299 TRACE ((stderr
, "%s\n", __FUNCTION__
));
301 if (fill_reduces_to_source (op
, color
, dst
, &pixel
)) {
302 for (i
= 0; i
< num_rects
; i
++) {
303 pixman_fill ((uint32_t *) dst
->data
, dst
->stride
/ sizeof (uint32_t),
304 PIXMAN_FORMAT_BPP (dst
->pixman_format
),
305 rects
[i
].x
, rects
[i
].y
,
306 rects
[i
].width
, rects
[i
].height
,
310 pixman_image_t
*src
= _pixman_image_for_color (color
);
311 if (unlikely (src
== NULL
))
312 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
314 op
= _pixman_operator (op
);
315 for (i
= 0; i
< num_rects
; i
++) {
316 pixman_image_composite32 (op
,
317 src
, NULL
, dst
->pixman_image
,
320 rects
[i
].x
, rects
[i
].y
,
321 rects
[i
].width
, rects
[i
].height
);
324 pixman_image_unref (src
);
327 return CAIRO_STATUS_SUCCESS
;
330 static cairo_int_status_t
331 fill_boxes (void *_dst
,
333 const cairo_color_t
*color
,
334 cairo_boxes_t
*boxes
)
336 cairo_image_surface_t
*dst
= _dst
;
337 struct _cairo_boxes_chunk
*chunk
;
341 TRACE ((stderr
, "%s x %d\n", __FUNCTION__
, boxes
->num_boxes
));
343 if (fill_reduces_to_source (op
, color
, dst
, &pixel
)) {
344 for (chunk
= &boxes
->chunks
; chunk
; chunk
= chunk
->next
) {
345 for (i
= 0; i
< chunk
->count
; i
++) {
346 int x
= _cairo_fixed_integer_part (chunk
->base
[i
].p1
.x
);
347 int y
= _cairo_fixed_integer_part (chunk
->base
[i
].p1
.y
);
348 int w
= _cairo_fixed_integer_part (chunk
->base
[i
].p2
.x
) - x
;
349 int h
= _cairo_fixed_integer_part (chunk
->base
[i
].p2
.y
) - y
;
350 pixman_fill ((uint32_t *) dst
->data
,
351 dst
->stride
/ sizeof (uint32_t),
352 PIXMAN_FORMAT_BPP (dst
->pixman_format
),
359 pixman_image_t
*src
= _pixman_image_for_color (color
);
360 if (unlikely (src
== NULL
))
361 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
363 op
= _pixman_operator (op
);
364 for (chunk
= &boxes
->chunks
; chunk
; chunk
= chunk
->next
) {
365 for (i
= 0; i
< chunk
->count
; i
++) {
366 int x1
= _cairo_fixed_integer_part (chunk
->base
[i
].p1
.x
);
367 int y1
= _cairo_fixed_integer_part (chunk
->base
[i
].p1
.y
);
368 int x2
= _cairo_fixed_integer_part (chunk
->base
[i
].p2
.x
);
369 int y2
= _cairo_fixed_integer_part (chunk
->base
[i
].p2
.y
);
370 pixman_image_composite32 (op
,
371 src
, NULL
, dst
->pixman_image
,
379 pixman_image_unref (src
);
382 return CAIRO_STATUS_SUCCESS
;
385 static cairo_int_status_t
386 composite (void *_dst
,
388 cairo_surface_t
*abstract_src
,
389 cairo_surface_t
*abstract_mask
,
399 cairo_image_source_t
*src
= (cairo_image_source_t
*)abstract_src
;
400 cairo_image_source_t
*mask
= (cairo_image_source_t
*)abstract_mask
;
402 TRACE ((stderr
, "%s\n", __FUNCTION__
));
405 pixman_image_composite32 (_pixman_operator (op
),
406 src
->pixman_image
, mask
->pixman_image
, to_pixman_image (_dst
),
412 pixman_image_composite32 (_pixman_operator (op
),
413 src
->pixman_image
, NULL
, to_pixman_image (_dst
),
420 return CAIRO_STATUS_SUCCESS
;
423 static cairo_int_status_t
425 cairo_surface_t
*abstract_src
,
426 cairo_surface_t
*abstract_mask
,
436 cairo_image_surface_t
*dst
= _dst
;
437 cairo_image_source_t
*src
= (cairo_image_source_t
*)abstract_src
;
438 cairo_image_source_t
*mask
= (cairo_image_source_t
*)abstract_mask
;
440 TRACE ((stderr
, "%s\n", __FUNCTION__
));
442 #if PIXMAN_HAS_OP_LERP
443 pixman_image_composite32 (PIXMAN_OP_LERP_SRC
,
444 src
->pixman_image
, mask
->pixman_image
, dst
->pixman_image
,
450 /* Punch the clip out of the destination */
451 TRACE ((stderr
, "%s - OUT_REVERSE (mask=%d/%p, dst=%d/%p)\n",
453 mask
->base
.unique_id
, mask
->pixman_image
,
454 dst
->base
.unique_id
, dst
->pixman_image
));
455 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE
,
456 mask
->pixman_image
, NULL
, dst
->pixman_image
,
462 /* Now add the two results together */
463 TRACE ((stderr
, "%s - ADD (src=%d/%p, mask=%d/%p, dst=%d/%p)\n",
465 src
->base
.unique_id
, src
->pixman_image
,
466 mask
->base
.unique_id
, mask
->pixman_image
,
467 dst
->base
.unique_id
, dst
->pixman_image
));
468 pixman_image_composite32 (PIXMAN_OP_ADD
,
469 src
->pixman_image
, mask
->pixman_image
, dst
->pixman_image
,
476 return CAIRO_STATUS_SUCCESS
;
479 static cairo_int_status_t
480 composite_boxes (void *_dst
,
482 cairo_surface_t
*abstract_src
,
483 cairo_surface_t
*abstract_mask
,
490 cairo_boxes_t
*boxes
,
491 const cairo_rectangle_int_t
*extents
)
493 pixman_image_t
*dst
= to_pixman_image (_dst
);
494 pixman_image_t
*src
= ((cairo_image_source_t
*)abstract_src
)->pixman_image
;
495 pixman_image_t
*mask
= abstract_mask
? ((cairo_image_source_t
*)abstract_mask
)->pixman_image
: NULL
;
496 pixman_image_t
*free_src
= NULL
;
497 struct _cairo_boxes_chunk
*chunk
;
500 /* XXX consider using a region? saves multiple prepare-composite */
501 TRACE ((stderr
, "%s x %d\n", __FUNCTION__
, boxes
->num_boxes
));
503 if (((cairo_surface_t
*)_dst
)->is_clear
&&
504 (op
== CAIRO_OPERATOR_SOURCE
||
505 op
== CAIRO_OPERATOR_OVER
||
506 op
== CAIRO_OPERATOR_ADD
)) {
509 if (op
== CAIRO_OPERATOR_CLEAR
) {
510 #if PIXMAN_HAS_OP_LERP
511 op
= PIXMAN_OP_LERP_CLEAR
;
513 free_src
= src
= _pixman_image_for_color (CAIRO_COLOR_WHITE
);
514 if (unlikely (src
== NULL
))
515 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
516 op
= PIXMAN_OP_OUT_REVERSE
;
518 } else if (op
== CAIRO_OPERATOR_SOURCE
) {
519 #if PIXMAN_HAS_OP_LERP
520 op
= PIXMAN_OP_LERP_SRC
;
522 return CAIRO_INT_STATUS_UNSUPPORTED
;
525 op
= _pixman_operator (op
);
528 op
= _pixman_operator (op
);
531 for (chunk
= &boxes
->chunks
; chunk
; chunk
= chunk
->next
) {
532 for (i
= 0; i
< chunk
->count
; i
++) {
533 int x1
= _cairo_fixed_integer_part (chunk
->base
[i
].p1
.x
);
534 int y1
= _cairo_fixed_integer_part (chunk
->base
[i
].p1
.y
);
535 int x2
= _cairo_fixed_integer_part (chunk
->base
[i
].p2
.x
);
536 int y2
= _cairo_fixed_integer_part (chunk
->base
[i
].p2
.y
);
538 pixman_image_composite32 (op
, src
, mask
, dst
,
539 x1
+ src_x
, y1
+ src_y
,
540 x1
+ mask_x
, y1
+ mask_y
,
541 x1
+ dst_x
, y1
+ dst_y
,
547 pixman_image_unref (free_src
);
549 return CAIRO_STATUS_SUCCESS
;
552 #define CAIRO_FIXED_16_16_MIN _cairo_fixed_from_int (-32768)
553 #define CAIRO_FIXED_16_16_MAX _cairo_fixed_from_int (32767)
556 line_exceeds_16_16 (const cairo_line_t
*line
)
559 line
->p1
.x
<= CAIRO_FIXED_16_16_MIN
||
560 line
->p1
.x
>= CAIRO_FIXED_16_16_MAX
||
562 line
->p2
.x
<= CAIRO_FIXED_16_16_MIN
||
563 line
->p2
.x
>= CAIRO_FIXED_16_16_MAX
||
565 line
->p1
.y
<= CAIRO_FIXED_16_16_MIN
||
566 line
->p1
.y
>= CAIRO_FIXED_16_16_MAX
||
568 line
->p2
.y
<= CAIRO_FIXED_16_16_MIN
||
569 line
->p2
.y
>= CAIRO_FIXED_16_16_MAX
;
573 project_line_x_onto_16_16 (const cairo_line_t
*line
,
575 cairo_fixed_t bottom
,
576 pixman_line_fixed_t
*out
)
578 /* XXX use fixed-point arithmetic? */
579 cairo_point_double_t p1
, p2
;
582 p1
.x
= _cairo_fixed_to_double (line
->p1
.x
);
583 p1
.y
= _cairo_fixed_to_double (line
->p1
.y
);
585 p2
.x
= _cairo_fixed_to_double (line
->p2
.x
);
586 p2
.y
= _cairo_fixed_to_double (line
->p2
.y
);
588 m
= (p2
.x
- p1
.x
) / (p2
.y
- p1
.y
);
589 out
->p1
.x
= _cairo_fixed_16_16_from_double (p1
.x
+ m
* _cairo_fixed_to_double (top
- line
->p1
.y
));
590 out
->p2
.x
= _cairo_fixed_16_16_from_double (p1
.x
+ m
* _cairo_fixed_to_double (bottom
- line
->p1
.y
));
594 _pixman_image_add_traps (pixman_image_t
*image
,
595 int dst_x
, int dst_y
,
596 cairo_traps_t
*traps
)
598 cairo_trapezoid_t
*t
= traps
->traps
;
599 int num_traps
= traps
->num_traps
;
600 while (num_traps
--) {
601 pixman_trapezoid_t trap
;
603 /* top/bottom will be clamped to surface bounds */
604 trap
.top
= _cairo_fixed_to_16_16 (t
->top
);
605 trap
.bottom
= _cairo_fixed_to_16_16 (t
->bottom
);
607 /* However, all the other coordinates will have been left untouched so
608 * as not to introduce numerical error. Recompute them if they
609 * exceed the 16.16 limits.
611 if (unlikely (line_exceeds_16_16 (&t
->left
))) {
612 project_line_x_onto_16_16 (&t
->left
, t
->top
, t
->bottom
, &trap
.left
);
613 trap
.left
.p1
.y
= trap
.top
;
614 trap
.left
.p2
.y
= trap
.bottom
;
616 trap
.left
.p1
.x
= _cairo_fixed_to_16_16 (t
->left
.p1
.x
);
617 trap
.left
.p1
.y
= _cairo_fixed_to_16_16 (t
->left
.p1
.y
);
618 trap
.left
.p2
.x
= _cairo_fixed_to_16_16 (t
->left
.p2
.x
);
619 trap
.left
.p2
.y
= _cairo_fixed_to_16_16 (t
->left
.p2
.y
);
622 if (unlikely (line_exceeds_16_16 (&t
->right
))) {
623 project_line_x_onto_16_16 (&t
->right
, t
->top
, t
->bottom
, &trap
.right
);
624 trap
.right
.p1
.y
= trap
.top
;
625 trap
.right
.p2
.y
= trap
.bottom
;
627 trap
.right
.p1
.x
= _cairo_fixed_to_16_16 (t
->right
.p1
.x
);
628 trap
.right
.p1
.y
= _cairo_fixed_to_16_16 (t
->right
.p1
.y
);
629 trap
.right
.p2
.x
= _cairo_fixed_to_16_16 (t
->right
.p2
.x
);
630 trap
.right
.p2
.y
= _cairo_fixed_to_16_16 (t
->right
.p2
.y
);
633 pixman_rasterize_trapezoid (image
, &trap
, -dst_x
, -dst_y
);
638 static cairo_int_status_t
639 composite_traps (void *_dst
,
641 cairo_surface_t
*abstract_src
,
646 const cairo_rectangle_int_t
*extents
,
647 cairo_antialias_t antialias
,
648 cairo_traps_t
*traps
)
650 cairo_image_surface_t
*dst
= (cairo_image_surface_t
*) _dst
;
651 cairo_image_source_t
*src
= (cairo_image_source_t
*) abstract_src
;
652 cairo_int_status_t status
;
653 pixman_image_t
*mask
;
654 pixman_format_code_t format
;
656 TRACE ((stderr
, "%s\n", __FUNCTION__
));
658 /* pixman doesn't eliminate self-intersecting trapezoids/edges */
659 status
= _cairo_bentley_ottmann_tessellate_traps (traps
,
660 CAIRO_FILL_RULE_WINDING
);
661 if (status
!= CAIRO_INT_STATUS_SUCCESS
)
664 /* Special case adding trapezoids onto a mask surface; we want to avoid
665 * creating an intermediate temporary mask unnecessarily.
667 * We make the assumption here that the portion of the trapezoids
668 * contained within the surface is bounded by [dst_x,dst_y,width,height];
669 * the Cairo core code passes bounds based on the trapezoid extents.
671 format
= antialias
== CAIRO_ANTIALIAS_NONE
? PIXMAN_a1
: PIXMAN_a8
;
672 if (dst
->pixman_format
== format
&&
673 (abstract_src
== NULL
||
674 (op
== CAIRO_OPERATOR_ADD
&& src
->is_opaque_solid
)))
676 _pixman_image_add_traps (dst
->pixman_image
, dst_x
, dst_y
, traps
);
677 return CAIRO_STATUS_SUCCESS
;
680 mask
= pixman_image_create_bits (format
,
681 extents
->width
, extents
->height
,
683 if (unlikely (mask
== NULL
))
684 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
686 _pixman_image_add_traps (mask
, extents
->x
, extents
->y
, traps
);
687 pixman_image_composite32 (_pixman_operator (op
),
688 src
->pixman_image
, mask
, dst
->pixman_image
,
689 extents
->x
+ src_x
, extents
->y
+ src_y
,
691 extents
->x
- dst_x
, extents
->y
- dst_y
,
692 extents
->width
, extents
->height
);
694 pixman_image_unref (mask
);
696 return CAIRO_STATUS_SUCCESS
;
699 #if PIXMAN_VERSION >= PIXMAN_VERSION_ENCODE(0,22,0)
701 set_point (pixman_point_fixed_t
*p
, cairo_point_t
*c
)
703 p
->x
= _cairo_fixed_to_16_16 (c
->x
);
704 p
->y
= _cairo_fixed_to_16_16 (c
->y
);
708 _pixman_image_add_tristrip (pixman_image_t
*image
,
709 int dst_x
, int dst_y
,
710 cairo_tristrip_t
*strip
)
712 pixman_triangle_t tri
;
713 pixman_point_fixed_t
*p
[3] = {&tri
.p1
, &tri
.p2
, &tri
.p3
};
716 set_point (p
[0], &strip
->points
[0]);
717 set_point (p
[1], &strip
->points
[1]);
718 set_point (p
[2], &strip
->points
[2]);
719 pixman_add_triangles (image
, -dst_x
, -dst_y
, 1, &tri
);
720 for (n
= 3; n
< strip
->num_points
; n
++) {
721 set_point (p
[n
%3], &strip
->points
[n
]);
722 pixman_add_triangles (image
, -dst_x
, -dst_y
, 1, &tri
);
726 static cairo_int_status_t
727 composite_tristrip (void *_dst
,
729 cairo_surface_t
*abstract_src
,
734 const cairo_rectangle_int_t
*extents
,
735 cairo_antialias_t antialias
,
736 cairo_tristrip_t
*strip
)
738 cairo_image_surface_t
*dst
= (cairo_image_surface_t
*) _dst
;
739 cairo_image_source_t
*src
= (cairo_image_source_t
*) abstract_src
;
740 pixman_image_t
*mask
;
741 pixman_format_code_t format
;
743 TRACE ((stderr
, "%s\n", __FUNCTION__
));
745 if (strip
->num_points
< 3)
746 return CAIRO_STATUS_SUCCESS
;
748 if (1) { /* pixman doesn't eliminate self-intersecting triangles/edges */
749 cairo_int_status_t status
;
753 _cairo_traps_init (&traps
);
754 for (n
= 0; n
< strip
->num_points
; n
++) {
757 p
[0] = strip
->points
[0];
758 p
[1] = strip
->points
[1];
759 p
[2] = strip
->points
[2];
760 p
[3] = strip
->points
[0];
762 _cairo_traps_tessellate_convex_quad (&traps
, p
);
764 status
= composite_traps (_dst
, op
, abstract_src
,
767 extents
, antialias
, &traps
);
768 _cairo_traps_fini (&traps
);
773 format
= antialias
== CAIRO_ANTIALIAS_NONE
? PIXMAN_a1
: PIXMAN_a8
;
774 if (dst
->pixman_format
== format
&&
775 (abstract_src
== NULL
||
776 (op
== CAIRO_OPERATOR_ADD
&& src
->is_opaque_solid
)))
778 _pixman_image_add_tristrip (dst
->pixman_image
, dst_x
, dst_y
, strip
);
779 return CAIRO_STATUS_SUCCESS
;
782 mask
= pixman_image_create_bits (format
,
783 extents
->width
, extents
->height
,
785 if (unlikely (mask
== NULL
))
786 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
788 _pixman_image_add_tristrip (mask
, extents
->x
, extents
->y
, strip
);
789 pixman_image_composite32 (_pixman_operator (op
),
790 src
->pixman_image
, mask
, dst
->pixman_image
,
791 extents
->x
+ src_x
, extents
->y
+ src_y
,
793 extents
->x
- dst_x
, extents
->y
- dst_y
,
794 extents
->width
, extents
->height
);
796 pixman_image_unref (mask
);
798 return CAIRO_STATUS_SUCCESS
;
802 static cairo_int_status_t
803 check_composite_glyphs (const cairo_composite_rectangles_t
*extents
,
804 cairo_scaled_font_t
*scaled_font
,
805 cairo_glyph_t
*glyphs
,
808 return CAIRO_STATUS_SUCCESS
;
811 #if HAS_PIXMAN_GLYPHS
812 static pixman_glyph_cache_t
*global_glyph_cache
;
814 static inline pixman_glyph_cache_t
*
815 get_glyph_cache (void)
817 if (!global_glyph_cache
)
818 global_glyph_cache
= pixman_glyph_cache_create ();
820 return global_glyph_cache
;
824 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t
*scaled_font
,
825 cairo_scaled_glyph_t
*scaled_glyph
)
827 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex
);
829 if (global_glyph_cache
) {
830 pixman_glyph_cache_remove (
831 global_glyph_cache
, scaled_font
,
832 (void *)_cairo_scaled_glyph_index (scaled_glyph
));
835 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex
);
838 static cairo_int_status_t
839 composite_glyphs (void *_dst
,
841 cairo_surface_t
*_src
,
846 cairo_composite_glyphs_info_t
*info
)
848 cairo_int_status_t status
= CAIRO_INT_STATUS_SUCCESS
;
849 pixman_glyph_cache_t
*glyph_cache
;
850 pixman_glyph_t pglyphs_stack
[CAIRO_STACK_ARRAY_LENGTH (pixman_glyph_t
)];
851 pixman_glyph_t
*pglyphs
= pglyphs_stack
;
855 TRACE ((stderr
, "%s\n", __FUNCTION__
));
857 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex
);
859 glyph_cache
= get_glyph_cache();
860 if (unlikely (glyph_cache
== NULL
)) {
861 status
= _cairo_error (CAIRO_STATUS_NO_MEMORY
);
865 pixman_glyph_cache_freeze (glyph_cache
);
867 if (info
->num_glyphs
> ARRAY_LENGTH (pglyphs_stack
)) {
868 pglyphs
= _cairo_malloc_ab (info
->num_glyphs
, sizeof (pixman_glyph_t
));
869 if (unlikely (pglyphs
== NULL
)) {
870 status
= _cairo_error (CAIRO_STATUS_NO_MEMORY
);
876 for (i
= 0; i
< info
->num_glyphs
; i
++) {
877 unsigned long index
= info
->glyphs
[i
].index
;
880 glyph
= pixman_glyph_cache_lookup (glyph_cache
, info
->font
, (void *)index
);
882 cairo_scaled_glyph_t
*scaled_glyph
;
883 cairo_image_surface_t
*glyph_surface
;
885 /* This call can actually end up recursing, so we have to
886 * drop the mutex around it.
888 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex
);
889 status
= _cairo_scaled_glyph_lookup (info
->font
, index
,
890 CAIRO_SCALED_GLYPH_INFO_SURFACE
,
892 CAIRO_MUTEX_LOCK (_cairo_glyph_cache_mutex
);
894 if (unlikely (status
))
897 glyph_surface
= scaled_glyph
->surface
;
898 glyph
= pixman_glyph_cache_insert (glyph_cache
, info
->font
, (void *)index
,
899 glyph_surface
->base
.device_transform
.x0
,
900 glyph_surface
->base
.device_transform
.y0
,
901 glyph_surface
->pixman_image
);
902 if (unlikely (!glyph
)) {
903 status
= _cairo_error (CAIRO_STATUS_NO_MEMORY
);
908 pg
->x
= _cairo_lround (info
->glyphs
[i
].x
);
909 pg
->y
= _cairo_lround (info
->glyphs
[i
].y
);
914 if (info
->use_mask
) {
915 pixman_format_code_t mask_format
;
917 mask_format
= pixman_glyph_get_mask_format (glyph_cache
, pg
- pglyphs
, pglyphs
);
919 pixman_composite_glyphs (_pixman_operator (op
),
920 ((cairo_image_source_t
*)_src
)->pixman_image
,
921 to_pixman_image (_dst
),
923 info
->extents
.x
+ src_x
, info
->extents
.y
+ src_y
,
924 info
->extents
.x
, info
->extents
.y
,
925 info
->extents
.x
- dst_x
, info
->extents
.y
- dst_y
,
926 info
->extents
.width
, info
->extents
.height
,
927 glyph_cache
, pg
- pglyphs
, pglyphs
);
929 pixman_composite_glyphs_no_mask (_pixman_operator (op
),
930 ((cairo_image_source_t
*)_src
)->pixman_image
,
931 to_pixman_image (_dst
),
934 glyph_cache
, pg
- pglyphs
, pglyphs
);
938 pixman_glyph_cache_thaw (glyph_cache
);
940 if (pglyphs
!= pglyphs_stack
)
944 CAIRO_MUTEX_UNLOCK (_cairo_glyph_cache_mutex
);
949 _cairo_image_scaled_glyph_fini (cairo_scaled_font_t
*scaled_font
,
950 cairo_scaled_glyph_t
*scaled_glyph
)
954 static cairo_int_status_t
955 composite_one_glyph (void *_dst
,
957 cairo_surface_t
*_src
,
962 cairo_composite_glyphs_info_t
*info
)
964 cairo_image_surface_t
*glyph_surface
;
965 cairo_scaled_glyph_t
*scaled_glyph
;
966 cairo_status_t status
;
969 TRACE ((stderr
, "%s\n", __FUNCTION__
));
971 status
= _cairo_scaled_glyph_lookup (info
->font
,
972 info
->glyphs
[0].index
,
973 CAIRO_SCALED_GLYPH_INFO_SURFACE
,
976 if (unlikely (status
))
979 glyph_surface
= scaled_glyph
->surface
;
980 if (glyph_surface
->width
== 0 || glyph_surface
->height
== 0)
981 return CAIRO_INT_STATUS_NOTHING_TO_DO
;
983 /* round glyph locations to the nearest pixel */
984 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
985 x
= _cairo_lround (info
->glyphs
[0].x
-
986 glyph_surface
->base
.device_transform
.x0
);
987 y
= _cairo_lround (info
->glyphs
[0].y
-
988 glyph_surface
->base
.device_transform
.y0
);
990 pixman_image_composite32 (_pixman_operator (op
),
991 ((cairo_image_source_t
*)_src
)->pixman_image
,
992 glyph_surface
->pixman_image
,
993 to_pixman_image (_dst
),
994 x
+ src_x
, y
+ src_y
,
996 x
- dst_x
, y
- dst_y
,
997 glyph_surface
->width
,
998 glyph_surface
->height
);
1000 return CAIRO_INT_STATUS_SUCCESS
;
1003 static cairo_int_status_t
1004 composite_glyphs_via_mask (void *_dst
,
1005 cairo_operator_t op
,
1006 cairo_surface_t
*_src
,
1011 cairo_composite_glyphs_info_t
*info
)
1013 cairo_scaled_glyph_t
*glyph_cache
[64];
1014 pixman_image_t
*white
= _pixman_image_for_color (CAIRO_COLOR_WHITE
);
1015 cairo_scaled_glyph_t
*scaled_glyph
;
1017 pixman_image_t
*mask
;
1018 pixman_format_code_t format
;
1019 cairo_status_t status
;
1022 TRACE ((stderr
, "%s\n", __FUNCTION__
));
1024 if (unlikely (white
== NULL
))
1025 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
1027 /* XXX convert the glyphs to common formats a8/a8r8g8b8 to hit
1028 * optimised paths through pixman. Should we increase the bit
1029 * depth of the target surface, we should reconsider the appropriate
1033 status
= _cairo_scaled_glyph_lookup (info
->font
,
1034 info
->glyphs
[0].index
,
1035 CAIRO_SCALED_GLYPH_INFO_SURFACE
,
1037 if (unlikely (status
)) {
1038 pixman_image_unref (white
);
1042 memset (glyph_cache
, 0, sizeof (glyph_cache
));
1043 glyph_cache
[info
->glyphs
[0].index
% ARRAY_LENGTH (glyph_cache
)] = scaled_glyph
;
1046 i
= (info
->extents
.width
+ 3) & ~3;
1047 if (scaled_glyph
->surface
->base
.content
& CAIRO_CONTENT_COLOR
) {
1048 format
= PIXMAN_a8r8g8b8
;
1049 i
= info
->extents
.width
* 4;
1052 if (i
* info
->extents
.height
> (int) sizeof (buf
)) {
1053 mask
= pixman_image_create_bits (format
,
1054 info
->extents
.width
,
1055 info
->extents
.height
,
1058 memset (buf
, 0, i
* info
->extents
.height
);
1059 mask
= pixman_image_create_bits (format
,
1060 info
->extents
.width
,
1061 info
->extents
.height
,
1062 (uint32_t *)buf
, i
);
1064 if (unlikely (mask
== NULL
)) {
1065 pixman_image_unref (white
);
1066 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
1069 status
= CAIRO_STATUS_SUCCESS
;
1070 for (i
= 0; i
< info
->num_glyphs
; i
++) {
1071 unsigned long glyph_index
= info
->glyphs
[i
].index
;
1072 int cache_index
= glyph_index
% ARRAY_LENGTH (glyph_cache
);
1073 cairo_image_surface_t
*glyph_surface
;
1076 scaled_glyph
= glyph_cache
[cache_index
];
1077 if (scaled_glyph
== NULL
||
1078 _cairo_scaled_glyph_index (scaled_glyph
) != glyph_index
)
1080 status
= _cairo_scaled_glyph_lookup (info
->font
, glyph_index
,
1081 CAIRO_SCALED_GLYPH_INFO_SURFACE
,
1084 if (unlikely (status
)) {
1085 pixman_image_unref (mask
);
1086 pixman_image_unref (white
);
1090 glyph_cache
[cache_index
] = scaled_glyph
;
1093 glyph_surface
= scaled_glyph
->surface
;
1094 if (glyph_surface
->width
&& glyph_surface
->height
) {
1095 if (glyph_surface
->base
.content
& CAIRO_CONTENT_COLOR
&&
1096 format
== PIXMAN_a8
) {
1097 pixman_image_t
*ca_mask
;
1099 format
= PIXMAN_a8r8g8b8
;
1100 ca_mask
= pixman_image_create_bits (format
,
1101 info
->extents
.width
,
1102 info
->extents
.height
,
1104 if (unlikely (ca_mask
== NULL
)) {
1105 pixman_image_unref (mask
);
1106 pixman_image_unref (white
);
1107 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
1110 pixman_image_composite32 (PIXMAN_OP_SRC
,
1111 white
, mask
, ca_mask
,
1115 info
->extents
.width
,
1116 info
->extents
.height
);
1117 pixman_image_unref (mask
);
1121 /* round glyph locations to the nearest pixel */
1122 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1123 x
= _cairo_lround (info
->glyphs
[i
].x
-
1124 glyph_surface
->base
.device_transform
.x0
);
1125 y
= _cairo_lround (info
->glyphs
[i
].y
-
1126 glyph_surface
->base
.device_transform
.y0
);
1128 if (glyph_surface
->pixman_format
== format
) {
1129 pixman_image_composite32 (PIXMAN_OP_ADD
,
1130 glyph_surface
->pixman_image
, NULL
, mask
,
1133 x
- info
->extents
.x
, y
- info
->extents
.y
,
1134 glyph_surface
->width
,
1135 glyph_surface
->height
);
1137 pixman_image_composite32 (PIXMAN_OP_ADD
,
1138 white
, glyph_surface
->pixman_image
, mask
,
1141 x
- info
->extents
.x
, y
- info
->extents
.y
,
1142 glyph_surface
->width
,
1143 glyph_surface
->height
);
1148 if (format
== PIXMAN_a8r8g8b8
)
1149 pixman_image_set_component_alpha (mask
, TRUE
);
1151 pixman_image_composite32 (_pixman_operator (op
),
1152 ((cairo_image_source_t
*)_src
)->pixman_image
,
1154 to_pixman_image (_dst
),
1155 info
->extents
.x
+ src_x
, info
->extents
.y
+ src_y
,
1157 info
->extents
.x
- dst_x
, info
->extents
.y
- dst_y
,
1158 info
->extents
.width
, info
->extents
.height
);
1159 pixman_image_unref (mask
);
1160 pixman_image_unref (white
);
1162 return CAIRO_STATUS_SUCCESS
;
1165 static cairo_int_status_t
1166 composite_glyphs (void *_dst
,
1167 cairo_operator_t op
,
1168 cairo_surface_t
*_src
,
1173 cairo_composite_glyphs_info_t
*info
)
1175 cairo_scaled_glyph_t
*glyph_cache
[64];
1176 pixman_image_t
*dst
, *src
;
1177 cairo_status_t status
;
1180 TRACE ((stderr
, "%s\n", __FUNCTION__
));
1182 if (info
->num_glyphs
== 1)
1183 return composite_one_glyph(_dst
, op
, _src
, src_x
, src_y
, dst_x
, dst_y
, info
);
1186 return composite_glyphs_via_mask(_dst
, op
, _src
, src_x
, src_y
, dst_x
, dst_y
, info
);
1188 op
= _pixman_operator (op
);
1189 dst
= to_pixman_image (_dst
);
1190 src
= ((cairo_image_source_t
*)_src
)->pixman_image
;
1192 memset (glyph_cache
, 0, sizeof (glyph_cache
));
1193 status
= CAIRO_STATUS_SUCCESS
;
1195 for (i
= 0; i
< info
->num_glyphs
; i
++) {
1197 cairo_image_surface_t
*glyph_surface
;
1198 cairo_scaled_glyph_t
*scaled_glyph
;
1199 unsigned long glyph_index
= info
->glyphs
[i
].index
;
1200 int cache_index
= glyph_index
% ARRAY_LENGTH (glyph_cache
);
1202 scaled_glyph
= glyph_cache
[cache_index
];
1203 if (scaled_glyph
== NULL
||
1204 _cairo_scaled_glyph_index (scaled_glyph
) != glyph_index
)
1206 status
= _cairo_scaled_glyph_lookup (info
->font
, glyph_index
,
1207 CAIRO_SCALED_GLYPH_INFO_SURFACE
,
1210 if (unlikely (status
))
1213 glyph_cache
[cache_index
] = scaled_glyph
;
1216 glyph_surface
= scaled_glyph
->surface
;
1217 if (glyph_surface
->width
&& glyph_surface
->height
) {
1218 /* round glyph locations to the nearest pixel */
1219 /* XXX: FRAGILE: We're ignoring device_transform scaling here. A bug? */
1220 x
= _cairo_lround (info
->glyphs
[i
].x
-
1221 glyph_surface
->base
.device_transform
.x0
);
1222 y
= _cairo_lround (info
->glyphs
[i
].y
-
1223 glyph_surface
->base
.device_transform
.y0
);
1225 pixman_image_composite32 (op
, src
, glyph_surface
->pixman_image
, dst
,
1226 x
+ src_x
, y
+ src_y
,
1228 x
- dst_x
, y
- dst_y
,
1229 glyph_surface
->width
,
1230 glyph_surface
->height
);
1238 static cairo_int_status_t
1239 check_composite (const cairo_composite_rectangles_t
*extents
)
1241 return CAIRO_STATUS_SUCCESS
;
1244 const cairo_compositor_t
*
1245 _cairo_image_traps_compositor_get (void)
1247 static cairo_traps_compositor_t compositor
;
1249 if (compositor
.base
.delegate
== NULL
) {
1250 _cairo_traps_compositor_init (&compositor
,
1251 &__cairo_no_compositor
);
1252 compositor
.acquire
= acquire
;
1253 compositor
.release
= release
;
1254 compositor
.set_clip_region
= set_clip_region
;
1255 compositor
.pattern_to_surface
= _cairo_image_source_create_for_pattern
;
1256 compositor
.draw_image_boxes
= draw_image_boxes
;
1257 //compositor.copy_boxes = copy_boxes;
1258 compositor
.fill_boxes
= fill_boxes
;
1259 compositor
.check_composite
= check_composite
;
1260 compositor
.composite
= composite
;
1261 compositor
.lerp
= lerp
;
1262 //compositor.check_composite_boxes = check_composite_boxes;
1263 compositor
.composite_boxes
= composite_boxes
;
1264 //compositor.check_composite_traps = check_composite_traps;
1265 compositor
.composite_traps
= composite_traps
;
1266 //compositor.check_composite_tristrip = check_composite_traps;
1267 #if PIXMAN_VERSION >= PIXMAN_VERSION_ENCODE(0,22,0)
1268 compositor
.composite_tristrip
= composite_tristrip
;
1270 compositor
.check_composite_glyphs
= check_composite_glyphs
;
1271 compositor
.composite_glyphs
= composite_glyphs
;
1274 return &compositor
.base
;
1277 const cairo_compositor_t
*
1278 _cairo_image_mask_compositor_get (void)
1280 static cairo_mask_compositor_t compositor
;
1282 if (compositor
.base
.delegate
== NULL
) {
1283 _cairo_mask_compositor_init (&compositor
,
1284 _cairo_image_traps_compositor_get ());
1285 compositor
.acquire
= acquire
;
1286 compositor
.release
= release
;
1287 compositor
.set_clip_region
= set_clip_region
;
1288 compositor
.pattern_to_surface
= _cairo_image_source_create_for_pattern
;
1289 compositor
.draw_image_boxes
= draw_image_boxes
;
1290 compositor
.fill_rectangles
= fill_rectangles
;
1291 compositor
.fill_boxes
= fill_boxes
;
1292 compositor
.check_composite
= check_composite
;
1293 compositor
.composite
= composite
;
1294 //compositor.lerp = lerp;
1295 //compositor.check_composite_boxes = check_composite_boxes;
1296 compositor
.composite_boxes
= composite_boxes
;
1297 compositor
.check_composite_glyphs
= check_composite_glyphs
;
1298 compositor
.composite_glyphs
= composite_glyphs
;
1301 return &compositor
.base
;
1304 #if PIXMAN_HAS_COMPOSITOR
1305 typedef struct _cairo_image_span_renderer
{
1306 cairo_span_renderer_t base
;
1308 pixman_image_compositor_t
*compositor
;
1309 pixman_image_t
*src
, *mask
;
1311 cairo_rectangle_int_t extents
;
1312 } cairo_image_span_renderer_t
;
1313 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t
) <= sizeof (cairo_abstract_span_renderer_t
));
1315 static cairo_status_t
1316 _cairo_image_bounded_opaque_spans (void *abstract_renderer
,
1318 const cairo_half_open_span_t
*spans
,
1321 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1324 return CAIRO_STATUS_SUCCESS
;
1327 if (spans
[0].coverage
)
1328 pixman_image_compositor_blt (r
->compositor
,
1330 spans
[1].x
- spans
[0].x
, height
,
1333 } while (--num_spans
> 1);
1335 return CAIRO_STATUS_SUCCESS
;
1338 static cairo_status_t
1339 _cairo_image_bounded_spans (void *abstract_renderer
,
1341 const cairo_half_open_span_t
*spans
,
1344 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1347 return CAIRO_STATUS_SUCCESS
;
1350 if (spans
[0].coverage
) {
1351 pixman_image_compositor_blt (r
->compositor
,
1353 spans
[1].x
- spans
[0].x
, height
,
1354 r
->opacity
* spans
[0].coverage
);
1357 } while (--num_spans
> 1);
1359 return CAIRO_STATUS_SUCCESS
;
1362 static cairo_status_t
1363 _cairo_image_unbounded_spans (void *abstract_renderer
,
1365 const cairo_half_open_span_t
*spans
,
1368 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1370 assert (y
+ height
<= r
->extents
.height
);
1371 if (y
> r
->extents
.y
) {
1372 pixman_image_compositor_blt (r
->compositor
,
1373 r
->extents
.x
, r
->extents
.y
,
1374 r
->extents
.width
, y
- r
->extents
.y
,
1378 if (num_spans
== 0) {
1379 pixman_image_compositor_blt (r
->compositor
,
1381 r
->extents
.width
, height
,
1384 if (spans
[0].x
!= r
->extents
.x
) {
1385 pixman_image_compositor_blt (r
->compositor
,
1387 spans
[0].x
- r
->extents
.x
,
1393 assert (spans
[0].x
< r
->extents
.x
+ r
->extents
.width
);
1394 pixman_image_compositor_blt (r
->compositor
,
1396 spans
[1].x
- spans
[0].x
, height
,
1397 r
->opacity
* spans
[0].coverage
);
1399 } while (--num_spans
> 1);
1401 if (spans
[0].x
!= r
->extents
.x
+ r
->extents
.width
) {
1402 assert (spans
[0].x
< r
->extents
.x
+ r
->extents
.width
);
1403 pixman_image_compositor_blt (r
->compositor
,
1405 r
->extents
.x
+ r
->extents
.width
- spans
[0].x
, height
,
1410 r
->extents
.y
= y
+ height
;
1411 return CAIRO_STATUS_SUCCESS
;
1414 static cairo_status_t
1415 _cairo_image_clipped_spans (void *abstract_renderer
,
1417 const cairo_half_open_span_t
*spans
,
1420 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1425 if (! spans
[0].inverse
)
1426 pixman_image_compositor_blt (r
->compositor
,
1428 spans
[1].x
- spans
[0].x
, height
,
1429 r
->opacity
* spans
[0].coverage
);
1431 } while (--num_spans
> 1);
1433 r
->extents
.y
= y
+ height
;
1434 return CAIRO_STATUS_SUCCESS
;
1437 static cairo_status_t
1438 _cairo_image_finish_unbounded_spans (void *abstract_renderer
)
1440 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1442 if (r
->extents
.y
< r
->extents
.height
) {
1443 pixman_image_compositor_blt (r
->compositor
,
1444 r
->extents
.x
, r
->extents
.y
,
1446 r
->extents
.height
- r
->extents
.y
,
1450 return CAIRO_STATUS_SUCCESS
;
1453 static cairo_int_status_t
1454 span_renderer_init (cairo_abstract_span_renderer_t
*_r
,
1455 const cairo_composite_rectangles_t
*composite
,
1456 cairo_bool_t needs_clip
)
1458 cairo_image_span_renderer_t
*r
= (cairo_image_span_renderer_t
*)_r
;
1459 cairo_image_surface_t
*dst
= (cairo_image_surface_t
*)composite
->surface
;
1460 const cairo_pattern_t
*source
= &composite
->source_pattern
.base
;
1461 cairo_operator_t op
= composite
->op
;
1465 TRACE ((stderr
, "%s\n", __FUNCTION__
));
1467 if (op
== CAIRO_OPERATOR_CLEAR
) {
1468 op
= PIXMAN_OP_LERP_CLEAR
;
1469 } else if (dst
->base
.is_clear
&&
1470 (op
== CAIRO_OPERATOR_SOURCE
||
1471 op
== CAIRO_OPERATOR_OVER
||
1472 op
== CAIRO_OPERATOR_ADD
)) {
1474 } else if (op
== CAIRO_OPERATOR_SOURCE
) {
1475 op
= PIXMAN_OP_LERP_SRC
;
1477 op
= _pixman_operator (op
);
1480 r
->compositor
= NULL
;
1482 r
->src
= _pixman_image_for_pattern (dst
, source
, FALSE
,
1483 &composite
->unbounded
,
1484 &composite
->source_sample_area
,
1486 if (unlikely (r
->src
== NULL
))
1487 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
1490 if (composite
->mask_pattern
.base
.type
== CAIRO_PATTERN_TYPE_SOLID
) {
1491 r
->opacity
= composite
->mask_pattern
.solid
.color
.alpha
;
1493 r
->mask
= _pixman_image_for_pattern (dst
,
1494 &composite
->mask_pattern
.base
,
1496 &composite
->unbounded
,
1497 &composite
->mask_sample_area
,
1499 if (unlikely (r
->mask
== NULL
))
1500 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
1502 /* XXX Component-alpha? */
1503 if ((dst
->base
.content
& CAIRO_CONTENT_COLOR
) == 0 &&
1504 _cairo_pattern_is_opaque (source
, &composite
->source_sample_area
))
1506 pixman_image_unref (r
->src
);
1514 if (composite
->is_bounded
) {
1515 if (r
->opacity
== 1.)
1516 r
->base
.render_rows
= _cairo_image_bounded_opaque_spans
;
1518 r
->base
.render_rows
= _cairo_image_bounded_spans
;
1519 r
->base
.finish
= NULL
;
1522 r
->base
.render_rows
= _cairo_image_clipped_spans
;
1524 r
->base
.render_rows
= _cairo_image_unbounded_spans
;
1525 r
->base
.finish
= _cairo_image_finish_unbounded_spans
;
1526 r
->extents
= composite
->unbounded
;
1527 r
->extents
.height
+= r
->extents
.y
;
1531 pixman_image_create_compositor (op
, r
->src
, r
->mask
, dst
->pixman_image
,
1532 composite
->unbounded
.x
+ src_x
,
1533 composite
->unbounded
.y
+ src_y
,
1534 composite
->unbounded
.x
+ mask_x
,
1535 composite
->unbounded
.y
+ mask_y
,
1536 composite
->unbounded
.x
,
1537 composite
->unbounded
.y
,
1538 composite
->unbounded
.width
,
1539 composite
->unbounded
.height
);
1540 if (unlikely (r
->compositor
== NULL
))
1541 return CAIRO_INT_STATUS_NOTHING_TO_DO
;
1543 return CAIRO_STATUS_SUCCESS
;
1547 span_renderer_fini (cairo_abstract_span_renderer_t
*_r
,
1548 cairo_int_status_t status
)
1550 cairo_image_span_renderer_t
*r
= (cairo_image_span_renderer_t
*) _r
;
1552 TRACE ((stderr
, "%s\n", __FUNCTION__
));
1554 if (status
== CAIRO_INT_STATUS_SUCCESS
&& r
->base
.finish
)
1558 pixman_image_compositor_destroy (r
->compositor
);
1561 pixman_image_unref (r
->src
);
1563 pixman_image_unref (r
->mask
);
1566 typedef struct _cairo_image_span_renderer
{
1567 cairo_span_renderer_t base
;
1569 const cairo_composite_rectangles_t
*composite
;
1575 pixman_image_t
*src
, *mask
;
1589 pixman_image_t
*dst
;
1595 cairo_rectangle_int_t extents
;
1602 #define SZ_BUF (int)(sizeof (cairo_abstract_span_renderer_t) - sizeof (cairo_image_span_renderer_t))
1603 } cairo_image_span_renderer_t
;
1604 COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t
) <= sizeof (cairo_abstract_span_renderer_t
));
1606 static cairo_status_t
1607 _cairo_image_spans (void *abstract_renderer
,
1609 const cairo_half_open_span_t
*spans
,
1612 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1613 uint8_t *mask
, *row
;
1617 return CAIRO_STATUS_SUCCESS
;
1619 mask
= r
->u
.mask
.data
+ (y
- r
->u
.mask
.extents
.y
) * r
->u
.mask
.stride
;
1620 mask
+= spans
[0].x
- r
->u
.mask
.extents
.x
;
1624 len
= spans
[1].x
- spans
[0].x
;
1625 if (spans
[0].coverage
) {
1626 *row
++ = r
->opacity
* spans
[0].coverage
;
1628 memset (row
, row
[-1], len
);
1632 } while (--num_spans
> 1);
1637 mask
+= r
->u
.mask
.stride
;
1638 memcpy (mask
, row
, len
);
1641 return CAIRO_STATUS_SUCCESS
;
1644 static cairo_status_t
1645 _cairo_image_spans_and_zero (void *abstract_renderer
,
1647 const cairo_half_open_span_t
*spans
,
1650 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1654 mask
= r
->u
.mask
.data
;
1655 if (y
> r
->u
.mask
.extents
.y
) {
1656 len
= (y
- r
->u
.mask
.extents
.y
) * r
->u
.mask
.stride
;
1657 memset (mask
, 0, len
);
1661 r
->u
.mask
.extents
.y
= y
+ height
;
1662 r
->u
.mask
.data
= mask
+ height
* r
->u
.mask
.stride
;
1663 if (num_spans
== 0) {
1664 memset (mask
, 0, height
* r
->u
.mask
.stride
);
1666 uint8_t *row
= mask
;
1668 if (spans
[0].x
!= r
->u
.mask
.extents
.x
) {
1669 len
= spans
[0].x
- r
->u
.mask
.extents
.x
;
1670 memset (row
, 0, len
);
1675 len
= spans
[1].x
- spans
[0].x
;
1676 *row
++ = r
->opacity
* spans
[0].coverage
;
1678 memset (row
, row
[-1], --len
);
1682 } while (--num_spans
> 1);
1684 if (spans
[0].x
!= r
->u
.mask
.extents
.x
+ r
->u
.mask
.extents
.width
) {
1685 len
= r
->u
.mask
.extents
.x
+ r
->u
.mask
.extents
.width
- spans
[0].x
;
1686 memset (row
, 0, len
);
1691 mask
+= r
->u
.mask
.stride
;
1692 memcpy (mask
, row
, r
->u
.mask
.extents
.width
);
1696 return CAIRO_STATUS_SUCCESS
;
1699 static cairo_status_t
1700 _cairo_image_finish_spans_and_zero (void *abstract_renderer
)
1702 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1704 if (r
->u
.mask
.extents
.y
< r
->u
.mask
.extents
.height
)
1705 memset (r
->u
.mask
.data
, 0, (r
->u
.mask
.extents
.height
- r
->u
.mask
.extents
.y
) * r
->u
.mask
.stride
);
1707 return CAIRO_STATUS_SUCCESS
;
1710 static cairo_status_t
1711 _fill8_spans (void *abstract_renderer
, int y
, int h
,
1712 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1714 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1717 return CAIRO_STATUS_SUCCESS
;
1719 if (likely(h
== 1)) {
1721 if (spans
[0].coverage
) {
1722 int len
= spans
[1].x
- spans
[0].x
;
1723 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
;
1725 *d
= r
->u
.fill
.pixel
;
1727 memset(d
, r
->u
.fill
.pixel
, len
);
1730 } while (--num_spans
> 1);
1733 if (spans
[0].coverage
) {
1736 int len
= spans
[1].x
- spans
[0].x
;
1737 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
;
1739 *d
= r
->u
.fill
.pixel
;
1741 memset(d
, r
->u
.fill
.pixel
, len
);
1746 } while (--num_spans
> 1);
1749 return CAIRO_STATUS_SUCCESS
;
1752 static cairo_status_t
1753 _fill16_spans (void *abstract_renderer
, int y
, int h
,
1754 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1756 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1759 return CAIRO_STATUS_SUCCESS
;
1761 if (likely(h
== 1)) {
1763 if (spans
[0].coverage
) {
1764 int len
= spans
[1].x
- spans
[0].x
;
1765 uint16_t *d
= (uint16_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
*2);
1767 *d
++ = r
->u
.fill
.pixel
;
1770 } while (--num_spans
> 1);
1773 if (spans
[0].coverage
) {
1776 int len
= spans
[1].x
- spans
[0].x
;
1777 uint16_t *d
= (uint16_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
*2);
1779 *d
++ = r
->u
.fill
.pixel
;
1784 } while (--num_spans
> 1);
1787 return CAIRO_STATUS_SUCCESS
;
1790 static cairo_status_t
1791 _fill32_spans (void *abstract_renderer
, int y
, int h
,
1792 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1794 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1797 return CAIRO_STATUS_SUCCESS
;
1799 if (likely(h
== 1)) {
1801 if (spans
[0].coverage
) {
1802 int len
= spans
[1].x
- spans
[0].x
;
1804 pixman_fill ((uint32_t *)r
->u
.fill
.data
, r
->u
.fill
.stride
/ sizeof(uint32_t), r
->bpp
,
1805 spans
[0].x
, y
, len
, 1, r
->u
.fill
.pixel
);
1807 uint32_t *d
= (uint32_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
*4);
1809 *d
++ = r
->u
.fill
.pixel
;
1813 } while (--num_spans
> 1);
1816 if (spans
[0].coverage
) {
1817 if (spans
[1].x
- spans
[0].x
> 16) {
1818 pixman_fill ((uint32_t *)r
->u
.fill
.data
, r
->u
.fill
.stride
/ sizeof(uint32_t), r
->bpp
,
1819 spans
[0].x
, y
, spans
[1].x
- spans
[0].x
, h
,
1824 int len
= spans
[1].x
- spans
[0].x
;
1825 uint32_t *d
= (uint32_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
*4);
1827 *d
++ = r
->u
.fill
.pixel
;
1833 } while (--num_spans
> 1);
1836 return CAIRO_STATUS_SUCCESS
;
1840 static cairo_status_t
1841 _fill_spans (void *abstract_renderer
, int y
, int h
,
1842 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1844 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1847 return CAIRO_STATUS_SUCCESS
;
1850 if (spans
[0].coverage
) {
1851 pixman_fill ((uint32_t *) r
->data
, r
->stride
, r
->bpp
,
1853 spans
[1].x
- spans
[0].x
, h
,
1857 } while (--num_spans
> 1);
1859 return CAIRO_STATUS_SUCCESS
;
1863 static cairo_status_t
1864 _blit_spans (void *abstract_renderer
, int y
, int h
,
1865 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1867 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1871 return CAIRO_STATUS_SUCCESS
;
1874 if (likely (h
== 1)) {
1875 uint8_t *src
= r
->u
.blit
.src_data
+ y
*r
->u
.blit
.src_stride
;
1876 uint8_t *dst
= r
->u
.blit
.data
+ y
*r
->u
.blit
.stride
;
1878 if (spans
[0].coverage
) {
1879 void *s
= src
+ spans
[0].x
*cpp
;
1880 void *d
= dst
+ spans
[0].x
*cpp
;
1881 int len
= (spans
[1].x
- spans
[0].x
) * cpp
;
1884 *(uint8_t *)d
= *(uint8_t *)s
;
1887 *(uint16_t *)d
= *(uint16_t *)s
;
1890 *(uint32_t *)d
= *(uint32_t *)s
;
1894 *(uint64_t *)d
= *(uint64_t *)s
;
1903 } while (--num_spans
> 1);
1906 if (spans
[0].coverage
) {
1909 void *src
= r
->u
.blit
.src_data
+ yy
*r
->u
.blit
.src_stride
+ spans
[0].x
*cpp
;
1910 void *dst
= r
->u
.blit
.data
+ yy
*r
->u
.blit
.stride
+ spans
[0].x
*cpp
;
1911 int len
= (spans
[1].x
- spans
[0].x
) * cpp
;
1914 *(uint8_t *)dst
= *(uint8_t *)src
;
1917 *(uint16_t *)dst
= *(uint16_t *)src
;
1920 *(uint32_t *)dst
= *(uint32_t *)src
;
1924 *(uint64_t *)dst
= *(uint64_t *)src
;
1928 memcpy(dst
, src
, len
);
1935 } while (--num_spans
> 1);
1938 return CAIRO_STATUS_SUCCESS
;
1941 static cairo_status_t
1942 _mono_spans (void *abstract_renderer
, int y
, int h
,
1943 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1945 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1948 return CAIRO_STATUS_SUCCESS
;
1951 if (spans
[0].coverage
) {
1952 pixman_image_composite32 (r
->op
,
1953 r
->src
, NULL
, r
->u
.composite
.dst
,
1954 spans
[0].x
+ r
->u
.composite
.src_x
, y
+ r
->u
.composite
.src_y
,
1957 spans
[1].x
- spans
[0].x
, h
);
1960 } while (--num_spans
> 1);
1962 return CAIRO_STATUS_SUCCESS
;
1965 static cairo_status_t
1966 _mono_unbounded_spans (void *abstract_renderer
, int y
, int h
,
1967 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
1969 cairo_image_span_renderer_t
*r
= abstract_renderer
;
1971 if (num_spans
== 0) {
1972 pixman_image_composite32 (PIXMAN_OP_CLEAR
,
1973 r
->src
, NULL
, r
->u
.composite
.dst
,
1974 spans
[0].x
+ r
->u
.composite
.src_x
, y
+ r
->u
.composite
.src_y
,
1976 r
->composite
->unbounded
.x
, y
,
1977 r
->composite
->unbounded
.width
, h
);
1978 r
->u
.composite
.mask_y
= y
+ h
;
1979 return CAIRO_STATUS_SUCCESS
;
1982 if (y
!= r
->u
.composite
.mask_y
) {
1983 pixman_image_composite32 (PIXMAN_OP_CLEAR
,
1984 r
->src
, NULL
, r
->u
.composite
.dst
,
1985 spans
[0].x
+ r
->u
.composite
.src_x
, y
+ r
->u
.composite
.src_y
,
1987 r
->composite
->unbounded
.x
, r
->u
.composite
.mask_y
,
1988 r
->composite
->unbounded
.width
, y
- r
->u
.composite
.mask_y
);
1991 if (spans
[0].x
!= r
->composite
->unbounded
.x
) {
1992 pixman_image_composite32 (PIXMAN_OP_CLEAR
,
1993 r
->src
, NULL
, r
->u
.composite
.dst
,
1994 spans
[0].x
+ r
->u
.composite
.src_x
, y
+ r
->u
.composite
.src_y
,
1996 r
->composite
->unbounded
.x
, y
,
1997 spans
[0].x
- r
->composite
->unbounded
.x
, h
);
2001 int op
= spans
[0].coverage
? r
->op
: PIXMAN_OP_CLEAR
;
2002 pixman_image_composite32 (op
,
2003 r
->src
, NULL
, r
->u
.composite
.dst
,
2004 spans
[0].x
+ r
->u
.composite
.src_x
, y
+ r
->u
.composite
.src_y
,
2007 spans
[1].x
- spans
[0].x
, h
);
2009 } while (--num_spans
> 1);
2011 if (spans
[0].x
!= r
->composite
->unbounded
.x
+ r
->composite
->unbounded
.width
) {
2012 pixman_image_composite32 (PIXMAN_OP_CLEAR
,
2013 r
->src
, NULL
, r
->u
.composite
.dst
,
2014 spans
[0].x
+ r
->u
.composite
.src_x
, y
+ r
->u
.composite
.src_y
,
2017 r
->composite
->unbounded
.x
+ r
->composite
->unbounded
.width
- spans
[0].x
, h
);
2020 r
->u
.composite
.mask_y
= y
+ h
;
2021 return CAIRO_STATUS_SUCCESS
;
2024 static cairo_status_t
2025 _mono_finish_unbounded_spans (void *abstract_renderer
)
2027 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2029 if (r
->u
.composite
.mask_y
< r
->composite
->unbounded
.y
+ r
->composite
->unbounded
.height
) {
2030 pixman_image_composite32 (PIXMAN_OP_CLEAR
,
2031 r
->src
, NULL
, r
->u
.composite
.dst
,
2032 r
->composite
->unbounded
.x
+ r
->u
.composite
.src_x
, r
->u
.composite
.mask_y
+ r
->u
.composite
.src_y
,
2034 r
->composite
->unbounded
.x
, r
->u
.composite
.mask_y
,
2035 r
->composite
->unbounded
.width
,
2036 r
->composite
->unbounded
.y
+ r
->composite
->unbounded
.height
- r
->u
.composite
.mask_y
);
2039 return CAIRO_STATUS_SUCCESS
;
2042 static cairo_int_status_t
2043 mono_renderer_init (cairo_image_span_renderer_t
*r
,
2044 const cairo_composite_rectangles_t
*composite
,
2045 cairo_antialias_t antialias
,
2046 cairo_bool_t needs_clip
)
2048 cairo_image_surface_t
*dst
= (cairo_image_surface_t
*)composite
->surface
;
2050 if (antialias
!= CAIRO_ANTIALIAS_NONE
)
2051 return CAIRO_INT_STATUS_UNSUPPORTED
;
2053 if (!_cairo_pattern_is_opaque_solid (&composite
->mask_pattern
.base
))
2054 return CAIRO_INT_STATUS_UNSUPPORTED
;
2056 r
->base
.render_rows
= NULL
;
2057 if (composite
->source_pattern
.base
.type
== CAIRO_PATTERN_TYPE_SOLID
) {
2058 const cairo_color_t
*color
;
2060 color
= &composite
->source_pattern
.solid
.color
;
2061 if (composite
->op
== CAIRO_OPERATOR_CLEAR
)
2062 color
= CAIRO_COLOR_TRANSPARENT
;
2064 if (fill_reduces_to_source (composite
->op
, color
, dst
, &r
->u
.fill
.pixel
)) {
2065 /* Use plain C for the fill operations as the span length is
2066 * typically small, too small to payback the startup overheads of
2069 switch (PIXMAN_FORMAT_BPP(dst
->pixman_format
)) {
2070 case 8: r
->base
.render_rows
= _fill8_spans
; break;
2071 case 16: r
->base
.render_rows
= _fill16_spans
; break;
2072 case 32: r
->base
.render_rows
= _fill32_spans
; break;
2075 r
->u
.fill
.data
= dst
->data
;
2076 r
->u
.fill
.stride
= dst
->stride
;
2078 } else if ((composite
->op
== CAIRO_OPERATOR_SOURCE
||
2079 (composite
->op
== CAIRO_OPERATOR_OVER
&&
2080 (dst
->base
.is_clear
|| (dst
->base
.content
& CAIRO_CONTENT_ALPHA
) == 0))) &&
2081 composite
->source_pattern
.base
.type
== CAIRO_PATTERN_TYPE_SURFACE
&&
2082 composite
->source_pattern
.surface
.surface
->backend
->type
== CAIRO_SURFACE_TYPE_IMAGE
&&
2083 to_image_surface(composite
->source_pattern
.surface
.surface
)->format
== dst
->format
)
2085 cairo_image_surface_t
*src
=
2086 to_image_surface(composite
->source_pattern
.surface
.surface
);
2089 if (_cairo_matrix_is_integer_translation(&composite
->source_pattern
.base
.matrix
,
2091 composite
->bounded
.x
+ tx
>= 0 &&
2092 composite
->bounded
.y
+ ty
>= 0 &&
2093 composite
->bounded
.x
+ composite
->bounded
.width
+ tx
<= src
->width
&&
2094 composite
->bounded
.y
+ composite
->bounded
.height
+ ty
<= src
->height
) {
2096 r
->u
.blit
.stride
= dst
->stride
;
2097 r
->u
.blit
.data
= dst
->data
;
2098 r
->u
.blit
.src_stride
= src
->stride
;
2099 r
->u
.blit
.src_data
= src
->data
+ src
->stride
* ty
+ tx
* 4;
2100 r
->base
.render_rows
= _blit_spans
;
2104 if (r
->base
.render_rows
== NULL
) {
2105 r
->src
= _pixman_image_for_pattern (dst
, &composite
->source_pattern
.base
, FALSE
,
2106 &composite
->unbounded
,
2107 &composite
->source_sample_area
,
2108 &r
->u
.composite
.src_x
, &r
->u
.composite
.src_y
);
2109 if (unlikely (r
->src
== NULL
))
2110 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
2112 r
->u
.composite
.dst
= to_pixman_image (composite
->surface
);
2113 r
->op
= _pixman_operator (composite
->op
);
2114 if (composite
->is_bounded
== 0) {
2115 r
->base
.render_rows
= _mono_unbounded_spans
;
2116 r
->base
.finish
= _mono_finish_unbounded_spans
;
2117 r
->u
.composite
.mask_y
= composite
->unbounded
.y
;
2119 r
->base
.render_rows
= _mono_spans
;
2121 r
->bpp
= PIXMAN_FORMAT_BPP(dst
->pixman_format
);
2123 return CAIRO_INT_STATUS_SUCCESS
;
2126 #define ONE_HALF 0x7f
2127 #define RB_MASK 0x00ff00ff
2128 #define RB_ONE_HALF 0x007f007f
2129 #define RB_MASK_PLUS_ONE 0x01000100
2131 static inline uint32_t
2132 mul8x2_8 (uint32_t a
, uint8_t b
)
2134 uint32_t t
= (a
& RB_MASK
) * b
+ RB_ONE_HALF
;
2135 return ((t
+ ((t
>> G_SHIFT
) & RB_MASK
)) >> G_SHIFT
) & RB_MASK
;
2138 static inline uint32_t
2139 add8x2_8x2 (uint32_t a
, uint32_t b
)
2142 t
|= RB_MASK_PLUS_ONE
- ((t
>> G_SHIFT
) & RB_MASK
);
2146 static inline uint8_t
2147 mul8_8 (uint8_t a
, uint8_t b
)
2149 uint16_t t
= a
* (uint16_t)b
+ ONE_HALF
;
2150 return ((t
>> G_SHIFT
) + t
) >> G_SHIFT
;
2153 static inline uint32_t
2154 lerp8x4 (uint32_t src
, uint8_t a
, uint32_t dst
)
2156 return (add8x2_8x2 (mul8x2_8 (src
, a
),
2157 mul8x2_8 (dst
, ~a
)) |
2158 add8x2_8x2 (mul8x2_8 (src
>> G_SHIFT
, a
),
2159 mul8x2_8 (dst
>> G_SHIFT
, ~a
)) << G_SHIFT
);
2162 static cairo_status_t
2163 _fill_a8_lerp_opaque_spans (void *abstract_renderer
, int y
, int h
,
2164 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
2166 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2169 return CAIRO_STATUS_SUCCESS
;
2171 if (likely(h
== 1)) {
2172 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
;
2174 uint8_t a
= spans
[0].coverage
;
2176 int len
= spans
[1].x
- spans
[0].x
;
2178 memset(d
+ spans
[0].x
, r
->u
.fill
.pixel
, len
);
2180 uint8_t s
= mul8_8(a
, r
->u
.fill
.pixel
);
2181 uint8_t *dst
= d
+ spans
[0].x
;
2184 uint8_t t
= mul8_8(*dst
, a
);
2190 } while (--num_spans
> 1);
2193 uint8_t a
= spans
[0].coverage
;
2198 int len
= spans
[1].x
- spans
[0].x
;
2199 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
;
2200 memset(d
, r
->u
.fill
.pixel
, len
);
2204 uint8_t s
= mul8_8(a
, r
->u
.fill
.pixel
);
2207 int len
= spans
[1].x
- spans
[0].x
;
2208 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
;
2210 uint8_t t
= mul8_8(*d
, a
);
2218 } while (--num_spans
> 1);
2221 return CAIRO_STATUS_SUCCESS
;
2224 static cairo_status_t
2225 _fill_xrgb32_lerp_opaque_spans (void *abstract_renderer
, int y
, int h
,
2226 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
2228 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2231 return CAIRO_STATUS_SUCCESS
;
2233 if (likely(h
== 1)) {
2235 uint8_t a
= spans
[0].coverage
;
2237 int len
= spans
[1].x
- spans
[0].x
;
2238 uint32_t *d
= (uint32_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
*4);
2241 pixman_fill ((uint32_t *)r
->u
.fill
.data
, r
->u
.fill
.stride
/ sizeof(uint32_t), 32,
2242 spans
[0].x
, y
, len
, 1, r
->u
.fill
.pixel
);
2244 uint32_t *d
= (uint32_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
*4);
2246 *d
++ = r
->u
.fill
.pixel
;
2248 } else while (len
-- > 0) {
2249 *d
= lerp8x4 (r
->u
.fill
.pixel
, a
, *d
);
2254 } while (--num_spans
> 1);
2257 uint8_t a
= spans
[0].coverage
;
2260 if (spans
[1].x
- spans
[0].x
> 16) {
2261 pixman_fill ((uint32_t *)r
->u
.fill
.data
, r
->u
.fill
.stride
/ sizeof(uint32_t), 32,
2262 spans
[0].x
, y
, spans
[1].x
- spans
[0].x
, h
,
2267 int len
= spans
[1].x
- spans
[0].x
;
2268 uint32_t *d
= (uint32_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
*4);
2270 *d
++ = r
->u
.fill
.pixel
;
2277 int len
= spans
[1].x
- spans
[0].x
;
2278 uint32_t *d
= (uint32_t *)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
*4);
2280 *d
= lerp8x4 (r
->u
.fill
.pixel
, a
, *d
);
2288 } while (--num_spans
> 1);
2291 return CAIRO_STATUS_SUCCESS
;
2294 static cairo_status_t
2295 _fill_a8_lerp_spans (void *abstract_renderer
, int y
, int h
,
2296 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
2298 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2301 return CAIRO_STATUS_SUCCESS
;
2303 if (likely(h
== 1)) {
2305 uint8_t a
= mul8_8 (spans
[0].coverage
, r
->bpp
);
2307 int len
= spans
[1].x
- spans
[0].x
;
2308 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
;
2309 uint16_t p
= (uint16_t)a
* r
->u
.fill
.pixel
+ 0x7f;
2312 uint16_t t
= *d
*ia
+ p
;
2313 *d
++ = (t
+ (t
>>8)) >> 8;
2317 } while (--num_spans
> 1);
2320 uint8_t a
= mul8_8 (spans
[0].coverage
, r
->bpp
);
2323 uint16_t p
= (uint16_t)a
* r
->u
.fill
.pixel
+ 0x7f;
2326 int len
= spans
[1].x
- spans
[0].x
;
2327 uint8_t *d
= r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
;
2329 uint16_t t
= *d
*ia
+ p
;
2330 *d
++ = (t
+ (t
>>8)) >> 8;
2336 } while (--num_spans
> 1);
2339 return CAIRO_STATUS_SUCCESS
;
2342 static cairo_status_t
2343 _fill_xrgb32_lerp_spans (void *abstract_renderer
, int y
, int h
,
2344 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
2346 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2349 return CAIRO_STATUS_SUCCESS
;
2351 if (likely(h
== 1)) {
2353 uint8_t a
= mul8_8 (spans
[0].coverage
, r
->bpp
);
2355 int len
= spans
[1].x
- spans
[0].x
;
2356 uint32_t *d
= (uint32_t*)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*y
+ spans
[0].x
*4);
2358 *d
= lerp8x4 (r
->u
.fill
.pixel
, a
, *d
);
2363 } while (--num_spans
> 1);
2366 uint8_t a
= mul8_8 (spans
[0].coverage
, r
->bpp
);
2370 int len
= spans
[1].x
- spans
[0].x
;
2371 uint32_t *d
= (uint32_t *)(r
->u
.fill
.data
+ r
->u
.fill
.stride
*yy
+ spans
[0].x
*4);
2373 *d
= lerp8x4 (r
->u
.fill
.pixel
, a
, *d
);
2380 } while (--num_spans
> 1);
2383 return CAIRO_STATUS_SUCCESS
;
2386 static cairo_status_t
2387 _blit_xrgb32_lerp_spans (void *abstract_renderer
, int y
, int h
,
2388 const cairo_half_open_span_t
*spans
, unsigned num_spans
)
2390 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2393 return CAIRO_STATUS_SUCCESS
;
2395 if (likely(h
== 1)) {
2396 uint8_t *src
= r
->u
.blit
.src_data
+ y
*r
->u
.blit
.src_stride
;
2397 uint8_t *dst
= r
->u
.blit
.data
+ y
*r
->u
.blit
.stride
;
2399 uint8_t a
= mul8_8 (spans
[0].coverage
, r
->bpp
);
2401 uint32_t *s
= (uint32_t*)src
+ spans
[0].x
;
2402 uint32_t *d
= (uint32_t*)dst
+ spans
[0].x
;
2403 int len
= spans
[1].x
- spans
[0].x
;
2408 memcpy(d
, s
, len
*4);
2411 *d
= lerp8x4 (*s
, a
, *d
);
2417 } while (--num_spans
> 1);
2420 uint8_t a
= mul8_8 (spans
[0].coverage
, r
->bpp
);
2424 uint32_t *s
= (uint32_t *)(r
->u
.blit
.src_data
+ yy
*r
->u
.blit
.src_stride
+ spans
[0].x
* 4);
2425 uint32_t *d
= (uint32_t *)(r
->u
.blit
.data
+ yy
*r
->u
.blit
.stride
+ spans
[0].x
* 4);
2426 int len
= spans
[1].x
- spans
[0].x
;
2431 memcpy(d
, s
, len
* 4);
2434 *d
= lerp8x4 (*s
, a
, *d
);
2442 } while (--num_spans
> 1);
2445 return CAIRO_STATUS_SUCCESS
;
2448 static cairo_status_t
2449 _inplace_spans (void *abstract_renderer
,
2451 const cairo_half_open_span_t
*spans
,
2454 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2459 return CAIRO_STATUS_SUCCESS
;
2461 if (num_spans
== 2 && spans
[0].coverage
== 0xff) {
2462 pixman_image_composite32 (r
->op
, r
->src
, NULL
, r
->u
.composite
.dst
,
2463 spans
[0].x
+ r
->u
.composite
.src_x
,
2464 y
+ r
->u
.composite
.src_y
,
2467 spans
[1].x
- spans
[0].x
, h
);
2468 return CAIRO_STATUS_SUCCESS
;
2471 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2472 x1
= x0
= spans
[0].x
;
2474 int len
= spans
[1].x
- spans
[0].x
;
2475 *mask
++ = spans
[0].coverage
;
2477 if (len
>= r
->u
.composite
.run_length
&& spans
[0].coverage
== 0xff) {
2479 pixman_image_composite32 (r
->op
, r
->src
, r
->mask
, r
->u
.composite
.dst
,
2480 x0
+ r
->u
.composite
.src_x
,
2481 y
+ r
->u
.composite
.src_y
,
2486 pixman_image_composite32 (r
->op
, r
->src
, NULL
, r
->u
.composite
.dst
,
2487 spans
[0].x
+ r
->u
.composite
.src_x
,
2488 y
+ r
->u
.composite
.src_y
,
2492 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2494 } else if (spans
[0].coverage
== 0x0 &&
2495 x1
- x0
> r
->u
.composite
.run_length
) {
2496 pixman_image_composite32 (r
->op
, r
->src
, r
->mask
, r
->u
.composite
.dst
,
2497 x0
+ r
->u
.composite
.src_x
,
2498 y
+ r
->u
.composite
.src_y
,
2502 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2505 memset (mask
, spans
[0].coverage
, --len
);
2511 } while (--num_spans
> 1);
2514 pixman_image_composite32 (r
->op
, r
->src
, r
->mask
, r
->u
.composite
.dst
,
2515 x0
+ r
->u
.composite
.src_x
,
2516 y
+ r
->u
.composite
.src_y
,
2522 return CAIRO_STATUS_SUCCESS
;
2525 static cairo_status_t
2526 _inplace_opacity_spans (void *abstract_renderer
, int y
, int h
,
2527 const cairo_half_open_span_t
*spans
,
2530 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2535 return CAIRO_STATUS_SUCCESS
;
2537 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2538 x1
= x0
= spans
[0].x
;
2540 int len
= spans
[1].x
- spans
[0].x
;
2541 uint8_t m
= mul8_8(spans
[0].coverage
, r
->bpp
);
2545 x1
- x0
> r
->u
.composite
.run_length
) {
2546 pixman_image_composite32 (r
->op
, r
->src
, r
->mask
, r
->u
.composite
.dst
,
2547 x0
+ r
->u
.composite
.src_x
,
2548 y
+ r
->u
.composite
.src_y
,
2552 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2555 memset (mask
, m
, --len
);
2561 } while (--num_spans
> 1);
2564 pixman_image_composite32 (r
->op
, r
->src
, r
->mask
, r
->u
.composite
.dst
,
2565 x0
+ r
->u
.composite
.src_x
,
2566 y
+ r
->u
.composite
.src_y
,
2572 return CAIRO_STATUS_SUCCESS
;
2575 static cairo_status_t
2576 _inplace_src_spans (void *abstract_renderer
, int y
, int h
,
2577 const cairo_half_open_span_t
*spans
,
2580 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2585 return CAIRO_STATUS_SUCCESS
;
2590 int len
= spans
[1].x
- spans
[0].x
;
2591 if (len
>= r
->u
.composite
.run_length
&& spans
[0].coverage
== 0xff) {
2592 if (spans
[0].x
!= x0
) {
2593 #if PIXMAN_HAS_OP_LERP
2594 pixman_image_composite32 (PIXMAN_OP_LERP_SRC
,
2595 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2596 x0
+ r
->u
.composite
.src_x
,
2597 y
+ r
->u
.composite
.src_y
,
2600 spans
[0].x
- x0
, h
);
2602 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE
,
2603 r
->mask
, NULL
, r
->u
.composite
.dst
,
2607 spans
[0].x
- x0
, h
);
2608 pixman_image_composite32 (PIXMAN_OP_ADD
,
2609 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2610 x0
+ r
->u
.composite
.src_x
,
2611 y
+ r
->u
.composite
.src_y
,
2614 spans
[0].x
- x0
, h
);
2618 pixman_image_composite32 (PIXMAN_OP_SRC
,
2619 r
->src
, NULL
, r
->u
.composite
.dst
,
2620 spans
[0].x
+ r
->u
.composite
.src_x
,
2621 y
+ r
->u
.composite
.src_y
,
2624 spans
[1].x
- spans
[0].x
, h
);
2628 } else if (spans
[0].coverage
== 0x0) {
2629 if (spans
[0].x
!= x0
) {
2630 #if PIXMAN_HAS_OP_LERP
2631 pixman_image_composite32 (PIXMAN_OP_LERP_SRC
,
2632 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2633 x0
+ r
->u
.composite
.src_x
,
2634 y
+ r
->u
.composite
.src_y
,
2637 spans
[0].x
- x0
, h
);
2639 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE
,
2640 r
->mask
, NULL
, r
->u
.composite
.dst
,
2644 spans
[0].x
- x0
, h
);
2645 pixman_image_composite32 (PIXMAN_OP_ADD
,
2646 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2647 x0
+ r
->u
.composite
.src_x
,
2648 y
+ r
->u
.composite
.src_y
,
2651 spans
[0].x
- x0
, h
);
2658 *m
++ = spans
[0].coverage
;
2660 memset (m
, spans
[0].coverage
, --len
);
2665 } while (--num_spans
> 1);
2667 if (spans
[0].x
!= x0
) {
2668 #if PIXMAN_HAS_OP_LERP
2669 pixman_image_composite32 (PIXMAN_OP_LERP_SRC
,
2670 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2671 x0
+ r
->u
.composite
.src_x
,
2672 y
+ r
->u
.composite
.src_y
,
2675 spans
[0].x
- x0
, h
);
2677 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE
,
2678 r
->mask
, NULL
, r
->u
.composite
.dst
,
2682 spans
[0].x
- x0
, h
);
2683 pixman_image_composite32 (PIXMAN_OP_ADD
,
2684 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2685 x0
+ r
->u
.composite
.src_x
,
2686 y
+ r
->u
.composite
.src_y
,
2689 spans
[0].x
- x0
, h
);
2693 return CAIRO_STATUS_SUCCESS
;
2696 static cairo_status_t
2697 _inplace_src_opacity_spans (void *abstract_renderer
, int y
, int h
,
2698 const cairo_half_open_span_t
*spans
,
2701 cairo_image_span_renderer_t
*r
= abstract_renderer
;
2706 return CAIRO_STATUS_SUCCESS
;
2709 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2711 int len
= spans
[1].x
- spans
[0].x
;
2712 uint8_t m
= mul8_8(spans
[0].coverage
, r
->bpp
);
2714 if (spans
[0].x
!= x0
) {
2715 #if PIXMAN_HAS_OP_LERP
2716 pixman_image_composite32 (PIXMAN_OP_LERP_SRC
,
2717 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2718 x0
+ r
->u
.composite
.src_x
,
2719 y
+ r
->u
.composite
.src_y
,
2722 spans
[0].x
- x0
, h
);
2724 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE
,
2725 r
->mask
, NULL
, r
->u
.composite
.dst
,
2729 spans
[0].x
- x0
, h
);
2730 pixman_image_composite32 (PIXMAN_OP_ADD
,
2731 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2732 x0
+ r
->u
.composite
.src_x
,
2733 y
+ r
->u
.composite
.src_y
,
2736 spans
[0].x
- x0
, h
);
2740 mask
= (uint8_t *)pixman_image_get_data (r
->mask
);
2745 memset (mask
, m
, --len
);
2750 } while (--num_spans
> 1);
2752 if (spans
[0].x
!= x0
) {
2753 #if PIXMAN_HAS_OP_LERP
2754 pixman_image_composite32 (PIXMAN_OP_LERP_SRC
,
2755 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2756 x0
+ r
->u
.composite
.src_x
,
2757 y
+ r
->u
.composite
.src_y
,
2760 spans
[0].x
- x0
, h
);
2762 pixman_image_composite32 (PIXMAN_OP_OUT_REVERSE
,
2763 r
->mask
, NULL
, r
->u
.composite
.dst
,
2767 spans
[0].x
- x0
, h
);
2768 pixman_image_composite32 (PIXMAN_OP_ADD
,
2769 r
->src
, r
->mask
, r
->u
.composite
.dst
,
2770 x0
+ r
->u
.composite
.src_x
,
2771 y
+ r
->u
.composite
.src_y
,
2774 spans
[0].x
- x0
, h
);
2778 return CAIRO_STATUS_SUCCESS
;
2781 static void free_pixels (pixman_image_t
*image
, void *data
)
2786 static cairo_int_status_t
2787 inplace_renderer_init (cairo_image_span_renderer_t
*r
,
2788 const cairo_composite_rectangles_t
*composite
,
2789 cairo_antialias_t antialias
,
2790 cairo_bool_t needs_clip
)
2792 cairo_image_surface_t
*dst
= (cairo_image_surface_t
*)composite
->surface
;
2795 if (composite
->mask_pattern
.base
.type
!= CAIRO_PATTERN_TYPE_SOLID
)
2796 return CAIRO_INT_STATUS_UNSUPPORTED
;
2798 r
->base
.render_rows
= NULL
;
2799 r
->bpp
= composite
->mask_pattern
.solid
.color
.alpha_short
>> 8;
2801 if (composite
->source_pattern
.base
.type
== CAIRO_PATTERN_TYPE_SOLID
) {
2802 const cairo_color_t
*color
;
2804 color
= &composite
->source_pattern
.solid
.color
;
2805 if (composite
->op
== CAIRO_OPERATOR_CLEAR
)
2806 color
= CAIRO_COLOR_TRANSPARENT
;
2808 if (fill_reduces_to_source (composite
->op
, color
, dst
, &r
->u
.fill
.pixel
)) {
2809 /* Use plain C for the fill operations as the span length is
2810 * typically small, too small to payback the startup overheads of
2813 if (r
->bpp
== 0xff) {
2814 switch (dst
->format
) {
2815 case CAIRO_FORMAT_A8
:
2816 r
->base
.render_rows
= _fill_a8_lerp_opaque_spans
;
2818 case CAIRO_FORMAT_RGB24
:
2819 case CAIRO_FORMAT_ARGB32
:
2820 r
->base
.render_rows
= _fill_xrgb32_lerp_opaque_spans
;
2822 case CAIRO_FORMAT_A1
:
2823 case CAIRO_FORMAT_RGB16_565
:
2824 case CAIRO_FORMAT_RGB30
:
2825 case CAIRO_FORMAT_INVALID
:
2829 switch (dst
->format
) {
2830 case CAIRO_FORMAT_A8
:
2831 r
->base
.render_rows
= _fill_a8_lerp_spans
;
2833 case CAIRO_FORMAT_RGB24
:
2834 case CAIRO_FORMAT_ARGB32
:
2835 r
->base
.render_rows
= _fill_xrgb32_lerp_spans
;
2837 case CAIRO_FORMAT_A1
:
2838 case CAIRO_FORMAT_RGB16_565
:
2839 case CAIRO_FORMAT_RGB30
:
2840 case CAIRO_FORMAT_INVALID
:
2844 r
->u
.fill
.data
= dst
->data
;
2845 r
->u
.fill
.stride
= dst
->stride
;
2847 } else if ((dst
->format
== CAIRO_FORMAT_ARGB32
|| dst
->format
== CAIRO_FORMAT_RGB24
) &&
2848 (composite
->op
== CAIRO_OPERATOR_SOURCE
||
2849 (composite
->op
== CAIRO_OPERATOR_OVER
&&
2850 (dst
->base
.is_clear
|| (dst
->base
.content
& CAIRO_CONTENT_ALPHA
) == 0))) &&
2851 composite
->source_pattern
.base
.type
== CAIRO_PATTERN_TYPE_SURFACE
&&
2852 composite
->source_pattern
.surface
.surface
->backend
->type
== CAIRO_SURFACE_TYPE_IMAGE
&&
2853 to_image_surface(composite
->source_pattern
.surface
.surface
)->format
== dst
->format
)
2855 cairo_image_surface_t
*src
=
2856 to_image_surface(composite
->source_pattern
.surface
.surface
);
2859 if (_cairo_matrix_is_integer_translation(&composite
->source_pattern
.base
.matrix
,
2861 composite
->bounded
.x
+ tx
>= 0 &&
2862 composite
->bounded
.y
+ ty
>= 0 &&
2863 composite
->bounded
.x
+ composite
->bounded
.width
+ tx
<= src
->width
&&
2864 composite
->bounded
.y
+ composite
->bounded
.height
+ ty
<= src
->height
) {
2866 assert(PIXMAN_FORMAT_BPP(dst
->pixman_format
) == 32);
2867 r
->u
.blit
.stride
= dst
->stride
;
2868 r
->u
.blit
.data
= dst
->data
;
2869 r
->u
.blit
.src_stride
= src
->stride
;
2870 r
->u
.blit
.src_data
= src
->data
+ src
->stride
* ty
+ tx
* 4;
2871 r
->base
.render_rows
= _blit_xrgb32_lerp_spans
;
2874 if (r
->base
.render_rows
== NULL
) {
2875 const cairo_pattern_t
*src
= &composite
->source_pattern
.base
;
2878 if (composite
->is_bounded
== 0)
2879 return CAIRO_INT_STATUS_UNSUPPORTED
;
2881 r
->base
.render_rows
= r
->bpp
== 0xff ? _inplace_spans
: _inplace_opacity_spans
;
2882 width
= (composite
->bounded
.width
+ 3) & ~3;
2884 r
->u
.composite
.run_length
= 8;
2885 if (src
->type
== CAIRO_PATTERN_TYPE_LINEAR
||
2886 src
->type
== CAIRO_PATTERN_TYPE_RADIAL
)
2887 r
->u
.composite
.run_length
= 256;
2888 if (dst
->base
.is_clear
&&
2889 (composite
->op
== CAIRO_OPERATOR_SOURCE
||
2890 composite
->op
== CAIRO_OPERATOR_OVER
||
2891 composite
->op
== CAIRO_OPERATOR_ADD
)) {
2892 r
->op
= PIXMAN_OP_SRC
;
2893 } else if (composite
->op
== CAIRO_OPERATOR_SOURCE
) {
2894 r
->base
.render_rows
= r
->bpp
== 0xff ? _inplace_src_spans
: _inplace_src_opacity_spans
;
2895 r
->u
.composite
.mask_y
= r
->composite
->unbounded
.y
;
2896 width
= (composite
->unbounded
.width
+ 3) & ~3;
2897 } else if (composite
->op
== CAIRO_OPERATOR_CLEAR
) {
2898 r
->op
= PIXMAN_OP_OUT_REVERSE
;
2901 r
->op
= _pixman_operator (composite
->op
);
2904 r
->src
= _pixman_image_for_pattern (dst
, src
, FALSE
,
2905 &composite
->bounded
,
2906 &composite
->source_sample_area
,
2907 &r
->u
.composite
.src_x
, &r
->u
.composite
.src_y
);
2908 if (unlikely (r
->src
== NULL
))
2909 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
2911 /* Create an effectively unbounded mask by repeating the single line */
2913 if (width
> SZ_BUF
) {
2914 buf
= malloc (width
);
2915 if (unlikely (buf
== NULL
)) {
2916 pixman_image_unref (r
->src
);
2917 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
2920 r
->mask
= pixman_image_create_bits (PIXMAN_a8
,
2921 width
, composite
->unbounded
.height
,
2922 (uint32_t *)buf
, 0);
2923 if (unlikely (r
->mask
== NULL
)) {
2924 pixman_image_unref (r
->src
);
2927 return _cairo_error(CAIRO_STATUS_NO_MEMORY
);
2931 pixman_image_set_destroy_function (r
->mask
, free_pixels
, buf
);
2933 r
->u
.composite
.dst
= dst
->pixman_image
;
2936 return CAIRO_INT_STATUS_SUCCESS
;
2939 static cairo_int_status_t
2940 span_renderer_init (cairo_abstract_span_renderer_t
*_r
,
2941 const cairo_composite_rectangles_t
*composite
,
2942 cairo_antialias_t antialias
,
2943 cairo_bool_t needs_clip
)
2945 cairo_image_span_renderer_t
*r
= (cairo_image_span_renderer_t
*)_r
;
2946 cairo_image_surface_t
*dst
= (cairo_image_surface_t
*)composite
->surface
;
2947 const cairo_pattern_t
*source
= &composite
->source_pattern
.base
;
2948 cairo_operator_t op
= composite
->op
;
2949 cairo_int_status_t status
;
2951 TRACE ((stderr
, "%s: antialias=%d, needs_clip=%d\n", __FUNCTION__
,
2952 antialias
, needs_clip
));
2955 return CAIRO_INT_STATUS_UNSUPPORTED
;
2957 r
->composite
= composite
;
2960 r
->base
.finish
= NULL
;
2962 status
= mono_renderer_init (r
, composite
, antialias
, needs_clip
);
2963 if (status
!= CAIRO_INT_STATUS_UNSUPPORTED
)
2966 status
= inplace_renderer_init (r
, composite
, antialias
, needs_clip
);
2967 if (status
!= CAIRO_INT_STATUS_UNSUPPORTED
)
2972 if (op
== CAIRO_OPERATOR_CLEAR
) {
2973 #if PIXMAN_HAS_OP_LERP
2974 op
= PIXMAN_OP_LERP_CLEAR
;
2976 source
= &_cairo_pattern_white
.base
;
2977 op
= PIXMAN_OP_OUT_REVERSE
;
2979 } else if (dst
->base
.is_clear
&&
2980 (op
== CAIRO_OPERATOR_SOURCE
||
2981 op
== CAIRO_OPERATOR_OVER
||
2982 op
== CAIRO_OPERATOR_ADD
)) {
2984 } else if (op
== CAIRO_OPERATOR_SOURCE
) {
2985 if (_cairo_pattern_is_opaque (&composite
->source_pattern
.base
,
2986 &composite
->source_sample_area
))
2988 op
= PIXMAN_OP_OVER
;
2992 #if PIXMAN_HAS_OP_LERP
2993 op
= PIXMAN_OP_LERP_SRC
;
2995 return CAIRO_INT_STATUS_UNSUPPORTED
;
2999 op
= _pixman_operator (op
);
3003 r
->src
= _pixman_image_for_pattern (dst
, source
, FALSE
,
3004 &composite
->unbounded
,
3005 &composite
->source_sample_area
,
3006 &r
->u
.mask
.src_x
, &r
->u
.mask
.src_y
);
3007 if (unlikely (r
->src
== NULL
))
3008 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
3011 if (composite
->mask_pattern
.base
.type
== CAIRO_PATTERN_TYPE_SOLID
) {
3012 r
->opacity
= composite
->mask_pattern
.solid
.color
.alpha
;
3014 pixman_image_t
*mask
;
3017 mask
= _pixman_image_for_pattern (dst
,
3018 &composite
->mask_pattern
.base
,
3020 &composite
->unbounded
,
3021 &composite
->mask_sample_area
,
3023 if (unlikely (mask
== NULL
))
3024 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
3026 /* XXX Component-alpha? */
3027 if ((dst
->base
.content
& CAIRO_CONTENT_COLOR
) == 0 &&
3028 _cairo_pattern_is_opaque (source
, &composite
->source_sample_area
))
3030 pixman_image_unref (r
->src
);
3032 r
->u
.mask
.src_x
= mask_x
;
3033 r
->u
.mask
.src_y
= mask_y
;
3038 pixman_image_unref (mask
);
3039 return CAIRO_INT_STATUS_UNSUPPORTED
;
3043 r
->u
.mask
.extents
= composite
->unbounded
;
3044 r
->u
.mask
.stride
= (r
->u
.mask
.extents
.width
+ 3) & ~3;
3045 if (r
->u
.mask
.extents
.height
* r
->u
.mask
.stride
> SZ_BUF
) {
3046 r
->mask
= pixman_image_create_bits (PIXMAN_a8
,
3047 r
->u
.mask
.extents
.width
,
3048 r
->u
.mask
.extents
.height
,
3051 r
->base
.render_rows
= _cairo_image_spans
;
3052 r
->base
.finish
= NULL
;
3054 r
->mask
= pixman_image_create_bits (PIXMAN_a8
,
3055 r
->u
.mask
.extents
.width
,
3056 r
->u
.mask
.extents
.height
,
3057 (uint32_t *)r
->_buf
, r
->u
.mask
.stride
);
3059 r
->base
.render_rows
= _cairo_image_spans_and_zero
;
3060 r
->base
.finish
= _cairo_image_finish_spans_and_zero
;
3062 if (unlikely (r
->mask
== NULL
))
3063 return _cairo_error (CAIRO_STATUS_NO_MEMORY
);
3065 r
->u
.mask
.data
= (uint8_t *) pixman_image_get_data (r
->mask
);
3066 r
->u
.mask
.stride
= pixman_image_get_stride (r
->mask
);
3068 r
->u
.mask
.extents
.height
+= r
->u
.mask
.extents
.y
;
3069 return CAIRO_STATUS_SUCCESS
;
3073 span_renderer_fini (cairo_abstract_span_renderer_t
*_r
,
3074 cairo_int_status_t status
)
3076 cairo_image_span_renderer_t
*r
= (cairo_image_span_renderer_t
*) _r
;
3078 TRACE ((stderr
, "%s\n", __FUNCTION__
));
3080 if (likely (status
== CAIRO_INT_STATUS_SUCCESS
)) {
3084 if (likely (status
== CAIRO_INT_STATUS_SUCCESS
&& r
->bpp
== 0)) {
3085 const cairo_composite_rectangles_t
*composite
= r
->composite
;
3087 pixman_image_composite32 (r
->op
, r
->src
, r
->mask
,
3088 to_pixman_image (composite
->surface
),
3089 composite
->unbounded
.x
+ r
->u
.mask
.src_x
,
3090 composite
->unbounded
.y
+ r
->u
.mask
.src_y
,
3092 composite
->unbounded
.x
,
3093 composite
->unbounded
.y
,
3094 composite
->unbounded
.width
,
3095 composite
->unbounded
.height
);
3099 pixman_image_unref (r
->src
);
3101 pixman_image_unref (r
->mask
);
3105 const cairo_compositor_t
*
3106 _cairo_image_spans_compositor_get (void)
3108 static cairo_spans_compositor_t spans
;
3109 static cairo_compositor_t shape
;
3111 if (spans
.base
.delegate
== NULL
) {
3112 _cairo_shape_mask_compositor_init (&shape
,
3113 _cairo_image_traps_compositor_get());
3114 shape
.glyphs
= NULL
;
3116 _cairo_spans_compositor_init (&spans
, &shape
);
3119 #if PIXMAN_HAS_OP_LERP
3120 spans
.flags
|= CAIRO_SPANS_COMPOSITOR_HAS_LERP
;
3123 //spans.acquire = acquire;
3124 //spans.release = release;
3125 spans
.fill_boxes
= fill_boxes
;
3126 spans
.draw_image_boxes
= draw_image_boxes
;
3127 //spans.copy_boxes = copy_boxes;
3128 spans
.pattern_to_surface
= _cairo_image_source_create_for_pattern
;
3129 //spans.check_composite_boxes = check_composite_boxes;
3130 spans
.composite_boxes
= composite_boxes
;
3131 //spans.check_span_renderer = check_span_renderer;
3132 spans
.renderer_init
= span_renderer_init
;
3133 spans
.renderer_fini
= span_renderer_fini
;