1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind
{
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec
)
85 if (prec
<= small_max_prec
)
86 return bitint_prec_small
;
87 if (huge_min_prec
&& prec
>= huge_min_prec
)
88 return bitint_prec_huge
;
89 if (large_min_prec
&& prec
>= large_min_prec
)
90 return bitint_prec_large
;
91 if (mid_min_prec
&& prec
>= mid_min_prec
)
92 return bitint_prec_middle
;
94 struct bitint_info info
;
95 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
97 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
98 if (prec
<= GET_MODE_PRECISION (limb_mode
))
100 small_max_prec
= prec
;
101 return bitint_prec_small
;
104 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
105 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
107 limb_prec
= GET_MODE_PRECISION (limb_mode
);
110 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
111 huge_min_prec
= 4 * limb_prec
;
113 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
115 if (prec
<= MAX_FIXED_MODE_SIZE
)
117 if (!mid_min_prec
|| prec
< mid_min_prec
)
119 return bitint_prec_middle
;
121 if (large_min_prec
&& prec
<= large_min_prec
)
122 return bitint_prec_large
;
123 return bitint_prec_huge
;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type
)
131 return bitint_precision_kind (TYPE_PRECISION (type
));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
140 bitint_min_cst_precision (tree cst
, int &ext
)
142 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
143 wide_int w
= wi::to_wide (cst
);
144 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
154 if (min_prec2
< min_prec
)
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
169 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
172 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
176 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
177 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
178 if (type
== NULL_TREE
179 || TYPE_PRECISION (type
) != prec
180 || TYPE_UNSIGNED (type
) != uns
)
181 type
= build_nonstandard_integer_type (prec
, uns
);
183 if (TREE_CODE (op
) != SSA_NAME
)
185 tree nop
= fold_convert (type
, op
);
186 if (is_gimple_val (nop
))
190 tree nop
= make_ssa_name (type
);
191 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
192 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
200 mergeable_op (gimple
*stmt
)
202 if (!is_gimple_assign (stmt
))
204 switch (gimple_assign_rhs_code (stmt
))
218 tree cnt
= gimple_assign_rhs2 (stmt
);
219 if (tree_fits_uhwi_p (cnt
)
220 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
225 case VIEW_CONVERT_EXPR
:
227 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
228 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
230 && TREE_CODE (lhs_type
) == BITINT_TYPE
231 && TREE_CODE (rhs_type
) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
234 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
235 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
)))
237 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
239 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
241 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
252 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
253 _Complex large/huge _BitInt lhs which has at most two immediate uses,
254 at most one use in REALPART_EXPR stmt in the same bb and exactly one
255 IMAGPART_EXPR use in the same bb with a single use which casts it to
256 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
257 return 2. Such cases (most common uses of those builtins) can be
258 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
259 of REALPART_EXPR as not needed to be backed up by a stack variable.
260 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
263 optimizable_arith_overflow (gimple
*stmt
)
265 bool is_ubsan
= false;
266 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
268 switch (gimple_call_internal_fn (stmt
))
270 case IFN_ADD_OVERFLOW
:
271 case IFN_SUB_OVERFLOW
:
272 case IFN_MUL_OVERFLOW
:
274 case IFN_UBSAN_CHECK_ADD
:
275 case IFN_UBSAN_CHECK_SUB
:
276 case IFN_UBSAN_CHECK_MUL
:
282 tree lhs
= gimple_call_lhs (stmt
);
285 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
287 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
288 if (TREE_CODE (type
) != BITINT_TYPE
289 || bitint_precision_kind (type
) < bitint_prec_large
)
296 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
297 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
298 || !gimple_store_p (use_stmt
)
299 || !is_gimple_assign (use_stmt
)
300 || gimple_has_volatile_ops (use_stmt
)
301 || stmt_ends_bb_p (use_stmt
))
309 gimple
*realpart
= NULL
, *cast
= NULL
;
310 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
312 gimple
*g
= USE_STMT (use_p
);
313 if (is_gimple_debug (g
))
315 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
317 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
324 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
330 use_operand_p use2_p
;
332 tree lhs2
= gimple_assign_lhs (g
);
333 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
335 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
336 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
337 || !gimple_assign_cast_p (use_stmt
))
340 lhs2
= gimple_assign_lhs (use_stmt
);
341 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
342 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
353 /* Punt if the cast stmt appears before realpart stmt, because
354 if both appear, the lowering wants to emit all the code
355 at the location of realpart stmt. */
356 gimple_stmt_iterator gsi
= gsi_for_stmt (realpart
);
357 unsigned int cnt
= 0;
360 gsi_prev_nondebug (&gsi
);
361 if (gsi_end_p (gsi
) || gsi_stmt (gsi
) == cast
)
363 if (gsi_stmt (gsi
) == stmt
)
365 /* If realpart is too far from stmt, punt as well.
366 Usually it will appear right after it. */
375 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
376 comparing large/huge _BitInt types, return the comparison code and if
377 non-NULL fill in the comparison operands to *POP1 and *POP2. */
380 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
382 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
383 tree_code code
= ERROR_MARK
;
384 if (gimple_code (stmt
) == GIMPLE_COND
)
386 code
= gimple_cond_code (stmt
);
387 op1
= gimple_cond_lhs (stmt
);
388 op2
= gimple_cond_rhs (stmt
);
390 else if (is_gimple_assign (stmt
))
392 code
= gimple_assign_rhs_code (stmt
);
393 op1
= gimple_assign_rhs1 (stmt
);
394 if (TREE_CODE_CLASS (code
) == tcc_comparison
395 || TREE_CODE_CLASS (code
) == tcc_binary
)
396 op2
= gimple_assign_rhs2 (stmt
);
398 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
400 tree type
= TREE_TYPE (op1
);
401 if (TREE_CODE (type
) != BITINT_TYPE
402 || bitint_precision_kind (type
) < bitint_prec_large
)
412 /* Class used during large/huge _BitInt lowering containing all the
413 state for the methods. */
415 struct bitint_large_huge
418 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
419 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
420 m_limb_type (NULL_TREE
), m_data (vNULL
) {}
422 ~bitint_large_huge ();
424 void insert_before (gimple
*);
425 tree
limb_access_type (tree
, tree
);
426 tree
limb_access (tree
, tree
, tree
, bool);
427 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
428 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
429 void if_then_if_then_else (gimple
*g
, gimple
*,
430 profile_probability
, profile_probability
,
431 edge
&, edge
&, edge
&);
432 tree
handle_operand (tree
, tree
);
433 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
434 tree
add_cast (tree
, tree
);
435 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
436 tree
handle_lshift (tree
, tree
, tree
);
437 tree
handle_cast (tree
, tree
, tree
);
438 tree
handle_load (gimple
*, tree
);
439 tree
handle_stmt (gimple
*, tree
);
440 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
441 tree
create_loop (tree
, tree
*);
442 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
443 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
444 void lower_shift_stmt (tree
, gimple
*);
445 void lower_muldiv_stmt (tree
, gimple
*);
446 void lower_float_conv_stmt (tree
, gimple
*);
447 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
449 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
451 void lower_addsub_overflow (tree
, gimple
*);
452 void lower_mul_overflow (tree
, gimple
*);
453 void lower_cplxpart_stmt (tree
, gimple
*);
454 void lower_complexexpr_stmt (gimple
*);
455 void lower_bit_query (gimple
*);
456 void lower_call (tree
, gimple
*);
457 void lower_asm (gimple
*);
458 void lower_stmt (gimple
*);
460 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
461 merged with their uses. */
463 /* Subset of those for lhs of load statements. These will be
464 cleared in m_names if the loads will be mergeable with all
467 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
468 to later passes (arguments or return values of calls). */
470 /* Subset of m_names which have a single use. As the lowering
471 can replace various original statements with their lowered
472 form even before it is done iterating over all basic blocks,
473 testing has_single_use for the purpose of emitting clobbers
474 doesn't work properly. */
475 bitmap m_single_use_names
;
476 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
479 /* Mapping of the partitions to corresponding decls. */
481 /* Unsigned integer type with limb precision. */
483 /* Its TYPE_SIZE_UNIT. */
484 unsigned HOST_WIDE_INT m_limb_size
;
485 /* Location of a gimple stmt which is being currently lowered. */
487 /* Current stmt iterator where code is being lowered currently. */
488 gimple_stmt_iterator m_gsi
;
489 /* Statement after which any clobbers should be added if non-NULL. */
490 gimple
*m_after_stmt
;
491 /* Set when creating loops to the loop header bb and its preheader. */
492 basic_block m_bb
, m_preheader_bb
;
493 /* Stmt iterator after which initialization statements should be emitted. */
494 gimple_stmt_iterator m_init_gsi
;
495 /* Decl into which a mergeable statement stores result. */
497 /* handle_operand/handle_stmt can be invoked in various ways.
499 lower_mergeable_stmt for large _BitInt calls those with constant
500 idx only, expanding to straight line code, for huge _BitInt
501 emits a loop from least significant limb upwards, where each loop
502 iteration handles 2 limbs, plus there can be up to one full limb
503 and one partial limb processed after the loop, where handle_operand
504 and/or handle_stmt are called with constant idx. m_upwards_2limb
505 is set for this case, false otherwise. m_upwards is true if it
506 is either large or huge _BitInt handled by lower_mergeable_stmt,
507 i.e. indexes always increase.
509 Another way is used by lower_comparison_stmt, which walks limbs
510 from most significant to least significant, partial limb if any
511 processed first with constant idx and then loop processing a single
512 limb per iteration with non-constant idx.
514 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
515 destination limbs are processed from most significant to least
516 significant or for RSHIFT_EXPR the other way around, in loops or
517 straight line code, but idx usually is non-constant (so from
518 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
519 handling there can access even partial limbs using non-constant
520 idx (then m_var_msb should be true, for all the other cases
521 including lower_mergeable_stmt/lower_comparison_stmt that is
522 not the case and so m_var_msb should be false.
524 m_first should be set the first time handle_operand/handle_stmt
525 is called and clear when it is called for some other limb with
526 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
527 or statement (e.g. +/-/<< with < limb_prec constant) needs some
528 state between the different calls, when m_first is true it should
529 push some trees to m_data vector and also make sure m_data_cnt is
530 incremented by how many trees were pushed, and when m_first is
531 false, it can use the m_data[m_data_cnt] etc. data or update them,
532 just needs to bump m_data_cnt by the same amount as when it was
533 called with m_first set. The toplevel calls to
534 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
535 m_data vector when setting m_first to true.
537 m_cast_conditional and m_bitfld_load are used when handling a
538 bit-field load inside of a widening cast. handle_cast sometimes
539 needs to do runtime comparisons and handle_operand only conditionally
540 or even in two separate conditional blocks for one idx (once with
541 constant index after comparing the runtime one for equality with the
542 constant). In these cases, m_cast_conditional is set to true and
543 the bit-field load then communicates its m_data_cnt to handle_cast
544 using m_bitfld_load. */
547 unsigned m_upwards_2limb
;
549 bool m_cast_conditional
;
550 unsigned m_bitfld_load
;
552 unsigned int m_data_cnt
;
555 bitint_large_huge::~bitint_large_huge ()
557 BITMAP_FREE (m_names
);
558 BITMAP_FREE (m_loads
);
559 BITMAP_FREE (m_preserved
);
560 BITMAP_FREE (m_single_use_names
);
562 delete_var_map (m_map
);
567 /* Insert gimple statement G before current location
568 and set its gimple_location. */
571 bitint_large_huge::insert_before (gimple
*g
)
573 gimple_set_location (g
, m_loc
);
574 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
577 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
578 This is normally m_limb_type, except for a partial most
579 significant limb if any. */
582 bitint_large_huge::limb_access_type (tree type
, tree idx
)
584 if (type
== NULL_TREE
)
586 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
587 unsigned int prec
= TYPE_PRECISION (type
);
588 gcc_assert (i
* limb_prec
< prec
);
589 if ((i
+ 1) * limb_prec
<= prec
)
592 return build_nonstandard_integer_type (prec
% limb_prec
,
593 TYPE_UNSIGNED (type
));
596 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
597 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
600 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
602 tree atype
= (tree_fits_uhwi_p (idx
)
603 ? limb_access_type (type
, idx
) : m_limb_type
);
605 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
607 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
608 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
609 ret
= build2 (MEM_REF
, m_limb_type
,
610 build_fold_addr_expr (var
),
611 build_int_cst (ptype
, off
));
612 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
613 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
615 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
618 = build2 (MEM_REF
, m_limb_type
, TREE_OPERAND (var
, 0),
619 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
620 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
623 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
624 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
625 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
629 var
= unshare_expr (var
);
630 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
631 || !useless_type_conversion_p (m_limb_type
,
632 TREE_TYPE (TREE_TYPE (var
))))
634 unsigned HOST_WIDE_INT nelts
635 = CEIL (tree_to_uhwi (TYPE_SIZE (type
)), limb_prec
);
636 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
637 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
639 ret
= build4 (ARRAY_REF
, m_limb_type
, var
, idx
, NULL_TREE
, NULL_TREE
);
641 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
643 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
645 ret
= gimple_assign_lhs (g
);
646 ret
= build1 (NOP_EXPR
, atype
, ret
);
651 /* Emit a half diamond,
660 or if (COND) new_bb1;
661 PROB is the probability that the condition is true.
662 Updates m_gsi to start of new_bb1.
663 Sets EDGE_TRUE to edge from new_bb1 to successor and
664 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
667 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
668 edge
&edge_true
, edge
&edge_false
)
670 insert_before (cond
);
671 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
672 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
673 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
674 e1
->flags
= EDGE_TRUE_VALUE
;
675 e1
->probability
= prob
;
676 e3
->probability
= prob
.invert ();
677 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
680 m_gsi
= gsi_after_labels (e1
->dest
);
683 /* Emit a full diamond,
692 or if (COND) new_bb2; else new_bb1;
693 PROB is the probability that the condition is true.
694 Updates m_gsi to start of new_bb2.
695 Sets EDGE_TRUE to edge from new_bb1 to successor and
696 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
699 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
700 edge
&edge_true
, edge
&edge_false
)
702 insert_before (cond
);
703 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
704 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
705 basic_block bb
= create_empty_bb (e1
->dest
);
706 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
707 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
708 e1
->flags
= EDGE_FALSE_VALUE
;
709 e3
->probability
= prob
;
710 e1
->probability
= prob
.invert ();
711 bb
->count
= e1
->src
->count
.apply_probability (prob
);
712 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
713 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
714 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
716 m_gsi
= gsi_after_labels (bb
);
719 /* Emit a half diamond with full diamond in it
733 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
734 PROB1 is the probability that the condition 1 is true.
735 PROB2 is the probability that the condition 2 is true.
736 Updates m_gsi to start of new_bb1.
737 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
738 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
739 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
740 If COND2 is NULL, this is equivalent to
741 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
742 EDGE_TRUE_TRUE = NULL; */
745 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
746 profile_probability prob1
,
747 profile_probability prob2
,
748 edge
&edge_true_true
,
749 edge
&edge_true_false
,
752 edge e2
, e3
, e4
= NULL
;
753 if_then (cond1
, prob1
, e2
, e3
);
756 edge_true_true
= NULL
;
757 edge_true_false
= e2
;
761 insert_before (cond2
);
762 e2
= split_block (gsi_bb (m_gsi
), cond2
);
763 basic_block bb
= create_empty_bb (e2
->dest
);
764 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
765 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
766 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
767 e4
->probability
= prob2
;
768 e2
->flags
= EDGE_FALSE_VALUE
;
769 e2
->probability
= prob2
.invert ();
770 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
771 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
772 e2
= find_edge (e2
->dest
, e3
->dest
);
774 edge_true_false
= e2
;
776 m_gsi
= gsi_after_labels (e2
->src
);
779 /* Emit code to access limb IDX from OP. */
782 bitint_large_huge::handle_operand (tree op
, tree idx
)
784 switch (TREE_CODE (op
))
788 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
790 if (SSA_NAME_IS_DEFAULT_DEF (op
))
794 tree v
= create_tmp_reg (m_limb_type
);
795 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
797 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
798 DECL_SOURCE_LOCATION (v
)
799 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
801 v
= get_or_create_ssa_default_def (cfun
, v
);
802 m_data
.safe_push (v
);
804 tree ret
= m_data
[m_data_cnt
];
806 if (tree_fits_uhwi_p (idx
))
808 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
809 ret
= add_cast (type
, ret
);
813 location_t loc_save
= m_loc
;
814 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
815 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
822 p
= var_to_partition (m_map
, op
);
823 gcc_assert (m_vars
[p
] != NULL_TREE
);
824 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
825 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
827 t
= gimple_assign_lhs (g
);
829 && m_single_use_names
830 && m_vars
[p
] != m_lhs
832 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
834 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
835 CLOBBER_STORAGE_END
);
836 g
= gimple_build_assign (m_vars
[p
], clobber
);
837 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
838 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
842 if (tree_fits_uhwi_p (idx
))
844 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
845 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
848 m_data
.safe_push (NULL_TREE
);
849 m_data
.safe_push (NULL_TREE
);
851 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
853 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
854 TYPE_SIGN (TREE_TYPE (op
)));
855 c
= wide_int_to_tree (type
,
856 wide_int::from (w
, TYPE_PRECISION (type
),
859 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
860 c
= build_int_cst (type
,
861 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
863 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
868 || (m_data
[m_data_cnt
] == NULL_TREE
869 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
871 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
872 unsigned int rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
874 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
877 m_data
.safe_push (NULL_TREE
);
878 m_data
.safe_push (NULL_TREE
);
880 if (integer_zerop (op
))
882 tree c
= build_zero_cst (m_limb_type
);
883 m_data
[m_data_cnt
] = c
;
884 m_data
[m_data_cnt
+ 1] = c
;
886 else if (integer_all_onesp (op
))
888 tree c
= build_all_ones_cst (m_limb_type
);
889 m_data
[m_data_cnt
] = c
;
890 m_data
[m_data_cnt
+ 1] = c
;
892 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
894 /* Single limb constant. Use a phi with that limb from
895 the preheader edge and 0 or -1 constant from the other edge
896 and for the second limb in the loop. */
898 gcc_assert (m_first
);
901 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
902 build_int_cst (m_limb_type
, ext
));
904 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
906 /* Constant which has enough significant bits that it isn't
907 worth trying to save .rodata space by extending from smaller
911 type
= TREE_TYPE (op
);
913 /* If we have a guarantee the most significant partial limb
914 (if any) will be only accessed through handle_operand
915 with INTEGER_CST idx, we don't need to include the partial
917 type
= build_bitint_type (prec
- rem
, 1);
918 tree c
= tree_output_constant_def (fold_convert (type
, op
));
919 m_data
[m_data_cnt
] = c
;
920 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
922 else if (m_upwards_2limb
)
924 /* Constant with smaller number of bits. Trade conditional
925 code for .rodata space by extending from smaller number. */
926 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
927 tree type
= build_bitint_type (min_prec
, 1);
928 tree c
= tree_output_constant_def (fold_convert (type
, op
));
929 tree idx2
= make_ssa_name (sizetype
);
930 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
932 g
= gimple_build_cond (LT_EXPR
, idx
,
933 size_int (min_prec
/ limb_prec
),
934 NULL_TREE
, NULL_TREE
);
935 edge edge_true
, edge_false
;
936 if_then (g
, (min_prec
>= (prec
- rem
) / 2
937 ? profile_probability::likely ()
938 : profile_probability::unlikely ()),
939 edge_true
, edge_false
);
940 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
941 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
943 c1
= gimple_assign_lhs (g
);
944 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
945 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
947 c2
= gimple_assign_lhs (g
);
948 tree c3
= build_int_cst (m_limb_type
, ext
);
949 m_gsi
= gsi_after_labels (edge_true
->dest
);
950 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
951 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
952 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
954 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
955 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
956 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
957 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
958 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number.
964 Version for loops with random access to the limbs or
966 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
968 if (min_prec
<= (unsigned) limb_prec
)
969 c
= fold_convert (m_limb_type
, op
);
972 tree type
= build_bitint_type (min_prec
, 1);
973 c
= tree_output_constant_def (fold_convert (type
, op
));
975 m_data
[m_data_cnt
] = c
;
976 m_data
[m_data_cnt
+ 1] = integer_type_node
;
978 t
= m_data
[m_data_cnt
];
979 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
981 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
982 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
984 t
= gimple_assign_lhs (g
);
987 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
989 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
990 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
992 t
= gimple_assign_lhs (g
);
995 t
= m_data
[m_data_cnt
+ 1];
996 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
998 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
999 unsigned rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
1000 int ext
= wi::neg_p (wi::to_wide (op
)) ? -1 : 0;
1001 tree c
= m_data
[m_data_cnt
];
1002 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
1003 g
= gimple_build_cond (LT_EXPR
, idx
,
1004 size_int (min_prec
/ limb_prec
),
1005 NULL_TREE
, NULL_TREE
);
1006 edge edge_true
, edge_false
;
1007 if_then (g
, (min_prec
>= (prec
- rem
) / 2
1008 ? profile_probability::likely ()
1009 : profile_probability::unlikely ()),
1010 edge_true
, edge_false
);
1011 if (min_prec
> (unsigned) limb_prec
)
1013 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
1014 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
1016 c
= gimple_assign_lhs (g
);
1018 tree c2
= build_int_cst (m_limb_type
, ext
);
1019 m_gsi
= gsi_after_labels (edge_true
->dest
);
1020 t
= make_ssa_name (m_limb_type
);
1021 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
1022 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1023 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1032 /* Helper method, add a PHI node with VAL from preheader edge if
1033 inside of a loop and m_first. Keep state in a pair of m_data
1034 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1035 the latch edge, otherwise create a new SSA_NAME for it and let
1036 caller initialize it. */
1039 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1044 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1045 return m_data
[m_data_cnt
];
1048 *data_out
= NULL_TREE
;
1049 if (tree_fits_uhwi_p (idx
))
1051 m_data
.safe_push (val
);
1052 m_data
.safe_push (NULL_TREE
);
1056 tree in
= make_ssa_name (TREE_TYPE (val
));
1057 gphi
*phi
= create_phi_node (in
, m_bb
);
1058 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1059 edge e2
= EDGE_PRED (m_bb
, 0);
1061 e2
= EDGE_PRED (m_bb
, 1);
1062 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1063 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1064 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1065 m_data
.safe_push (in
);
1066 m_data
.safe_push (out
);
1070 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1071 convert it without emitting any code, otherwise emit
1072 the conversion statement before the current location. */
1075 bitint_large_huge::add_cast (tree type
, tree val
)
1077 if (TREE_CODE (val
) == INTEGER_CST
)
1078 return fold_convert (type
, val
);
1080 tree lhs
= make_ssa_name (type
);
1081 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1086 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1089 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1092 tree lhs
, data_out
, ctype
;
1093 tree rhs1_type
= TREE_TYPE (rhs1
);
1095 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1098 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1099 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1101 ctype
= build_complex_type (m_limb_type
);
1102 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1104 if (!TYPE_UNSIGNED (rhs1_type
))
1106 tree type
= unsigned_type_for (rhs1_type
);
1107 rhs1
= add_cast (type
, rhs1
);
1108 rhs2
= add_cast (type
, rhs2
);
1110 rhs1
= add_cast (m_limb_type
, rhs1
);
1111 rhs2
= add_cast (m_limb_type
, rhs2
);
1113 lhs
= make_ssa_name (ctype
);
1114 g
= gimple_build_call_internal (code
== PLUS_EXPR
1115 ? IFN_UADDC
: IFN_USUBC
,
1116 3, rhs1
, rhs2
, data_in
);
1117 gimple_call_set_lhs (g
, lhs
);
1119 if (data_out
== NULL_TREE
)
1120 data_out
= make_ssa_name (m_limb_type
);
1121 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1122 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1125 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1127 ctype
= build_complex_type (m_limb_type
);
1128 lhs
= make_ssa_name (ctype
);
1129 g
= gimple_build_call_internal (code
== PLUS_EXPR
1130 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1132 gimple_call_set_lhs (g
, lhs
);
1134 if (data_out
== NULL_TREE
)
1135 data_out
= make_ssa_name (m_limb_type
);
1136 if (!integer_zerop (data_in
))
1138 rhs1
= make_ssa_name (m_limb_type
);
1139 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1140 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1142 rhs2
= make_ssa_name (m_limb_type
);
1143 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1144 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1146 lhs
= make_ssa_name (ctype
);
1147 g
= gimple_build_call_internal (code
== PLUS_EXPR
1151 gimple_call_set_lhs (g
, lhs
);
1153 data_in
= make_ssa_name (m_limb_type
);
1154 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1155 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1157 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1162 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1163 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1169 tree in
= add_cast (rhs1_type
, data_in
);
1170 lhs
= make_ssa_name (rhs1_type
);
1171 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1173 rhs1
= make_ssa_name (rhs1_type
);
1174 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1176 m_data
[m_data_cnt
] = NULL_TREE
;
1180 rhs1
= make_ssa_name (m_limb_type
);
1181 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1182 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1184 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1185 rhs1
= add_cast (rhs1_type
, rhs1
);
1186 m_data
[m_data_cnt
] = data_out
;
1191 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1192 count in [0, limb_prec - 1] range. */
1195 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1197 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1198 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1202 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1204 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1207 if (!integer_zerop (data_in
))
1209 lhs
= make_ssa_name (m_limb_type
);
1210 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1211 build_int_cst (unsigned_type_node
,
1214 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1215 lhs
= add_cast (rhs1_type
, lhs
);
1218 if (types_compatible_p (rhs1_type
, m_limb_type
))
1220 if (data_out
== NULL_TREE
)
1221 data_out
= make_ssa_name (m_limb_type
);
1222 g
= gimple_build_assign (data_out
, rhs1
);
1225 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1227 lhs
= make_ssa_name (rhs1_type
);
1228 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1230 if (!integer_zerop (data_in
))
1233 lhs
= make_ssa_name (rhs1_type
);
1234 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1240 m_data
[m_data_cnt
] = data_out
;
1245 /* Helper function for handle_stmt method, handle an integral
1246 to integral conversion. */
1249 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1251 tree rhs_type
= TREE_TYPE (rhs1
);
1253 if (TREE_CODE (rhs1
) == SSA_NAME
1254 && TREE_CODE (lhs_type
) == BITINT_TYPE
1255 && TREE_CODE (rhs_type
) == BITINT_TYPE
1256 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1257 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1259 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1260 /* If lhs has bigger precision than rhs, we can use
1261 the simple case only if there is a guarantee that
1262 the most significant limb is handled in straight
1263 line code. If m_var_msb (on left shifts) or
1264 if m_upwards_2limb * limb_prec is equal to
1265 lhs precision that is not the case. */
1267 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
1268 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
))
1269 && (!m_upwards_2limb
1270 || (m_upwards_2limb
* limb_prec
1271 < TYPE_PRECISION (lhs_type
)))))
1273 rhs1
= handle_operand (rhs1
, idx
);
1274 if (tree_fits_uhwi_p (idx
))
1276 tree type
= limb_access_type (lhs_type
, idx
);
1277 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1278 rhs1
= add_cast (type
, rhs1
);
1283 /* Indexes lower than this don't need any special processing. */
1284 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1285 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1286 /* Indexes >= than this always contain an extension. */
1287 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1288 bool save_first
= m_first
;
1291 m_data
.safe_push (NULL_TREE
);
1292 m_data
.safe_push (NULL_TREE
);
1293 m_data
.safe_push (NULL_TREE
);
1294 if (TYPE_UNSIGNED (rhs_type
))
1295 /* No need to keep state between iterations. */
1297 else if (m_upwards
&& !m_upwards_2limb
)
1298 /* We need to keep state between iterations, but
1299 not within any loop, everything is straight line
1300 code with only increasing indexes. */
1302 else if (!m_upwards_2limb
)
1304 unsigned save_data_cnt
= m_data_cnt
;
1305 gimple_stmt_iterator save_gsi
= m_gsi
;
1307 if (gsi_end_p (m_gsi
))
1308 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1311 m_data_cnt
= save_data_cnt
+ 3;
1312 t
= handle_operand (rhs1
, size_int (low
));
1314 m_data
[save_data_cnt
+ 2]
1315 = build_int_cst (NULL_TREE
, m_data_cnt
);
1316 m_data_cnt
= save_data_cnt
;
1317 t
= add_cast (signed_type_for (m_limb_type
), t
);
1318 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1319 tree n
= make_ssa_name (TREE_TYPE (t
));
1320 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1322 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1324 if (gsi_end_p (m_init_gsi
))
1325 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1327 gsi_prev (&m_init_gsi
);
1330 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1331 /* We need to keep state between iterations, but
1332 fortunately not within the loop, only afterwards. */
1337 m_data
.truncate (m_data_cnt
);
1338 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1339 m_data
.safe_push (NULL_TREE
);
1343 unsigned save_data_cnt
= m_data_cnt
;
1345 if (!tree_fits_uhwi_p (idx
))
1348 && (m_upwards_2limb
* limb_prec
1349 <= ((unsigned) TYPE_PRECISION (rhs_type
)
1350 - !TYPE_UNSIGNED (rhs_type
))))
1352 rhs1
= handle_operand (rhs1
, idx
);
1354 m_data
[save_data_cnt
+ 2]
1355 = build_int_cst (NULL_TREE
, m_data_cnt
);
1356 m_first
= save_first
;
1359 bool single_comparison
1360 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1361 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1362 idx
, size_int (low
), NULL_TREE
, NULL_TREE
);
1363 edge edge_true_true
, edge_true_false
, edge_false
;
1364 if_then_if_then_else (g
, (single_comparison
? NULL
1365 : gimple_build_cond (EQ_EXPR
, idx
,
1369 profile_probability::likely (),
1370 profile_probability::unlikely (),
1371 edge_true_true
, edge_true_false
, edge_false
);
1372 bool save_cast_conditional
= m_cast_conditional
;
1373 m_cast_conditional
= true;
1375 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1377 m_data
[save_data_cnt
+ 2]
1378 = build_int_cst (NULL_TREE
, m_data_cnt
);
1379 tree ext
= NULL_TREE
;
1380 tree bitfld
= NULL_TREE
;
1381 if (!single_comparison
)
1383 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1385 m_data_cnt
= save_data_cnt
+ 3;
1388 bitfld
= m_data
[m_bitfld_load
];
1389 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1392 t2
= handle_operand (rhs1
, size_int (low
));
1393 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1394 t2
= add_cast (m_limb_type
, t2
);
1395 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1397 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1398 tree lpm1
= build_int_cst (unsigned_type_node
,
1400 tree n
= make_ssa_name (TREE_TYPE (ext
));
1401 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1403 ext
= add_cast (m_limb_type
, n
);
1407 if (TYPE_UNSIGNED (rhs_type
))
1408 t3
= build_zero_cst (m_limb_type
);
1409 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1410 t3
= m_data
[save_data_cnt
];
1412 t3
= m_data
[save_data_cnt
+ 1];
1413 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1414 t
= make_ssa_name (m_limb_type
);
1415 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1416 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1417 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1419 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1422 tree t4
= make_ssa_name (m_limb_type
);
1423 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1424 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1426 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1428 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1429 if (!save_cast_conditional
)
1431 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1435 for (basic_block bb
= gsi_bb (m_gsi
);;)
1437 edge e1
= single_succ_edge (bb
);
1438 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1439 tree t5
= (e2
? m_data
[save_data_cnt
+ 1]
1440 : make_ssa_name (m_limb_type
));
1441 phi
= create_phi_node (t5
, e1
->dest
);
1443 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1444 add_phi_arg (phi
, (e3
== e1
? t4
1445 : build_zero_cst (m_limb_type
)),
1446 e3
, UNKNOWN_LOCATION
);
1457 t4
= m_data
[m_bitfld_load
+ 1];
1459 t4
= make_ssa_name (m_limb_type
);
1460 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1462 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1463 edge_true_false
, UNKNOWN_LOCATION
);
1464 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1465 edge_false
, UNKNOWN_LOCATION
);
1467 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1469 m_data
[m_bitfld_load
] = t4
;
1470 m_data
[m_bitfld_load
+ 2] = t4
;
1473 m_cast_conditional
= save_cast_conditional
;
1474 m_first
= save_first
;
1479 if (tree_to_uhwi (idx
) < low
)
1481 t
= handle_operand (rhs1
, idx
);
1483 m_data
[save_data_cnt
+ 2]
1484 = build_int_cst (NULL_TREE
, m_data_cnt
);
1486 else if (tree_to_uhwi (idx
) < high
)
1488 t
= handle_operand (rhs1
, size_int (low
));
1490 m_data
[save_data_cnt
+ 2]
1491 = build_int_cst (NULL_TREE
, m_data_cnt
);
1492 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1493 t
= add_cast (m_limb_type
, t
);
1494 tree ext
= NULL_TREE
;
1495 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1497 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1498 tree lpm1
= build_int_cst (unsigned_type_node
,
1500 tree n
= make_ssa_name (TREE_TYPE (ext
));
1501 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1503 ext
= add_cast (m_limb_type
, n
);
1504 m_data
[save_data_cnt
+ 1] = ext
;
1509 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1511 handle_operand (rhs1
, size_zero_node
);
1512 m_data
[save_data_cnt
+ 2]
1513 = build_int_cst (NULL_TREE
, m_data_cnt
);
1516 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1517 if (TYPE_UNSIGNED (rhs_type
))
1518 t
= build_zero_cst (m_limb_type
);
1519 else if (m_bb
&& m_data
[save_data_cnt
])
1520 t
= m_data
[save_data_cnt
];
1522 t
= m_data
[save_data_cnt
+ 1];
1524 tree type
= limb_access_type (lhs_type
, idx
);
1525 if (!useless_type_conversion_p (type
, m_limb_type
))
1526 t
= add_cast (type
, t
);
1527 m_first
= save_first
;
1531 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1532 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1533 && INTEGRAL_TYPE_P (rhs_type
))
1535 /* Add support for 3 or more limbs filled in from normal integral
1536 type if this assert fails. If no target chooses limb mode smaller
1537 than half of largest supported normal integral type, this will not
1539 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1540 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1543 gimple_stmt_iterator save_gsi
= m_gsi
;
1545 if (gsi_end_p (m_gsi
))
1546 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1549 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1550 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1552 tree type
= NULL_TREE
;
1553 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1554 rhs_type
= TREE_TYPE (rhs1
);
1557 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1558 r1
= add_cast (m_limb_type
, rhs1
);
1559 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1561 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1563 build_int_cst (unsigned_type_node
,
1566 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1568 if (TYPE_UNSIGNED (rhs_type
))
1569 rext
= build_zero_cst (m_limb_type
);
1572 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1573 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1575 build_int_cst (unsigned_type_node
,
1578 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1581 if (gsi_end_p (m_init_gsi
))
1582 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1584 gsi_prev (&m_init_gsi
);
1588 if (m_upwards_2limb
)
1593 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1594 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1596 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1599 m_data
[m_data_cnt
+ 1] = t
;
1602 m_data
[m_data_cnt
+ 1] = rext
;
1603 m_data
.safe_push (rext
);
1604 t
= m_data
[m_data_cnt
];
1606 else if (!tree_fits_uhwi_p (idx
))
1607 t
= m_data
[m_data_cnt
+ 1];
1610 tree type
= limb_access_type (lhs_type
, idx
);
1611 t
= m_data
[m_data_cnt
+ 2];
1612 if (!useless_type_conversion_p (type
, m_limb_type
))
1613 t
= add_cast (type
, t
);
1620 m_data
.safe_push (r1
);
1621 m_data
.safe_push (r2
);
1622 m_data
.safe_push (rext
);
1624 if (tree_fits_uhwi_p (idx
))
1626 tree type
= limb_access_type (lhs_type
, idx
);
1627 if (integer_zerop (idx
))
1628 t
= m_data
[m_data_cnt
];
1629 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1630 && integer_onep (idx
))
1631 t
= m_data
[m_data_cnt
+ 1];
1633 t
= m_data
[m_data_cnt
+ 2];
1634 if (!useless_type_conversion_p (type
, m_limb_type
))
1635 t
= add_cast (type
, t
);
1639 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1640 NULL_TREE
, NULL_TREE
);
1641 edge e2
, e3
, e4
= NULL
;
1642 if_then (g
, profile_probability::likely (), e2
, e3
);
1643 if (m_data
[m_data_cnt
+ 1])
1645 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1646 NULL_TREE
, NULL_TREE
);
1648 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1649 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1650 e2
= find_edge (e5
->dest
, e2
->dest
);
1651 e4
->probability
= profile_probability::unlikely ();
1652 e5
->flags
= EDGE_FALSE_VALUE
;
1653 e5
->probability
= e4
->probability
.invert ();
1655 m_gsi
= gsi_after_labels (e2
->dest
);
1656 t
= make_ssa_name (m_limb_type
);
1657 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1658 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1659 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1661 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1668 /* Helper function for handle_stmt method, handle a load from memory. */
1671 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1673 tree rhs1
= gimple_assign_rhs1 (stmt
);
1674 tree rhs_type
= TREE_TYPE (rhs1
);
1675 bool eh
= stmt_ends_bb_p (stmt
);
1676 edge eh_edge
= NULL
;
1682 basic_block bb
= gimple_bb (stmt
);
1684 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1685 if (eh_edge
->flags
& EDGE_EH
)
1689 if (TREE_CODE (rhs1
) == COMPONENT_REF
1690 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1692 tree fld
= TREE_OPERAND (rhs1
, 1);
1693 /* For little-endian, we can allow as inputs bit-fields
1694 which start at a limb boundary. */
1695 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1696 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1697 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1699 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1700 handle it normally for now. */
1701 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1703 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1704 poly_int64 bitoffset
;
1705 poly_uint64 field_offset
, repr_offset
;
1706 bool var_field_off
= false;
1707 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1708 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1709 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1713 var_field_off
= true;
1715 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1716 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1717 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1718 TREE_OPERAND (rhs1
, 0), repr
,
1719 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1720 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1721 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1722 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1727 gimple_stmt_iterator save_gsi
= m_gsi
;
1729 if (gsi_end_p (m_gsi
))
1730 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1733 tree t
= limb_access (rhs_type
, nrhs1
, size_int (bo_idx
), true);
1734 tree iv
= make_ssa_name (m_limb_type
);
1735 g
= gimple_build_assign (iv
, t
);
1739 maybe_duplicate_eh_stmt (g
, stmt
);
1742 edge e
= split_block (gsi_bb (m_gsi
), g
);
1743 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1744 = profile_probability::very_unlikely ();
1745 m_gsi
= gsi_after_labels (e
->dest
);
1746 if (gsi_bb (save_gsi
) == e
->src
)
1748 if (gsi_end_p (save_gsi
))
1749 save_gsi
= gsi_end_bb (e
->dest
);
1751 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1753 if (m_preheader_bb
== e
->src
)
1754 m_preheader_bb
= e
->dest
;
1758 if (gsi_end_p (m_init_gsi
))
1759 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1761 gsi_prev (&m_init_gsi
);
1764 prepare_data_in_out (iv
, idx
, &out
);
1765 out
= m_data
[m_data_cnt
];
1766 m_data
.safe_push (out
);
1770 m_data
.safe_push (NULL_TREE
);
1771 m_data
.safe_push (NULL_TREE
);
1772 m_data
.safe_push (NULL_TREE
);
1776 tree nidx0
= NULL_TREE
, nidx1
;
1777 tree iv
= m_data
[m_data_cnt
];
1778 if (m_cast_conditional
&& iv
)
1780 gcc_assert (!m_bitfld_load
);
1781 m_bitfld_load
= m_data_cnt
;
1783 if (tree_fits_uhwi_p (idx
))
1785 unsigned prec
= TYPE_PRECISION (rhs_type
);
1786 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1787 gcc_assert (i
* limb_prec
< prec
);
1788 nidx1
= size_int (i
+ bo_idx
+ 1);
1789 if ((i
+ 1) * limb_prec
> prec
)
1792 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1796 nidx0
= size_int (i
+ bo_idx
);
1806 nidx0
= make_ssa_name (sizetype
);
1807 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1812 nidx1
= make_ssa_name (sizetype
);
1813 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1814 size_int (bo_idx
+ 1));
1818 tree iv2
= NULL_TREE
;
1821 tree t
= limb_access (rhs_type
, nrhs1
, nidx0
, true);
1822 iv
= make_ssa_name (m_limb_type
);
1823 g
= gimple_build_assign (iv
, t
);
1829 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
1830 unsigned prec
= TYPE_PRECISION (rhs_type
);
1833 if ((prec
% limb_prec
) == 0
1834 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
1835 conditional
= false;
1837 edge edge_true
= NULL
, edge_false
= NULL
;
1840 g
= gimple_build_cond (NE_EXPR
, idx
,
1841 size_int (prec
/ limb_prec
),
1842 NULL_TREE
, NULL_TREE
);
1843 if_then (g
, profile_probability::likely (),
1844 edge_true
, edge_false
);
1846 tree t
= limb_access (rhs_type
, nrhs1
, nidx1
, true);
1850 && !tree_fits_uhwi_p (idx
))
1851 iv2
= m_data
[m_data_cnt
+ 1];
1853 iv2
= make_ssa_name (m_limb_type
);
1854 g
= gimple_build_assign (iv2
, t
);
1858 maybe_duplicate_eh_stmt (g
, stmt
);
1861 edge e
= split_block (gsi_bb (m_gsi
), g
);
1862 m_gsi
= gsi_after_labels (e
->dest
);
1863 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1864 = profile_probability::very_unlikely ();
1869 tree iv3
= make_ssa_name (m_limb_type
);
1871 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
1872 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
1873 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
1874 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
1875 edge_false
, UNKNOWN_LOCATION
);
1876 m_gsi
= gsi_after_labels (edge_true
->dest
);
1879 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
1880 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
1882 iv
= gimple_assign_lhs (g
);
1885 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
1886 iv2
, build_int_cst (unsigned_type_node
,
1887 limb_prec
- bo_bit
));
1889 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
1890 gimple_assign_lhs (g
), iv
);
1892 iv
= gimple_assign_lhs (g
);
1893 if (m_data
[m_data_cnt
])
1894 m_data
[m_data_cnt
] = iv2
;
1896 if (tree_fits_uhwi_p (idx
))
1898 tree atype
= limb_access_type (rhs_type
, idx
);
1899 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
1900 iv
= add_cast (atype
, iv
);
1907 /* Use write_p = true for loads with EH edges to make
1908 sure limb_access doesn't add a cast as separate
1909 statement after it. */
1910 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
1911 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
1912 g
= gimple_build_assign (ret
, rhs1
);
1916 maybe_duplicate_eh_stmt (g
, stmt
);
1919 edge e
= split_block (gsi_bb (m_gsi
), g
);
1920 m_gsi
= gsi_after_labels (e
->dest
);
1921 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1922 = profile_probability::very_unlikely ();
1924 if (tree_fits_uhwi_p (idx
))
1926 tree atype
= limb_access_type (rhs_type
, idx
);
1927 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
1928 ret
= add_cast (atype
, ret
);
1934 /* Return a limb IDX from a mergeable statement STMT. */
1937 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
1939 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
1941 switch (gimple_code (stmt
))
1944 if (gimple_assign_load_p (stmt
))
1945 return handle_load (stmt
, idx
);
1946 switch (gimple_assign_rhs_code (stmt
))
1951 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1954 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1955 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
1956 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
1962 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1963 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1964 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
1967 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1968 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
1969 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
1971 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
1973 gimple_assign_rhs2 (stmt
), idx
);
1976 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1978 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
1979 gimple_assign_rhs1 (stmt
), idx
);
1980 case VIEW_CONVERT_EXPR
:
1981 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
1982 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0),
1994 /* Return minimum precision of OP at STMT.
1995 Positive value is minimum precision above which all bits
1996 are zero, negative means all bits above negation of the
1997 value are copies of the sign bit. */
2000 range_to_prec (tree op
, gimple
*stmt
)
2004 tree type
= TREE_TYPE (op
);
2005 unsigned int prec
= TYPE_PRECISION (type
);
2008 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
2009 || r
.undefined_p ())
2011 if (TYPE_UNSIGNED (type
))
2014 return MIN ((int) -prec
, -2);
2017 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
2019 w
= r
.lower_bound ();
2022 int min_prec1
= wi::min_precision (w
, SIGNED
);
2023 w
= r
.upper_bound ();
2024 int min_prec2
= wi::min_precision (w
, SIGNED
);
2025 int min_prec
= MAX (min_prec1
, min_prec2
);
2026 return MIN (-min_prec
, -2);
2030 w
= r
.upper_bound ();
2031 int min_prec
= wi::min_precision (w
, UNSIGNED
);
2032 return MAX (min_prec
, 1);
2035 /* Return address of the first limb of OP and write into *PREC
2036 its precision. If positive, the operand is zero extended
2037 from that precision, if it is negative, the operand is sign-extended
2038 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2039 otherwise *PREC_STORED is prec from the innermost call without
2040 range optimizations. */
2043 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
2044 int *prec_stored
, int *prec
)
2047 location_t loc_save
= m_loc
;
2048 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2049 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2050 && TREE_CODE (op
) != INTEGER_CST
)
2053 *prec
= range_to_prec (op
, stmt
);
2054 bitint_prec_kind kind
= bitint_prec_small
;
2055 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2056 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2057 kind
= bitint_precision_kind (TREE_TYPE (op
));
2058 if (kind
== bitint_prec_middle
)
2060 tree type
= NULL_TREE
;
2061 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2063 tree op_type
= TREE_TYPE (op
);
2064 unsigned HOST_WIDE_INT nelts
2065 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2066 /* Add support for 3 or more limbs filled in from normal
2067 integral type if this assert fails. If no target chooses
2068 limb mode smaller than half of largest supported normal
2069 integral type, this will not be needed. */
2070 gcc_assert (nelts
<= 2);
2072 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2073 ? TYPE_PRECISION (op_type
)
2074 : -TYPE_PRECISION (op_type
));
2075 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2080 if (TYPE_UNSIGNED (op_type
))
2082 if (*prec_stored
> limb_prec
)
2083 *prec_stored
= limb_prec
;
2085 else if (*prec_stored
< -limb_prec
)
2086 *prec_stored
= -limb_prec
;
2089 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2090 tree var
= create_tmp_var (atype
);
2092 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2093 t1
= add_cast (m_limb_type
, t1
);
2094 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2095 NULL_TREE
, NULL_TREE
);
2096 gimple
*g
= gimple_build_assign (v
, t1
);
2100 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2101 g
= gimple_build_assign (make_ssa_name (op_type
),
2102 RSHIFT_EXPR
, op
, lp
);
2104 tree t2
= gimple_assign_lhs (g
);
2105 t2
= add_cast (m_limb_type
, t2
);
2106 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2107 NULL_TREE
, NULL_TREE
);
2108 g
= gimple_build_assign (v
, t2
);
2111 tree ret
= build_fold_addr_expr (var
);
2112 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2114 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2115 g
= gimple_build_assign (var
, clobber
);
2116 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2121 switch (TREE_CODE (op
))
2125 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2127 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2129 m_loc
= gimple_location (g
);
2130 if (gimple_assign_load_p (g
))
2132 *prec
= range_to_prec (op
, NULL
);
2134 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2135 ? TYPE_PRECISION (TREE_TYPE (op
))
2136 : -TYPE_PRECISION (TREE_TYPE (op
)));
2137 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2138 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2139 NULL_TREE
, true, GSI_SAME_STMT
);
2141 else if (gimple_code (g
) == GIMPLE_NOP
)
2143 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2145 *prec_stored
= *prec
;
2146 tree var
= create_tmp_var (m_limb_type
);
2147 TREE_ADDRESSABLE (var
) = 1;
2148 ret
= build_fold_addr_expr (var
);
2149 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2151 tree clobber
= build_clobber (m_limb_type
,
2152 CLOBBER_STORAGE_END
);
2153 g
= gimple_build_assign (var
, clobber
);
2154 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2159 gcc_assert (gimple_assign_cast_p (g
));
2160 tree rhs1
= gimple_assign_rhs1 (g
);
2161 bitint_prec_kind kind
= bitint_prec_small
;
2162 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2163 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2164 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2165 if (kind
>= bitint_prec_large
)
2167 tree lhs_type
= TREE_TYPE (op
);
2168 tree rhs_type
= TREE_TYPE (rhs1
);
2169 int prec_stored_val
= 0;
2170 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2171 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2173 if (TYPE_UNSIGNED (lhs_type
)
2174 && !TYPE_UNSIGNED (rhs_type
))
2175 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2179 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2181 else if (TYPE_UNSIGNED (lhs_type
))
2183 gcc_assert (*prec
> 0
2184 || prec_stored_val
> 0
2185 || (-prec_stored_val
2186 >= TYPE_PRECISION (lhs_type
)));
2187 *prec
= TYPE_PRECISION (lhs_type
);
2189 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2192 *prec
= -TYPE_PRECISION (lhs_type
);
2207 int p
= var_to_partition (m_map
, op
);
2208 gcc_assert (m_vars
[p
] != NULL_TREE
);
2209 *prec
= range_to_prec (op
, stmt
);
2211 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2212 ? TYPE_PRECISION (TREE_TYPE (op
))
2213 : -TYPE_PRECISION (TREE_TYPE (op
)));
2214 return build_fold_addr_expr (m_vars
[p
]);
2217 unsigned int min_prec
, mp
;
2219 w
= wi::to_wide (op
);
2220 if (tree_int_cst_sgn (op
) >= 0)
2222 min_prec
= wi::min_precision (w
, UNSIGNED
);
2223 *prec
= MAX (min_prec
, 1);
2227 min_prec
= wi::min_precision (w
, SIGNED
);
2228 *prec
= MIN ((int) -min_prec
, -2);
2230 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2233 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
))
2234 && (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
2235 || TYPE_PRECISION (TREE_TYPE (op
)) <= limb_prec
))
2236 type
= TREE_TYPE (op
);
2238 type
= build_bitint_type (mp
, 1);
2239 if (TREE_CODE (type
) != BITINT_TYPE
2240 || bitint_precision_kind (type
) == bitint_prec_small
)
2242 if (TYPE_PRECISION (type
) <= limb_prec
)
2246 while (bitint_precision_kind (mp
) == bitint_prec_small
)
2248 /* This case is for targets which e.g. have 64-bit
2249 limb but categorize up to 128-bits _BitInts as
2250 small. We could use type of m_limb_type[2] and
2251 similar instead to save space. */
2252 type
= build_bitint_type (mp
, 1);
2257 if (tree_int_cst_sgn (op
) >= 0)
2258 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2260 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2262 op
= tree_output_constant_def (fold_convert (type
, op
));
2263 return build_fold_addr_expr (op
);
2269 /* Helper function, create a loop before the current location,
2270 start with sizetype INIT value from the preheader edge. Return
2271 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2272 from the latch edge. */
2275 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2277 if (!gsi_end_p (m_gsi
))
2280 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2281 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2282 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2283 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2284 e3
->probability
= profile_probability::very_unlikely ();
2285 e2
->flags
= EDGE_FALSE_VALUE
;
2286 e2
->probability
= e3
->probability
.invert ();
2287 tree idx
= make_ssa_name (sizetype
);
2288 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2289 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2290 *idx_next
= make_ssa_name (sizetype
);
2291 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2292 m_gsi
= gsi_after_labels (e1
->dest
);
2294 m_preheader_bb
= e1
->src
;
2295 class loop
*loop
= alloc_loop ();
2296 loop
->header
= e1
->dest
;
2297 add_loop (loop
, e1
->src
->loop_father
);
2301 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2302 lowered using iteration from the least significant limb up to the most
2303 significant limb. For large _BitInt it is emitted as straight line code
2304 before current location, for huge _BitInt as a loop handling two limbs
2305 at once, followed by handling up to limbs in straight line code (at most
2306 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2307 comparisons, in that case CMP_CODE should be the comparison code and
2308 CMP_OP1/CMP_OP2 the comparison operands. */
2311 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2312 tree cmp_op1
, tree cmp_op2
)
2314 bool eq_p
= cmp_code
!= ERROR_MARK
;
2317 type
= TREE_TYPE (cmp_op1
);
2319 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2320 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2321 bitint_prec_kind kind
= bitint_precision_kind (type
);
2322 gcc_assert (kind
>= bitint_prec_large
);
2324 tree lhs
= gimple_get_lhs (stmt
);
2325 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2327 && TREE_CODE (lhs
) == SSA_NAME
2328 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2329 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2331 int p
= var_to_partition (m_map
, lhs
);
2332 gcc_assert (m_vars
[p
] != NULL_TREE
);
2333 m_lhs
= lhs
= m_vars
[p
];
2335 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2337 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2339 basic_block eh_pad
= NULL
;
2340 tree nlhs
= NULL_TREE
;
2341 unsigned HOST_WIDE_INT bo_idx
= 0;
2342 unsigned HOST_WIDE_INT bo_bit
= 0;
2343 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2344 if (gimple_store_p (stmt
))
2346 store_operand
= gimple_assign_rhs1 (stmt
);
2347 eh
= stmt_ends_bb_p (stmt
);
2352 basic_block bb
= gimple_bb (stmt
);
2354 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2355 if (e
->flags
& EDGE_EH
)
2361 if (TREE_CODE (lhs
) == COMPONENT_REF
2362 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2364 tree fld
= TREE_OPERAND (lhs
, 1);
2365 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2366 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2367 poly_int64 bitoffset
;
2368 poly_uint64 field_offset
, repr_offset
;
2369 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2373 bool var_field_off
= false;
2374 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2375 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2376 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2380 var_field_off
= true;
2382 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2383 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2384 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2385 TREE_OPERAND (lhs
, 0), repr
,
2387 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2388 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2389 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2390 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2395 && TREE_CODE (store_operand
) == SSA_NAME
2397 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2398 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2399 || gimple_assign_cast_p (stmt
))
2401 rhs1
= gimple_assign_rhs1 (store_operand
2402 ? SSA_NAME_DEF_STMT (store_operand
)
2404 /* Optimize mergeable ops ending with widening cast to _BitInt
2405 (or followed by store). We can lower just the limbs of the
2406 cast operand and widen afterwards. */
2407 if (TREE_CODE (rhs1
) == SSA_NAME
2409 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2410 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2411 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2412 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2413 limb_prec
) < CEIL (prec
, limb_prec
)
2414 || (kind
== bitint_prec_huge
2415 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2417 store_operand
= rhs1
;
2418 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2419 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2420 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2424 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2425 if (kind
== bitint_prec_large
)
2426 cnt
= CEIL (prec
, limb_prec
);
2429 rem
= (prec
% (2 * limb_prec
));
2430 end
= (prec
- rem
) / limb_prec
;
2431 cnt
= 2 + CEIL (rem
, limb_prec
);
2432 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2435 basic_block edge_bb
= NULL
;
2438 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2440 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2442 if (kind
== bitint_prec_large
)
2443 m_gsi
= gsi_end_bb (edge_bb
);
2446 m_after_stmt
= stmt
;
2447 if (kind
!= bitint_prec_large
)
2448 m_upwards_2limb
= end
;
2452 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2453 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2454 > CEIL (prec
, limb_prec
)));
2456 for (unsigned i
= 0; i
< cnt
; i
++)
2459 if (kind
== bitint_prec_large
)
2462 idx
= size_int (end
+ (i
> 2));
2465 rhs1
= handle_operand (cmp_op1
, idx
);
2466 tree rhs2
= handle_operand (cmp_op2
, idx
);
2467 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2469 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2470 e1
->flags
= EDGE_FALSE_VALUE
;
2471 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2472 e1
->probability
= profile_probability::unlikely ();
2473 e2
->probability
= e1
->probability
.invert ();
2475 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2476 m_gsi
= gsi_after_labels (e1
->dest
);
2481 rhs1
= handle_operand (store_operand
, idx
);
2483 rhs1
= handle_stmt (stmt
, idx
);
2484 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2485 rhs1
= add_cast (m_limb_type
, rhs1
);
2486 if (sext
&& i
== cnt
- 1)
2491 if (tree_fits_uhwi_p (idx
))
2492 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2495 nidx
= make_ssa_name (sizetype
);
2496 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2502 basic_block new_bb
= NULL
;
2503 /* Handle stores into bit-fields. */
2509 if (kind
!= bitint_prec_large
)
2511 prepare_data_in_out (build_zero_cst (m_limb_type
),
2513 bf_next
= m_data
.pop ();
2514 bf_cur
= m_data
.pop ();
2515 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2516 NULL_TREE
, NULL_TREE
);
2518 if_then_else (g
, profile_probability::unlikely (),
2523 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2524 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2525 bitsize_int (limb_prec
- bo_bit
),
2526 bitsize_int (bo_idx
* limb_prec
+ bo_bit
));
2527 tree t
= add_cast (ftype
, rhs1
);
2528 g
= gimple_build_assign (bfr
, t
);
2532 maybe_duplicate_eh_stmt (g
, stmt
);
2535 edge e
= split_block (gsi_bb (m_gsi
), g
);
2536 m_gsi
= gsi_after_labels (e
->dest
);
2537 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2538 = profile_probability::very_unlikely ();
2541 if (kind
== bitint_prec_large
)
2547 m_gsi
= gsi_after_labels (e2
->src
);
2551 tree t1
= make_ssa_name (m_limb_type
);
2552 tree t2
= make_ssa_name (m_limb_type
);
2553 tree t3
= make_ssa_name (m_limb_type
);
2554 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2555 build_int_cst (unsigned_type_node
,
2556 limb_prec
- bo_bit
));
2558 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2559 build_int_cst (unsigned_type_node
,
2563 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2566 if (bf_next
&& i
== 1)
2568 g
= gimple_build_assign (bf_next
, bf_cur
);
2575 /* Handle bit-field access to partial last limb if needed. */
2579 && tree_fits_uhwi_p (idx
))
2581 unsigned int tprec
= TYPE_PRECISION (type
);
2582 unsigned int rprec
= tprec
% limb_prec
;
2583 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2586 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2587 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2588 unshare_expr (nlhs
),
2589 bitsize_int (rprec
+ bo_bit
),
2590 bitsize_int ((bo_idx
2591 + tprec
/ limb_prec
)
2593 tree t
= add_cast (ftype
, rhs1
);
2594 g
= gimple_build_assign (bfr
, t
);
2598 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2601 /* Otherwise, stores to any other lhs. */
2604 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
,
2606 g
= gimple_build_assign (l
, rhs1
);
2611 maybe_duplicate_eh_stmt (g
, stmt
);
2614 edge e
= split_block (gsi_bb (m_gsi
), g
);
2615 m_gsi
= gsi_after_labels (e
->dest
);
2616 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2617 = profile_probability::very_unlikely ();
2621 m_gsi
= gsi_after_labels (new_bb
);
2625 if (kind
== bitint_prec_huge
&& i
<= 1)
2629 idx
= make_ssa_name (sizetype
);
2630 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2636 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2639 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2640 NULL_TREE
, NULL_TREE
);
2643 m_gsi
= gsi_after_labels (edge_bb
);
2645 m_gsi
= gsi_for_stmt (stmt
);
2655 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2656 tree lpm1
= build_int_cst (unsigned_type_node
,
2658 tree n
= make_ssa_name (TREE_TYPE (ext
));
2659 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2661 ext
= add_cast (m_limb_type
, n
);
2664 ext
= build_zero_cst (m_limb_type
);
2665 kind
= bitint_precision_kind (type
);
2666 unsigned start
= CEIL (prec
, limb_prec
);
2667 prec
= TYPE_PRECISION (type
);
2668 idx
= idx_first
= idx_next
= NULL_TREE
;
2669 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2670 kind
= bitint_prec_large
;
2671 if (kind
== bitint_prec_large
)
2672 cnt
= CEIL (prec
, limb_prec
) - start
;
2675 rem
= prec
% limb_prec
;
2676 end
= (prec
- rem
) / limb_prec
;
2677 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2679 for (unsigned i
= 0; i
< cnt
; i
++)
2681 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2682 idx
= size_int (start
+ i
);
2683 else if (i
== cnt
- 1 && (rem
!= 0))
2684 idx
= size_int (end
);
2685 else if (i
== (bo_bit
!= 0))
2686 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2688 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2690 tree t1
= make_ssa_name (m_limb_type
);
2691 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2692 build_int_cst (unsigned_type_node
,
2693 limb_prec
- bo_bit
));
2695 if (integer_zerop (ext
))
2699 tree t2
= make_ssa_name (m_limb_type
);
2700 rhs1
= make_ssa_name (m_limb_type
);
2701 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2702 build_int_cst (unsigned_type_node
,
2705 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2713 if (tree_fits_uhwi_p (idx
))
2714 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2717 nidx
= make_ssa_name (sizetype
);
2718 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2724 /* Handle bit-field access to partial last limb if needed. */
2725 if (nlhs
&& i
== cnt
- 1)
2727 unsigned int tprec
= TYPE_PRECISION (type
);
2728 unsigned int rprec
= tprec
% limb_prec
;
2729 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2732 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2733 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2734 unshare_expr (nlhs
),
2735 bitsize_int (rprec
+ bo_bit
),
2736 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2738 tree t
= add_cast (ftype
, rhs1
);
2739 g
= gimple_build_assign (bfr
, t
);
2743 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2746 /* Otherwise, stores to any other lhs. */
2749 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
, nidx
, true);
2750 g
= gimple_build_assign (l
, rhs1
);
2755 maybe_duplicate_eh_stmt (g
, stmt
);
2758 edge e
= split_block (gsi_bb (m_gsi
), g
);
2759 m_gsi
= gsi_after_labels (e
->dest
);
2760 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2761 = profile_probability::very_unlikely ();
2764 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2766 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2769 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2770 NULL_TREE
, NULL_TREE
);
2772 m_gsi
= gsi_for_stmt (stmt
);
2777 if (bf_cur
!= NULL_TREE
)
2779 unsigned int tprec
= TYPE_PRECISION (type
);
2780 unsigned int rprec
= tprec
% limb_prec
;
2781 tree ftype
= build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2782 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2783 bitsize_int (rprec
+ bo_bit
),
2784 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2789 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2790 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2791 build_int_cst (unsigned_type_node
,
2792 limb_prec
- bo_bit
));
2795 rhs1
= add_cast (ftype
, rhs1
);
2796 g
= gimple_build_assign (bfr
, rhs1
);
2800 maybe_duplicate_eh_stmt (g
, stmt
);
2803 edge e
= split_block (gsi_bb (m_gsi
), g
);
2804 m_gsi
= gsi_after_labels (e
->dest
);
2805 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2806 = profile_probability::very_unlikely ();
2811 if (gimple_store_p (stmt
))
2813 unlink_stmt_vdef (stmt
);
2814 release_ssa_name (gimple_vdef (stmt
));
2815 gsi_remove (&m_gsi
, true);
2819 lhs
= make_ssa_name (boolean_type_node
);
2820 basic_block bb
= gimple_bb (stmt
);
2821 gphi
*phi
= create_phi_node (lhs
, bb
);
2822 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2823 unsigned int n
= EDGE_COUNT (bb
->preds
);
2824 for (unsigned int i
= 0; i
< n
; i
++)
2826 edge e2
= EDGE_PRED (bb
, i
);
2827 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
2828 e2
, UNKNOWN_LOCATION
);
2830 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2837 /* Handle a large/huge _BitInt comparison statement STMT other than
2838 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2839 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2840 lowered by iteration from the most significant limb downwards to
2841 the least significant one, for large _BitInt in straight line code,
2842 otherwise with most significant limb handled in
2843 straight line code followed by a loop handling one limb at a time.
2844 Comparisons with unsigned huge _BitInt with precisions which are
2845 multiples of limb precision can use just the loop and don't need to
2846 handle most significant limb before the loop. The loop or straight
2847 line code jumps to final basic block if a particular pair of limbs
2851 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2852 tree cmp_op1
, tree cmp_op2
)
2854 tree type
= TREE_TYPE (cmp_op1
);
2855 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2856 bitint_prec_kind kind
= bitint_precision_kind (type
);
2857 gcc_assert (kind
>= bitint_prec_large
);
2859 if (!TYPE_UNSIGNED (type
)
2860 && integer_zerop (cmp_op2
)
2861 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
2863 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
2864 tree idx
= size_int (end
);
2866 tree rhs1
= handle_operand (cmp_op1
, idx
);
2867 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2869 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2870 rhs1
= add_cast (stype
, rhs1
);
2872 tree lhs
= make_ssa_name (boolean_type_node
);
2873 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
2874 build_zero_cst (TREE_TYPE (rhs1
)));
2880 unsigned cnt
, rem
= 0, end
= 0;
2881 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
2882 if (kind
== bitint_prec_large
)
2883 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
2886 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
2887 if (rem
== 0 && !TYPE_UNSIGNED (type
))
2889 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
2890 cnt
= 1 + (rem
!= 0);
2893 basic_block edge_bb
= NULL
;
2894 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2896 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2898 m_gsi
= gsi_end_bb (edge_bb
);
2900 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
2901 for (unsigned i
= 0; i
< cnt
; i
++)
2904 if (kind
== bitint_prec_large
)
2905 idx
= size_int (cnt
- i
- 1);
2906 else if (i
== cnt
- 1)
2907 idx
= create_loop (size_int (end
- 1), &idx_next
);
2909 idx
= size_int (end
);
2910 tree rhs1
= handle_operand (cmp_op1
, idx
);
2911 tree rhs2
= handle_operand (cmp_op2
, idx
);
2913 && !TYPE_UNSIGNED (type
)
2914 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2916 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2917 rhs1
= add_cast (stype
, rhs1
);
2918 rhs2
= add_cast (stype
, rhs2
);
2920 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2922 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2923 e1
->flags
= EDGE_FALSE_VALUE
;
2924 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2925 e1
->probability
= profile_probability::likely ();
2926 e2
->probability
= e1
->probability
.invert ();
2928 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2929 m_gsi
= gsi_after_labels (e1
->dest
);
2931 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2933 e1
= split_block (gsi_bb (m_gsi
), g
);
2934 e1
->flags
= EDGE_FALSE_VALUE
;
2935 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2936 e1
->probability
= profile_probability::unlikely ();
2937 e2
->probability
= e1
->probability
.invert ();
2938 m_gsi
= gsi_after_labels (e1
->dest
);
2939 edges
[2 * i
+ 1] = e2
;
2941 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
2943 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
2945 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
2946 NULL_TREE
, NULL_TREE
);
2948 edge true_edge
, false_edge
;
2949 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
2950 &true_edge
, &false_edge
);
2951 m_gsi
= gsi_after_labels (false_edge
->dest
);
2956 tree lhs
= make_ssa_name (boolean_type_node
);
2957 basic_block bb
= gimple_bb (stmt
);
2958 gphi
*phi
= create_phi_node (lhs
, bb
);
2959 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
2961 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
2962 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
2963 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
2965 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
2966 ? boolean_true_node
: boolean_false_node
,
2967 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
2972 /* Lower large/huge _BitInt left and right shift except for left
2973 shift by < limb_prec constant. */
2976 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
2978 tree rhs1
= gimple_assign_rhs1 (stmt
);
2979 tree lhs
= gimple_assign_lhs (stmt
);
2980 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
2981 tree type
= TREE_TYPE (rhs1
);
2982 gimple
*final_stmt
= gsi_stmt (m_gsi
);
2983 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
2984 && bitint_precision_kind (type
) >= bitint_prec_large
);
2985 int prec
= TYPE_PRECISION (type
);
2986 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
2988 if (obj
== NULL_TREE
)
2990 int part
= var_to_partition (m_map
, lhs
);
2991 gcc_assert (m_vars
[part
] != NULL_TREE
);
2994 /* Preparation code common for both left and right shifts.
2995 unsigned n1 = n % limb_prec;
2996 size_t n2 = n / limb_prec;
2997 size_t n3 = n1 != 0;
2998 unsigned n4 = (limb_prec - n1) % limb_prec;
2999 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3000 if (TREE_CODE (n
) == INTEGER_CST
)
3002 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3003 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
3004 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
3005 n3
= size_int (!integer_zerop (n1
));
3006 n4
= int_const_binop (TRUNC_MOD_EXPR
,
3007 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
3011 n1
= make_ssa_name (TREE_TYPE (n
));
3012 n2
= make_ssa_name (sizetype
);
3013 n3
= make_ssa_name (sizetype
);
3014 n4
= make_ssa_name (TREE_TYPE (n
));
3015 if (pow2p_hwi (limb_prec
))
3017 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
3018 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
3020 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3022 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3024 build_int_cst (TREE_TYPE (n
),
3025 exact_log2 (limb_prec
)));
3027 if (gimple_assign_lhs (g
) != n2
)
3029 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3032 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3035 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
3041 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3042 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
3044 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3046 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3047 TRUNC_DIV_EXPR
, n
, lp
);
3049 if (gimple_assign_lhs (g
) != n2
)
3051 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3054 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3055 MINUS_EXPR
, lp
, n1
);
3057 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3061 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3062 build_zero_cst (TREE_TYPE (n
)));
3064 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3067 tree p
= build_int_cst (sizetype
,
3068 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3069 if (rhs_code
== RSHIFT_EXPR
)
3074 unsigned n1 = n % limb_prec;
3075 size_t n2 = n / limb_prec;
3076 size_t n3 = n1 != 0;
3077 unsigned n4 = (limb_prec - n1) % limb_prec;
3079 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3080 int signed_p = (typeof (src) -1) < 0;
3081 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3082 ? p : p - n3); ++idx)
3083 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3085 if (prec % limb_prec == 0)
3088 ext = ((signed limb_type) (src[p] << (limb_prec
3089 - (prec % limb_prec))))
3090 >> (limb_prec - (prec % limb_prec));
3092 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3093 if (!signed_p && (prec % limb_prec == 0))
3095 else if (idx < prec / 64)
3097 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3103 dst[idx] = ((signed limb_type) ext) >> n1;
3104 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3108 dst[idx] = ext >> n1;
3111 for (++idx; idx <= p; ++idx)
3114 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3116 else if (TREE_CODE (n3
) == INTEGER_CST
)
3117 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3120 pmn3
= make_ssa_name (sizetype
);
3121 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3124 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3125 edge edge_true
, edge_false
;
3126 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3128 tree idx
= create_loop (n2
, &idx_next
);
3129 tree idxmn2
= make_ssa_name (sizetype
);
3130 tree idxpn3
= make_ssa_name (sizetype
);
3131 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3133 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3136 tree t1
= handle_operand (rhs1
, idx
);
3138 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3139 RSHIFT_EXPR
, t1
, n1
);
3141 t1
= gimple_assign_lhs (g
);
3142 if (!integer_zerop (n3
))
3145 tree t2
= handle_operand (rhs1
, idxpn3
);
3146 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3147 LSHIFT_EXPR
, t2
, n4
);
3149 t2
= gimple_assign_lhs (g
);
3150 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3151 BIT_IOR_EXPR
, t1
, t2
);
3153 t1
= gimple_assign_lhs (g
);
3155 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3156 g
= gimple_build_assign (l
, t1
);
3158 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3160 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3162 idx
= make_ssa_name (sizetype
);
3163 m_gsi
= gsi_for_stmt (final_stmt
);
3164 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3165 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3166 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3167 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3168 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3169 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3171 tree ms
= handle_operand (rhs1
, p
);
3173 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3174 ext
= add_cast (m_limb_type
, ms
);
3175 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3176 && !integer_zerop (n3
))
3178 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3179 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3181 t1
= handle_operand (rhs1
, idx
);
3182 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3183 RSHIFT_EXPR
, t1
, n1
);
3185 t1
= gimple_assign_lhs (g
);
3186 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3187 LSHIFT_EXPR
, ext
, n4
);
3189 tree t2
= gimple_assign_lhs (g
);
3190 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3191 BIT_IOR_EXPR
, t1
, t2
);
3193 t1
= gimple_assign_lhs (g
);
3194 idxmn2
= make_ssa_name (sizetype
);
3195 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3197 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3198 g
= gimple_build_assign (l
, t1
);
3200 idx_next
= make_ssa_name (sizetype
);
3201 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3203 m_gsi
= gsi_for_stmt (final_stmt
);
3204 tree nidx
= make_ssa_name (sizetype
);
3205 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3206 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3207 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3208 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3209 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3210 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3213 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3215 idx
= gimple_assign_lhs (g
);
3217 if (!TYPE_UNSIGNED (type
))
3218 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3219 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3220 RSHIFT_EXPR
, sext
, n1
);
3222 t1
= gimple_assign_lhs (g
);
3223 if (!TYPE_UNSIGNED (type
))
3225 t1
= add_cast (m_limb_type
, t1
);
3226 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3228 build_int_cst (TREE_TYPE (n
),
3231 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3234 ext
= build_zero_cst (m_limb_type
);
3235 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3236 g
= gimple_build_assign (l
, t1
);
3238 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3241 idx
= gimple_assign_lhs (g
);
3242 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3243 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3244 idx
= create_loop (idx
, &idx_next
);
3245 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3246 g
= gimple_build_assign (l
, ext
);
3248 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3250 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3258 unsigned n1 = n % limb_prec;
3259 size_t n2 = n / limb_prec;
3260 size_t n3 = n1 != 0;
3261 unsigned n4 = (limb_prec - n1) % limb_prec;
3263 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3264 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3265 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3268 dst[idx] = src[idx - n2] << n1;
3271 for (; (ssize_t) idx >= 0; --idx)
3274 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3275 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3278 n2pn3
= make_ssa_name (sizetype
);
3279 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3282 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3283 idx even to access the most significant partial limb. */
3285 if (integer_zerop (n3
))
3286 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3287 counts. Emit if (true) condition that can be optimized later. */
3288 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3289 NULL_TREE
, NULL_TREE
);
3291 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3292 edge edge_true
, edge_false
;
3293 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3295 tree idx
= create_loop (p
, &idx_next
);
3296 tree idxmn2
= make_ssa_name (sizetype
);
3297 tree idxmn2mn3
= make_ssa_name (sizetype
);
3298 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3300 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3303 tree t1
= handle_operand (rhs1
, idxmn2
);
3305 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3306 LSHIFT_EXPR
, t1
, n1
);
3308 t1
= gimple_assign_lhs (g
);
3309 if (!integer_zerop (n3
))
3312 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3313 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3314 RSHIFT_EXPR
, t2
, n4
);
3316 t2
= gimple_assign_lhs (g
);
3317 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3318 BIT_IOR_EXPR
, t1
, t2
);
3320 t1
= gimple_assign_lhs (g
);
3322 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3323 g
= gimple_build_assign (l
, t1
);
3325 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3327 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3328 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3329 NULL_TREE
, NULL_TREE
);
3331 idx
= make_ssa_name (sizetype
);
3332 m_gsi
= gsi_for_stmt (final_stmt
);
3333 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3334 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3335 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3336 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3337 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3338 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3340 if (!integer_zerop (n3
))
3342 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3343 NULL_TREE
, NULL_TREE
);
3344 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3345 idxmn2
= make_ssa_name (sizetype
);
3346 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3349 t1
= handle_operand (rhs1
, idxmn2
);
3350 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3351 LSHIFT_EXPR
, t1
, n1
);
3353 t1
= gimple_assign_lhs (g
);
3354 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3355 g
= gimple_build_assign (l
, t1
);
3357 idx_next
= make_ssa_name (sizetype
);
3358 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3360 m_gsi
= gsi_for_stmt (final_stmt
);
3361 tree nidx
= make_ssa_name (sizetype
);
3362 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3363 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3364 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3365 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3366 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3367 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3370 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3371 ssize_int (0), NULL_TREE
, NULL_TREE
);
3372 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3373 idx
= create_loop (idx
, &idx_next
);
3374 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3375 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3377 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3379 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3380 ssize_int (0), NULL_TREE
, NULL_TREE
);
3385 /* Lower large/huge _BitInt multiplication or division. */
3388 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3390 tree rhs1
= gimple_assign_rhs1 (stmt
);
3391 tree rhs2
= gimple_assign_rhs2 (stmt
);
3392 tree lhs
= gimple_assign_lhs (stmt
);
3393 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3394 tree type
= TREE_TYPE (rhs1
);
3395 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3396 && bitint_precision_kind (type
) >= bitint_prec_large
);
3397 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3398 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3399 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3400 if (obj
== NULL_TREE
)
3402 int part
= var_to_partition (m_map
, lhs
);
3403 gcc_assert (m_vars
[part
] != NULL_TREE
);
3405 lhs
= build_fold_addr_expr (obj
);
3409 lhs
= build_fold_addr_expr (obj
);
3410 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3411 NULL_TREE
, true, GSI_SAME_STMT
);
3413 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3418 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3419 lhs
, build_int_cst (sitype
, prec
),
3420 rhs1
, build_int_cst (sitype
, prec1
),
3421 rhs2
, build_int_cst (sitype
, prec2
));
3424 case TRUNC_DIV_EXPR
:
3425 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3426 lhs
, build_int_cst (sitype
, prec
),
3428 build_int_cst (sitype
, 0),
3429 rhs1
, build_int_cst (sitype
, prec1
),
3430 rhs2
, build_int_cst (sitype
, prec2
));
3431 if (!stmt_ends_bb_p (stmt
))
3432 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3435 case TRUNC_MOD_EXPR
:
3436 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3437 build_int_cst (sitype
, 0),
3438 lhs
, build_int_cst (sitype
, prec
),
3439 rhs1
, build_int_cst (sitype
, prec1
),
3440 rhs2
, build_int_cst (sitype
, prec2
));
3441 if (!stmt_ends_bb_p (stmt
))
3442 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3448 if (stmt_ends_bb_p (stmt
))
3450 maybe_duplicate_eh_stmt (g
, stmt
);
3453 basic_block bb
= gimple_bb (stmt
);
3455 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3456 if (e1
->flags
& EDGE_EH
)
3460 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3461 m_gsi
= gsi_after_labels (e2
->dest
);
3462 make_edge (e2
->src
, e1
->dest
, EDGE_EH
)->probability
3463 = profile_probability::very_unlikely ();
3468 /* Lower large/huge _BitInt conversion to/from floating point. */
3471 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3473 tree rhs1
= gimple_assign_rhs1 (stmt
);
3474 tree lhs
= gimple_assign_lhs (stmt
);
3475 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3476 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3478 if (rhs_code
== FIX_TRUNC_EXPR
)
3480 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3481 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3483 if (obj
== NULL_TREE
)
3485 int part
= var_to_partition (m_map
, lhs
);
3486 gcc_assert (m_vars
[part
] != NULL_TREE
);
3488 lhs
= build_fold_addr_expr (obj
);
3492 lhs
= build_fold_addr_expr (obj
);
3493 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3494 NULL_TREE
, true, GSI_SAME_STMT
);
3496 scalar_mode from_mode
3497 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3499 /* IEEE single is a full superset of both IEEE half and
3500 bfloat formats, convert to float first and then to _BitInt
3501 to avoid the need of another 2 library routines. */
3502 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3503 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3504 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3506 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3508 rhs1
= add_cast (type
, rhs1
);
3511 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3512 lhs
, build_int_cst (sitype
, prec
),
3519 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3520 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3521 rhs1
, build_int_cst (sitype
, prec
));
3522 gimple_call_set_lhs (g
, lhs
);
3523 if (!stmt_ends_bb_p (stmt
))
3524 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3525 gsi_replace (&m_gsi
, g
, true);
3529 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3530 If check_zero is true, caller wants to check if all bits in [start, end)
3531 are zero, otherwise if bits in [start, end) are either all zero or
3532 all ones. L is the limb with index LIMB, START and END are measured
3536 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3537 unsigned int end
, tree l
,
3541 unsigned startlimb
= start
/ limb_prec
;
3542 unsigned endlimb
= (end
- 1) / limb_prec
;
3545 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3547 if (startlimb
== endlimb
&& limb
== startlimb
)
3551 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3552 end
- start
, false, limb_prec
);
3553 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3555 wide_int_to_tree (m_limb_type
, w
));
3557 return gimple_assign_lhs (g
);
3559 unsigned int shift
= start
% limb_prec
;
3560 if ((end
% limb_prec
) != 0)
3562 unsigned int lshift
= (-end
) % limb_prec
;
3564 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3566 build_int_cst (unsigned_type_node
,
3569 l
= gimple_assign_lhs (g
);
3571 l
= add_cast (signed_type_for (m_limb_type
), l
);
3572 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3574 build_int_cst (unsigned_type_node
, shift
));
3576 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3578 else if (limb
== startlimb
)
3580 if ((start
% limb_prec
) == 0)
3583 l
= add_cast (signed_type_for (m_limb_type
), l
);
3584 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3586 build_int_cst (unsigned_type_node
,
3587 start
% limb_prec
));
3589 l
= gimple_assign_lhs (g
);
3591 l
= add_cast (m_limb_type
, l
);
3594 else if (limb
== endlimb
)
3596 if ((end
% limb_prec
) == 0)
3600 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3601 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3603 wide_int_to_tree (m_limb_type
, w
));
3605 return gimple_assign_lhs (g
);
3607 unsigned int shift
= (-end
) % limb_prec
;
3608 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3610 build_int_cst (unsigned_type_node
, shift
));
3612 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3613 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3615 build_int_cst (unsigned_type_node
, shift
));
3617 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3622 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3623 result including overflow flag into the right locations. */
3626 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3627 tree ovf
, tree lhs
, tree orig_obj
,
3628 gimple
*stmt
, tree_code code
)
3632 if (obj
== NULL_TREE
3633 && (TREE_CODE (type
) != BITINT_TYPE
3634 || bitint_precision_kind (type
) < bitint_prec_large
))
3636 /* Add support for 3 or more limbs filled in from normal integral
3637 type if this assert fails. If no target chooses limb mode smaller
3638 than half of largest supported normal integral type, this will not
3640 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3641 tree lhs_type
= type
;
3642 if (TREE_CODE (type
) == BITINT_TYPE
3643 && bitint_precision_kind (type
) == bitint_prec_middle
)
3644 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3645 TYPE_UNSIGNED (type
));
3646 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3647 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3649 r1
= gimple_assign_lhs (g
);
3650 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3651 r1
= add_cast (lhs_type
, r1
);
3652 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3654 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3655 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3657 r2
= gimple_assign_lhs (g
);
3658 r2
= add_cast (lhs_type
, r2
);
3659 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3660 build_int_cst (unsigned_type_node
,
3663 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3664 gimple_assign_lhs (g
));
3666 r1
= gimple_assign_lhs (g
);
3668 if (lhs_type
!= type
)
3669 r1
= add_cast (type
, r1
);
3670 ovf
= add_cast (lhs_type
, ovf
);
3671 if (lhs_type
!= type
)
3672 ovf
= add_cast (type
, ovf
);
3673 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3674 m_gsi
= gsi_for_stmt (stmt
);
3675 gsi_replace (&m_gsi
, g
, true);
3679 unsigned HOST_WIDE_INT nelts
= 0;
3680 tree atype
= NULL_TREE
;
3683 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3684 if (orig_obj
== NULL_TREE
)
3686 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3692 if (orig_obj
== NULL_TREE
)
3694 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3695 v1
= build2 (MEM_REF
, atype
,
3696 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3698 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3699 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3701 v1
= unshare_expr (obj
);
3702 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3703 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3704 g
= gimple_build_assign (v1
, v2
);
3707 if (orig_obj
== NULL_TREE
&& obj
)
3709 ovf
= add_cast (m_limb_type
, ovf
);
3710 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3711 g
= gimple_build_assign (l
, ovf
);
3715 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3716 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3717 (nelts
+ 1) * m_limb_size
);
3718 tree v1
= build2 (MEM_REF
, atype
,
3719 build_fold_addr_expr (unshare_expr (obj
)),
3721 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3725 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3727 imm_use_iterator ui
;
3728 use_operand_p use_p
;
3729 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3731 g
= USE_STMT (use_p
);
3732 if (!is_gimple_assign (g
)
3733 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3735 tree lhs2
= gimple_assign_lhs (g
);
3737 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3738 lhs2
= gimple_assign_lhs (use_stmt
);
3739 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3740 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3741 g
= gimple_build_assign (lhs2
, ovf
);
3743 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3744 gsi_replace (&gsi
, g
, true);
3745 if (gsi_stmt (m_gsi
) == use_stmt
)
3746 m_gsi
= gsi_for_stmt (g
);
3750 else if (ovf
!= boolean_false_node
)
3752 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3753 NULL_TREE
, NULL_TREE
);
3754 edge edge_true
, edge_false
;
3755 if_then (g
, profile_probability::very_unlikely (),
3756 edge_true
, edge_false
);
3757 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3758 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3761 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3762 true, GSI_SAME_STMT
);
3763 m_gsi
= gsi_after_labels (edge_true
->dest
);
3768 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3769 g
= gimple_build_assign (var
, clobber
);
3770 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3774 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3775 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3776 argument 1 precision PREC1 and minimum precision for the result
3777 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3780 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3781 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3786 /* Ignore this special rule for subtraction, even if both
3787 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3788 in infinite precision. */
3789 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3791 /* Result in [0, prec2) is unsigned, if prec > prec2,
3792 all bits above it will be zero. */
3793 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3794 return boolean_false_node
;
3797 /* ovf if any of bits in [start, end) is non-zero. */
3798 *start
= prec
- !TYPE_UNSIGNED (type
);
3802 else if (TYPE_UNSIGNED (type
))
3804 /* If result in [0, prec2) is signed and if prec > prec2,
3805 all bits above it will be sign bit copies. */
3808 /* ovf if bit prec - 1 is non-zero. */
3814 /* ovf if any of bits in [start, end) is non-zero. */
3819 else if (prec
>= prec2
)
3820 return boolean_false_node
;
3823 /* ovf if [start, end) bits aren't all zeros or all ones. */
3826 *check_zero
= false;
3831 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3832 argument or return type _Complex large/huge _BitInt. */
3835 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
3837 tree arg0
= gimple_call_arg (stmt
, 0);
3838 tree arg1
= gimple_call_arg (stmt
, 1);
3839 tree lhs
= gimple_call_lhs (stmt
);
3844 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3845 gsi_remove (&gsi
, true);
3848 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3849 tree type
= TREE_TYPE (lhs
);
3850 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3851 type
= TREE_TYPE (type
);
3852 int prec
= TYPE_PRECISION (type
);
3853 int prec0
= range_to_prec (arg0
, stmt
);
3854 int prec1
= range_to_prec (arg1
, stmt
);
3855 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3856 the be minimum unsigned precision of any possible operation's
3857 result, otherwise it is minimum signed precision.
3859 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3860 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3861 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3862 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3863 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3864 8 + 8 [0, 0x1fe] 9 UNSIGNED
3865 8 + 10 [0, 0x4fe] 11 UNSIGNED
3866 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3867 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3868 8 + -8 [-0x80, 0x17e] 10 SIGNED
3869 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3870 10 + -8 [-0x80, 0x47e] 12 SIGNED
3871 8 - 8 [-0xff, 0xff] 9 SIGNED
3872 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3873 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3874 -8 - -8 [-0xff, 0xff] 9 SIGNED
3875 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3876 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3877 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3878 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3879 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3880 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3881 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3882 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3883 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
3884 prec1
< 0 ? -prec1
: prec1
);
3885 /* If operands are either both signed or both unsigned,
3886 we need just one additional bit. */
3887 prec2
= (((prec0
< 0) == (prec1
< 0)
3888 /* If one operand is signed and one unsigned and
3889 the signed one has larger precision, we need
3890 just one extra bit, otherwise two. */
3891 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
3892 : (prec2
== -prec1
&& prec2
!= prec0
)))
3893 ? prec2
+ 1 : prec2
+ 2);
3894 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
3895 prec1
< 0 ? -prec1
: prec1
);
3896 prec3
= MAX (prec3
, prec
);
3897 tree var
= NULL_TREE
;
3898 tree orig_obj
= obj
;
3899 if (obj
== NULL_TREE
3900 && TREE_CODE (type
) == BITINT_TYPE
3901 && bitint_precision_kind (type
) >= bitint_prec_large
3903 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
3905 int part
= var_to_partition (m_map
, lhs
);
3906 gcc_assert (m_vars
[part
] != NULL_TREE
);
3908 if (TREE_TYPE (lhs
) == type
)
3911 if (TREE_CODE (type
) != BITINT_TYPE
3912 || bitint_precision_kind (type
) < bitint_prec_large
)
3914 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
3915 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
3916 var
= create_tmp_var (atype
);
3919 enum tree_code code
;
3920 switch (gimple_call_internal_fn (stmt
))
3922 case IFN_ADD_OVERFLOW
:
3923 case IFN_UBSAN_CHECK_ADD
:
3926 case IFN_SUB_OVERFLOW
:
3927 case IFN_UBSAN_CHECK_SUB
:
3933 unsigned start
, end
;
3935 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
3936 &start
, &end
, &check_zero
);
3938 unsigned startlimb
, endlimb
;
3946 startlimb
= start
/ limb_prec
;
3947 endlimb
= (end
- 1) / limb_prec
;
3950 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
3951 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
3952 unsigned cnt
, rem
= 0, fin
= 0;
3953 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
3954 bool last_ovf
= (ovf
== NULL_TREE
3955 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
3956 if (kind
!= bitint_prec_huge
)
3957 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
3960 rem
= (prec4
% (2 * limb_prec
));
3961 fin
= (prec4
- rem
) / limb_prec
;
3962 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
3963 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
3966 if (kind
== bitint_prec_huge
)
3967 m_upwards_2limb
= fin
;
3970 tree type0
= TREE_TYPE (arg0
);
3971 tree type1
= TREE_TYPE (arg1
);
3973 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
3974 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
3975 if (TYPE_PRECISION (type0
) < prec5
)
3977 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
3978 if (TREE_CODE (arg0
) == INTEGER_CST
)
3979 arg0
= fold_convert (type0
, arg0
);
3981 if (TYPE_PRECISION (type1
) < prec5
)
3983 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
3984 if (TREE_CODE (arg1
) == INTEGER_CST
)
3985 arg1
= fold_convert (type1
, arg1
);
3987 unsigned int data_cnt
= 0;
3988 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
3989 tree cmp
= build_zero_cst (m_limb_type
);
3990 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
3991 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
3992 for (unsigned i
= 0; i
< cnt
; i
++)
3996 if (kind
!= bitint_prec_huge
)
3999 idx
= size_int (fin
+ (i
> 2));
4000 if (!last_ovf
|| i
< cnt
- 1)
4002 if (type0
!= TREE_TYPE (arg0
))
4003 rhs1
= handle_cast (type0
, arg0
, idx
);
4005 rhs1
= handle_operand (arg0
, idx
);
4006 if (type1
!= TREE_TYPE (arg1
))
4007 rhs2
= handle_cast (type1
, arg1
, idx
);
4009 rhs2
= handle_operand (arg1
, idx
);
4011 data_cnt
= m_data_cnt
;
4012 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4013 rhs1
= add_cast (m_limb_type
, rhs1
);
4014 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
4015 rhs2
= add_cast (m_limb_type
, rhs2
);
4021 m_data_cnt
= data_cnt
;
4022 if (TYPE_UNSIGNED (type0
))
4023 rhs1
= build_zero_cst (m_limb_type
);
4026 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
4027 if (TREE_CODE (rhs1
) == INTEGER_CST
)
4028 rhs1
= build_int_cst (m_limb_type
,
4029 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
4032 tree lpm1
= build_int_cst (unsigned_type_node
,
4034 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
4035 RSHIFT_EXPR
, rhs1
, lpm1
);
4037 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4040 if (TYPE_UNSIGNED (type1
))
4041 rhs2
= build_zero_cst (m_limb_type
);
4044 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
4045 if (TREE_CODE (rhs2
) == INTEGER_CST
)
4046 rhs2
= build_int_cst (m_limb_type
,
4047 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
4050 tree lpm1
= build_int_cst (unsigned_type_node
,
4052 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
4053 RSHIFT_EXPR
, rhs2
, lpm1
);
4055 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4059 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4060 if (ovf
!= boolean_false_node
)
4062 if (tree_fits_uhwi_p (idx
))
4064 unsigned limb
= tree_to_uhwi (idx
);
4065 if (limb
>= startlimb
&& limb
<= endlimb
)
4067 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4069 tree this_ovf
= make_ssa_name (boolean_type_node
);
4070 if (ovf
== NULL_TREE
&& !check_zero
)
4073 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4075 build_int_cst (m_limb_type
, 1));
4077 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4078 gimple_assign_lhs (g
),
4079 build_int_cst (m_limb_type
, 1));
4082 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4084 if (ovf
== NULL_TREE
)
4088 tree b
= make_ssa_name (boolean_type_node
);
4089 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4095 else if (startlimb
< fin
)
4097 if (m_first
&& startlimb
+ 2 < fin
)
4100 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4101 ovf_out
= m_data
.pop ();
4105 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4106 cmp_out
= m_data
.pop ();
4110 if (i
!= 0 || startlimb
!= fin
- 1)
4113 bool single_comparison
4114 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4115 if (!single_comparison
)
4118 if (!check_zero
&& (start
% limb_prec
) == 0)
4119 single_comparison
= true;
4121 else if ((startlimb
& 1) == (i
& 1))
4125 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4126 NULL_TREE
, NULL_TREE
);
4127 edge edge_true_true
, edge_true_false
, edge_false
;
4129 if (!single_comparison
)
4130 g2
= gimple_build_cond (NE_EXPR
, idx
,
4131 size_int (startlimb
), NULL_TREE
,
4133 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4134 profile_probability::likely (),
4135 edge_true_true
, edge_true_false
,
4137 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4138 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4140 tree this_ovf
= make_ssa_name (boolean_type_node
);
4141 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4143 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4145 build_int_cst (m_limb_type
, 1));
4147 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4148 gimple_assign_lhs (g
),
4149 build_int_cst (m_limb_type
, 1));
4152 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4154 if (cmp_code
== GT_EXPR
)
4156 tree t
= make_ssa_name (boolean_type_node
);
4157 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4161 tree this_ovf2
= NULL_TREE
;
4162 if (!single_comparison
)
4164 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4165 tree t
= make_ssa_name (boolean_type_node
);
4166 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4168 this_ovf2
= make_ssa_name (boolean_type_node
);
4169 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4173 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4175 if (i
== 1 && ovf_out
)
4178 t
= make_ssa_name (boolean_type_node
);
4179 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4180 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4182 add_phi_arg (phi
, ovf
? ovf
4183 : boolean_false_node
, edge_false
,
4186 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4189 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4191 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4192 phi
= create_phi_node (t
, edge_true_false
->dest
);
4193 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4194 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4196 add_phi_arg (phi
, cmp
, edge_true_true
,
4206 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4208 else if (!tree_fits_uhwi_p (idx
)
4209 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4211 bool single_comparison
4212 = (((unsigned) prec
% limb_prec
) == 0
4213 || prec_limbs
+ 1 >= fin
4214 || (prec_limbs
& 1) == (i
& 1));
4215 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4216 NULL_TREE
, NULL_TREE
);
4218 if (!single_comparison
)
4219 g2
= gimple_build_cond (LT_EXPR
, idx
,
4220 size_int (prec_limbs
- 1),
4221 NULL_TREE
, NULL_TREE
);
4222 edge edge_true_true
, edge_true_false
, edge_false
;
4223 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4224 profile_probability::likely (),
4225 edge_true_true
, edge_true_false
,
4227 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4228 g
= gimple_build_assign (l
, rhs
);
4230 if (!single_comparison
)
4232 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4233 l
= limb_access (type
, var
? var
: obj
,
4234 size_int (prec_limbs
- 1), true);
4235 if (!useless_type_conversion_p (TREE_TYPE (l
),
4237 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4238 g
= gimple_build_assign (l
, rhs
);
4241 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4245 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4246 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4247 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4248 g
= gimple_build_assign (l
, rhs
);
4253 if (kind
== bitint_prec_huge
&& i
<= 1)
4257 idx
= make_ssa_name (sizetype
);
4258 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4264 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4267 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4268 NULL_TREE
, NULL_TREE
);
4270 m_gsi
= gsi_for_stmt (final_stmt
);
4276 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4279 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4280 argument or return type _Complex large/huge _BitInt. */
4283 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4285 tree arg0
= gimple_call_arg (stmt
, 0);
4286 tree arg1
= gimple_call_arg (stmt
, 1);
4287 tree lhs
= gimple_call_lhs (stmt
);
4290 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4291 gsi_remove (&gsi
, true);
4294 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4295 tree type
= TREE_TYPE (lhs
);
4296 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4297 type
= TREE_TYPE (type
);
4298 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4299 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4300 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4301 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4302 + (prec1
< 0 ? -prec1
: prec1
));
4303 if (prec0
== 1 || prec1
== 1)
4305 tree var
= NULL_TREE
;
4306 tree orig_obj
= obj
;
4307 bool force_var
= false;
4308 if (obj
== NULL_TREE
4309 && TREE_CODE (type
) == BITINT_TYPE
4310 && bitint_precision_kind (type
) >= bitint_prec_large
4312 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4314 int part
= var_to_partition (m_map
, lhs
);
4315 gcc_assert (m_vars
[part
] != NULL_TREE
);
4317 if (TREE_TYPE (lhs
) == type
)
4320 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4322 for (int i
= 0; i
< 2; ++i
)
4324 tree arg
= i
? arg1
: arg0
;
4325 if (TREE_CODE (arg
) == ADDR_EXPR
)
4326 arg
= TREE_OPERAND (arg
, 0);
4327 if (get_base_address (arg
) == obj
)
4334 if (obj
== NULL_TREE
4336 || TREE_CODE (type
) != BITINT_TYPE
4337 || bitint_precision_kind (type
) < bitint_prec_large
4338 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4340 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4341 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4342 var
= create_tmp_var (atype
);
4344 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4345 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4346 NULL_TREE
, true, GSI_SAME_STMT
);
4347 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4349 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4350 addr
, build_int_cst (sitype
,
4352 arg0
, build_int_cst (sitype
, prec0
),
4353 arg1
, build_int_cst (sitype
, prec1
));
4356 unsigned start
, end
;
4358 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4359 &start
, &end
, &check_zero
);
4360 if (ovf
== NULL_TREE
)
4362 unsigned startlimb
= start
/ limb_prec
;
4363 unsigned endlimb
= (end
- 1) / limb_prec
;
4365 bool use_loop
= false;
4366 if (startlimb
== endlimb
)
4368 else if (startlimb
+ 1 == endlimb
)
4370 else if ((end
% limb_prec
) == 0)
4378 use_loop
= startlimb
+ 2 < endlimb
;
4382 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4383 size_int (startlimb
), true);
4384 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4386 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4387 startlimb
, check_zero
);
4388 ovf
= make_ssa_name (boolean_type_node
);
4390 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4391 build_zero_cst (m_limb_type
));
4394 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4396 build_int_cst (m_limb_type
, 1));
4398 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4399 build_int_cst (m_limb_type
, 1));
4405 basic_block edge_bb
= NULL
;
4406 gimple_stmt_iterator gsi
= m_gsi
;
4408 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4410 m_gsi
= gsi_end_bb (edge_bb
);
4412 tree cmp
= build_zero_cst (m_limb_type
);
4413 for (unsigned i
= 0; i
< cnt
; i
++)
4415 tree idx
, idx_next
= NULL_TREE
;
4417 idx
= size_int (startlimb
);
4419 idx
= size_int (endlimb
);
4421 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4423 idx
= size_int (startlimb
+ 1);
4424 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4425 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4427 l
= gimple_assign_lhs (g
);
4428 if (i
== 0 || i
== 2)
4429 l
= arith_overflow_extract_bits (start
, end
, l
,
4432 if (i
== 0 && !check_zero
)
4435 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4437 build_int_cst (m_limb_type
, 1));
4439 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4440 build_int_cst (m_limb_type
, 1),
4441 NULL_TREE
, NULL_TREE
);
4444 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4446 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4447 e1
->flags
= EDGE_FALSE_VALUE
;
4448 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4450 e1
->probability
= profile_probability::likely ();
4451 e2
->probability
= e1
->probability
.invert ();
4453 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4454 m_gsi
= gsi_after_labels (e1
->dest
);
4455 if (i
== 1 && use_loop
)
4457 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4460 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4461 size_int (endlimb
+ (cnt
== 1)),
4462 NULL_TREE
, NULL_TREE
);
4464 edge true_edge
, false_edge
;
4465 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4468 m_gsi
= gsi_after_labels (false_edge
->dest
);
4473 ovf
= make_ssa_name (boolean_type_node
);
4474 basic_block bb
= gimple_bb (final_stmt
);
4475 gphi
*phi
= create_phi_node (ovf
, bb
);
4476 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4478 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4480 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4481 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4483 m_gsi
= gsi_for_stmt (final_stmt
);
4487 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4490 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4491 .{ADD,SUB,MUL}_OVERFLOW call. */
4494 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4496 tree rhs1
= gimple_assign_rhs1 (stmt
);
4497 rhs1
= TREE_OPERAND (rhs1
, 0);
4498 if (obj
== NULL_TREE
)
4500 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4501 gcc_assert (m_vars
[part
] != NULL_TREE
);
4504 if (TREE_CODE (rhs1
) == SSA_NAME
4506 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4508 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4511 int part
= var_to_partition (m_map
, rhs1
);
4512 gcc_assert (m_vars
[part
] != NULL_TREE
);
4513 tree var
= m_vars
[part
];
4514 unsigned HOST_WIDE_INT nelts
4515 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4516 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4517 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4518 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4519 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4520 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4521 ? 0 : nelts
* m_limb_size
);
4522 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4523 gimple
*g
= gimple_build_assign (obj
, v2
);
4527 /* Lower COMPLEX_EXPR stmt. */
4530 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4532 tree lhs
= gimple_assign_lhs (stmt
);
4533 tree rhs1
= gimple_assign_rhs1 (stmt
);
4534 tree rhs2
= gimple_assign_rhs2 (stmt
);
4535 int part
= var_to_partition (m_map
, lhs
);
4536 gcc_assert (m_vars
[part
] != NULL_TREE
);
4538 unsigned HOST_WIDE_INT nelts
4539 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4540 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4541 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4542 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4544 if (TREE_CODE (rhs1
) == SSA_NAME
)
4546 part
= var_to_partition (m_map
, rhs1
);
4547 gcc_assert (m_vars
[part
] != NULL_TREE
);
4550 else if (integer_zerop (rhs1
))
4551 v2
= build_zero_cst (atype
);
4553 v2
= tree_output_constant_def (rhs1
);
4554 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4555 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4556 gimple
*g
= gimple_build_assign (v1
, v2
);
4558 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4559 TYPE_SIZE_UNIT (atype
));
4560 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4561 if (TREE_CODE (rhs2
) == SSA_NAME
)
4563 part
= var_to_partition (m_map
, rhs2
);
4564 gcc_assert (m_vars
[part
] != NULL_TREE
);
4567 else if (integer_zerop (rhs2
))
4568 v2
= build_zero_cst (atype
);
4570 v2
= tree_output_constant_def (rhs2
);
4571 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4572 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4573 g
= gimple_build_assign (v1
, v2
);
4577 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4581 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4583 tree arg0
= gimple_call_arg (stmt
, 0);
4584 tree arg1
= (gimple_call_num_args (stmt
) == 2
4585 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4586 tree lhs
= gimple_call_lhs (stmt
);
4591 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4592 gsi_remove (&gsi
, true);
4595 tree type
= TREE_TYPE (arg0
);
4596 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4597 bitint_prec_kind kind
= bitint_precision_kind (type
);
4598 gcc_assert (kind
>= bitint_prec_large
);
4599 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4600 enum built_in_function fcode
= END_BUILTINS
;
4601 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4602 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4603 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4607 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4608 fcode
= BUILT_IN_CLZ
;
4609 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4610 fcode
= BUILT_IN_CLZL
;
4612 fcode
= BUILT_IN_CLZLL
;
4615 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4616 we don't add the addend at the end. */
4617 arg1
= integer_zero_node
;
4620 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4621 fcode
= BUILT_IN_CTZ
;
4622 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4623 fcode
= BUILT_IN_CTZL
;
4625 fcode
= BUILT_IN_CTZLL
;
4629 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4630 fcode
= BUILT_IN_CLRSB
;
4631 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4632 fcode
= BUILT_IN_CLRSBL
;
4634 fcode
= BUILT_IN_CLRSBLL
;
4637 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4638 fcode
= BUILT_IN_PARITY
;
4639 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4640 fcode
= BUILT_IN_PARITYL
;
4642 fcode
= BUILT_IN_PARITYLL
;
4646 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4647 fcode
= BUILT_IN_POPCOUNT
;
4648 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4649 fcode
= BUILT_IN_POPCOUNTL
;
4651 fcode
= BUILT_IN_POPCOUNTLL
;
4657 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4658 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4659 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4660 basic_block edge_bb
= NULL
;
4663 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4664 if (kind
== bitint_prec_large
)
4665 cnt
= CEIL (prec
, limb_prec
);
4668 rem
= (prec
% (2 * limb_prec
));
4669 end
= (prec
- rem
) / limb_prec
;
4670 cnt
= 2 + CEIL (rem
, limb_prec
);
4671 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4674 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4676 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4678 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4680 if (kind
== bitint_prec_large
)
4681 m_gsi
= gsi_end_bb (edge_bb
);
4682 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4685 m_after_stmt
= stmt
;
4686 if (kind
!= bitint_prec_large
)
4687 m_upwards_2limb
= end
;
4689 for (unsigned i
= 0; i
< cnt
; i
++)
4692 if (kind
== bitint_prec_large
)
4695 idx
= size_int (end
+ (i
> 2));
4697 tree rhs1
= handle_operand (arg0
, idx
);
4698 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4700 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4701 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4702 rhs1
= add_cast (m_limb_type
, rhs1
);
4706 if (ifn
== IFN_PARITY
)
4707 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4708 else if (ifn
== IFN_FFS
)
4709 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4711 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4717 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4718 build_zero_cst (m_limb_type
),
4719 NULL_TREE
, NULL_TREE
);
4722 e1
= split_block (gsi_bb (m_gsi
), g
);
4723 e1
->flags
= EDGE_FALSE_VALUE
;
4724 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4725 e1
->probability
= profile_probability::unlikely ();
4726 e2
->probability
= e1
->probability
.invert ();
4728 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4729 m_gsi
= gsi_after_labels (e1
->dest
);
4732 if (tree_fits_uhwi_p (idx
))
4734 = build_int_cst (integer_type_node
,
4735 tree_to_uhwi (idx
) * limb_prec
4736 + (ifn
== IFN_FFS
));
4743 res
= make_ssa_name (integer_type_node
);
4744 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4745 build_int_cst (integer_type_node
,
4748 m_data
[m_data_cnt
] = res
;
4752 if (!integer_zerop (in
))
4754 if (kind
== bitint_prec_huge
&& i
== 1)
4757 res
= make_ssa_name (m_limb_type
);
4758 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4763 m_data
[m_data_cnt
] = res
;
4766 g
= gimple_build_call (fndecl
, 1, rhs1
);
4767 tem
= make_ssa_name (integer_type_node
);
4768 gimple_call_set_lhs (g
, tem
);
4770 if (!integer_zerop (in
))
4772 if (kind
== bitint_prec_huge
&& i
== 1)
4775 res
= make_ssa_name (integer_type_node
);
4776 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4781 m_data
[m_data_cnt
] = res
;
4788 if (kind
== bitint_prec_huge
&& i
<= 1)
4792 idx
= make_ssa_name (sizetype
);
4793 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4799 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4802 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4803 NULL_TREE
, NULL_TREE
);
4805 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4806 m_gsi
= gsi_after_labels (edge_bb
);
4808 m_gsi
= gsi_for_stmt (stmt
);
4816 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4818 if (kind
== bitint_prec_large
)
4819 cnt
= CEIL (prec
, limb_prec
);
4822 rem
= prec
% limb_prec
;
4823 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4825 end
= (prec
- rem
) / limb_prec
;
4826 cnt
= 1 + (rem
!= 0);
4827 if (ifn
== IFN_CLRSB
)
4831 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4833 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4835 m_gsi
= gsi_end_bb (edge_bb
);
4838 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4841 gsi
= gsi_for_stmt (stmt
);
4843 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4845 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
4848 for (unsigned i
= 0; i
< cnt
; i
++)
4851 if (kind
== bitint_prec_large
)
4852 idx
= size_int (cnt
- i
- 1);
4853 else if (i
== cnt
- 1)
4854 idx
= create_loop (size_int (end
- 1), &idx_next
);
4856 idx
= size_int (end
);
4858 tree rhs1
= handle_operand (arg0
, idx
);
4859 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4861 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4862 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4863 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4864 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
4865 rhs1
= add_cast (m_limb_type
, rhs1
);
4870 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4871 build_zero_cst (m_limb_type
),
4872 NULL_TREE
, NULL_TREE
);
4874 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4875 e1
->flags
= EDGE_FALSE_VALUE
;
4876 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4877 e1
->probability
= profile_probability::unlikely ();
4878 e2
->probability
= e1
->probability
.invert ();
4880 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4881 m_gsi
= gsi_after_labels (e1
->dest
);
4890 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4892 build_int_cst (m_limb_type
, 1));
4894 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4895 build_int_cst (m_limb_type
, 1),
4896 NULL_TREE
, NULL_TREE
);
4901 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4902 BIT_XOR_EXPR
, rhs1
, first
);
4904 tree stype
= signed_type_for (m_limb_type
);
4905 g
= gimple_build_cond (LT_EXPR
,
4907 gimple_assign_lhs (g
)),
4908 build_zero_cst (stype
),
4909 NULL_TREE
, NULL_TREE
);
4911 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4912 e1
->flags
= EDGE_FALSE_VALUE
;
4913 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
4915 e1
->probability
= profile_probability::unlikely ();
4916 e2
->probability
= e1
->probability
.invert ();
4918 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
4920 m_gsi
= gsi_after_labels (e1
->dest
);
4922 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
4923 NULL_TREE
, NULL_TREE
);
4926 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4927 e1
->flags
= EDGE_FALSE_VALUE
;
4928 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
4929 e1
->probability
= profile_probability::unlikely ();
4930 e2
->probability
= e1
->probability
.invert ();
4932 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4933 m_gsi
= gsi_after_labels (e1
->dest
);
4934 bqp
[2 * i
+ 1].e
= e2
;
4937 if (tree_fits_uhwi_p (idx
))
4939 = build_int_cst (integer_type_node
,
4941 - (((int) tree_to_uhwi (idx
) + 1)
4942 * limb_prec
) - sub_one
);
4946 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
4948 in
= prepare_data_in_out (in
, idx
, &out
);
4949 out
= m_data
[m_data_cnt
+ 1];
4951 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
4952 build_int_cst (integer_type_node
,
4955 m_data
[m_data_cnt
] = out
;
4959 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
4961 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4964 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
4965 NULL_TREE
, NULL_TREE
);
4967 edge true_edge
, false_edge
;
4968 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4969 &true_edge
, &false_edge
);
4970 m_gsi
= gsi_after_labels (false_edge
->dest
);
4980 gphi
*phi1
, *phi2
, *phi3
;
4982 bb
= gsi_bb (m_gsi
);
4983 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
4984 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4986 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4988 for (unsigned i
= 0; i
< cnt
; i
++)
4990 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4991 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4993 if (arg1
== NULL_TREE
)
4995 g
= gimple_build_builtin_unreachable (m_loc
);
4998 m_gsi
= gsi_for_stmt (stmt
);
4999 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
5000 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5002 if (arg1
== NULL_TREE
)
5003 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
5004 gimple_phi_result (phi2
),
5005 gimple_call_lhs (g
));
5008 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5009 PLUS_EXPR
, gimple_phi_result (phi2
),
5010 gimple_call_lhs (g
));
5012 edge e1
= split_block (gimple_bb (stmt
), g
);
5013 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
5014 e2
->probability
= profile_probability::always ();
5015 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
5016 get_immediate_dominator (CDI_DOMINATORS
,
5018 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
5019 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
5020 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
5021 m_gsi
= gsi_for_stmt (stmt
);
5022 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5024 gsi_replace (&m_gsi
, g
, true);
5027 bb
= gsi_bb (m_gsi
);
5028 remove_edge (find_edge (bb
, edge_bb
));
5030 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
5031 e
->probability
= profile_probability::always ();
5032 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
5033 get_immediate_dominator (CDI_DOMINATORS
,
5035 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5037 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5039 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
5041 for (unsigned i
= 0; i
< cnt
; i
++)
5043 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
5044 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
5046 tree a
= bqp
[i
].addend
;
5047 if (i
&& kind
== bitint_prec_large
)
5048 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
5050 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
5052 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
5054 m_gsi
= gsi_after_labels (edge_bb
);
5055 g
= gimple_build_call (fndecl
, 1,
5056 add_cast (signed_type_for (m_limb_type
),
5057 gimple_phi_result (phi1
)));
5058 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5060 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5061 PLUS_EXPR
, gimple_call_lhs (g
),
5062 gimple_phi_result (phi2
));
5064 if (kind
!= bitint_prec_large
)
5066 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5067 PLUS_EXPR
, gimple_assign_lhs (g
),
5071 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5072 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5073 m_gsi
= gsi_for_stmt (stmt
);
5074 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5075 gsi_replace (&m_gsi
, g
, true);
5078 g
= gimple_build_call (fndecl
, 1, res
);
5079 gimple_call_set_lhs (g
, lhs
);
5080 gsi_replace (&m_gsi
, g
, true);
5083 g
= gimple_build_assign (lhs
, res
);
5084 gsi_replace (&m_gsi
, g
, true);
5091 /* Lower a call statement with one or more large/huge _BitInt
5092 arguments or large/huge _BitInt return value. */
5095 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5097 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5098 unsigned int nargs
= gimple_call_num_args (stmt
);
5099 if (gimple_call_internal_p (stmt
))
5100 switch (gimple_call_internal_fn (stmt
))
5102 case IFN_ADD_OVERFLOW
:
5103 case IFN_SUB_OVERFLOW
:
5104 case IFN_UBSAN_CHECK_ADD
:
5105 case IFN_UBSAN_CHECK_SUB
:
5106 lower_addsub_overflow (obj
, stmt
);
5108 case IFN_MUL_OVERFLOW
:
5109 case IFN_UBSAN_CHECK_MUL
:
5110 lower_mul_overflow (obj
, stmt
);
5118 lower_bit_query (stmt
);
5123 for (unsigned int i
= 0; i
< nargs
; ++i
)
5125 tree arg
= gimple_call_arg (stmt
, i
);
5126 if (TREE_CODE (arg
) != SSA_NAME
5127 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5128 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5130 if (SSA_NAME_IS_DEFAULT_DEF (arg
)
5131 && (!SSA_NAME_VAR (arg
) || VAR_P (SSA_NAME_VAR (arg
))))
5133 tree var
= create_tmp_reg (TREE_TYPE (arg
));
5134 arg
= get_or_create_ssa_default_def (cfun
, var
);
5138 int p
= var_to_partition (m_map
, arg
);
5140 gcc_assert (v
!= NULL_TREE
);
5141 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5142 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5143 arg
= make_ssa_name (TREE_TYPE (arg
));
5144 gimple
*g
= gimple_build_assign (arg
, v
);
5145 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5147 gimple_call_set_arg (stmt
, i
, arg
);
5148 if (m_preserved
== NULL
)
5149 m_preserved
= BITMAP_ALLOC (NULL
);
5150 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5152 tree lhs
= gimple_call_lhs (stmt
);
5154 && TREE_CODE (lhs
) == SSA_NAME
5155 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5156 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5158 int p
= var_to_partition (m_map
, lhs
);
5160 gcc_assert (v
!= NULL_TREE
);
5161 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5162 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5163 gimple_call_set_lhs (stmt
, v
);
5164 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5169 /* Lower __asm STMT which involves large/huge _BitInt values. */
5172 bitint_large_huge::lower_asm (gimple
*stmt
)
5174 gasm
*g
= as_a
<gasm
*> (stmt
);
5175 unsigned noutputs
= gimple_asm_noutputs (g
);
5176 unsigned ninputs
= gimple_asm_ninputs (g
);
5178 for (unsigned i
= 0; i
< noutputs
; ++i
)
5180 tree t
= gimple_asm_output_op (g
, i
);
5181 tree s
= TREE_VALUE (t
);
5182 if (TREE_CODE (s
) == SSA_NAME
5183 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5184 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5186 int part
= var_to_partition (m_map
, s
);
5187 gcc_assert (m_vars
[part
] != NULL_TREE
);
5188 TREE_VALUE (t
) = m_vars
[part
];
5191 for (unsigned i
= 0; i
< ninputs
; ++i
)
5193 tree t
= gimple_asm_input_op (g
, i
);
5194 tree s
= TREE_VALUE (t
);
5195 if (TREE_CODE (s
) == SSA_NAME
5196 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5197 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5199 int part
= var_to_partition (m_map
, s
);
5200 gcc_assert (m_vars
[part
] != NULL_TREE
);
5201 TREE_VALUE (t
) = m_vars
[part
];
5207 /* Lower statement STMT which involves large/huge _BitInt values
5208 into code accessing individual limbs. */
5211 bitint_large_huge::lower_stmt (gimple
*stmt
)
5215 m_data
.truncate (0);
5217 m_gsi
= gsi_for_stmt (stmt
);
5218 m_after_stmt
= NULL
;
5221 gsi_prev (&m_init_gsi
);
5222 m_preheader_bb
= NULL
;
5223 m_upwards_2limb
= 0;
5226 m_cast_conditional
= false;
5228 m_loc
= gimple_location (stmt
);
5229 if (is_gimple_call (stmt
))
5231 lower_call (NULL_TREE
, stmt
);
5234 if (gimple_code (stmt
) == GIMPLE_ASM
)
5239 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5240 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5241 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5242 bool mergeable_cast_p
= false;
5243 bool final_cast_p
= false;
5244 if (gimple_assign_cast_p (stmt
))
5246 lhs
= gimple_assign_lhs (stmt
);
5247 tree rhs1
= gimple_assign_rhs1 (stmt
);
5248 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5249 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5250 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5251 mergeable_cast_p
= true;
5252 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5253 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5254 && INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5256 final_cast_p
= true;
5257 if (TREE_CODE (rhs1
) == SSA_NAME
5259 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5261 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5262 if (is_gimple_assign (g
)
5263 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5265 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5266 if (TREE_CODE (rhs2
) == SSA_NAME
5268 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5270 g
= SSA_NAME_DEF_STMT (rhs2
);
5271 int ovf
= optimizable_arith_overflow (g
);
5273 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5274 and IMAGPART_EXPR uses, where the latter is cast to
5275 non-_BitInt, it will be optimized when handling
5276 the REALPART_EXPR. */
5280 lower_call (NULL_TREE
, g
);
5288 if (gimple_store_p (stmt
))
5290 tree rhs1
= gimple_assign_rhs1 (stmt
);
5291 if (TREE_CODE (rhs1
) == SSA_NAME
5293 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5295 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5296 m_loc
= gimple_location (g
);
5297 lhs
= gimple_assign_lhs (stmt
);
5298 if (is_gimple_assign (g
) && !mergeable_op (g
))
5299 switch (gimple_assign_rhs_code (g
))
5303 lower_shift_stmt (lhs
, g
);
5305 m_gsi
= gsi_for_stmt (stmt
);
5306 unlink_stmt_vdef (stmt
);
5307 release_ssa_name (gimple_vdef (stmt
));
5308 gsi_remove (&m_gsi
, true);
5311 case TRUNC_DIV_EXPR
:
5312 case TRUNC_MOD_EXPR
:
5313 lower_muldiv_stmt (lhs
, g
);
5315 case FIX_TRUNC_EXPR
:
5316 lower_float_conv_stmt (lhs
, g
);
5320 lower_cplxpart_stmt (lhs
, g
);
5325 else if (optimizable_arith_overflow (g
) == 3)
5327 lower_call (lhs
, g
);
5330 m_loc
= gimple_location (stmt
);
5333 if (mergeable_op (stmt
)
5334 || gimple_store_p (stmt
)
5335 || gimple_assign_load_p (stmt
)
5337 || mergeable_cast_p
)
5339 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5343 else if (cmp_code
!= ERROR_MARK
)
5344 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5345 if (cmp_code
!= ERROR_MARK
)
5347 if (gimple_code (stmt
) == GIMPLE_COND
)
5349 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5350 gimple_cond_set_lhs (cstmt
, lhs
);
5351 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5352 gimple_cond_set_code (cstmt
, cmp_code
);
5356 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5358 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5359 boolean_false_node
);
5360 gimple_assign_set_rhs1 (stmt
, cond
);
5361 lhs
= gimple_assign_lhs (stmt
);
5362 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5363 || (bitint_precision_kind (TREE_TYPE (lhs
))
5364 <= bitint_prec_middle
));
5368 gimple_assign_set_rhs1 (stmt
, lhs
);
5369 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5370 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5376 tree lhs_type
= TREE_TYPE (lhs
);
5377 /* Add support for 3 or more limbs filled in from normal integral
5378 type if this assert fails. If no target chooses limb mode smaller
5379 than half of largest supported normal integral type, this will not
5381 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5383 if (TREE_CODE (lhs_type
) == BITINT_TYPE
5384 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5385 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5386 TYPE_UNSIGNED (lhs_type
));
5388 tree rhs1
= gimple_assign_rhs1 (stmt
);
5389 tree r1
= handle_operand (rhs1
, size_int (0));
5390 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5391 r1
= add_cast (lhs_type
, r1
);
5392 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5396 tree r2
= handle_operand (rhs1
, size_int (1));
5397 r2
= add_cast (lhs_type
, r2
);
5398 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5399 build_int_cst (unsigned_type_node
,
5402 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5403 gimple_assign_lhs (g
));
5405 r1
= gimple_assign_lhs (g
);
5407 if (lhs_type
!= TREE_TYPE (lhs
))
5408 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5410 g
= gimple_build_assign (lhs
, r1
);
5411 gsi_replace (&m_gsi
, g
, true);
5414 if (is_gimple_assign (stmt
))
5415 switch (gimple_assign_rhs_code (stmt
))
5419 lower_shift_stmt (NULL_TREE
, stmt
);
5422 case TRUNC_DIV_EXPR
:
5423 case TRUNC_MOD_EXPR
:
5424 lower_muldiv_stmt (NULL_TREE
, stmt
);
5426 case FIX_TRUNC_EXPR
:
5428 lower_float_conv_stmt (NULL_TREE
, stmt
);
5432 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5435 lower_complexexpr_stmt (stmt
);
5443 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5444 the desired memory state. */
5447 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5449 tree vuse2
= (tree
) data
;
5456 /* Return true if STMT uses a library function and needs to take
5457 address of its inputs. We need to avoid bit-fields in those
5458 cases. Similarly, we need to avoid overlap between destination
5459 and source limb arrays. */
5462 stmt_needs_operand_addr (gimple
*stmt
)
5464 if (is_gimple_assign (stmt
))
5465 switch (gimple_assign_rhs_code (stmt
))
5468 case TRUNC_DIV_EXPR
:
5469 case TRUNC_MOD_EXPR
:
5475 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5476 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5481 /* Dominator walker used to discover which large/huge _BitInt
5482 loads could be sunk into all their uses. */
5484 class bitint_dom_walker
: public dom_walker
5487 bitint_dom_walker (bitmap names
, bitmap loads
)
5488 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5490 edge
before_dom_children (basic_block
) final override
;
5493 bitmap m_names
, m_loads
;
5497 bitint_dom_walker::before_dom_children (basic_block bb
)
5499 gphi
*phi
= get_virtual_phi (bb
);
5502 vop
= gimple_phi_result (phi
);
5503 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5506 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5508 auto_vec
<tree
, 16> worklist
;
5509 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5510 !gsi_end_p (gsi
); gsi_next (&gsi
))
5512 gimple
*stmt
= gsi_stmt (gsi
);
5513 if (is_gimple_debug (stmt
))
5516 if (!vop
&& gimple_vuse (stmt
))
5517 vop
= gimple_vuse (stmt
);
5520 if (gimple_vdef (stmt
))
5521 vop
= gimple_vdef (stmt
);
5523 tree lhs
= gimple_get_lhs (stmt
);
5525 && TREE_CODE (lhs
) == SSA_NAME
5526 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5527 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5528 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5529 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5530 it means it will be handled in a loop or straight line code
5531 at the location of its (ultimate) immediate use, so for
5532 vop checking purposes check these only at the ultimate
5537 use_operand_p use_p
;
5538 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5540 tree s
= USE_FROM_PTR (use_p
);
5541 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5542 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5543 worklist
.safe_push (s
);
5546 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5547 while (worklist
.length () > 0)
5549 tree s
= worklist
.pop ();
5551 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5553 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5554 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5555 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5557 tree s2
= USE_FROM_PTR (use_p
);
5558 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5559 && (bitint_precision_kind (TREE_TYPE (s2
))
5560 >= bitint_prec_large
))
5561 worklist
.safe_push (s2
);
5565 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5566 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5568 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5569 if (TREE_CODE (rhs
) == SSA_NAME
5570 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5575 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5578 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5579 tree rhs1
= gimple_assign_rhs1 (g
);
5580 if (needs_operand_addr
5581 && TREE_CODE (rhs1
) == COMPONENT_REF
5582 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5584 tree fld
= TREE_OPERAND (rhs1
, 1);
5585 /* For little-endian, we can allow as inputs bit-fields
5586 which start at a limb boundary. */
5587 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5588 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5589 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5594 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5600 ao_ref_init (&ref
, rhs1
);
5601 tree lvop
= gimple_vuse (g
);
5602 unsigned limit
= 64;
5605 && is_gimple_assign (stmt
)
5606 && gimple_store_p (stmt
)
5607 && (needs_operand_addr
5608 || !operand_equal_p (lhs
, gimple_assign_rhs1 (g
), 0)))
5611 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5612 NULL
, NULL
, limit
, lvop
) == NULL
)
5613 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5617 bb
->aux
= (void *) vop
;
5623 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5624 build_ssa_conflict_graph.
5625 The differences are:
5626 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5627 2) for large/huge _BitInt multiplication/division/modulo process def
5628 only after processing uses rather than before to make uses conflict
5630 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5631 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5632 the final statement. */
5635 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5636 ssa_conflicts
*graph
, bitmap names
,
5637 void (*def
) (live_track
*, tree
,
5639 void (*use
) (live_track
*, tree
))
5641 bool muldiv_p
= false;
5642 tree lhs
= NULL_TREE
;
5643 if (is_gimple_assign (stmt
))
5645 lhs
= gimple_assign_lhs (stmt
);
5646 if (TREE_CODE (lhs
) == SSA_NAME
5647 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5648 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5650 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5652 switch (gimple_assign_rhs_code (stmt
))
5655 case TRUNC_DIV_EXPR
:
5656 case TRUNC_MOD_EXPR
:
5668 /* For stmts with more than one SSA_NAME definition pretend all the
5669 SSA_NAME outputs but the first one are live at this point, so
5670 that conflicts are added in between all those even when they are
5671 actually not really live after the asm, because expansion might
5672 copy those into pseudos after the asm and if multiple outputs
5673 share the same partition, it might overwrite those that should
5675 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5679 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5685 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5686 def (live
, var
, graph
);
5689 auto_vec
<tree
, 16> worklist
;
5690 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5691 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5692 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5694 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5697 worklist
.safe_push (var
);
5700 while (worklist
.length () > 0)
5702 tree s
= worklist
.pop ();
5703 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5704 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5705 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5707 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5710 worklist
.safe_push (var
);
5715 def (live
, lhs
, graph
);
5718 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5719 return the largest bitint_prec_kind of them, otherwise return
5720 bitint_prec_small. */
5722 static bitint_prec_kind
5723 arith_overflow_arg_kind (gimple
*stmt
)
5725 bitint_prec_kind ret
= bitint_prec_small
;
5726 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
5727 switch (gimple_call_internal_fn (stmt
))
5729 case IFN_ADD_OVERFLOW
:
5730 case IFN_SUB_OVERFLOW
:
5731 case IFN_MUL_OVERFLOW
:
5732 for (int i
= 0; i
< 2; ++i
)
5734 tree a
= gimple_call_arg (stmt
, i
);
5735 if (TREE_CODE (a
) == INTEGER_CST
5736 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
5738 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
5739 ret
= MAX (ret
, kind
);
5749 /* Entry point for _BitInt(N) operation lowering during optimization. */
5752 gimple_lower_bitint (void)
5754 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
5758 for (i
= 0; i
< num_ssa_names
; ++i
)
5760 tree s
= ssa_name (i
);
5763 tree type
= TREE_TYPE (s
);
5764 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5766 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5767 != bitint_prec_small
)
5769 type
= TREE_TYPE (type
);
5771 if (TREE_CODE (type
) == BITINT_TYPE
5772 && bitint_precision_kind (type
) != bitint_prec_small
)
5774 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5775 into memory. Such functions could have no large/huge SSA_NAMEs. */
5776 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5778 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5779 if (is_gimple_assign (g
) && gimple_store_p (g
))
5781 tree t
= gimple_assign_rhs1 (g
);
5782 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5783 && (bitint_precision_kind (TREE_TYPE (t
))
5784 >= bitint_prec_large
))
5788 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5789 to floating point types need to be rewritten. */
5790 else if (SCALAR_FLOAT_TYPE_P (type
))
5792 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5793 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5795 tree t
= gimple_assign_rhs1 (g
);
5796 if (TREE_CODE (t
) == INTEGER_CST
5797 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5798 && (bitint_precision_kind (TREE_TYPE (t
))
5799 != bitint_prec_small
))
5804 if (i
== num_ssa_names
)
5808 auto_vec
<gimple
*, 4> switch_statements
;
5809 FOR_EACH_BB_FN (bb
, cfun
)
5811 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
5813 tree idx
= gimple_switch_index (swtch
);
5814 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
5815 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
5819 group_case_labels_stmt (swtch
);
5820 switch_statements
.safe_push (swtch
);
5824 if (!switch_statements
.is_empty ())
5826 bool expanded
= false;
5830 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
5832 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
5833 tree_switch_conversion::switch_decision_tree
dt (swtch
);
5834 expanded
|= dt
.analyze_switch_statement ();
5839 free_dominance_info (CDI_DOMINATORS
);
5840 free_dominance_info (CDI_POST_DOMINATORS
);
5841 mark_virtual_operands_for_renaming (cfun
);
5842 cleanup_tree_cfg (TODO_update_ssa
);
5846 struct bitint_large_huge large_huge
;
5847 bool has_large_huge_parm_result
= false;
5848 bool has_large_huge
= false;
5849 unsigned int ret
= 0, first_large_huge
= ~0U;
5850 bool edge_insertions
= false;
5851 for (; i
< num_ssa_names
; ++i
)
5853 tree s
= ssa_name (i
);
5856 tree type
= TREE_TYPE (s
);
5857 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5859 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5860 >= bitint_prec_large
)
5861 has_large_huge
= true;
5862 type
= TREE_TYPE (type
);
5864 if (TREE_CODE (type
) == BITINT_TYPE
5865 && bitint_precision_kind (type
) >= bitint_prec_large
)
5867 if (first_large_huge
== ~0U)
5868 first_large_huge
= i
;
5869 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
5870 gimple_stmt_iterator gsi
;
5872 /* Unoptimize certain constructs to simpler alternatives to
5873 avoid having to lower all of them. */
5874 if (is_gimple_assign (stmt
) && gimple_bb (stmt
))
5875 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
5882 first_large_huge
= 0;
5883 location_t loc
= gimple_location (stmt
);
5884 gsi
= gsi_for_stmt (stmt
);
5885 tree rhs1
= gimple_assign_rhs1 (stmt
);
5886 tree type
= TREE_TYPE (rhs1
);
5887 tree n
= gimple_assign_rhs2 (stmt
), m
;
5888 tree p
= build_int_cst (TREE_TYPE (n
),
5889 TYPE_PRECISION (type
));
5890 if (TREE_CODE (n
) == INTEGER_CST
)
5891 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
5894 m
= make_ssa_name (TREE_TYPE (n
));
5895 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
5896 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5897 gimple_set_location (g
, loc
);
5899 if (!TYPE_UNSIGNED (type
))
5901 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
5903 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5904 rhs1
= fold_convert (utype
, rhs1
);
5907 tree t
= make_ssa_name (type
);
5908 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
5909 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5910 gimple_set_location (g
, loc
);
5913 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5914 rhs_code
== LROTATE_EXPR
5915 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
5917 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5918 gimple_set_location (g
, loc
);
5919 tree op1
= gimple_assign_lhs (g
);
5920 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5921 rhs_code
== LROTATE_EXPR
5922 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
5924 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5925 gimple_set_location (g
, loc
);
5926 tree op2
= gimple_assign_lhs (g
);
5927 tree lhs
= gimple_assign_lhs (stmt
);
5928 if (!TYPE_UNSIGNED (type
))
5930 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
5931 BIT_IOR_EXPR
, op1
, op2
);
5932 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5933 gimple_set_location (g
, loc
);
5934 g
= gimple_build_assign (lhs
, NOP_EXPR
,
5935 gimple_assign_lhs (g
));
5938 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
5939 gsi_replace (&gsi
, g
, true);
5940 gimple_set_location (g
, loc
);
5948 first_large_huge
= 0;
5949 gsi
= gsi_for_stmt (stmt
);
5950 tree lhs
= gimple_assign_lhs (stmt
);
5951 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
5952 location_t loc
= gimple_location (stmt
);
5953 if (rhs_code
== ABS_EXPR
)
5954 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5955 build_zero_cst (TREE_TYPE (rhs1
)),
5956 NULL_TREE
, NULL_TREE
);
5957 else if (rhs_code
== ABSU_EXPR
)
5959 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
5960 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
5961 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5962 gimple_set_location (g
, loc
);
5963 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5964 build_zero_cst (TREE_TYPE (rhs1
)),
5965 NULL_TREE
, NULL_TREE
);
5968 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
5970 rhs2
= gimple_assign_rhs2 (stmt
);
5971 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5972 std::swap (rhs1
, rhs2
);
5973 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
5974 NULL_TREE
, NULL_TREE
);
5975 if (rhs_code
== MAX_EXPR
)
5976 std::swap (rhs1
, rhs2
);
5980 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5981 build_zero_cst (TREE_TYPE (rhs1
)),
5982 NULL_TREE
, NULL_TREE
);
5983 rhs1
= gimple_assign_rhs2 (stmt
);
5984 rhs2
= gimple_assign_rhs3 (stmt
);
5986 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5987 gimple_set_location (g
, loc
);
5988 edge e1
= split_block (gsi_bb (gsi
), g
);
5989 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
5990 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
5991 e3
->probability
= profile_probability::even ();
5992 e1
->flags
= EDGE_TRUE_VALUE
;
5993 e1
->probability
= e3
->probability
.invert ();
5994 if (dom_info_available_p (CDI_DOMINATORS
))
5995 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
5996 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
5998 gsi
= gsi_after_labels (e1
->dest
);
5999 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6001 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6002 gimple_set_location (g
, loc
);
6003 rhs2
= gimple_assign_lhs (g
);
6004 std::swap (rhs1
, rhs2
);
6006 gsi
= gsi_for_stmt (stmt
);
6007 gsi_remove (&gsi
, true);
6008 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
6009 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
6010 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
6014 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6015 into memory. Such functions could have no large/huge SSA_NAMEs. */
6016 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6018 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6019 if (is_gimple_assign (g
) && gimple_store_p (g
))
6021 tree t
= gimple_assign_rhs1 (g
);
6022 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6023 && (bitint_precision_kind (TREE_TYPE (t
))
6024 >= bitint_prec_large
))
6025 has_large_huge
= true;
6028 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6029 to floating point types need to be rewritten. */
6030 else if (SCALAR_FLOAT_TYPE_P (type
))
6032 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6033 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6035 tree t
= gimple_assign_rhs1 (g
);
6036 if (TREE_CODE (t
) == INTEGER_CST
6037 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6038 && (bitint_precision_kind (TREE_TYPE (t
))
6039 >= bitint_prec_large
))
6040 has_large_huge
= true;
6044 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
6046 tree s
= ssa_name (i
);
6049 tree type
= TREE_TYPE (s
);
6050 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6051 type
= TREE_TYPE (type
);
6052 if (TREE_CODE (type
) == BITINT_TYPE
6053 && bitint_precision_kind (type
) >= bitint_prec_large
)
6055 use_operand_p use_p
;
6057 has_large_huge
= true;
6059 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
6061 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6062 the same bb and could be handled in the same loop with the
6065 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6066 && single_imm_use (s
, &use_p
, &use_stmt
)
6067 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
6069 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
6071 if (mergeable_op (use_stmt
))
6073 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6074 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6076 if (gimple_assign_cast_p (use_stmt
))
6078 tree lhs
= gimple_assign_lhs (use_stmt
);
6079 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6082 else if (gimple_store_p (use_stmt
)
6083 && is_gimple_assign (use_stmt
)
6084 && !gimple_has_volatile_ops (use_stmt
)
6085 && !stmt_ends_bb_p (use_stmt
))
6088 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6090 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6091 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6092 && ((is_gimple_assign (use_stmt
)
6093 && (gimple_assign_rhs_code (use_stmt
)
6095 || gimple_code (use_stmt
) == GIMPLE_COND
)
6096 && (!gimple_store_p (use_stmt
)
6097 || (is_gimple_assign (use_stmt
)
6098 && !gimple_has_volatile_ops (use_stmt
)
6099 && !stmt_ends_bb_p (use_stmt
)))
6100 && (TREE_CODE (rhs1
) != SSA_NAME
6101 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6103 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6104 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6105 < bitint_prec_large
))
6107 if (is_gimple_assign (use_stmt
))
6108 switch (gimple_assign_rhs_code (use_stmt
))
6111 case TRUNC_DIV_EXPR
:
6112 case TRUNC_MOD_EXPR
:
6114 /* Uses which use handle_operand_addr can't
6115 deal with nested casts. */
6116 if (TREE_CODE (rhs1
) == SSA_NAME
6117 && gimple_assign_cast_p
6118 (SSA_NAME_DEF_STMT (rhs1
))
6119 && has_single_use (rhs1
)
6120 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6121 == gimple_bb (SSA_NAME_DEF_STMT (s
))))
6127 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6128 >= TYPE_PRECISION (TREE_TYPE (s
)))
6129 && mergeable_op (use_stmt
))
6131 /* Prevent merging a widening non-mergeable cast
6132 on result of some narrower mergeable op
6133 together with later mergeable operations. E.g.
6134 result of _BitInt(223) addition shouldn't be
6135 sign-extended to _BitInt(513) and have another
6136 _BitInt(513) added to it, as handle_plus_minus
6137 with its PHI node handling inside of handle_cast
6138 will not work correctly. An exception is if
6139 use_stmt is a store, this is handled directly
6140 in lower_mergeable_stmt. */
6141 if (TREE_CODE (rhs1
) != SSA_NAME
6142 || !has_single_use (rhs1
)
6143 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6144 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6145 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6146 || gimple_store_p (use_stmt
))
6148 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6149 < TYPE_PRECISION (TREE_TYPE (s
)))
6150 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6152 /* Another exception is if the widening cast is
6153 from mergeable same precision cast from something
6156 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6157 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6158 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6159 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6161 if (TREE_CODE (rhs2
) != SSA_NAME
6162 || !has_single_use (rhs2
)
6163 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6164 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6165 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6171 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6172 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6176 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6177 rhs1
= TREE_OPERAND (rhs1
, 0);
6178 if (TREE_CODE (rhs1
) == SSA_NAME
)
6180 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6181 if (optimizable_arith_overflow (g
))
6189 case TRUNC_DIV_EXPR
:
6190 case TRUNC_MOD_EXPR
:
6191 case FIX_TRUNC_EXPR
:
6193 if (gimple_store_p (use_stmt
)
6194 && is_gimple_assign (use_stmt
)
6195 && !gimple_has_volatile_ops (use_stmt
)
6196 && !stmt_ends_bb_p (use_stmt
))
6198 tree lhs
= gimple_assign_lhs (use_stmt
);
6199 /* As multiply/division passes address of the lhs
6200 to library function and that assumes it can extend
6201 it to whole number of limbs, avoid merging those
6202 with bit-field stores. Don't allow it for
6203 shifts etc. either, so that the bit-field store
6204 handling doesn't have to be done everywhere. */
6205 if (TREE_CODE (lhs
) == COMPONENT_REF
6206 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6216 /* Also ignore uninitialized uses. */
6217 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6218 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6222 if (!large_huge
.m_names
)
6223 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6224 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6225 if (has_single_use (s
))
6227 if (!large_huge
.m_single_use_names
)
6228 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6229 bitmap_set_bit (large_huge
.m_single_use_names
,
6230 SSA_NAME_VERSION (s
));
6232 if (SSA_NAME_VAR (s
)
6233 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6234 && SSA_NAME_IS_DEFAULT_DEF (s
))
6235 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6236 has_large_huge_parm_result
= true;
6238 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6239 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6240 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6241 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6243 use_operand_p use_p
;
6244 imm_use_iterator iter
;
6245 bool optimizable_load
= true;
6246 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6248 gimple
*use_stmt
= USE_STMT (use_p
);
6249 if (is_gimple_debug (use_stmt
))
6251 if (gimple_code (use_stmt
) == GIMPLE_PHI
6252 || is_gimple_call (use_stmt
))
6254 optimizable_load
= false;
6260 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6263 tree s2
= USE_FROM_PTR (use_p
);
6264 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6266 optimizable_load
= false;
6271 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6273 if (!large_huge
.m_loads
)
6274 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6275 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6279 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6280 into memory. Such functions could have no large/huge SSA_NAMEs. */
6281 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6283 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6284 if (is_gimple_assign (g
) && gimple_store_p (g
))
6286 tree t
= gimple_assign_rhs1 (g
);
6287 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6288 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6289 has_large_huge
= true;
6294 if (large_huge
.m_names
|| has_large_huge
)
6296 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6297 calculate_dominance_info (CDI_DOMINATORS
);
6299 enable_ranger (cfun
);
6300 if (large_huge
.m_loads
)
6302 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6304 bitint_dom_walker (large_huge
.m_names
,
6305 large_huge
.m_loads
).walk (entry
);
6306 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6307 clear_aux_for_blocks ();
6308 BITMAP_FREE (large_huge
.m_loads
);
6310 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6311 large_huge
.m_limb_size
6312 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6314 if (large_huge
.m_names
)
6317 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6318 coalesce_ssa_name (large_huge
.m_map
);
6319 partition_view_normal (large_huge
.m_map
);
6320 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6322 fprintf (dump_file
, "After Coalescing:\n");
6323 dump_var_map (dump_file
, large_huge
.m_map
);
6326 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6328 if (has_large_huge_parm_result
)
6329 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6331 tree s
= ssa_name (i
);
6332 if (SSA_NAME_VAR (s
)
6333 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6334 && SSA_NAME_IS_DEFAULT_DEF (s
))
6335 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6337 int p
= var_to_partition (large_huge
.m_map
, s
);
6338 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6340 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6341 mark_addressable (SSA_NAME_VAR (s
));
6345 tree atype
= NULL_TREE
;
6346 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6348 tree s
= ssa_name (i
);
6349 int p
= var_to_partition (large_huge
.m_map
, s
);
6350 if (large_huge
.m_vars
[p
] != NULL_TREE
)
6352 if (atype
== NULL_TREE
6353 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6354 TYPE_SIZE (TREE_TYPE (s
))))
6356 unsigned HOST_WIDE_INT nelts
6357 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6358 atype
= build_array_type_nelts (large_huge
.m_limb_type
, nelts
);
6360 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6361 mark_addressable (large_huge
.m_vars
[p
]);
6365 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6367 gimple_stmt_iterator prev
;
6368 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6374 gimple
*stmt
= gsi_stmt (gsi
);
6375 if (is_gimple_debug (stmt
))
6377 bitint_prec_kind kind
= bitint_prec_small
;
6379 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6380 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6382 bitint_prec_kind this_kind
6383 = bitint_precision_kind (TREE_TYPE (t
));
6384 kind
= MAX (kind
, this_kind
);
6386 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6388 t
= gimple_assign_rhs1 (stmt
);
6389 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6391 bitint_prec_kind this_kind
6392 = bitint_precision_kind (TREE_TYPE (t
));
6393 kind
= MAX (kind
, this_kind
);
6396 if (is_gimple_assign (stmt
)
6397 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6399 t
= gimple_assign_rhs1 (stmt
);
6400 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6401 && TREE_CODE (t
) == INTEGER_CST
)
6403 bitint_prec_kind this_kind
6404 = bitint_precision_kind (TREE_TYPE (t
));
6405 kind
= MAX (kind
, this_kind
);
6408 if (is_gimple_call (stmt
))
6410 t
= gimple_call_lhs (stmt
);
6411 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6413 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6414 kind
= MAX (kind
, this_kind
);
6415 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6418 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6419 kind
= MAX (kind
, this_kind
);
6423 if (kind
== bitint_prec_small
)
6425 switch (gimple_code (stmt
))
6428 /* For now. We'll need to handle some internal functions and
6429 perhaps some builtins. */
6430 if (kind
== bitint_prec_middle
)
6434 if (kind
== bitint_prec_middle
)
6440 if (gimple_clobber_p (stmt
))
6442 if (kind
>= bitint_prec_large
)
6444 if (gimple_assign_single_p (stmt
))
6445 /* No need to lower copies, loads or stores. */
6447 if (gimple_assign_cast_p (stmt
))
6449 tree lhs
= gimple_assign_lhs (stmt
);
6450 tree rhs
= gimple_assign_rhs1 (stmt
);
6451 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6452 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6453 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6454 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6455 /* No need to lower casts to same precision. */
6463 if (kind
== bitint_prec_middle
)
6465 tree type
= NULL_TREE
;
6466 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6467 with the same precision and back. */
6468 unsigned int nops
= gimple_num_ops (stmt
);
6469 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6471 if (tree op
= gimple_op (stmt
, i
))
6473 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6475 gimple_set_op (stmt
, i
, nop
);
6476 else if (COMPARISON_CLASS_P (op
))
6478 TREE_OPERAND (op
, 0)
6479 = maybe_cast_middle_bitint (&gsi
,
6480 TREE_OPERAND (op
, 0),
6482 TREE_OPERAND (op
, 1)
6483 = maybe_cast_middle_bitint (&gsi
,
6484 TREE_OPERAND (op
, 1),
6487 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6490 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6493 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6497 if (tree lhs
= gimple_get_lhs (stmt
))
6498 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6499 && (bitint_precision_kind (TREE_TYPE (lhs
))
6500 == bitint_prec_middle
))
6502 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6503 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6504 type
= build_nonstandard_integer_type (prec
, uns
);
6505 tree lhs2
= make_ssa_name (type
);
6506 gimple_set_lhs (stmt
, lhs2
);
6507 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6508 if (stmt_ends_bb_p (stmt
))
6510 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6511 gsi_insert_on_edge_immediate (e
, g
);
6514 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6520 if (tree lhs
= gimple_get_lhs (stmt
))
6521 if (TREE_CODE (lhs
) == SSA_NAME
)
6523 tree type
= TREE_TYPE (lhs
);
6524 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6525 type
= TREE_TYPE (type
);
6526 if (TREE_CODE (type
) == BITINT_TYPE
6527 && bitint_precision_kind (type
) >= bitint_prec_large
6528 && (large_huge
.m_names
== NULL
6529 || !bitmap_bit_p (large_huge
.m_names
,
6530 SSA_NAME_VERSION (lhs
))))
6534 large_huge
.lower_stmt (stmt
);
6537 tree atype
= NULL_TREE
;
6538 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6541 gphi
*phi
= gsi
.phi ();
6542 tree lhs
= gimple_phi_result (phi
);
6543 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6544 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6546 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6547 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6548 tree v1
= large_huge
.m_vars
[p1
];
6549 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6551 tree arg
= gimple_phi_arg_def (phi
, i
);
6552 edge e
= gimple_phi_arg_edge (phi
, i
);
6554 switch (TREE_CODE (arg
))
6557 if (integer_zerop (arg
) && VAR_P (v1
))
6559 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6560 g
= gimple_build_assign (v1
, zero
);
6561 gsi_insert_on_edge (e
, g
);
6562 edge_insertions
= true;
6566 unsigned int min_prec
, prec
, rem
;
6568 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6569 rem
= prec
% (2 * limb_prec
);
6570 min_prec
= bitint_min_cst_precision (arg
, ext
);
6571 if (min_prec
> prec
- rem
- 2 * limb_prec
6572 && min_prec
> (unsigned) limb_prec
)
6573 /* Constant which has enough significant bits that it
6574 isn't worth trying to save .rodata space by extending
6575 from smaller number. */
6578 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6581 else if (min_prec
== prec
)
6582 c
= tree_output_constant_def (arg
);
6583 else if (min_prec
== (unsigned) limb_prec
)
6584 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6587 tree ctype
= build_bitint_type (min_prec
, 1);
6588 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6592 if (VAR_P (v1
) && min_prec
== prec
)
6594 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6596 g
= gimple_build_assign (v1
, v2
);
6597 gsi_insert_on_edge (e
, g
);
6598 edge_insertions
= true;
6601 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6602 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6607 unsigned HOST_WIDE_INT nelts
6608 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
6611 = build_array_type_nelts (large_huge
.m_limb_type
,
6613 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6615 build1 (VIEW_CONVERT_EXPR
,
6618 gsi_insert_on_edge (e
, g
);
6622 unsigned HOST_WIDE_INT nelts
6623 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
6624 - min_prec
) / limb_prec
;
6626 = build_array_type_nelts (large_huge
.m_limb_type
,
6628 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6631 off
= fold_convert (ptype
,
6632 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6634 off
= build_zero_cst (ptype
);
6635 tree vd
= build2 (MEM_REF
, vtype
,
6636 build_fold_addr_expr (v1
), off
);
6637 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
6644 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6646 = fold_convert (ptype
,
6647 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6648 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
6649 build_fold_addr_expr (v1
), off
);
6651 vd
= build_fold_addr_expr (vd
);
6652 unsigned HOST_WIDE_INT nbytes
6653 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
6656 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6657 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
6658 g
= gimple_build_call (fn
, 3, vd
,
6659 integer_minus_one_node
,
6660 build_int_cst (sizetype
,
6663 gsi_insert_on_edge (e
, g
);
6664 edge_insertions
= true;
6669 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
6671 if (large_huge
.m_names
== NULL
6672 || !bitmap_bit_p (large_huge
.m_names
,
6673 SSA_NAME_VERSION (arg
)))
6676 int p2
= var_to_partition (large_huge
.m_map
, arg
);
6679 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
6680 tree v2
= large_huge
.m_vars
[p2
];
6681 if (VAR_P (v1
) && VAR_P (v2
))
6682 g
= gimple_build_assign (v1
, v2
);
6683 else if (VAR_P (v1
))
6684 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
6685 TREE_TYPE (v1
), v2
));
6686 else if (VAR_P (v2
))
6687 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6688 TREE_TYPE (v2
), v1
), v2
);
6691 if (atype
== NULL_TREE
6692 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6693 TYPE_SIZE (TREE_TYPE (lhs
))))
6695 unsigned HOST_WIDE_INT nelts
6696 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
6699 = build_array_type_nelts (large_huge
.m_limb_type
,
6702 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6704 build1 (VIEW_CONVERT_EXPR
,
6707 gsi_insert_on_edge (e
, g
);
6708 edge_insertions
= true;
6715 if (large_huge
.m_names
|| has_large_huge
)
6718 for (i
= 0; i
< num_ssa_names
; ++i
)
6720 tree s
= ssa_name (i
);
6723 tree type
= TREE_TYPE (s
);
6724 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6725 type
= TREE_TYPE (type
);
6726 if (TREE_CODE (type
) == BITINT_TYPE
6727 && bitint_precision_kind (type
) >= bitint_prec_large
)
6729 if (large_huge
.m_preserved
6730 && bitmap_bit_p (large_huge
.m_preserved
,
6731 SSA_NAME_VERSION (s
)))
6733 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6734 if (gimple_code (g
) == GIMPLE_NOP
)
6736 if (SSA_NAME_VAR (s
))
6737 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
6738 release_ssa_name (s
);
6741 if (gimple_bb (g
) == NULL
)
6743 release_ssa_name (s
);
6746 if (gimple_code (g
) != GIMPLE_ASM
)
6748 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
6749 bool save_vta
= flag_var_tracking_assignments
;
6750 flag_var_tracking_assignments
= false;
6751 gsi_remove (&gsi
, true);
6752 flag_var_tracking_assignments
= save_vta
;
6755 nop
= gimple_build_nop ();
6756 SSA_NAME_DEF_STMT (s
) = nop
;
6757 release_ssa_name (s
);
6761 disable_ranger (cfun
);
6764 if (edge_insertions
)
6765 gsi_commit_edge_inserts ();
6772 const pass_data pass_data_lower_bitint
=
6774 GIMPLE_PASS
, /* type */
6775 "bitintlower", /* name */
6776 OPTGROUP_NONE
, /* optinfo_flags */
6777 TV_NONE
, /* tv_id */
6778 PROP_ssa
, /* properties_required */
6779 PROP_gimple_lbitint
, /* properties_provided */
6780 0, /* properties_destroyed */
6781 0, /* todo_flags_start */
6782 0, /* todo_flags_finish */
6785 class pass_lower_bitint
: public gimple_opt_pass
6788 pass_lower_bitint (gcc::context
*ctxt
)
6789 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
6792 /* opt_pass methods: */
6793 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
6794 unsigned int execute (function
*) final override
6796 return gimple_lower_bitint ();
6799 }; // class pass_lower_bitint
6804 make_pass_lower_bitint (gcc::context
*ctxt
)
6806 return new pass_lower_bitint (ctxt
);
6812 const pass_data pass_data_lower_bitint_O0
=
6814 GIMPLE_PASS
, /* type */
6815 "bitintlower0", /* name */
6816 OPTGROUP_NONE
, /* optinfo_flags */
6817 TV_NONE
, /* tv_id */
6818 PROP_cfg
, /* properties_required */
6819 PROP_gimple_lbitint
, /* properties_provided */
6820 0, /* properties_destroyed */
6821 0, /* todo_flags_start */
6822 0, /* todo_flags_finish */
6825 class pass_lower_bitint_O0
: public gimple_opt_pass
6828 pass_lower_bitint_O0 (gcc::context
*ctxt
)
6829 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
6832 /* opt_pass methods: */
6833 bool gate (function
*fun
) final override
6835 /* With errors, normal optimization passes are not run. If we don't
6836 lower bitint operations at all, rtl expansion will abort. */
6837 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
6840 unsigned int execute (function
*) final override
6842 return gimple_lower_bitint ();
6845 }; // class pass_lower_bitint_O0
6850 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
6852 return new pass_lower_bitint_O0 (ctxt
);