1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind
{
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec
)
85 if (prec
<= small_max_prec
)
86 return bitint_prec_small
;
87 if (huge_min_prec
&& prec
>= huge_min_prec
)
88 return bitint_prec_huge
;
89 if (large_min_prec
&& prec
>= large_min_prec
)
90 return bitint_prec_large
;
91 if (mid_min_prec
&& prec
>= mid_min_prec
)
92 return bitint_prec_middle
;
94 struct bitint_info info
;
95 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
97 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
98 if (prec
<= GET_MODE_PRECISION (limb_mode
))
100 small_max_prec
= prec
;
101 return bitint_prec_small
;
104 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
105 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
107 limb_prec
= GET_MODE_PRECISION (limb_mode
);
110 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
111 huge_min_prec
= 4 * limb_prec
;
113 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
115 if (prec
<= MAX_FIXED_MODE_SIZE
)
117 if (!mid_min_prec
|| prec
< mid_min_prec
)
119 return bitint_prec_middle
;
121 if (large_min_prec
&& prec
<= large_min_prec
)
122 return bitint_prec_large
;
123 return bitint_prec_huge
;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type
)
131 return bitint_precision_kind (TYPE_PRECISION (type
));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
140 bitint_min_cst_precision (tree cst
, int &ext
)
142 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
143 wide_int w
= wi::to_wide (cst
);
144 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
154 if (min_prec2
< min_prec
)
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
169 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
172 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
176 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
177 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
178 if (type
== NULL_TREE
179 || TYPE_PRECISION (type
) != prec
180 || TYPE_UNSIGNED (type
) != uns
)
181 type
= build_nonstandard_integer_type (prec
, uns
);
183 if (TREE_CODE (op
) != SSA_NAME
)
185 tree nop
= fold_convert (type
, op
);
186 if (is_gimple_val (nop
))
190 tree nop
= make_ssa_name (type
);
191 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
192 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
200 mergeable_op (gimple
*stmt
)
202 if (!is_gimple_assign (stmt
))
204 switch (gimple_assign_rhs_code (stmt
))
218 tree cnt
= gimple_assign_rhs2 (stmt
);
219 if (tree_fits_uhwi_p (cnt
)
220 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
225 case VIEW_CONVERT_EXPR
:
227 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
228 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
230 && TREE_CODE (lhs_type
) == BITINT_TYPE
231 && TREE_CODE (rhs_type
) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type
), TYPE_SIZE (rhs_type
)))
236 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
238 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
240 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple
*stmt
)
264 bool is_ubsan
= false;
265 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
267 switch (gimple_call_internal_fn (stmt
))
269 case IFN_ADD_OVERFLOW
:
270 case IFN_SUB_OVERFLOW
:
271 case IFN_MUL_OVERFLOW
:
273 case IFN_UBSAN_CHECK_ADD
:
274 case IFN_UBSAN_CHECK_SUB
:
275 case IFN_UBSAN_CHECK_MUL
:
281 tree lhs
= gimple_call_lhs (stmt
);
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
286 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
287 if (TREE_CODE (type
) != BITINT_TYPE
288 || bitint_precision_kind (type
) < bitint_prec_large
)
295 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
296 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
297 || !gimple_store_p (use_stmt
)
298 || !is_gimple_assign (use_stmt
)
299 || gimple_has_volatile_ops (use_stmt
)
300 || stmt_ends_bb_p (use_stmt
))
308 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
310 gimple
*g
= USE_STMT (use_p
);
311 if (is_gimple_debug (g
))
313 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
315 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
321 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
327 use_operand_p use2_p
;
329 tree lhs2
= gimple_assign_lhs (g
);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
332 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
333 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
334 || !gimple_assign_cast_p (use_stmt
))
337 lhs2
= gimple_assign_lhs (use_stmt
);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
339 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
347 return seen
== 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
355 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
357 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
358 tree_code code
= ERROR_MARK
;
359 if (gimple_code (stmt
) == GIMPLE_COND
)
361 code
= gimple_cond_code (stmt
);
362 op1
= gimple_cond_lhs (stmt
);
363 op2
= gimple_cond_rhs (stmt
);
365 else if (is_gimple_assign (stmt
))
367 code
= gimple_assign_rhs_code (stmt
);
368 op1
= gimple_assign_rhs1 (stmt
);
369 if (TREE_CODE_CLASS (code
) == tcc_comparison
370 || TREE_CODE_CLASS (code
) == tcc_binary
)
371 op2
= gimple_assign_rhs2 (stmt
);
373 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
375 tree type
= TREE_TYPE (op1
);
376 if (TREE_CODE (type
) != BITINT_TYPE
377 || bitint_precision_kind (type
) < bitint_prec_large
)
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
393 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
394 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
395 m_limb_type (NULL_TREE
), m_data (vNULL
) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple
*);
400 tree
limb_access_type (tree
, tree
);
401 tree
limb_access (tree
, tree
, tree
, bool);
402 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
403 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
404 void if_then_if_then_else (gimple
*g
, gimple
*,
405 profile_probability
, profile_probability
,
406 edge
&, edge
&, edge
&);
407 tree
handle_operand (tree
, tree
);
408 tree
prepare_data_in_out (tree
, tree
, tree
*);
409 tree
add_cast (tree
, tree
);
410 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
411 tree
handle_lshift (tree
, tree
, tree
);
412 tree
handle_cast (tree
, tree
, tree
);
413 tree
handle_load (gimple
*, tree
);
414 tree
handle_stmt (gimple
*, tree
);
415 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
416 tree
create_loop (tree
, tree
*);
417 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
418 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
419 void lower_shift_stmt (tree
, gimple
*);
420 void lower_muldiv_stmt (tree
, gimple
*);
421 void lower_float_conv_stmt (tree
, gimple
*);
422 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
424 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
426 void lower_addsub_overflow (tree
, gimple
*);
427 void lower_mul_overflow (tree
, gimple
*);
428 void lower_cplxpart_stmt (tree
, gimple
*);
429 void lower_complexexpr_stmt (gimple
*);
430 void lower_bit_query (gimple
*);
431 void lower_call (tree
, gimple
*);
432 void lower_asm (gimple
*);
433 void lower_stmt (gimple
*);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names
;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
454 /* Mapping of the partitions to corresponding decls. */
456 /* Unsigned integer type with limb precision. */
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size
;
460 /* Location of a gimple stmt which is being currently lowered. */
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi
;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple
*m_after_stmt
;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb
, m_preheader_bb
;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi
;
470 /* Decl into which a mergeable statement stores result. */
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
522 unsigned m_upwards_2limb
;
524 bool m_cast_conditional
;
525 unsigned m_bitfld_load
;
527 unsigned int m_data_cnt
;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names
);
533 BITMAP_FREE (m_loads
);
534 BITMAP_FREE (m_preserved
);
535 BITMAP_FREE (m_single_use_names
);
537 delete_var_map (m_map
);
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
546 bitint_large_huge::insert_before (gimple
*g
)
548 gimple_set_location (g
, m_loc
);
549 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
557 bitint_large_huge::limb_access_type (tree type
, tree idx
)
559 if (type
== NULL_TREE
)
561 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
562 unsigned int prec
= TYPE_PRECISION (type
);
563 gcc_assert (i
* limb_prec
< prec
);
564 if ((i
+ 1) * limb_prec
<= prec
)
567 return build_nonstandard_integer_type (prec
% limb_prec
,
568 TYPE_UNSIGNED (type
));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
575 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
577 tree atype
= (tree_fits_uhwi_p (idx
)
578 ? limb_access_type (type
, idx
) : m_limb_type
);
580 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
582 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
583 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
584 ret
= build2 (MEM_REF
, m_limb_type
,
585 build_fold_addr_expr (var
),
586 build_int_cst (ptype
, off
));
587 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
588 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
590 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
593 = build2 (MEM_REF
, m_limb_type
, TREE_OPERAND (var
, 0),
594 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
598 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
599 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
600 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
604 var
= unshare_expr (var
);
605 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type
,
607 TREE_TYPE (TREE_TYPE (var
))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type
)), limb_prec
);
611 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
612 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
614 ret
= build4 (ARRAY_REF
, m_limb_type
, var
, idx
, NULL_TREE
, NULL_TREE
);
616 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
618 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
620 ret
= gimple_assign_lhs (g
);
621 ret
= build1 (NOP_EXPR
, atype
, ret
);
626 /* Emit a half diamond,
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
642 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
643 edge
&edge_true
, edge
&edge_false
)
645 insert_before (cond
);
646 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
647 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
648 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
649 e1
->flags
= EDGE_TRUE_VALUE
;
650 e1
->probability
= prob
;
651 e3
->probability
= prob
.invert ();
652 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
655 m_gsi
= gsi_after_labels (e1
->dest
);
658 /* Emit a full diamond,
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
674 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
675 edge
&edge_true
, edge
&edge_false
)
677 insert_before (cond
);
678 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
679 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
680 basic_block bb
= create_empty_bb (e1
->dest
);
681 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
682 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
683 e1
->flags
= EDGE_FALSE_VALUE
;
684 e3
->probability
= prob
;
685 e1
->probability
= prob
.invert ();
686 bb
->count
= e1
->src
->count
.apply_probability (prob
);
687 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
688 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
689 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
691 m_gsi
= gsi_after_labels (bb
);
694 /* Emit a half diamond with full diamond in it
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
720 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
721 profile_probability prob1
,
722 profile_probability prob2
,
723 edge
&edge_true_true
,
724 edge
&edge_true_false
,
727 edge e2
, e3
, e4
= NULL
;
728 if_then (cond1
, prob1
, e2
, e3
);
731 edge_true_true
= NULL
;
732 edge_true_false
= e2
;
736 insert_before (cond2
);
737 e2
= split_block (gsi_bb (m_gsi
), cond2
);
738 basic_block bb
= create_empty_bb (e2
->dest
);
739 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
740 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
741 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
742 e4
->probability
= prob2
;
743 e2
->flags
= EDGE_FALSE_VALUE
;
744 e2
->probability
= prob2
.invert ();
745 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
746 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
747 e2
= find_edge (e2
->dest
, e3
->dest
);
749 edge_true_false
= e2
;
751 m_gsi
= gsi_after_labels (e2
->src
);
754 /* Emit code to access limb IDX from OP. */
757 bitint_large_huge::handle_operand (tree op
, tree idx
)
759 switch (TREE_CODE (op
))
763 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op
))
769 tree v
= create_tmp_reg (m_limb_type
);
770 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
772 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
773 DECL_SOURCE_LOCATION (v
)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
776 v
= get_or_create_ssa_default_def (cfun
, v
);
777 m_data
.safe_push (v
);
779 tree ret
= m_data
[m_data_cnt
];
781 if (tree_fits_uhwi_p (idx
))
783 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
784 ret
= add_cast (type
, ret
);
788 location_t loc_save
= m_loc
;
789 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
790 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
797 p
= var_to_partition (m_map
, op
);
798 gcc_assert (m_vars
[p
] != NULL_TREE
);
799 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
800 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
802 t
= gimple_assign_lhs (g
);
804 && m_single_use_names
805 && m_vars
[p
] != m_lhs
807 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
809 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]), CLOBBER_EOL
);
810 g
= gimple_build_assign (m_vars
[p
], clobber
);
811 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
812 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
816 if (tree_fits_uhwi_p (idx
))
818 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
819 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
822 m_data
.safe_push (NULL_TREE
);
823 m_data
.safe_push (NULL_TREE
);
825 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
827 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
828 TYPE_SIGN (TREE_TYPE (op
)));
829 c
= wide_int_to_tree (type
,
830 wide_int::from (w
, TYPE_PRECISION (type
),
833 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
834 c
= build_int_cst (type
,
835 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
837 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
842 || (m_data
[m_data_cnt
] == NULL_TREE
843 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
845 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
846 unsigned int rem
= prec
% (2 * limb_prec
);
848 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
851 m_data
.safe_push (NULL_TREE
);
852 m_data
.safe_push (NULL_TREE
);
854 if (integer_zerop (op
))
856 tree c
= build_zero_cst (m_limb_type
);
857 m_data
[m_data_cnt
] = c
;
858 m_data
[m_data_cnt
+ 1] = c
;
860 else if (integer_all_onesp (op
))
862 tree c
= build_all_ones_cst (m_limb_type
);
863 m_data
[m_data_cnt
] = c
;
864 m_data
[m_data_cnt
+ 1] = c
;
866 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
868 /* Single limb constant. Use a phi with that limb from
869 the preheader edge and 0 or -1 constant from the other edge
870 and for the second limb in the loop. */
872 gcc_assert (m_first
);
875 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
);
876 g
= gimple_build_assign (m_data
[m_data_cnt
+ 1],
877 build_int_cst (m_limb_type
, ext
));
879 m_data
[m_data_cnt
+ 1] = gimple_assign_rhs1 (g
);
881 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
883 /* Constant which has enough significant bits that it isn't
884 worth trying to save .rodata space by extending from smaller
888 type
= TREE_TYPE (op
);
890 /* If we have a guarantee the most significant partial limb
891 (if any) will be only accessed through handle_operand
892 with INTEGER_CST idx, we don't need to include the partial
894 type
= build_bitint_type (prec
- rem
, 1);
895 tree c
= tree_output_constant_def (fold_convert (type
, op
));
896 m_data
[m_data_cnt
] = c
;
897 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
899 else if (m_upwards_2limb
)
901 /* Constant with smaller number of bits. Trade conditional
902 code for .rodata space by extending from smaller number. */
903 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
904 tree type
= build_bitint_type (min_prec
, 1);
905 tree c
= tree_output_constant_def (fold_convert (type
, op
));
906 tree idx2
= make_ssa_name (sizetype
);
907 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
909 g
= gimple_build_cond (LT_EXPR
, idx
,
910 size_int (min_prec
/ limb_prec
),
911 NULL_TREE
, NULL_TREE
);
912 edge edge_true
, edge_false
;
913 if_then (g
, (min_prec
>= (prec
- rem
) / 2
914 ? profile_probability::likely ()
915 : profile_probability::unlikely ()),
916 edge_true
, edge_false
);
917 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
918 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
920 c1
= gimple_assign_lhs (g
);
921 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
922 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
924 c2
= gimple_assign_lhs (g
);
925 tree c3
= build_int_cst (m_limb_type
, ext
);
926 m_gsi
= gsi_after_labels (edge_true
->dest
);
927 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
928 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
929 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
931 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
932 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
933 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
934 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
935 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
939 /* Constant with smaller number of bits. Trade conditional
940 code for .rodata space by extending from smaller number.
941 Version for loops with random access to the limbs or
943 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
945 if (min_prec
<= (unsigned) limb_prec
)
946 c
= fold_convert (m_limb_type
, op
);
949 tree type
= build_bitint_type (min_prec
, 1);
950 c
= tree_output_constant_def (fold_convert (type
, op
));
952 m_data
[m_data_cnt
] = c
;
953 m_data
[m_data_cnt
+ 1] = integer_type_node
;
955 t
= m_data
[m_data_cnt
];
956 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
958 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
959 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
961 t
= gimple_assign_lhs (g
);
964 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
966 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
967 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
969 t
= gimple_assign_lhs (g
);
972 t
= m_data
[m_data_cnt
+ 1];
973 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
975 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
976 unsigned rem
= prec
% (2 * limb_prec
);
977 int ext
= tree_int_cst_sgn (op
) < 0 ? -1 : 0;
978 tree c
= m_data
[m_data_cnt
];
979 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
980 g
= gimple_build_cond (LT_EXPR
, idx
,
981 size_int (min_prec
/ limb_prec
),
982 NULL_TREE
, NULL_TREE
);
983 edge edge_true
, edge_false
;
984 if_then (g
, (min_prec
>= (prec
- rem
) / 2
985 ? profile_probability::likely ()
986 : profile_probability::unlikely ()),
987 edge_true
, edge_false
);
988 if (min_prec
> (unsigned) limb_prec
)
990 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
991 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
993 c
= gimple_assign_lhs (g
);
995 tree c2
= build_int_cst (m_limb_type
, ext
);
996 m_gsi
= gsi_after_labels (edge_true
->dest
);
997 t
= make_ssa_name (m_limb_type
);
998 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
999 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1000 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1009 /* Helper method, add a PHI node with VAL from preheader edge if
1010 inside of a loop and m_first. Keep state in a pair of m_data
1014 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
)
1018 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1019 return m_data
[m_data_cnt
];
1022 *data_out
= NULL_TREE
;
1023 if (tree_fits_uhwi_p (idx
))
1025 m_data
.safe_push (val
);
1026 m_data
.safe_push (NULL_TREE
);
1030 tree in
= make_ssa_name (TREE_TYPE (val
));
1031 gphi
*phi
= create_phi_node (in
, m_bb
);
1032 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1033 edge e2
= EDGE_PRED (m_bb
, 0);
1035 e2
= EDGE_PRED (m_bb
, 1);
1036 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1037 tree out
= make_ssa_name (TREE_TYPE (val
));
1038 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1039 m_data
.safe_push (in
);
1040 m_data
.safe_push (out
);
1044 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1045 convert it without emitting any code, otherwise emit
1046 the conversion statement before the current location. */
1049 bitint_large_huge::add_cast (tree type
, tree val
)
1051 if (TREE_CODE (val
) == INTEGER_CST
)
1052 return fold_convert (type
, val
);
1054 tree lhs
= make_ssa_name (type
);
1055 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1060 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1063 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1066 tree lhs
, data_out
, ctype
;
1067 tree rhs1_type
= TREE_TYPE (rhs1
);
1069 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1072 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1073 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1075 ctype
= build_complex_type (m_limb_type
);
1076 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1078 if (!TYPE_UNSIGNED (rhs1_type
))
1080 tree type
= unsigned_type_for (rhs1_type
);
1081 rhs1
= add_cast (type
, rhs1
);
1082 rhs2
= add_cast (type
, rhs2
);
1084 rhs1
= add_cast (m_limb_type
, rhs1
);
1085 rhs2
= add_cast (m_limb_type
, rhs2
);
1087 lhs
= make_ssa_name (ctype
);
1088 g
= gimple_build_call_internal (code
== PLUS_EXPR
1089 ? IFN_UADDC
: IFN_USUBC
,
1090 3, rhs1
, rhs2
, data_in
);
1091 gimple_call_set_lhs (g
, lhs
);
1093 if (data_out
== NULL_TREE
)
1094 data_out
= make_ssa_name (m_limb_type
);
1095 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1096 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1099 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1101 ctype
= build_complex_type (m_limb_type
);
1102 lhs
= make_ssa_name (ctype
);
1103 g
= gimple_build_call_internal (code
== PLUS_EXPR
1104 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1106 gimple_call_set_lhs (g
, lhs
);
1108 if (data_out
== NULL_TREE
)
1109 data_out
= make_ssa_name (m_limb_type
);
1110 if (!integer_zerop (data_in
))
1112 rhs1
= make_ssa_name (m_limb_type
);
1113 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1114 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1116 rhs2
= make_ssa_name (m_limb_type
);
1117 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1118 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1120 lhs
= make_ssa_name (ctype
);
1121 g
= gimple_build_call_internal (code
== PLUS_EXPR
1125 gimple_call_set_lhs (g
, lhs
);
1127 data_in
= make_ssa_name (m_limb_type
);
1128 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1129 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1131 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1136 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1137 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1143 tree in
= add_cast (rhs1_type
, data_in
);
1144 lhs
= make_ssa_name (rhs1_type
);
1145 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1147 rhs1
= make_ssa_name (rhs1_type
);
1148 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1150 m_data
[m_data_cnt
] = NULL_TREE
;
1154 rhs1
= make_ssa_name (m_limb_type
);
1155 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1156 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1158 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1159 rhs1
= add_cast (rhs1_type
, rhs1
);
1160 m_data
[m_data_cnt
] = data_out
;
1165 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1166 count in [0, limb_prec - 1] range. */
1169 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1171 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1172 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1176 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1178 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1181 if (!integer_zerop (data_in
))
1183 lhs
= make_ssa_name (m_limb_type
);
1184 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1185 build_int_cst (unsigned_type_node
,
1188 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1189 lhs
= add_cast (rhs1_type
, lhs
);
1192 if (types_compatible_p (rhs1_type
, m_limb_type
))
1194 if (data_out
== NULL_TREE
)
1195 data_out
= make_ssa_name (m_limb_type
);
1196 g
= gimple_build_assign (data_out
, rhs1
);
1199 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1201 lhs
= make_ssa_name (rhs1_type
);
1202 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1204 if (!integer_zerop (data_in
))
1207 lhs
= make_ssa_name (rhs1_type
);
1208 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1214 m_data
[m_data_cnt
] = data_out
;
1219 /* Helper function for handle_stmt method, handle an integral
1220 to integral conversion. */
1223 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1225 tree rhs_type
= TREE_TYPE (rhs1
);
1227 if (TREE_CODE (rhs1
) == SSA_NAME
1228 && TREE_CODE (lhs_type
) == BITINT_TYPE
1229 && TREE_CODE (rhs_type
) == BITINT_TYPE
1230 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1231 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1233 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1234 /* If lhs has bigger precision than rhs, we can use
1235 the simple case only if there is a guarantee that
1236 the most significant limb is handled in straight
1237 line code. If m_var_msb (on left shifts) or
1238 if m_upwards_2limb * limb_prec is equal to
1239 lhs precision that is not the case. */
1241 && tree_int_cst_equal (TYPE_SIZE (rhs_type
),
1242 TYPE_SIZE (lhs_type
))
1243 && (!m_upwards_2limb
1244 || (m_upwards_2limb
* limb_prec
1245 < TYPE_PRECISION (lhs_type
)))))
1247 rhs1
= handle_operand (rhs1
, idx
);
1248 if (tree_fits_uhwi_p (idx
))
1250 tree type
= limb_access_type (lhs_type
, idx
);
1251 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1252 rhs1
= add_cast (type
, rhs1
);
1257 /* Indexes lower than this don't need any special processing. */
1258 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1259 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1260 /* Indexes >= than this always contain an extension. */
1261 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1262 bool save_first
= m_first
;
1265 m_data
.safe_push (NULL_TREE
);
1266 m_data
.safe_push (NULL_TREE
);
1267 m_data
.safe_push (NULL_TREE
);
1268 if (TYPE_UNSIGNED (rhs_type
))
1269 /* No need to keep state between iterations. */
1271 else if (m_upwards
&& !m_upwards_2limb
)
1272 /* We need to keep state between iterations, but
1273 not within any loop, everything is straight line
1274 code with only increasing indexes. */
1276 else if (!m_upwards_2limb
)
1278 unsigned save_data_cnt
= m_data_cnt
;
1279 gimple_stmt_iterator save_gsi
= m_gsi
;
1281 if (gsi_end_p (m_gsi
))
1282 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1285 m_data_cnt
= save_data_cnt
+ 3;
1286 t
= handle_operand (rhs1
, size_int (low
));
1288 m_data
[save_data_cnt
+ 2]
1289 = build_int_cst (NULL_TREE
, m_data_cnt
);
1290 m_data_cnt
= save_data_cnt
;
1291 t
= add_cast (signed_type_for (m_limb_type
), t
);
1292 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1293 tree n
= make_ssa_name (TREE_TYPE (t
));
1294 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1296 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1298 if (gsi_end_p (m_init_gsi
))
1299 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1301 gsi_prev (&m_init_gsi
);
1304 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1305 /* We need to keep state between iterations, but
1306 fortunately not within the loop, only afterwards. */
1311 m_data
.truncate (m_data_cnt
);
1312 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1313 m_data
.safe_push (NULL_TREE
);
1317 unsigned save_data_cnt
= m_data_cnt
;
1319 if (!tree_fits_uhwi_p (idx
))
1322 && (m_upwards_2limb
* limb_prec
1323 <= ((unsigned) TYPE_PRECISION (rhs_type
)
1324 - !TYPE_UNSIGNED (rhs_type
))))
1326 rhs1
= handle_operand (rhs1
, idx
);
1328 m_data
[save_data_cnt
+ 2]
1329 = build_int_cst (NULL_TREE
, m_data_cnt
);
1330 m_first
= save_first
;
1333 bool single_comparison
1334 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1335 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1336 idx
, size_int (low
), NULL_TREE
, NULL_TREE
);
1337 edge edge_true_true
, edge_true_false
, edge_false
;
1338 if_then_if_then_else (g
, (single_comparison
? NULL
1339 : gimple_build_cond (EQ_EXPR
, idx
,
1343 profile_probability::likely (),
1344 profile_probability::unlikely (),
1345 edge_true_true
, edge_true_false
, edge_false
);
1346 bool save_cast_conditional
= m_cast_conditional
;
1347 m_cast_conditional
= true;
1349 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1351 m_data
[save_data_cnt
+ 2]
1352 = build_int_cst (NULL_TREE
, m_data_cnt
);
1353 tree ext
= NULL_TREE
;
1354 tree bitfld
= NULL_TREE
;
1355 if (!single_comparison
)
1357 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1359 m_data_cnt
= save_data_cnt
+ 3;
1362 bitfld
= m_data
[m_bitfld_load
];
1363 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1366 t2
= handle_operand (rhs1
, size_int (low
));
1367 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1368 t2
= add_cast (m_limb_type
, t2
);
1369 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1371 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1372 tree lpm1
= build_int_cst (unsigned_type_node
,
1374 tree n
= make_ssa_name (TREE_TYPE (ext
));
1375 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1377 ext
= add_cast (m_limb_type
, n
);
1381 if (TYPE_UNSIGNED (rhs_type
))
1382 t3
= build_zero_cst (m_limb_type
);
1383 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1384 t3
= m_data
[save_data_cnt
];
1386 t3
= m_data
[save_data_cnt
+ 1];
1387 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1388 t
= make_ssa_name (m_limb_type
);
1389 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1390 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1391 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1393 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1396 tree t4
= make_ssa_name (m_limb_type
);
1397 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1398 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1400 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1402 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1403 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1410 t4
= m_data
[m_bitfld_load
+ 1];
1412 t4
= make_ssa_name (m_limb_type
);
1413 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1415 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1416 edge_true_false
, UNKNOWN_LOCATION
);
1417 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1418 edge_false
, UNKNOWN_LOCATION
);
1420 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1422 m_data
[m_bitfld_load
] = t4
;
1423 m_data
[m_bitfld_load
+ 2] = t4
;
1426 m_cast_conditional
= save_cast_conditional
;
1427 m_first
= save_first
;
1432 if (tree_to_uhwi (idx
) < low
)
1434 t
= handle_operand (rhs1
, idx
);
1436 m_data
[save_data_cnt
+ 2]
1437 = build_int_cst (NULL_TREE
, m_data_cnt
);
1439 else if (tree_to_uhwi (idx
) < high
)
1441 t
= handle_operand (rhs1
, size_int (low
));
1443 m_data
[save_data_cnt
+ 2]
1444 = build_int_cst (NULL_TREE
, m_data_cnt
);
1445 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1446 t
= add_cast (m_limb_type
, t
);
1447 tree ext
= NULL_TREE
;
1448 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1450 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1451 tree lpm1
= build_int_cst (unsigned_type_node
,
1453 tree n
= make_ssa_name (TREE_TYPE (ext
));
1454 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1456 ext
= add_cast (m_limb_type
, n
);
1457 m_data
[save_data_cnt
+ 1] = ext
;
1462 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1464 handle_operand (rhs1
, size_zero_node
);
1465 m_data
[save_data_cnt
+ 2]
1466 = build_int_cst (NULL_TREE
, m_data_cnt
);
1469 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1470 if (TYPE_UNSIGNED (rhs_type
))
1471 t
= build_zero_cst (m_limb_type
);
1473 t
= m_data
[save_data_cnt
+ 1];
1475 tree type
= limb_access_type (lhs_type
, idx
);
1476 if (!useless_type_conversion_p (type
, m_limb_type
))
1477 t
= add_cast (type
, t
);
1478 m_first
= save_first
;
1482 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1483 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1484 && INTEGRAL_TYPE_P (rhs_type
))
1486 /* Add support for 3 or more limbs filled in from normal integral
1487 type if this assert fails. If no target chooses limb mode smaller
1488 than half of largest supported normal integral type, this will not
1490 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1491 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1494 gimple_stmt_iterator save_gsi
= m_gsi
;
1496 if (gsi_end_p (m_gsi
))
1497 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1500 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1501 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1503 tree type
= NULL_TREE
;
1504 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1505 rhs_type
= TREE_TYPE (rhs1
);
1508 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1509 r1
= add_cast (m_limb_type
, rhs1
);
1510 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1512 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1514 build_int_cst (unsigned_type_node
,
1517 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1519 if (TYPE_UNSIGNED (rhs_type
))
1520 rext
= build_zero_cst (m_limb_type
);
1523 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1524 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1526 build_int_cst (unsigned_type_node
,
1529 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1532 if (gsi_end_p (m_init_gsi
))
1533 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1535 gsi_prev (&m_init_gsi
);
1539 if (m_upwards_2limb
)
1544 prepare_data_in_out (r1
, idx
, &out1
);
1545 g
= gimple_build_assign (m_data
[m_data_cnt
+ 1], rext
);
1547 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1549 prepare_data_in_out (r2
, idx
, &out2
);
1550 g
= gimple_build_assign (m_data
[m_data_cnt
+ 3], rext
);
1554 m_data
[m_data_cnt
+ 1] = t
;
1557 m_data
[m_data_cnt
+ 1] = rext
;
1558 m_data
.safe_push (rext
);
1559 t
= m_data
[m_data_cnt
];
1561 else if (!tree_fits_uhwi_p (idx
))
1562 t
= m_data
[m_data_cnt
+ 1];
1565 tree type
= limb_access_type (lhs_type
, idx
);
1566 t
= m_data
[m_data_cnt
+ 2];
1567 if (!useless_type_conversion_p (type
, m_limb_type
))
1568 t
= add_cast (type
, t
);
1575 m_data
.safe_push (r1
);
1576 m_data
.safe_push (r2
);
1577 m_data
.safe_push (rext
);
1579 if (tree_fits_uhwi_p (idx
))
1581 tree type
= limb_access_type (lhs_type
, idx
);
1582 if (integer_zerop (idx
))
1583 t
= m_data
[m_data_cnt
];
1584 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1585 && integer_onep (idx
))
1586 t
= m_data
[m_data_cnt
+ 1];
1588 t
= m_data
[m_data_cnt
+ 2];
1589 if (!useless_type_conversion_p (type
, m_limb_type
))
1590 t
= add_cast (type
, t
);
1594 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1595 NULL_TREE
, NULL_TREE
);
1596 edge e2
, e3
, e4
= NULL
;
1597 if_then (g
, profile_probability::likely (), e2
, e3
);
1598 if (m_data
[m_data_cnt
+ 1])
1600 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1601 NULL_TREE
, NULL_TREE
);
1603 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1604 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1605 e2
= find_edge (e5
->dest
, e2
->dest
);
1606 e4
->probability
= profile_probability::unlikely ();
1607 e5
->flags
= EDGE_FALSE_VALUE
;
1608 e5
->probability
= e4
->probability
.invert ();
1610 m_gsi
= gsi_after_labels (e2
->dest
);
1611 t
= make_ssa_name (m_limb_type
);
1612 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1613 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1614 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1616 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1623 /* Helper function for handle_stmt method, handle a load from memory. */
1626 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1628 tree rhs1
= gimple_assign_rhs1 (stmt
);
1629 tree rhs_type
= TREE_TYPE (rhs1
);
1630 bool eh
= stmt_ends_bb_p (stmt
);
1631 edge eh_edge
= NULL
;
1637 basic_block bb
= gimple_bb (stmt
);
1639 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1640 if (eh_edge
->flags
& EDGE_EH
)
1644 if (TREE_CODE (rhs1
) == COMPONENT_REF
1645 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1647 tree fld
= TREE_OPERAND (rhs1
, 1);
1648 /* For little-endian, we can allow as inputs bit-fields
1649 which start at a limb boundary. */
1650 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1651 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1652 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1654 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1655 handle it normally for now. */
1656 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1658 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1659 poly_int64 bitoffset
;
1660 poly_uint64 field_offset
, repr_offset
;
1661 bool var_field_off
= false;
1662 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1663 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1664 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1668 var_field_off
= true;
1670 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1671 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1672 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1673 TREE_OPERAND (rhs1
, 0), repr
,
1674 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1675 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1676 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1677 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1682 gimple_stmt_iterator save_gsi
= m_gsi
;
1684 if (gsi_end_p (m_gsi
))
1685 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1688 tree t
= limb_access (rhs_type
, nrhs1
, size_int (bo_idx
), true);
1689 tree iv
= make_ssa_name (m_limb_type
);
1690 g
= gimple_build_assign (iv
, t
);
1694 maybe_duplicate_eh_stmt (g
, stmt
);
1697 edge e
= split_block (gsi_bb (m_gsi
), g
);
1698 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1699 = profile_probability::very_unlikely ();
1700 m_gsi
= gsi_after_labels (e
->dest
);
1701 if (gsi_bb (save_gsi
) == e
->src
)
1703 if (gsi_end_p (save_gsi
))
1704 save_gsi
= gsi_end_bb (e
->dest
);
1706 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1708 if (m_preheader_bb
== e
->src
)
1709 m_preheader_bb
= e
->dest
;
1713 if (gsi_end_p (m_init_gsi
))
1714 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1716 gsi_prev (&m_init_gsi
);
1719 prepare_data_in_out (iv
, idx
, &out
);
1720 out
= m_data
[m_data_cnt
];
1721 m_data
.safe_push (out
);
1725 m_data
.safe_push (NULL_TREE
);
1726 m_data
.safe_push (NULL_TREE
);
1727 m_data
.safe_push (NULL_TREE
);
1731 tree nidx0
= NULL_TREE
, nidx1
;
1732 tree iv
= m_data
[m_data_cnt
];
1733 if (m_cast_conditional
&& iv
)
1735 gcc_assert (!m_bitfld_load
);
1736 m_bitfld_load
= m_data_cnt
;
1738 if (tree_fits_uhwi_p (idx
))
1740 unsigned prec
= TYPE_PRECISION (rhs_type
);
1741 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1742 gcc_assert (i
* limb_prec
< prec
);
1743 nidx1
= size_int (i
+ bo_idx
+ 1);
1744 if ((i
+ 1) * limb_prec
> prec
)
1747 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1751 nidx0
= size_int (i
+ bo_idx
);
1761 nidx0
= make_ssa_name (sizetype
);
1762 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1767 nidx1
= make_ssa_name (sizetype
);
1768 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1769 size_int (bo_idx
+ 1));
1773 tree iv2
= NULL_TREE
;
1776 tree t
= limb_access (rhs_type
, nrhs1
, nidx0
, true);
1777 iv
= make_ssa_name (m_limb_type
);
1778 g
= gimple_build_assign (iv
, t
);
1784 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
1785 unsigned prec
= TYPE_PRECISION (rhs_type
);
1788 if ((prec
% limb_prec
) == 0
1789 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
1790 conditional
= false;
1792 edge edge_true
= NULL
, edge_false
= NULL
;
1795 g
= gimple_build_cond (NE_EXPR
, idx
,
1796 size_int (prec
/ limb_prec
),
1797 NULL_TREE
, NULL_TREE
);
1798 if_then (g
, profile_probability::likely (),
1799 edge_true
, edge_false
);
1801 tree t
= limb_access (rhs_type
, nrhs1
, nidx1
, true);
1805 && !tree_fits_uhwi_p (idx
))
1806 iv2
= m_data
[m_data_cnt
+ 1];
1808 iv2
= make_ssa_name (m_limb_type
);
1809 g
= gimple_build_assign (iv2
, t
);
1813 maybe_duplicate_eh_stmt (g
, stmt
);
1816 edge e
= split_block (gsi_bb (m_gsi
), g
);
1817 m_gsi
= gsi_after_labels (e
->dest
);
1818 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1819 = profile_probability::very_unlikely ();
1824 tree iv3
= make_ssa_name (m_limb_type
);
1826 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
1827 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
1828 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
1829 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
1830 edge_false
, UNKNOWN_LOCATION
);
1831 m_gsi
= gsi_after_labels (edge_true
->dest
);
1834 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
1835 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
1837 iv
= gimple_assign_lhs (g
);
1840 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
1841 iv2
, build_int_cst (unsigned_type_node
,
1842 limb_prec
- bo_bit
));
1844 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
1845 gimple_assign_lhs (g
), iv
);
1847 iv
= gimple_assign_lhs (g
);
1848 if (m_data
[m_data_cnt
])
1849 m_data
[m_data_cnt
] = iv2
;
1851 if (tree_fits_uhwi_p (idx
))
1853 tree atype
= limb_access_type (rhs_type
, idx
);
1854 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
1855 iv
= add_cast (atype
, iv
);
1862 /* Use write_p = true for loads with EH edges to make
1863 sure limb_access doesn't add a cast as separate
1864 statement after it. */
1865 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
1866 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
1867 g
= gimple_build_assign (ret
, rhs1
);
1871 maybe_duplicate_eh_stmt (g
, stmt
);
1874 edge e
= split_block (gsi_bb (m_gsi
), g
);
1875 m_gsi
= gsi_after_labels (e
->dest
);
1876 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1877 = profile_probability::very_unlikely ();
1879 if (tree_fits_uhwi_p (idx
))
1881 tree atype
= limb_access_type (rhs_type
, idx
);
1882 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
1883 ret
= add_cast (atype
, ret
);
1889 /* Return a limb IDX from a mergeable statement STMT. */
1892 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
1894 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
1896 switch (gimple_code (stmt
))
1899 if (gimple_assign_load_p (stmt
))
1900 return handle_load (stmt
, idx
);
1901 switch (gimple_assign_rhs_code (stmt
))
1906 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1909 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1910 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
1911 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
1917 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1918 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1919 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
1922 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1923 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
1924 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
1926 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
1928 gimple_assign_rhs2 (stmt
), idx
);
1931 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1933 case VIEW_CONVERT_EXPR
:
1934 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
1935 gimple_assign_rhs1 (stmt
), idx
);
1946 /* Return minimum precision of OP at STMT.
1947 Positive value is minimum precision above which all bits
1948 are zero, negative means all bits above negation of the
1949 value are copies of the sign bit. */
1952 range_to_prec (tree op
, gimple
*stmt
)
1956 tree type
= TREE_TYPE (op
);
1957 unsigned int prec
= TYPE_PRECISION (type
);
1960 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
1961 || r
.undefined_p ())
1963 if (TYPE_UNSIGNED (type
))
1966 return MIN ((int) -prec
, -2);
1969 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
1971 w
= r
.lower_bound ();
1974 int min_prec1
= wi::min_precision (w
, SIGNED
);
1975 w
= r
.upper_bound ();
1976 int min_prec2
= wi::min_precision (w
, SIGNED
);
1977 int min_prec
= MAX (min_prec1
, min_prec2
);
1978 return MIN (-min_prec
, -2);
1982 w
= r
.upper_bound ();
1983 int min_prec
= wi::min_precision (w
, UNSIGNED
);
1984 return MAX (min_prec
, 1);
1987 /* Return address of the first limb of OP and write into *PREC
1988 its precision. If positive, the operand is zero extended
1989 from that precision, if it is negative, the operand is sign-extended
1990 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1991 otherwise *PREC_STORED is prec from the innermost call without
1992 range optimizations. */
1995 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
1996 int *prec_stored
, int *prec
)
1999 location_t loc_save
= m_loc
;
2000 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2001 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2002 && TREE_CODE (op
) != INTEGER_CST
)
2005 *prec
= range_to_prec (op
, stmt
);
2006 bitint_prec_kind kind
= bitint_prec_small
;
2007 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2008 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2009 kind
= bitint_precision_kind (TREE_TYPE (op
));
2010 if (kind
== bitint_prec_middle
)
2012 tree type
= NULL_TREE
;
2013 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2015 tree op_type
= TREE_TYPE (op
);
2016 unsigned HOST_WIDE_INT nelts
2017 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2018 /* Add support for 3 or more limbs filled in from normal
2019 integral type if this assert fails. If no target chooses
2020 limb mode smaller than half of largest supported normal
2021 integral type, this will not be needed. */
2022 gcc_assert (nelts
<= 2);
2024 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2025 ? TYPE_PRECISION (op_type
)
2026 : -TYPE_PRECISION (op_type
));
2027 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2032 if (TYPE_UNSIGNED (op_type
))
2034 if (*prec_stored
> limb_prec
)
2035 *prec_stored
= limb_prec
;
2037 else if (*prec_stored
< -limb_prec
)
2038 *prec_stored
= -limb_prec
;
2041 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2042 tree var
= create_tmp_var (atype
);
2044 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2045 t1
= add_cast (m_limb_type
, t1
);
2046 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2047 NULL_TREE
, NULL_TREE
);
2048 gimple
*g
= gimple_build_assign (v
, t1
);
2052 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2053 g
= gimple_build_assign (make_ssa_name (op_type
),
2054 RSHIFT_EXPR
, op
, lp
);
2056 tree t2
= gimple_assign_lhs (g
);
2057 t2
= add_cast (m_limb_type
, t2
);
2058 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2059 NULL_TREE
, NULL_TREE
);
2060 g
= gimple_build_assign (v
, t2
);
2063 tree ret
= build_fold_addr_expr (var
);
2064 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2066 tree clobber
= build_clobber (atype
, CLOBBER_EOL
);
2067 g
= gimple_build_assign (var
, clobber
);
2068 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2073 switch (TREE_CODE (op
))
2077 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2079 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2081 m_loc
= gimple_location (g
);
2082 if (gimple_assign_load_p (g
))
2084 *prec
= range_to_prec (op
, NULL
);
2086 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2087 ? TYPE_PRECISION (TREE_TYPE (op
))
2088 : -TYPE_PRECISION (TREE_TYPE (op
)));
2089 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2090 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2091 NULL_TREE
, true, GSI_SAME_STMT
);
2093 else if (gimple_code (g
) == GIMPLE_NOP
)
2095 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2097 *prec_stored
= *prec
;
2098 tree var
= create_tmp_var (m_limb_type
);
2099 TREE_ADDRESSABLE (var
) = 1;
2100 ret
= build_fold_addr_expr (var
);
2101 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2103 tree clobber
= build_clobber (m_limb_type
, CLOBBER_EOL
);
2104 g
= gimple_build_assign (var
, clobber
);
2105 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2110 gcc_assert (gimple_assign_cast_p (g
));
2111 tree rhs1
= gimple_assign_rhs1 (g
);
2112 bitint_prec_kind kind
= bitint_prec_small
;
2113 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2114 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2115 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2116 if (kind
>= bitint_prec_large
)
2118 tree lhs_type
= TREE_TYPE (op
);
2119 tree rhs_type
= TREE_TYPE (rhs1
);
2120 int prec_stored_val
= 0;
2121 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2122 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2124 if (TYPE_UNSIGNED (lhs_type
)
2125 && !TYPE_UNSIGNED (rhs_type
))
2126 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2130 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2132 else if (TYPE_UNSIGNED (lhs_type
))
2134 gcc_assert (*prec
> 0
2135 || prec_stored_val
> 0
2136 || (-prec_stored_val
2137 >= TYPE_PRECISION (lhs_type
)));
2138 *prec
= TYPE_PRECISION (lhs_type
);
2140 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2143 *prec
= -TYPE_PRECISION (lhs_type
);
2158 int p
= var_to_partition (m_map
, op
);
2159 gcc_assert (m_vars
[p
] != NULL_TREE
);
2160 *prec
= range_to_prec (op
, stmt
);
2162 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2163 ? TYPE_PRECISION (TREE_TYPE (op
))
2164 : -TYPE_PRECISION (TREE_TYPE (op
)));
2165 return build_fold_addr_expr (m_vars
[p
]);
2168 unsigned int min_prec
, mp
;
2170 w
= wi::to_wide (op
);
2171 if (tree_int_cst_sgn (op
) >= 0)
2173 min_prec
= wi::min_precision (w
, UNSIGNED
);
2174 *prec
= MAX (min_prec
, 1);
2178 min_prec
= wi::min_precision (w
, SIGNED
);
2179 *prec
= MIN ((int) -min_prec
, -2);
2181 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2184 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
)))
2185 type
= TREE_TYPE (op
);
2187 type
= build_bitint_type (mp
, 1);
2188 if (TREE_CODE (type
) != BITINT_TYPE
2189 || bitint_precision_kind (type
) == bitint_prec_small
)
2191 if (TYPE_PRECISION (type
) <= limb_prec
)
2194 /* This case is for targets which e.g. have 64-bit
2195 limb but categorize up to 128-bits _BitInts as
2196 small. We could use type of m_limb_type[2] and
2197 similar instead to save space. */
2198 type
= build_bitint_type (mid_min_prec
, 1);
2202 if (tree_int_cst_sgn (op
) >= 0)
2203 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2205 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2207 op
= tree_output_constant_def (fold_convert (type
, op
));
2208 return build_fold_addr_expr (op
);
2214 /* Helper function, create a loop before the current location,
2215 start with sizetype INIT value from the preheader edge. Return
2216 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2217 from the latch edge. */
2220 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2222 if (!gsi_end_p (m_gsi
))
2225 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2226 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2227 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2228 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2229 e3
->probability
= profile_probability::very_unlikely ();
2230 e2
->flags
= EDGE_FALSE_VALUE
;
2231 e2
->probability
= e3
->probability
.invert ();
2232 tree idx
= make_ssa_name (sizetype
);
2233 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2234 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2235 *idx_next
= make_ssa_name (sizetype
);
2236 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2237 m_gsi
= gsi_after_labels (e1
->dest
);
2239 m_preheader_bb
= e1
->src
;
2240 class loop
*loop
= alloc_loop ();
2241 loop
->header
= e1
->dest
;
2242 add_loop (loop
, e1
->src
->loop_father
);
2246 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2247 lowered using iteration from the least significant limb up to the most
2248 significant limb. For large _BitInt it is emitted as straight line code
2249 before current location, for huge _BitInt as a loop handling two limbs
2250 at once, followed by handling up to limbs in straight line code (at most
2251 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2252 comparisons, in that case CMP_CODE should be the comparison code and
2253 CMP_OP1/CMP_OP2 the comparison operands. */
2256 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2257 tree cmp_op1
, tree cmp_op2
)
2259 bool eq_p
= cmp_code
!= ERROR_MARK
;
2262 type
= TREE_TYPE (cmp_op1
);
2264 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2265 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2266 bitint_prec_kind kind
= bitint_precision_kind (type
);
2267 gcc_assert (kind
>= bitint_prec_large
);
2269 tree lhs
= gimple_get_lhs (stmt
);
2270 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2272 && TREE_CODE (lhs
) == SSA_NAME
2273 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2274 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2276 int p
= var_to_partition (m_map
, lhs
);
2277 gcc_assert (m_vars
[p
] != NULL_TREE
);
2278 m_lhs
= lhs
= m_vars
[p
];
2280 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2282 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2284 basic_block eh_pad
= NULL
;
2285 tree nlhs
= NULL_TREE
;
2286 unsigned HOST_WIDE_INT bo_idx
= 0;
2287 unsigned HOST_WIDE_INT bo_bit
= 0;
2288 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2289 if (gimple_store_p (stmt
))
2291 store_operand
= gimple_assign_rhs1 (stmt
);
2292 eh
= stmt_ends_bb_p (stmt
);
2297 basic_block bb
= gimple_bb (stmt
);
2299 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2300 if (e
->flags
& EDGE_EH
)
2306 if (TREE_CODE (lhs
) == COMPONENT_REF
2307 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2309 tree fld
= TREE_OPERAND (lhs
, 1);
2310 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2311 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2312 poly_int64 bitoffset
;
2313 poly_uint64 field_offset
, repr_offset
;
2314 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2318 bool var_field_off
= false;
2319 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2320 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2321 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2325 var_field_off
= true;
2327 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2328 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2329 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2330 TREE_OPERAND (lhs
, 0), repr
,
2332 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2333 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2334 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2335 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2340 && TREE_CODE (store_operand
) == SSA_NAME
2342 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2343 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2344 || gimple_assign_cast_p (stmt
))
2346 rhs1
= gimple_assign_rhs1 (store_operand
2347 ? SSA_NAME_DEF_STMT (store_operand
)
2349 /* Optimize mergeable ops ending with widening cast to _BitInt
2350 (or followed by store). We can lower just the limbs of the
2351 cast operand and widen afterwards. */
2352 if (TREE_CODE (rhs1
) == SSA_NAME
2354 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2355 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2356 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2357 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2358 limb_prec
) < CEIL (prec
, limb_prec
)
2359 || (kind
== bitint_prec_huge
2360 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2362 store_operand
= rhs1
;
2363 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2364 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2365 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2369 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2370 if (kind
== bitint_prec_large
)
2371 cnt
= CEIL (prec
, limb_prec
);
2374 rem
= (prec
% (2 * limb_prec
));
2375 end
= (prec
- rem
) / limb_prec
;
2376 cnt
= 2 + CEIL (rem
, limb_prec
);
2377 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2380 basic_block edge_bb
= NULL
;
2383 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2385 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2387 if (kind
== bitint_prec_large
)
2388 m_gsi
= gsi_end_bb (edge_bb
);
2391 m_after_stmt
= stmt
;
2392 if (kind
!= bitint_prec_large
)
2393 m_upwards_2limb
= end
;
2397 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2398 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2399 > CEIL (prec
, limb_prec
)));
2401 for (unsigned i
= 0; i
< cnt
; i
++)
2404 if (kind
== bitint_prec_large
)
2407 idx
= size_int (end
+ (i
> 2));
2410 rhs1
= handle_operand (cmp_op1
, idx
);
2411 tree rhs2
= handle_operand (cmp_op2
, idx
);
2412 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2414 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2415 e1
->flags
= EDGE_FALSE_VALUE
;
2416 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2417 e1
->probability
= profile_probability::unlikely ();
2418 e2
->probability
= e1
->probability
.invert ();
2420 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2421 m_gsi
= gsi_after_labels (e1
->dest
);
2426 rhs1
= handle_operand (store_operand
, idx
);
2428 rhs1
= handle_stmt (stmt
, idx
);
2429 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2430 rhs1
= add_cast (m_limb_type
, rhs1
);
2431 if (sext
&& i
== cnt
- 1)
2436 if (tree_fits_uhwi_p (idx
))
2437 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2440 nidx
= make_ssa_name (sizetype
);
2441 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2447 basic_block new_bb
= NULL
;
2448 /* Handle stores into bit-fields. */
2454 if (kind
!= bitint_prec_large
)
2456 prepare_data_in_out (build_zero_cst (m_limb_type
),
2458 bf_next
= m_data
.pop ();
2459 bf_cur
= m_data
.pop ();
2460 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2461 NULL_TREE
, NULL_TREE
);
2463 if_then_else (g
, profile_probability::unlikely (),
2468 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2469 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2470 bitsize_int (limb_prec
- bo_bit
),
2471 bitsize_int (bo_idx
* limb_prec
+ bo_bit
));
2472 tree t
= add_cast (ftype
, rhs1
);
2473 g
= gimple_build_assign (bfr
, t
);
2477 maybe_duplicate_eh_stmt (g
, stmt
);
2480 edge e
= split_block (gsi_bb (m_gsi
), g
);
2481 m_gsi
= gsi_after_labels (e
->dest
);
2482 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2483 = profile_probability::very_unlikely ();
2486 if (kind
== bitint_prec_large
)
2492 m_gsi
= gsi_after_labels (e2
->src
);
2496 tree t1
= make_ssa_name (m_limb_type
);
2497 tree t2
= make_ssa_name (m_limb_type
);
2498 tree t3
= make_ssa_name (m_limb_type
);
2499 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2500 build_int_cst (unsigned_type_node
,
2501 limb_prec
- bo_bit
));
2503 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2504 build_int_cst (unsigned_type_node
,
2508 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2511 if (bf_next
&& i
== 1)
2513 g
= gimple_build_assign (bf_next
, bf_cur
);
2520 /* Handle bit-field access to partial last limb if needed. */
2524 && tree_fits_uhwi_p (idx
))
2526 unsigned int tprec
= TYPE_PRECISION (type
);
2527 unsigned int rprec
= tprec
% limb_prec
;
2528 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2531 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2532 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2533 unshare_expr (nlhs
),
2534 bitsize_int (rprec
+ bo_bit
),
2535 bitsize_int ((bo_idx
2536 + tprec
/ limb_prec
)
2538 tree t
= add_cast (ftype
, rhs1
);
2539 g
= gimple_build_assign (bfr
, t
);
2543 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2546 /* Otherwise, stores to any other lhs. */
2549 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
,
2551 g
= gimple_build_assign (l
, rhs1
);
2556 maybe_duplicate_eh_stmt (g
, stmt
);
2559 edge e
= split_block (gsi_bb (m_gsi
), g
);
2560 m_gsi
= gsi_after_labels (e
->dest
);
2561 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2562 = profile_probability::very_unlikely ();
2566 m_gsi
= gsi_after_labels (new_bb
);
2570 if (kind
== bitint_prec_huge
&& i
<= 1)
2574 idx
= make_ssa_name (sizetype
);
2575 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2581 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2584 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2585 NULL_TREE
, NULL_TREE
);
2588 m_gsi
= gsi_after_labels (edge_bb
);
2590 m_gsi
= gsi_for_stmt (stmt
);
2599 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2600 tree lpm1
= build_int_cst (unsigned_type_node
,
2602 tree n
= make_ssa_name (TREE_TYPE (ext
));
2603 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2605 ext
= add_cast (m_limb_type
, n
);
2608 ext
= build_zero_cst (m_limb_type
);
2609 kind
= bitint_precision_kind (type
);
2610 unsigned start
= CEIL (prec
, limb_prec
);
2611 prec
= TYPE_PRECISION (type
);
2612 idx
= idx_first
= idx_next
= NULL_TREE
;
2613 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2614 kind
= bitint_prec_large
;
2615 if (kind
== bitint_prec_large
)
2616 cnt
= CEIL (prec
, limb_prec
) - start
;
2619 rem
= prec
% limb_prec
;
2620 end
= (prec
- rem
) / limb_prec
;
2621 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2623 for (unsigned i
= 0; i
< cnt
; i
++)
2625 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2626 idx
= size_int (start
+ i
);
2627 else if (i
== cnt
- 1 && (rem
!= 0))
2628 idx
= size_int (end
);
2629 else if (i
== (bo_bit
!= 0))
2630 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2632 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2634 tree t1
= make_ssa_name (m_limb_type
);
2635 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2636 build_int_cst (unsigned_type_node
,
2637 limb_prec
- bo_bit
));
2639 if (integer_zerop (ext
))
2643 tree t2
= make_ssa_name (m_limb_type
);
2644 rhs1
= make_ssa_name (m_limb_type
);
2645 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2646 build_int_cst (unsigned_type_node
,
2649 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2657 if (tree_fits_uhwi_p (idx
))
2658 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2661 nidx
= make_ssa_name (sizetype
);
2662 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2668 /* Handle bit-field access to partial last limb if needed. */
2669 if (nlhs
&& i
== cnt
- 1)
2671 unsigned int tprec
= TYPE_PRECISION (type
);
2672 unsigned int rprec
= tprec
% limb_prec
;
2673 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2676 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2677 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2678 unshare_expr (nlhs
),
2679 bitsize_int (rprec
+ bo_bit
),
2680 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2682 tree t
= add_cast (ftype
, rhs1
);
2683 g
= gimple_build_assign (bfr
, t
);
2687 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2690 /* Otherwise, stores to any other lhs. */
2693 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
, nidx
, true);
2694 g
= gimple_build_assign (l
, rhs1
);
2699 maybe_duplicate_eh_stmt (g
, stmt
);
2702 edge e
= split_block (gsi_bb (m_gsi
), g
);
2703 m_gsi
= gsi_after_labels (e
->dest
);
2704 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2705 = profile_probability::very_unlikely ();
2708 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2710 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2713 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2714 NULL_TREE
, NULL_TREE
);
2716 m_gsi
= gsi_for_stmt (stmt
);
2720 if (bf_cur
!= NULL_TREE
)
2722 unsigned int tprec
= TYPE_PRECISION (type
);
2723 unsigned int rprec
= tprec
% limb_prec
;
2724 tree ftype
= build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2725 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2726 bitsize_int (rprec
+ bo_bit
),
2727 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2732 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2733 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2734 build_int_cst (unsigned_type_node
,
2735 limb_prec
- bo_bit
));
2738 rhs1
= add_cast (ftype
, rhs1
);
2739 g
= gimple_build_assign (bfr
, rhs1
);
2743 maybe_duplicate_eh_stmt (g
, stmt
);
2746 edge e
= split_block (gsi_bb (m_gsi
), g
);
2747 m_gsi
= gsi_after_labels (e
->dest
);
2748 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2749 = profile_probability::very_unlikely ();
2754 if (gimple_store_p (stmt
))
2756 unlink_stmt_vdef (stmt
);
2757 release_ssa_name (gimple_vdef (stmt
));
2758 gsi_remove (&m_gsi
, true);
2762 lhs
= make_ssa_name (boolean_type_node
);
2763 basic_block bb
= gimple_bb (stmt
);
2764 gphi
*phi
= create_phi_node (lhs
, bb
);
2765 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2766 unsigned int n
= EDGE_COUNT (bb
->preds
);
2767 for (unsigned int i
= 0; i
< n
; i
++)
2769 edge e2
= EDGE_PRED (bb
, i
);
2770 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
2771 e2
, UNKNOWN_LOCATION
);
2773 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2780 /* Handle a large/huge _BitInt comparison statement STMT other than
2781 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2782 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2783 lowered by iteration from the most significant limb downwards to
2784 the least significant one, for large _BitInt in straight line code,
2785 otherwise with most significant limb handled in
2786 straight line code followed by a loop handling one limb at a time.
2787 Comparisons with unsigned huge _BitInt with precisions which are
2788 multiples of limb precision can use just the loop and don't need to
2789 handle most significant limb before the loop. The loop or straight
2790 line code jumps to final basic block if a particular pair of limbs
2794 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2795 tree cmp_op1
, tree cmp_op2
)
2797 tree type
= TREE_TYPE (cmp_op1
);
2798 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2799 bitint_prec_kind kind
= bitint_precision_kind (type
);
2800 gcc_assert (kind
>= bitint_prec_large
);
2802 if (!TYPE_UNSIGNED (type
)
2803 && integer_zerop (cmp_op2
)
2804 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
2806 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
2807 tree idx
= size_int (end
);
2809 tree rhs1
= handle_operand (cmp_op1
, idx
);
2810 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2812 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2813 rhs1
= add_cast (stype
, rhs1
);
2815 tree lhs
= make_ssa_name (boolean_type_node
);
2816 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
2817 build_zero_cst (TREE_TYPE (rhs1
)));
2823 unsigned cnt
, rem
= 0, end
= 0;
2824 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
2825 if (kind
== bitint_prec_large
)
2826 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
2829 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
2830 if (rem
== 0 && !TYPE_UNSIGNED (type
))
2832 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
2833 cnt
= 1 + (rem
!= 0);
2836 basic_block edge_bb
= NULL
;
2837 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2839 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2841 m_gsi
= gsi_end_bb (edge_bb
);
2843 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
2844 for (unsigned i
= 0; i
< cnt
; i
++)
2847 if (kind
== bitint_prec_large
)
2848 idx
= size_int (cnt
- i
- 1);
2849 else if (i
== cnt
- 1)
2850 idx
= create_loop (size_int (end
- 1), &idx_next
);
2852 idx
= size_int (end
);
2853 tree rhs1
= handle_operand (cmp_op1
, idx
);
2854 tree rhs2
= handle_operand (cmp_op2
, idx
);
2856 && !TYPE_UNSIGNED (type
)
2857 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2859 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2860 rhs1
= add_cast (stype
, rhs1
);
2861 rhs2
= add_cast (stype
, rhs2
);
2863 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2865 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2866 e1
->flags
= EDGE_FALSE_VALUE
;
2867 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2868 e1
->probability
= profile_probability::likely ();
2869 e2
->probability
= e1
->probability
.invert ();
2871 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2872 m_gsi
= gsi_after_labels (e1
->dest
);
2874 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2876 e1
= split_block (gsi_bb (m_gsi
), g
);
2877 e1
->flags
= EDGE_FALSE_VALUE
;
2878 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2879 e1
->probability
= profile_probability::unlikely ();
2880 e2
->probability
= e1
->probability
.invert ();
2881 m_gsi
= gsi_after_labels (e1
->dest
);
2882 edges
[2 * i
+ 1] = e2
;
2884 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
2886 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
2888 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
2889 NULL_TREE
, NULL_TREE
);
2891 edge true_edge
, false_edge
;
2892 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
2893 &true_edge
, &false_edge
);
2894 m_gsi
= gsi_after_labels (false_edge
->dest
);
2898 tree lhs
= make_ssa_name (boolean_type_node
);
2899 basic_block bb
= gimple_bb (stmt
);
2900 gphi
*phi
= create_phi_node (lhs
, bb
);
2901 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
2903 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
2904 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
2905 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
2907 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
2908 ? boolean_true_node
: boolean_false_node
,
2909 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
2914 /* Lower large/huge _BitInt left and right shift except for left
2915 shift by < limb_prec constant. */
2918 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
2920 tree rhs1
= gimple_assign_rhs1 (stmt
);
2921 tree lhs
= gimple_assign_lhs (stmt
);
2922 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
2923 tree type
= TREE_TYPE (rhs1
);
2924 gimple
*final_stmt
= gsi_stmt (m_gsi
);
2925 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
2926 && bitint_precision_kind (type
) >= bitint_prec_large
);
2927 int prec
= TYPE_PRECISION (type
);
2928 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
2930 if (obj
== NULL_TREE
)
2932 int part
= var_to_partition (m_map
, lhs
);
2933 gcc_assert (m_vars
[part
] != NULL_TREE
);
2936 /* Preparation code common for both left and right shifts.
2937 unsigned n1 = n % limb_prec;
2938 size_t n2 = n / limb_prec;
2939 size_t n3 = n1 != 0;
2940 unsigned n4 = (limb_prec - n1) % limb_prec;
2941 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2942 if (TREE_CODE (n
) == INTEGER_CST
)
2944 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2945 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
2946 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
2947 n3
= size_int (!integer_zerop (n1
));
2948 n4
= int_const_binop (TRUNC_MOD_EXPR
,
2949 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
2953 n1
= make_ssa_name (TREE_TYPE (n
));
2954 n2
= make_ssa_name (sizetype
);
2955 n3
= make_ssa_name (sizetype
);
2956 n4
= make_ssa_name (TREE_TYPE (n
));
2957 if (pow2p_hwi (limb_prec
))
2959 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
2960 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
2962 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2964 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2966 build_int_cst (TREE_TYPE (n
),
2967 exact_log2 (limb_prec
)));
2969 if (gimple_assign_lhs (g
) != n2
)
2971 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2974 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
2977 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
2983 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2984 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
2986 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2988 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2989 TRUNC_DIV_EXPR
, n
, lp
);
2991 if (gimple_assign_lhs (g
) != n2
)
2993 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2996 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
2997 MINUS_EXPR
, lp
, n1
);
2999 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3003 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3004 build_zero_cst (TREE_TYPE (n
)));
3006 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3009 tree p
= build_int_cst (sizetype
,
3010 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3011 if (rhs_code
== RSHIFT_EXPR
)
3016 unsigned n1 = n % limb_prec;
3017 size_t n2 = n / limb_prec;
3018 size_t n3 = n1 != 0;
3019 unsigned n4 = (limb_prec - n1) % limb_prec;
3021 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3022 int signed_p = (typeof (src) -1) < 0;
3023 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3024 ? p : p - n3); ++idx)
3025 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3027 if (prec % limb_prec == 0)
3030 ext = ((signed limb_type) (src[p] << (limb_prec
3031 - (prec % limb_prec))))
3032 >> (limb_prec - (prec % limb_prec));
3034 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3035 if (!signed_p && (prec % limb_prec == 0))
3037 else if (idx < prec / 64)
3039 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3045 dst[idx] = ((signed limb_type) ext) >> n1;
3046 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3050 dst[idx] = ext >> n1;
3053 for (++idx; idx <= p; ++idx)
3056 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3058 else if (TREE_CODE (n3
) == INTEGER_CST
)
3059 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3062 pmn3
= make_ssa_name (sizetype
);
3063 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3066 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3067 edge edge_true
, edge_false
;
3068 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3070 tree idx
= create_loop (n2
, &idx_next
);
3071 tree idxmn2
= make_ssa_name (sizetype
);
3072 tree idxpn3
= make_ssa_name (sizetype
);
3073 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3075 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3078 tree t1
= handle_operand (rhs1
, idx
);
3080 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3081 RSHIFT_EXPR
, t1
, n1
);
3083 t1
= gimple_assign_lhs (g
);
3084 if (!integer_zerop (n3
))
3087 tree t2
= handle_operand (rhs1
, idxpn3
);
3088 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3089 LSHIFT_EXPR
, t2
, n4
);
3091 t2
= gimple_assign_lhs (g
);
3092 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3093 BIT_IOR_EXPR
, t1
, t2
);
3095 t1
= gimple_assign_lhs (g
);
3097 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3098 g
= gimple_build_assign (l
, t1
);
3100 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3102 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3104 idx
= make_ssa_name (sizetype
);
3105 m_gsi
= gsi_for_stmt (final_stmt
);
3106 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3107 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3108 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3109 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3110 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3111 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3113 tree ms
= handle_operand (rhs1
, p
);
3115 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3116 ext
= add_cast (m_limb_type
, ms
);
3117 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3118 && !integer_zerop (n3
))
3120 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3121 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3123 t1
= handle_operand (rhs1
, idx
);
3124 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3125 RSHIFT_EXPR
, t1
, n1
);
3127 t1
= gimple_assign_lhs (g
);
3128 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3129 LSHIFT_EXPR
, ext
, n4
);
3131 tree t2
= gimple_assign_lhs (g
);
3132 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3133 BIT_IOR_EXPR
, t1
, t2
);
3135 t1
= gimple_assign_lhs (g
);
3136 idxmn2
= make_ssa_name (sizetype
);
3137 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3139 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3140 g
= gimple_build_assign (l
, t1
);
3142 idx_next
= make_ssa_name (sizetype
);
3143 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3145 m_gsi
= gsi_for_stmt (final_stmt
);
3146 tree nidx
= make_ssa_name (sizetype
);
3147 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3148 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3149 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3150 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3151 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3152 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3155 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3157 idx
= gimple_assign_lhs (g
);
3159 if (!TYPE_UNSIGNED (type
))
3160 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3161 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3162 RSHIFT_EXPR
, sext
, n1
);
3164 t1
= gimple_assign_lhs (g
);
3165 if (!TYPE_UNSIGNED (type
))
3167 t1
= add_cast (m_limb_type
, t1
);
3168 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3170 build_int_cst (TREE_TYPE (n
),
3173 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3176 ext
= build_zero_cst (m_limb_type
);
3177 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3178 g
= gimple_build_assign (l
, t1
);
3180 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3183 idx
= gimple_assign_lhs (g
);
3184 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3185 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3186 idx
= create_loop (idx
, &idx_next
);
3187 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3188 g
= gimple_build_assign (l
, ext
);
3190 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3192 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3200 unsigned n1 = n % limb_prec;
3201 size_t n2 = n / limb_prec;
3202 size_t n3 = n1 != 0;
3203 unsigned n4 = (limb_prec - n1) % limb_prec;
3205 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3206 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3207 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3210 dst[idx] = src[idx - n2] << n1;
3213 for (; (ssize_t) idx >= 0; --idx)
3216 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3217 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3220 n2pn3
= make_ssa_name (sizetype
);
3221 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3224 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3225 idx even to access the most significant partial limb. */
3227 if (integer_zerop (n3
))
3228 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3229 counts. Emit if (true) condition that can be optimized later. */
3230 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3231 NULL_TREE
, NULL_TREE
);
3233 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3234 edge edge_true
, edge_false
;
3235 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3237 tree idx
= create_loop (p
, &idx_next
);
3238 tree idxmn2
= make_ssa_name (sizetype
);
3239 tree idxmn2mn3
= make_ssa_name (sizetype
);
3240 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3242 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3245 tree t1
= handle_operand (rhs1
, idxmn2
);
3247 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3248 LSHIFT_EXPR
, t1
, n1
);
3250 t1
= gimple_assign_lhs (g
);
3251 if (!integer_zerop (n3
))
3254 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3255 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3256 RSHIFT_EXPR
, t2
, n4
);
3258 t2
= gimple_assign_lhs (g
);
3259 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3260 BIT_IOR_EXPR
, t1
, t2
);
3262 t1
= gimple_assign_lhs (g
);
3264 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3265 g
= gimple_build_assign (l
, t1
);
3267 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3269 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3270 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3271 NULL_TREE
, NULL_TREE
);
3273 idx
= make_ssa_name (sizetype
);
3274 m_gsi
= gsi_for_stmt (final_stmt
);
3275 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3276 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3277 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3278 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3279 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3280 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3282 if (!integer_zerop (n3
))
3284 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3285 NULL_TREE
, NULL_TREE
);
3286 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3287 idxmn2
= make_ssa_name (sizetype
);
3288 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3291 t1
= handle_operand (rhs1
, idxmn2
);
3292 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3293 LSHIFT_EXPR
, t1
, n1
);
3295 t1
= gimple_assign_lhs (g
);
3296 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3297 g
= gimple_build_assign (l
, t1
);
3299 idx_next
= make_ssa_name (sizetype
);
3300 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3302 m_gsi
= gsi_for_stmt (final_stmt
);
3303 tree nidx
= make_ssa_name (sizetype
);
3304 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3305 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3306 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3307 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3308 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3309 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3312 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3313 ssize_int (0), NULL_TREE
, NULL_TREE
);
3314 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3315 idx
= create_loop (idx
, &idx_next
);
3316 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3317 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3319 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3321 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3322 ssize_int (0), NULL_TREE
, NULL_TREE
);
3327 /* Lower large/huge _BitInt multiplication or division. */
3330 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3332 tree rhs1
= gimple_assign_rhs1 (stmt
);
3333 tree rhs2
= gimple_assign_rhs2 (stmt
);
3334 tree lhs
= gimple_assign_lhs (stmt
);
3335 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3336 tree type
= TREE_TYPE (rhs1
);
3337 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3338 && bitint_precision_kind (type
) >= bitint_prec_large
);
3339 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3340 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3341 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3342 if (obj
== NULL_TREE
)
3344 int part
= var_to_partition (m_map
, lhs
);
3345 gcc_assert (m_vars
[part
] != NULL_TREE
);
3347 lhs
= build_fold_addr_expr (obj
);
3351 lhs
= build_fold_addr_expr (obj
);
3352 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3353 NULL_TREE
, true, GSI_SAME_STMT
);
3355 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3360 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3361 lhs
, build_int_cst (sitype
, prec
),
3362 rhs1
, build_int_cst (sitype
, prec1
),
3363 rhs2
, build_int_cst (sitype
, prec2
));
3366 case TRUNC_DIV_EXPR
:
3367 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3368 lhs
, build_int_cst (sitype
, prec
),
3370 build_int_cst (sitype
, 0),
3371 rhs1
, build_int_cst (sitype
, prec1
),
3372 rhs2
, build_int_cst (sitype
, prec2
));
3373 if (!stmt_ends_bb_p (stmt
))
3374 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3377 case TRUNC_MOD_EXPR
:
3378 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3379 build_int_cst (sitype
, 0),
3380 lhs
, build_int_cst (sitype
, prec
),
3381 rhs1
, build_int_cst (sitype
, prec1
),
3382 rhs2
, build_int_cst (sitype
, prec2
));
3383 if (!stmt_ends_bb_p (stmt
))
3384 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3390 if (stmt_ends_bb_p (stmt
))
3392 maybe_duplicate_eh_stmt (g
, stmt
);
3395 basic_block bb
= gimple_bb (stmt
);
3397 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3398 if (e1
->flags
& EDGE_EH
)
3402 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3403 m_gsi
= gsi_after_labels (e2
->dest
);
3404 make_edge (e2
->src
, e1
->dest
, EDGE_EH
)->probability
3405 = profile_probability::very_unlikely ();
3410 /* Lower large/huge _BitInt conversion to/from floating point. */
3413 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3415 tree rhs1
= gimple_assign_rhs1 (stmt
);
3416 tree lhs
= gimple_assign_lhs (stmt
);
3417 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3418 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3420 if (rhs_code
== FIX_TRUNC_EXPR
)
3422 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3423 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3425 if (obj
== NULL_TREE
)
3427 int part
= var_to_partition (m_map
, lhs
);
3428 gcc_assert (m_vars
[part
] != NULL_TREE
);
3430 lhs
= build_fold_addr_expr (obj
);
3434 lhs
= build_fold_addr_expr (obj
);
3435 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3436 NULL_TREE
, true, GSI_SAME_STMT
);
3438 scalar_mode from_mode
3439 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3441 /* IEEE single is a full superset of both IEEE half and
3442 bfloat formats, convert to float first and then to _BitInt
3443 to avoid the need of another 2 library routines. */
3444 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3445 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3446 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3448 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3450 rhs1
= add_cast (type
, rhs1
);
3453 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3454 lhs
, build_int_cst (sitype
, prec
),
3461 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3462 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3463 rhs1
, build_int_cst (sitype
, prec
));
3464 gimple_call_set_lhs (g
, lhs
);
3465 if (!stmt_ends_bb_p (stmt
))
3466 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3467 gsi_replace (&m_gsi
, g
, true);
3471 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3472 If check_zero is true, caller wants to check if all bits in [start, end)
3473 are zero, otherwise if bits in [start, end) are either all zero or
3474 all ones. L is the limb with index LIMB, START and END are measured
3478 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3479 unsigned int end
, tree l
,
3483 unsigned startlimb
= start
/ limb_prec
;
3484 unsigned endlimb
= (end
- 1) / limb_prec
;
3487 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3489 if (startlimb
== endlimb
&& limb
== startlimb
)
3493 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3494 end
- start
, false, limb_prec
);
3495 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3497 wide_int_to_tree (m_limb_type
, w
));
3499 return gimple_assign_lhs (g
);
3501 unsigned int shift
= start
% limb_prec
;
3502 if ((end
% limb_prec
) != 0)
3504 unsigned int lshift
= (-end
) % limb_prec
;
3506 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3508 build_int_cst (unsigned_type_node
,
3511 l
= gimple_assign_lhs (g
);
3513 l
= add_cast (signed_type_for (m_limb_type
), l
);
3514 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3516 build_int_cst (unsigned_type_node
, shift
));
3518 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3520 else if (limb
== startlimb
)
3522 if ((start
% limb_prec
) == 0)
3525 l
= add_cast (signed_type_for (m_limb_type
), l
);
3526 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3528 build_int_cst (unsigned_type_node
,
3529 start
% limb_prec
));
3531 l
= gimple_assign_lhs (g
);
3533 l
= add_cast (m_limb_type
, l
);
3536 else if (limb
== endlimb
)
3538 if ((end
% limb_prec
) == 0)
3542 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3543 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3545 wide_int_to_tree (m_limb_type
, w
));
3547 return gimple_assign_lhs (g
);
3549 unsigned int shift
= (-end
) % limb_prec
;
3550 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3552 build_int_cst (unsigned_type_node
, shift
));
3554 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3555 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3557 build_int_cst (unsigned_type_node
, shift
));
3559 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3564 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3565 result including overflow flag into the right locations. */
3568 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3569 tree ovf
, tree lhs
, tree orig_obj
,
3570 gimple
*stmt
, tree_code code
)
3574 if (obj
== NULL_TREE
3575 && (TREE_CODE (type
) != BITINT_TYPE
3576 || bitint_precision_kind (type
) < bitint_prec_large
))
3578 /* Add support for 3 or more limbs filled in from normal integral
3579 type if this assert fails. If no target chooses limb mode smaller
3580 than half of largest supported normal integral type, this will not
3582 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3583 tree lhs_type
= type
;
3584 if (TREE_CODE (type
) == BITINT_TYPE
3585 && bitint_precision_kind (type
) == bitint_prec_middle
)
3586 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3587 TYPE_UNSIGNED (type
));
3588 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3589 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3591 r1
= gimple_assign_lhs (g
);
3592 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3593 r1
= add_cast (lhs_type
, r1
);
3594 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3596 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3597 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3599 r2
= gimple_assign_lhs (g
);
3600 r2
= add_cast (lhs_type
, r2
);
3601 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3602 build_int_cst (unsigned_type_node
,
3605 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3606 gimple_assign_lhs (g
));
3608 r1
= gimple_assign_lhs (g
);
3610 if (lhs_type
!= type
)
3611 r1
= add_cast (type
, r1
);
3612 ovf
= add_cast (lhs_type
, ovf
);
3613 if (lhs_type
!= type
)
3614 ovf
= add_cast (type
, ovf
);
3615 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3616 m_gsi
= gsi_for_stmt (stmt
);
3617 gsi_replace (&m_gsi
, g
, true);
3621 unsigned HOST_WIDE_INT nelts
= 0;
3622 tree atype
= NULL_TREE
;
3625 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3626 if (orig_obj
== NULL_TREE
)
3628 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3634 if (orig_obj
== NULL_TREE
)
3636 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3637 v1
= build2 (MEM_REF
, atype
,
3638 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3640 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3641 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3643 v1
= unshare_expr (obj
);
3644 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3645 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3646 g
= gimple_build_assign (v1
, v2
);
3649 if (orig_obj
== NULL_TREE
&& obj
)
3651 ovf
= add_cast (m_limb_type
, ovf
);
3652 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3653 g
= gimple_build_assign (l
, ovf
);
3657 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3658 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3659 (nelts
+ 1) * m_limb_size
);
3660 tree v1
= build2 (MEM_REF
, atype
,
3661 build_fold_addr_expr (unshare_expr (obj
)),
3663 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3667 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3669 imm_use_iterator ui
;
3670 use_operand_p use_p
;
3671 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3673 g
= USE_STMT (use_p
);
3674 if (!is_gimple_assign (g
)
3675 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3677 tree lhs2
= gimple_assign_lhs (g
);
3679 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3680 lhs2
= gimple_assign_lhs (use_stmt
);
3681 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3682 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3683 g
= gimple_build_assign (lhs2
, ovf
);
3685 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3686 gsi_replace (&gsi
, g
, true);
3687 if (gsi_stmt (m_gsi
) == use_stmt
)
3688 m_gsi
= gsi_for_stmt (g
);
3692 else if (ovf
!= boolean_false_node
)
3694 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3695 NULL_TREE
, NULL_TREE
);
3696 edge edge_true
, edge_false
;
3697 if_then (g
, profile_probability::very_unlikely (),
3698 edge_true
, edge_false
);
3699 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3700 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3703 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3704 true, GSI_SAME_STMT
);
3705 m_gsi
= gsi_after_labels (edge_true
->dest
);
3710 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_EOL
);
3711 g
= gimple_build_assign (var
, clobber
);
3712 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3716 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3717 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3718 argument 1 precision PREC1 and minimum precision for the result
3719 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3722 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3723 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3728 /* Ignore this special rule for subtraction, even if both
3729 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3730 in infinite precision. */
3731 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3733 /* Result in [0, prec2) is unsigned, if prec > prec2,
3734 all bits above it will be zero. */
3735 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3736 return boolean_false_node
;
3739 /* ovf if any of bits in [start, end) is non-zero. */
3740 *start
= prec
- !TYPE_UNSIGNED (type
);
3744 else if (TYPE_UNSIGNED (type
))
3746 /* If result in [0, prec2) is signed and if prec > prec2,
3747 all bits above it will be sign bit copies. */
3750 /* ovf if bit prec - 1 is non-zero. */
3756 /* ovf if any of bits in [start, end) is non-zero. */
3761 else if (prec
>= prec2
)
3762 return boolean_false_node
;
3765 /* ovf if [start, end) bits aren't all zeros or all ones. */
3768 *check_zero
= false;
3773 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3774 argument or return type _Complex large/huge _BitInt. */
3777 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
3779 tree arg0
= gimple_call_arg (stmt
, 0);
3780 tree arg1
= gimple_call_arg (stmt
, 1);
3781 tree lhs
= gimple_call_lhs (stmt
);
3786 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3787 gsi_remove (&gsi
, true);
3790 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3791 tree type
= TREE_TYPE (lhs
);
3792 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3793 type
= TREE_TYPE (type
);
3794 int prec
= TYPE_PRECISION (type
);
3795 int prec0
= range_to_prec (arg0
, stmt
);
3796 int prec1
= range_to_prec (arg1
, stmt
);
3797 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3798 the be minimum unsigned precision of any possible operation's
3799 result, otherwise it is minimum signed precision.
3801 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3802 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3803 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3804 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3805 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3806 8 + 8 [0, 0x1fe] 9 UNSIGNED
3807 8 + 10 [0, 0x4fe] 11 UNSIGNED
3808 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3809 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3810 8 + -8 [-0x80, 0x17e] 10 SIGNED
3811 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3812 10 + -8 [-0x80, 0x47e] 12 SIGNED
3813 8 - 8 [-0xff, 0xff] 9 SIGNED
3814 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3815 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3816 -8 - -8 [-0xff, 0xff] 9 SIGNED
3817 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3818 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3819 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3820 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3821 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3822 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3823 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3824 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3825 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
3826 prec1
< 0 ? -prec1
: prec1
);
3827 /* If operands are either both signed or both unsigned,
3828 we need just one additional bit. */
3829 prec2
= (((prec0
< 0) == (prec1
< 0)
3830 /* If one operand is signed and one unsigned and
3831 the signed one has larger precision, we need
3832 just one extra bit, otherwise two. */
3833 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
3834 : (prec2
== -prec1
&& prec2
!= prec0
)))
3835 ? prec2
+ 1 : prec2
+ 2);
3836 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
3837 prec1
< 0 ? -prec1
: prec1
);
3838 prec3
= MAX (prec3
, prec
);
3839 tree var
= NULL_TREE
;
3840 tree orig_obj
= obj
;
3841 if (obj
== NULL_TREE
3842 && TREE_CODE (type
) == BITINT_TYPE
3843 && bitint_precision_kind (type
) >= bitint_prec_large
3845 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
3847 int part
= var_to_partition (m_map
, lhs
);
3848 gcc_assert (m_vars
[part
] != NULL_TREE
);
3850 if (TREE_TYPE (lhs
) == type
)
3853 if (TREE_CODE (type
) != BITINT_TYPE
3854 || bitint_precision_kind (type
) < bitint_prec_large
)
3856 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
3857 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
3858 var
= create_tmp_var (atype
);
3861 enum tree_code code
;
3862 switch (gimple_call_internal_fn (stmt
))
3864 case IFN_ADD_OVERFLOW
:
3865 case IFN_UBSAN_CHECK_ADD
:
3868 case IFN_SUB_OVERFLOW
:
3869 case IFN_UBSAN_CHECK_SUB
:
3875 unsigned start
, end
;
3877 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
3878 &start
, &end
, &check_zero
);
3880 unsigned startlimb
, endlimb
;
3888 startlimb
= start
/ limb_prec
;
3889 endlimb
= (end
- 1) / limb_prec
;
3892 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
3893 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
3894 unsigned cnt
, rem
= 0, fin
= 0;
3895 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
3896 bool last_ovf
= (ovf
== NULL_TREE
3897 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
3898 if (kind
!= bitint_prec_huge
)
3899 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
3902 rem
= (prec4
% (2 * limb_prec
));
3903 fin
= (prec4
- rem
) / limb_prec
;
3904 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
3905 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
3908 if (kind
== bitint_prec_huge
)
3909 m_upwards_2limb
= fin
;
3912 tree type0
= TREE_TYPE (arg0
);
3913 tree type1
= TREE_TYPE (arg1
);
3915 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
3916 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
3917 if (TYPE_PRECISION (type0
) < prec5
)
3919 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
3920 if (TREE_CODE (arg0
) == INTEGER_CST
)
3921 arg0
= fold_convert (type0
, arg0
);
3923 if (TYPE_PRECISION (type1
) < prec5
)
3925 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
3926 if (TREE_CODE (arg1
) == INTEGER_CST
)
3927 arg1
= fold_convert (type1
, arg1
);
3929 unsigned int data_cnt
= 0;
3930 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
3931 tree cmp
= build_zero_cst (m_limb_type
);
3932 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
3933 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
3934 for (unsigned i
= 0; i
< cnt
; i
++)
3938 if (kind
!= bitint_prec_huge
)
3941 idx
= size_int (fin
+ (i
> 2));
3942 if (!last_ovf
|| i
< cnt
- 1)
3944 if (type0
!= TREE_TYPE (arg0
))
3945 rhs1
= handle_cast (type0
, arg0
, idx
);
3947 rhs1
= handle_operand (arg0
, idx
);
3948 if (type1
!= TREE_TYPE (arg1
))
3949 rhs2
= handle_cast (type1
, arg1
, idx
);
3951 rhs2
= handle_operand (arg1
, idx
);
3953 data_cnt
= m_data_cnt
;
3954 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
3955 rhs1
= add_cast (m_limb_type
, rhs1
);
3956 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
3957 rhs2
= add_cast (m_limb_type
, rhs2
);
3963 m_data_cnt
= data_cnt
;
3964 if (TYPE_UNSIGNED (type0
))
3965 rhs1
= build_zero_cst (m_limb_type
);
3968 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
3969 if (TREE_CODE (rhs1
) == INTEGER_CST
)
3970 rhs1
= build_int_cst (m_limb_type
,
3971 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
3974 tree lpm1
= build_int_cst (unsigned_type_node
,
3976 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
3977 RSHIFT_EXPR
, rhs1
, lpm1
);
3979 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3982 if (TYPE_UNSIGNED (type1
))
3983 rhs2
= build_zero_cst (m_limb_type
);
3986 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
3987 if (TREE_CODE (rhs2
) == INTEGER_CST
)
3988 rhs2
= build_int_cst (m_limb_type
,
3989 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
3992 tree lpm1
= build_int_cst (unsigned_type_node
,
3994 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
3995 RSHIFT_EXPR
, rhs2
, lpm1
);
3997 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4001 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4002 if (ovf
!= boolean_false_node
)
4004 if (tree_fits_uhwi_p (idx
))
4006 unsigned limb
= tree_to_uhwi (idx
);
4007 if (limb
>= startlimb
&& limb
<= endlimb
)
4009 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4011 tree this_ovf
= make_ssa_name (boolean_type_node
);
4012 if (ovf
== NULL_TREE
&& !check_zero
)
4015 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4017 build_int_cst (m_limb_type
, 1));
4019 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4020 gimple_assign_lhs (g
),
4021 build_int_cst (m_limb_type
, 1));
4024 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4026 if (ovf
== NULL_TREE
)
4030 tree b
= make_ssa_name (boolean_type_node
);
4031 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4037 else if (startlimb
< fin
)
4039 if (m_first
&& startlimb
+ 2 < fin
)
4042 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4043 ovf_out
= m_data
.pop ();
4047 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4048 cmp_out
= m_data
.pop ();
4052 if (i
!= 0 || startlimb
!= fin
- 1)
4055 bool single_comparison
4056 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4057 if (!single_comparison
)
4060 if (!check_zero
&& (start
% limb_prec
) == 0)
4061 single_comparison
= true;
4063 else if ((startlimb
& 1) == (i
& 1))
4067 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4068 NULL_TREE
, NULL_TREE
);
4069 edge edge_true_true
, edge_true_false
, edge_false
;
4071 if (!single_comparison
)
4072 g2
= gimple_build_cond (NE_EXPR
, idx
,
4073 size_int (startlimb
), NULL_TREE
,
4075 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4076 profile_probability::likely (),
4077 edge_true_true
, edge_true_false
,
4079 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4080 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4082 tree this_ovf
= make_ssa_name (boolean_type_node
);
4083 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4085 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4087 build_int_cst (m_limb_type
, 1));
4089 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4090 gimple_assign_lhs (g
),
4091 build_int_cst (m_limb_type
, 1));
4094 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4096 if (cmp_code
== GT_EXPR
)
4098 tree t
= make_ssa_name (boolean_type_node
);
4099 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4103 tree this_ovf2
= NULL_TREE
;
4104 if (!single_comparison
)
4106 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4107 tree t
= make_ssa_name (boolean_type_node
);
4108 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4110 this_ovf2
= make_ssa_name (boolean_type_node
);
4111 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4115 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4117 if (i
== 1 && ovf_out
)
4120 t
= make_ssa_name (boolean_type_node
);
4121 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4122 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4124 add_phi_arg (phi
, ovf
? ovf
4125 : boolean_false_node
, edge_false
,
4128 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4131 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4133 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4134 phi
= create_phi_node (t
, edge_true_false
->dest
);
4135 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4136 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4138 add_phi_arg (phi
, cmp
, edge_true_true
,
4148 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4150 else if (!tree_fits_uhwi_p (idx
)
4151 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4153 bool single_comparison
4154 = (((unsigned) prec
% limb_prec
) == 0
4155 || prec_limbs
+ 1 >= fin
4156 || (prec_limbs
& 1) == (i
& 1));
4157 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4158 NULL_TREE
, NULL_TREE
);
4160 if (!single_comparison
)
4161 g2
= gimple_build_cond (LT_EXPR
, idx
,
4162 size_int (prec_limbs
- 1),
4163 NULL_TREE
, NULL_TREE
);
4164 edge edge_true_true
, edge_true_false
, edge_false
;
4165 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4166 profile_probability::likely (),
4167 edge_true_true
, edge_true_false
,
4169 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4170 g
= gimple_build_assign (l
, rhs
);
4172 if (!single_comparison
)
4174 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4175 l
= limb_access (type
, var
? var
: obj
,
4176 size_int (prec_limbs
- 1), true);
4177 if (!useless_type_conversion_p (TREE_TYPE (l
),
4179 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4180 g
= gimple_build_assign (l
, rhs
);
4183 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4187 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4188 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4189 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4190 g
= gimple_build_assign (l
, rhs
);
4195 if (kind
== bitint_prec_huge
&& i
<= 1)
4199 idx
= make_ssa_name (sizetype
);
4200 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4206 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4209 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4210 NULL_TREE
, NULL_TREE
);
4212 m_gsi
= gsi_for_stmt (final_stmt
);
4217 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4220 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4221 argument or return type _Complex large/huge _BitInt. */
4224 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4226 tree arg0
= gimple_call_arg (stmt
, 0);
4227 tree arg1
= gimple_call_arg (stmt
, 1);
4228 tree lhs
= gimple_call_lhs (stmt
);
4231 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4232 gsi_remove (&gsi
, true);
4235 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4236 tree type
= TREE_TYPE (lhs
);
4237 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4238 type
= TREE_TYPE (type
);
4239 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4240 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4241 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4242 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4243 + (prec1
< 0 ? -prec1
: prec1
));
4244 if (prec0
== 1 || prec1
== 1)
4246 tree var
= NULL_TREE
;
4247 tree orig_obj
= obj
;
4248 bool force_var
= false;
4249 if (obj
== NULL_TREE
4250 && TREE_CODE (type
) == BITINT_TYPE
4251 && bitint_precision_kind (type
) >= bitint_prec_large
4253 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4255 int part
= var_to_partition (m_map
, lhs
);
4256 gcc_assert (m_vars
[part
] != NULL_TREE
);
4258 if (TREE_TYPE (lhs
) == type
)
4261 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4263 for (int i
= 0; i
< 2; ++i
)
4265 tree arg
= i
? arg1
: arg0
;
4266 if (TREE_CODE (arg
) == ADDR_EXPR
)
4267 arg
= TREE_OPERAND (arg
, 0);
4268 if (get_base_address (arg
) == obj
)
4275 if (obj
== NULL_TREE
4277 || TREE_CODE (type
) != BITINT_TYPE
4278 || bitint_precision_kind (type
) < bitint_prec_large
4279 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4281 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4282 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4283 var
= create_tmp_var (atype
);
4285 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4286 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4287 NULL_TREE
, true, GSI_SAME_STMT
);
4288 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4290 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4291 addr
, build_int_cst (sitype
,
4293 arg0
, build_int_cst (sitype
, prec0
),
4294 arg1
, build_int_cst (sitype
, prec1
));
4297 unsigned start
, end
;
4299 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4300 &start
, &end
, &check_zero
);
4301 if (ovf
== NULL_TREE
)
4303 unsigned startlimb
= start
/ limb_prec
;
4304 unsigned endlimb
= (end
- 1) / limb_prec
;
4306 bool use_loop
= false;
4307 if (startlimb
== endlimb
)
4309 else if (startlimb
+ 1 == endlimb
)
4311 else if ((end
% limb_prec
) == 0)
4319 use_loop
= startlimb
+ 2 < endlimb
;
4323 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4324 size_int (startlimb
), true);
4325 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4327 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4328 startlimb
, check_zero
);
4329 ovf
= make_ssa_name (boolean_type_node
);
4331 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4332 build_zero_cst (m_limb_type
));
4335 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4337 build_int_cst (m_limb_type
, 1));
4339 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4340 build_int_cst (m_limb_type
, 1));
4346 basic_block edge_bb
= NULL
;
4347 gimple_stmt_iterator gsi
= m_gsi
;
4349 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4351 m_gsi
= gsi_end_bb (edge_bb
);
4353 tree cmp
= build_zero_cst (m_limb_type
);
4354 for (unsigned i
= 0; i
< cnt
; i
++)
4356 tree idx
, idx_next
= NULL_TREE
;
4358 idx
= size_int (startlimb
);
4360 idx
= size_int (endlimb
);
4362 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4364 idx
= size_int (startlimb
+ 1);
4365 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4366 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4368 l
= gimple_assign_lhs (g
);
4369 if (i
== 0 || i
== 2)
4370 l
= arith_overflow_extract_bits (start
, end
, l
,
4373 if (i
== 0 && !check_zero
)
4376 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4378 build_int_cst (m_limb_type
, 1));
4380 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4381 build_int_cst (m_limb_type
, 1),
4382 NULL_TREE
, NULL_TREE
);
4385 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4387 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4388 e1
->flags
= EDGE_FALSE_VALUE
;
4389 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4391 e1
->probability
= profile_probability::likely ();
4392 e2
->probability
= e1
->probability
.invert ();
4394 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4395 m_gsi
= gsi_after_labels (e1
->dest
);
4396 if (i
== 1 && use_loop
)
4398 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4401 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4402 size_int (endlimb
+ (cnt
== 1)),
4403 NULL_TREE
, NULL_TREE
);
4405 edge true_edge
, false_edge
;
4406 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4409 m_gsi
= gsi_after_labels (false_edge
->dest
);
4413 ovf
= make_ssa_name (boolean_type_node
);
4414 basic_block bb
= gimple_bb (final_stmt
);
4415 gphi
*phi
= create_phi_node (ovf
, bb
);
4416 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4418 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4420 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4421 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4423 m_gsi
= gsi_for_stmt (final_stmt
);
4427 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4430 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4431 .{ADD,SUB,MUL}_OVERFLOW call. */
4434 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4436 tree rhs1
= gimple_assign_rhs1 (stmt
);
4437 rhs1
= TREE_OPERAND (rhs1
, 0);
4438 if (obj
== NULL_TREE
)
4440 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4441 gcc_assert (m_vars
[part
] != NULL_TREE
);
4444 if (TREE_CODE (rhs1
) == SSA_NAME
4446 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4448 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4451 int part
= var_to_partition (m_map
, rhs1
);
4452 gcc_assert (m_vars
[part
] != NULL_TREE
);
4453 tree var
= m_vars
[part
];
4454 unsigned HOST_WIDE_INT nelts
4455 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4456 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4457 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4458 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4459 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4460 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4461 ? 0 : nelts
* m_limb_size
);
4462 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4463 gimple
*g
= gimple_build_assign (obj
, v2
);
4467 /* Lower COMPLEX_EXPR stmt. */
4470 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4472 tree lhs
= gimple_assign_lhs (stmt
);
4473 tree rhs1
= gimple_assign_rhs1 (stmt
);
4474 tree rhs2
= gimple_assign_rhs2 (stmt
);
4475 int part
= var_to_partition (m_map
, lhs
);
4476 gcc_assert (m_vars
[part
] != NULL_TREE
);
4478 unsigned HOST_WIDE_INT nelts
4479 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4480 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4481 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4482 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4484 if (TREE_CODE (rhs1
) == SSA_NAME
)
4486 part
= var_to_partition (m_map
, rhs1
);
4487 gcc_assert (m_vars
[part
] != NULL_TREE
);
4490 else if (integer_zerop (rhs1
))
4491 v2
= build_zero_cst (atype
);
4493 v2
= tree_output_constant_def (rhs1
);
4494 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4495 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4496 gimple
*g
= gimple_build_assign (v1
, v2
);
4498 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4499 TYPE_SIZE_UNIT (atype
));
4500 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4501 if (TREE_CODE (rhs2
) == SSA_NAME
)
4503 part
= var_to_partition (m_map
, rhs2
);
4504 gcc_assert (m_vars
[part
] != NULL_TREE
);
4507 else if (integer_zerop (rhs2
))
4508 v2
= build_zero_cst (atype
);
4510 v2
= tree_output_constant_def (rhs2
);
4511 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4512 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4513 g
= gimple_build_assign (v1
, v2
);
4517 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4521 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4523 tree arg0
= gimple_call_arg (stmt
, 0);
4524 tree arg1
= (gimple_call_num_args (stmt
) == 2
4525 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4526 tree lhs
= gimple_call_lhs (stmt
);
4531 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4532 gsi_remove (&gsi
, true);
4535 tree type
= TREE_TYPE (arg0
);
4536 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4537 bitint_prec_kind kind
= bitint_precision_kind (type
);
4538 gcc_assert (kind
>= bitint_prec_large
);
4539 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4540 enum built_in_function fcode
= END_BUILTINS
;
4541 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4542 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4543 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4547 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4548 fcode
= BUILT_IN_CLZ
;
4549 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4550 fcode
= BUILT_IN_CLZL
;
4552 fcode
= BUILT_IN_CLZLL
;
4555 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4556 we don't add the addend at the end. */
4557 arg1
= integer_zero_node
;
4560 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4561 fcode
= BUILT_IN_CTZ
;
4562 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4563 fcode
= BUILT_IN_CTZL
;
4565 fcode
= BUILT_IN_CTZLL
;
4569 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4570 fcode
= BUILT_IN_CLRSB
;
4571 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4572 fcode
= BUILT_IN_CLRSBL
;
4574 fcode
= BUILT_IN_CLRSBLL
;
4577 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4578 fcode
= BUILT_IN_PARITY
;
4579 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4580 fcode
= BUILT_IN_PARITYL
;
4582 fcode
= BUILT_IN_PARITYLL
;
4586 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4587 fcode
= BUILT_IN_POPCOUNT
;
4588 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4589 fcode
= BUILT_IN_POPCOUNTL
;
4591 fcode
= BUILT_IN_POPCOUNTLL
;
4597 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4598 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4599 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4600 basic_block edge_bb
= NULL
;
4603 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4604 if (kind
== bitint_prec_large
)
4605 cnt
= CEIL (prec
, limb_prec
);
4608 rem
= (prec
% (2 * limb_prec
));
4609 end
= (prec
- rem
) / limb_prec
;
4610 cnt
= 2 + CEIL (rem
, limb_prec
);
4611 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4614 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4616 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4618 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4620 if (kind
== bitint_prec_large
)
4621 m_gsi
= gsi_end_bb (edge_bb
);
4622 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4625 m_after_stmt
= stmt
;
4626 if (kind
!= bitint_prec_large
)
4627 m_upwards_2limb
= end
;
4629 for (unsigned i
= 0; i
< cnt
; i
++)
4632 if (kind
== bitint_prec_large
)
4635 idx
= size_int (end
+ (i
> 2));
4637 tree rhs1
= handle_operand (arg0
, idx
);
4638 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4640 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4641 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4642 rhs1
= add_cast (m_limb_type
, rhs1
);
4646 if (ifn
== IFN_PARITY
)
4647 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4648 else if (ifn
== IFN_FFS
)
4649 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4651 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4657 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4658 build_zero_cst (m_limb_type
),
4659 NULL_TREE
, NULL_TREE
);
4662 e1
= split_block (gsi_bb (m_gsi
), g
);
4663 e1
->flags
= EDGE_FALSE_VALUE
;
4664 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4665 e1
->probability
= profile_probability::unlikely ();
4666 e2
->probability
= e1
->probability
.invert ();
4668 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4669 m_gsi
= gsi_after_labels (e1
->dest
);
4672 if (tree_fits_uhwi_p (idx
))
4674 = build_int_cst (integer_type_node
,
4675 tree_to_uhwi (idx
) * limb_prec
4676 + (ifn
== IFN_FFS
));
4683 res
= make_ssa_name (integer_type_node
);
4684 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4685 build_int_cst (integer_type_node
,
4688 m_data
[m_data_cnt
] = res
;
4692 if (!integer_zerop (in
))
4694 if (kind
== bitint_prec_huge
&& i
== 1)
4697 res
= make_ssa_name (m_limb_type
);
4698 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4703 m_data
[m_data_cnt
] = res
;
4706 g
= gimple_build_call (fndecl
, 1, rhs1
);
4707 tem
= make_ssa_name (integer_type_node
);
4708 gimple_call_set_lhs (g
, tem
);
4710 if (!integer_zerop (in
))
4712 if (kind
== bitint_prec_huge
&& i
== 1)
4715 res
= make_ssa_name (integer_type_node
);
4716 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4721 m_data
[m_data_cnt
] = res
;
4728 if (kind
== bitint_prec_huge
&& i
<= 1)
4732 idx
= make_ssa_name (sizetype
);
4733 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4739 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4742 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4743 NULL_TREE
, NULL_TREE
);
4745 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4746 m_gsi
= gsi_after_labels (edge_bb
);
4748 m_gsi
= gsi_for_stmt (stmt
);
4755 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4757 if (kind
== bitint_prec_large
)
4758 cnt
= CEIL (prec
, limb_prec
);
4761 rem
= prec
% limb_prec
;
4762 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4764 end
= (prec
- rem
) / limb_prec
;
4765 cnt
= 1 + (rem
!= 0);
4766 if (ifn
== IFN_CLRSB
)
4770 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4772 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4774 m_gsi
= gsi_end_bb (edge_bb
);
4777 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4780 gsi
= gsi_for_stmt (stmt
);
4782 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4784 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
4787 for (unsigned i
= 0; i
< cnt
; i
++)
4790 if (kind
== bitint_prec_large
)
4791 idx
= size_int (cnt
- i
- 1);
4792 else if (i
== cnt
- 1)
4793 idx
= create_loop (size_int (end
- 1), &idx_next
);
4795 idx
= size_int (end
);
4797 tree rhs1
= handle_operand (arg0
, idx
);
4798 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4800 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4801 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4802 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4803 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
4804 rhs1
= add_cast (m_limb_type
, rhs1
);
4809 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4810 build_zero_cst (m_limb_type
),
4811 NULL_TREE
, NULL_TREE
);
4813 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4814 e1
->flags
= EDGE_FALSE_VALUE
;
4815 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4816 e1
->probability
= profile_probability::unlikely ();
4817 e2
->probability
= e1
->probability
.invert ();
4819 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4820 m_gsi
= gsi_after_labels (e1
->dest
);
4829 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4831 build_int_cst (m_limb_type
, 1));
4833 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4834 build_int_cst (m_limb_type
, 1),
4835 NULL_TREE
, NULL_TREE
);
4840 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4841 BIT_XOR_EXPR
, rhs1
, first
);
4843 tree stype
= signed_type_for (m_limb_type
);
4844 g
= gimple_build_cond (LT_EXPR
,
4846 gimple_assign_lhs (g
)),
4847 build_zero_cst (stype
),
4848 NULL_TREE
, NULL_TREE
);
4850 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4851 e1
->flags
= EDGE_FALSE_VALUE
;
4852 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
4854 e1
->probability
= profile_probability::unlikely ();
4855 e2
->probability
= e1
->probability
.invert ();
4857 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
4859 m_gsi
= gsi_after_labels (e1
->dest
);
4861 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
4862 NULL_TREE
, NULL_TREE
);
4865 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4866 e1
->flags
= EDGE_FALSE_VALUE
;
4867 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
4868 e1
->probability
= profile_probability::unlikely ();
4869 e2
->probability
= e1
->probability
.invert ();
4871 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4872 m_gsi
= gsi_after_labels (e1
->dest
);
4873 bqp
[2 * i
+ 1].e
= e2
;
4876 if (tree_fits_uhwi_p (idx
))
4878 = build_int_cst (integer_type_node
,
4880 - (((int) tree_to_uhwi (idx
) + 1)
4881 * limb_prec
) - sub_one
);
4885 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
4887 in
= prepare_data_in_out (in
, idx
, &out
);
4888 out
= m_data
[m_data_cnt
+ 1];
4890 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
4891 build_int_cst (integer_type_node
,
4894 m_data
[m_data_cnt
] = out
;
4898 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
4900 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4903 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
4904 NULL_TREE
, NULL_TREE
);
4906 edge true_edge
, false_edge
;
4907 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4908 &true_edge
, &false_edge
);
4909 m_gsi
= gsi_after_labels (false_edge
->dest
);
4918 gphi
*phi1
, *phi2
, *phi3
;
4920 bb
= gsi_bb (m_gsi
);
4921 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
4922 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4924 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4926 for (unsigned i
= 0; i
< cnt
; i
++)
4928 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4929 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4931 if (arg1
== NULL_TREE
)
4933 g
= gimple_build_builtin_unreachable (m_loc
);
4936 m_gsi
= gsi_for_stmt (stmt
);
4937 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
4938 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4940 if (arg1
== NULL_TREE
)
4941 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
4942 gimple_phi_result (phi2
),
4943 gimple_call_lhs (g
));
4946 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4947 PLUS_EXPR
, gimple_phi_result (phi2
),
4948 gimple_call_lhs (g
));
4950 edge e1
= split_block (gimple_bb (stmt
), g
);
4951 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
4952 e2
->probability
= profile_probability::always ();
4953 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
4954 get_immediate_dominator (CDI_DOMINATORS
,
4956 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
4957 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
4958 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
4959 m_gsi
= gsi_for_stmt (stmt
);
4960 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
4962 gsi_replace (&m_gsi
, g
, true);
4965 bb
= gsi_bb (m_gsi
);
4966 remove_edge (find_edge (bb
, edge_bb
));
4968 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
4969 e
->probability
= profile_probability::always ();
4970 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
4971 get_immediate_dominator (CDI_DOMINATORS
,
4973 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4975 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4977 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
4979 for (unsigned i
= 0; i
< cnt
; i
++)
4981 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
4982 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
4984 tree a
= bqp
[i
].addend
;
4985 if (i
&& kind
== bitint_prec_large
)
4986 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
4988 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
4990 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
4992 m_gsi
= gsi_after_labels (edge_bb
);
4993 g
= gimple_build_call (fndecl
, 1,
4994 add_cast (signed_type_for (m_limb_type
),
4995 gimple_phi_result (phi1
)));
4996 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4998 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4999 PLUS_EXPR
, gimple_call_lhs (g
),
5000 gimple_phi_result (phi2
));
5002 if (kind
!= bitint_prec_large
)
5004 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5005 PLUS_EXPR
, gimple_assign_lhs (g
),
5009 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5010 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5011 m_gsi
= gsi_for_stmt (stmt
);
5012 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5013 gsi_replace (&m_gsi
, g
, true);
5016 g
= gimple_build_call (fndecl
, 1, res
);
5017 gimple_call_set_lhs (g
, lhs
);
5018 gsi_replace (&m_gsi
, g
, true);
5021 g
= gimple_build_assign (lhs
, res
);
5022 gsi_replace (&m_gsi
, g
, true);
5029 /* Lower a call statement with one or more large/huge _BitInt
5030 arguments or large/huge _BitInt return value. */
5033 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5035 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5036 unsigned int nargs
= gimple_call_num_args (stmt
);
5037 if (gimple_call_internal_p (stmt
))
5038 switch (gimple_call_internal_fn (stmt
))
5040 case IFN_ADD_OVERFLOW
:
5041 case IFN_SUB_OVERFLOW
:
5042 case IFN_UBSAN_CHECK_ADD
:
5043 case IFN_UBSAN_CHECK_SUB
:
5044 lower_addsub_overflow (obj
, stmt
);
5046 case IFN_MUL_OVERFLOW
:
5047 case IFN_UBSAN_CHECK_MUL
:
5048 lower_mul_overflow (obj
, stmt
);
5056 lower_bit_query (stmt
);
5061 for (unsigned int i
= 0; i
< nargs
; ++i
)
5063 tree arg
= gimple_call_arg (stmt
, i
);
5064 if (TREE_CODE (arg
) != SSA_NAME
5065 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5066 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5068 int p
= var_to_partition (m_map
, arg
);
5070 gcc_assert (v
!= NULL_TREE
);
5071 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5072 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5073 arg
= make_ssa_name (TREE_TYPE (arg
));
5074 gimple
*g
= gimple_build_assign (arg
, v
);
5075 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5076 gimple_call_set_arg (stmt
, i
, arg
);
5077 if (m_preserved
== NULL
)
5078 m_preserved
= BITMAP_ALLOC (NULL
);
5079 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5081 tree lhs
= gimple_call_lhs (stmt
);
5083 && TREE_CODE (lhs
) == SSA_NAME
5084 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5085 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5087 int p
= var_to_partition (m_map
, lhs
);
5089 gcc_assert (v
!= NULL_TREE
);
5090 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5091 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5092 gimple_call_set_lhs (stmt
, v
);
5093 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5098 /* Lower __asm STMT which involves large/huge _BitInt values. */
5101 bitint_large_huge::lower_asm (gimple
*stmt
)
5103 gasm
*g
= as_a
<gasm
*> (stmt
);
5104 unsigned noutputs
= gimple_asm_noutputs (g
);
5105 unsigned ninputs
= gimple_asm_ninputs (g
);
5107 for (unsigned i
= 0; i
< noutputs
; ++i
)
5109 tree t
= gimple_asm_output_op (g
, i
);
5110 tree s
= TREE_VALUE (t
);
5111 if (TREE_CODE (s
) == SSA_NAME
5112 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5113 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5115 int part
= var_to_partition (m_map
, s
);
5116 gcc_assert (m_vars
[part
] != NULL_TREE
);
5117 TREE_VALUE (t
) = m_vars
[part
];
5120 for (unsigned i
= 0; i
< ninputs
; ++i
)
5122 tree t
= gimple_asm_input_op (g
, i
);
5123 tree s
= TREE_VALUE (t
);
5124 if (TREE_CODE (s
) == SSA_NAME
5125 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5126 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5128 int part
= var_to_partition (m_map
, s
);
5129 gcc_assert (m_vars
[part
] != NULL_TREE
);
5130 TREE_VALUE (t
) = m_vars
[part
];
5136 /* Lower statement STMT which involves large/huge _BitInt values
5137 into code accessing individual limbs. */
5140 bitint_large_huge::lower_stmt (gimple
*stmt
)
5144 m_data
.truncate (0);
5146 m_gsi
= gsi_for_stmt (stmt
);
5147 m_after_stmt
= NULL
;
5150 gsi_prev (&m_init_gsi
);
5151 m_preheader_bb
= NULL
;
5152 m_upwards_2limb
= 0;
5155 m_cast_conditional
= false;
5157 m_loc
= gimple_location (stmt
);
5158 if (is_gimple_call (stmt
))
5160 lower_call (NULL_TREE
, stmt
);
5163 if (gimple_code (stmt
) == GIMPLE_ASM
)
5168 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5169 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5170 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5171 bool mergeable_cast_p
= false;
5172 bool final_cast_p
= false;
5173 if (gimple_assign_cast_p (stmt
))
5175 lhs
= gimple_assign_lhs (stmt
);
5176 tree rhs1
= gimple_assign_rhs1 (stmt
);
5177 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5178 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5179 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5180 mergeable_cast_p
= true;
5181 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5182 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5183 && INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5185 final_cast_p
= true;
5186 if (TREE_CODE (rhs1
) == SSA_NAME
5188 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5190 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5191 if (is_gimple_assign (g
)
5192 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5194 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5195 if (TREE_CODE (rhs2
) == SSA_NAME
5197 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5199 g
= SSA_NAME_DEF_STMT (rhs2
);
5200 int ovf
= optimizable_arith_overflow (g
);
5202 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5203 and IMAGPART_EXPR uses, where the latter is cast to
5204 non-_BitInt, it will be optimized when handling
5205 the REALPART_EXPR. */
5209 lower_call (NULL_TREE
, g
);
5217 if (gimple_store_p (stmt
))
5219 tree rhs1
= gimple_assign_rhs1 (stmt
);
5220 if (TREE_CODE (rhs1
) == SSA_NAME
5222 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5224 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5225 m_loc
= gimple_location (g
);
5226 lhs
= gimple_assign_lhs (stmt
);
5227 if (is_gimple_assign (g
) && !mergeable_op (g
))
5228 switch (gimple_assign_rhs_code (g
))
5232 lower_shift_stmt (lhs
, g
);
5234 m_gsi
= gsi_for_stmt (stmt
);
5235 unlink_stmt_vdef (stmt
);
5236 release_ssa_name (gimple_vdef (stmt
));
5237 gsi_remove (&m_gsi
, true);
5240 case TRUNC_DIV_EXPR
:
5241 case TRUNC_MOD_EXPR
:
5242 lower_muldiv_stmt (lhs
, g
);
5244 case FIX_TRUNC_EXPR
:
5245 lower_float_conv_stmt (lhs
, g
);
5249 lower_cplxpart_stmt (lhs
, g
);
5254 else if (optimizable_arith_overflow (g
) == 3)
5256 lower_call (lhs
, g
);
5259 m_loc
= gimple_location (stmt
);
5262 if (mergeable_op (stmt
)
5263 || gimple_store_p (stmt
)
5264 || gimple_assign_load_p (stmt
)
5266 || mergeable_cast_p
)
5268 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5272 else if (cmp_code
!= ERROR_MARK
)
5273 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5274 if (cmp_code
!= ERROR_MARK
)
5276 if (gimple_code (stmt
) == GIMPLE_COND
)
5278 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5279 gimple_cond_set_lhs (cstmt
, lhs
);
5280 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5281 gimple_cond_set_code (cstmt
, cmp_code
);
5285 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5287 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5288 boolean_false_node
);
5289 gimple_assign_set_rhs1 (stmt
, cond
);
5290 lhs
= gimple_assign_lhs (stmt
);
5291 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5292 || (bitint_precision_kind (TREE_TYPE (lhs
))
5293 <= bitint_prec_middle
));
5297 gimple_assign_set_rhs1 (stmt
, lhs
);
5298 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5299 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5305 tree lhs_type
= TREE_TYPE (lhs
);
5306 /* Add support for 3 or more limbs filled in from normal integral
5307 type if this assert fails. If no target chooses limb mode smaller
5308 than half of largest supported normal integral type, this will not
5310 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5312 if (TREE_CODE (lhs_type
) == BITINT_TYPE
5313 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5314 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5315 TYPE_UNSIGNED (lhs_type
));
5317 tree rhs1
= gimple_assign_rhs1 (stmt
);
5318 tree r1
= handle_operand (rhs1
, size_int (0));
5319 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5320 r1
= add_cast (lhs_type
, r1
);
5321 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5325 tree r2
= handle_operand (rhs1
, size_int (1));
5326 r2
= add_cast (lhs_type
, r2
);
5327 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5328 build_int_cst (unsigned_type_node
,
5331 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5332 gimple_assign_lhs (g
));
5334 r1
= gimple_assign_lhs (g
);
5336 if (lhs_type
!= TREE_TYPE (lhs
))
5337 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5339 g
= gimple_build_assign (lhs
, r1
);
5340 gsi_replace (&m_gsi
, g
, true);
5343 if (is_gimple_assign (stmt
))
5344 switch (gimple_assign_rhs_code (stmt
))
5348 lower_shift_stmt (NULL_TREE
, stmt
);
5351 case TRUNC_DIV_EXPR
:
5352 case TRUNC_MOD_EXPR
:
5353 lower_muldiv_stmt (NULL_TREE
, stmt
);
5355 case FIX_TRUNC_EXPR
:
5357 lower_float_conv_stmt (NULL_TREE
, stmt
);
5361 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5364 lower_complexexpr_stmt (stmt
);
5372 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5373 the desired memory state. */
5376 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5378 tree vuse2
= (tree
) data
;
5385 /* Return true if STMT uses a library function and needs to take
5386 address of its inputs. We need to avoid bit-fields in those
5390 stmt_needs_operand_addr (gimple
*stmt
)
5392 if (is_gimple_assign (stmt
))
5393 switch (gimple_assign_rhs_code (stmt
))
5396 case TRUNC_DIV_EXPR
:
5397 case TRUNC_MOD_EXPR
:
5403 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5404 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5409 /* Dominator walker used to discover which large/huge _BitInt
5410 loads could be sunk into all their uses. */
5412 class bitint_dom_walker
: public dom_walker
5415 bitint_dom_walker (bitmap names
, bitmap loads
)
5416 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5418 edge
before_dom_children (basic_block
) final override
;
5421 bitmap m_names
, m_loads
;
5425 bitint_dom_walker::before_dom_children (basic_block bb
)
5427 gphi
*phi
= get_virtual_phi (bb
);
5430 vop
= gimple_phi_result (phi
);
5431 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5434 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5436 auto_vec
<tree
, 16> worklist
;
5437 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5438 !gsi_end_p (gsi
); gsi_next (&gsi
))
5440 gimple
*stmt
= gsi_stmt (gsi
);
5441 if (is_gimple_debug (stmt
))
5444 if (!vop
&& gimple_vuse (stmt
))
5445 vop
= gimple_vuse (stmt
);
5448 if (gimple_vdef (stmt
))
5449 vop
= gimple_vdef (stmt
);
5451 tree lhs
= gimple_get_lhs (stmt
);
5453 && TREE_CODE (lhs
) == SSA_NAME
5454 && TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5455 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5456 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5457 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5458 it means it will be handled in a loop or straight line code
5459 at the location of its (ultimate) immediate use, so for
5460 vop checking purposes check these only at the ultimate
5465 use_operand_p use_p
;
5466 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5468 tree s
= USE_FROM_PTR (use_p
);
5469 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5470 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5471 worklist
.safe_push (s
);
5474 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5475 while (worklist
.length () > 0)
5477 tree s
= worklist
.pop ();
5479 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5481 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5482 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5483 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5485 tree s2
= USE_FROM_PTR (use_p
);
5486 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5487 && (bitint_precision_kind (TREE_TYPE (s2
))
5488 >= bitint_prec_large
))
5489 worklist
.safe_push (s2
);
5493 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5494 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5496 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5497 if (TREE_CODE (rhs
) == SSA_NAME
5498 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5503 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5506 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5507 if (needs_operand_addr
5508 && TREE_CODE (rhs1
) == COMPONENT_REF
5509 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5511 tree fld
= TREE_OPERAND (rhs1
, 1);
5512 /* For little-endian, we can allow as inputs bit-fields
5513 which start at a limb boundary. */
5514 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5515 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5516 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5521 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5527 ao_ref_init (&ref
, rhs1
);
5528 tree lvop
= gimple_vuse (SSA_NAME_DEF_STMT (s
));
5529 unsigned limit
= 64;
5532 && is_gimple_assign (stmt
)
5533 && gimple_store_p (stmt
)
5534 && !operand_equal_p (lhs
,
5535 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
)),
5539 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5540 NULL
, NULL
, limit
, lvop
) == NULL
)
5541 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5545 bb
->aux
= (void *) vop
;
5551 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5552 build_ssa_conflict_graph.
5553 The differences are:
5554 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5555 2) for large/huge _BitInt multiplication/division/modulo process def
5556 only after processing uses rather than before to make uses conflict
5558 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5559 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5560 the final statement. */
5563 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5564 ssa_conflicts
*graph
, bitmap names
,
5565 void (*def
) (live_track
*, tree
,
5567 void (*use
) (live_track
*, tree
))
5569 bool muldiv_p
= false;
5570 tree lhs
= NULL_TREE
;
5571 if (is_gimple_assign (stmt
))
5573 lhs
= gimple_assign_lhs (stmt
);
5574 if (TREE_CODE (lhs
) == SSA_NAME
5575 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5576 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5578 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5580 switch (gimple_assign_rhs_code (stmt
))
5583 case TRUNC_DIV_EXPR
:
5584 case TRUNC_MOD_EXPR
:
5596 /* For stmts with more than one SSA_NAME definition pretend all the
5597 SSA_NAME outputs but the first one are live at this point, so
5598 that conflicts are added in between all those even when they are
5599 actually not really live after the asm, because expansion might
5600 copy those into pseudos after the asm and if multiple outputs
5601 share the same partition, it might overwrite those that should
5603 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5607 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5613 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5614 def (live
, var
, graph
);
5617 auto_vec
<tree
, 16> worklist
;
5618 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5619 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5620 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5622 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5625 worklist
.safe_push (var
);
5628 while (worklist
.length () > 0)
5630 tree s
= worklist
.pop ();
5631 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5632 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5633 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5635 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5638 worklist
.safe_push (var
);
5643 def (live
, lhs
, graph
);
5646 /* Entry point for _BitInt(N) operation lowering during optimization. */
5649 gimple_lower_bitint (void)
5651 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
5655 for (i
= 0; i
< num_ssa_names
; ++i
)
5657 tree s
= ssa_name (i
);
5660 tree type
= TREE_TYPE (s
);
5661 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5662 type
= TREE_TYPE (type
);
5663 if (TREE_CODE (type
) == BITINT_TYPE
5664 && bitint_precision_kind (type
) != bitint_prec_small
)
5666 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5667 into memory. Such functions could have no large/huge SSA_NAMEs. */
5668 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5670 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5671 if (is_gimple_assign (g
) && gimple_store_p (g
))
5673 tree t
= gimple_assign_rhs1 (g
);
5674 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5675 && (bitint_precision_kind (TREE_TYPE (t
))
5676 >= bitint_prec_large
))
5680 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5681 to floating point types need to be rewritten. */
5682 else if (SCALAR_FLOAT_TYPE_P (type
))
5684 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5685 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5687 tree t
= gimple_assign_rhs1 (g
);
5688 if (TREE_CODE (t
) == INTEGER_CST
5689 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5690 && (bitint_precision_kind (TREE_TYPE (t
))
5691 != bitint_prec_small
))
5696 if (i
== num_ssa_names
)
5700 auto_vec
<gimple
*, 4> switch_statements
;
5701 FOR_EACH_BB_FN (bb
, cfun
)
5703 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
5705 tree idx
= gimple_switch_index (swtch
);
5706 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
5707 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
5711 group_case_labels_stmt (swtch
);
5712 switch_statements
.safe_push (swtch
);
5716 if (!switch_statements
.is_empty ())
5718 bool expanded
= false;
5722 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
5724 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
5725 tree_switch_conversion::switch_decision_tree
dt (swtch
);
5726 expanded
|= dt
.analyze_switch_statement ();
5731 free_dominance_info (CDI_DOMINATORS
);
5732 free_dominance_info (CDI_POST_DOMINATORS
);
5733 mark_virtual_operands_for_renaming (cfun
);
5734 cleanup_tree_cfg (TODO_update_ssa
);
5738 struct bitint_large_huge large_huge
;
5739 bool has_large_huge_parm_result
= false;
5740 bool has_large_huge
= false;
5741 unsigned int ret
= 0, first_large_huge
= ~0U;
5742 bool edge_insertions
= false;
5743 for (; i
< num_ssa_names
; ++i
)
5745 tree s
= ssa_name (i
);
5748 tree type
= TREE_TYPE (s
);
5749 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5750 type
= TREE_TYPE (type
);
5751 if (TREE_CODE (type
) == BITINT_TYPE
5752 && bitint_precision_kind (type
) >= bitint_prec_large
)
5754 if (first_large_huge
== ~0U)
5755 first_large_huge
= i
;
5756 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
5757 gimple_stmt_iterator gsi
;
5759 /* Unoptimize certain constructs to simpler alternatives to
5760 avoid having to lower all of them. */
5761 if (is_gimple_assign (stmt
))
5762 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
5769 first_large_huge
= 0;
5770 location_t loc
= gimple_location (stmt
);
5771 gsi
= gsi_for_stmt (stmt
);
5772 tree rhs1
= gimple_assign_rhs1 (stmt
);
5773 tree type
= TREE_TYPE (rhs1
);
5774 tree n
= gimple_assign_rhs2 (stmt
), m
;
5775 tree p
= build_int_cst (TREE_TYPE (n
),
5776 TYPE_PRECISION (type
));
5777 if (TREE_CODE (n
) == INTEGER_CST
)
5778 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
5781 m
= make_ssa_name (TREE_TYPE (n
));
5782 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
5783 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5784 gimple_set_location (g
, loc
);
5786 if (!TYPE_UNSIGNED (type
))
5788 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
5790 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5791 rhs1
= fold_convert (utype
, rhs1
);
5794 tree t
= make_ssa_name (type
);
5795 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
5796 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5797 gimple_set_location (g
, loc
);
5800 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5801 rhs_code
== LROTATE_EXPR
5802 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
5804 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5805 gimple_set_location (g
, loc
);
5806 tree op1
= gimple_assign_lhs (g
);
5807 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5808 rhs_code
== LROTATE_EXPR
5809 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
5811 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5812 gimple_set_location (g
, loc
);
5813 tree op2
= gimple_assign_lhs (g
);
5814 tree lhs
= gimple_assign_lhs (stmt
);
5815 if (!TYPE_UNSIGNED (type
))
5817 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
5818 BIT_IOR_EXPR
, op1
, op2
);
5819 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5820 gimple_set_location (g
, loc
);
5821 g
= gimple_build_assign (lhs
, NOP_EXPR
,
5822 gimple_assign_lhs (g
));
5825 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
5826 gsi_replace (&gsi
, g
, true);
5827 gimple_set_location (g
, loc
);
5835 first_large_huge
= 0;
5836 gsi
= gsi_for_stmt (stmt
);
5837 tree lhs
= gimple_assign_lhs (stmt
);
5838 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
5839 location_t loc
= gimple_location (stmt
);
5840 if (rhs_code
== ABS_EXPR
)
5841 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5842 build_zero_cst (TREE_TYPE (rhs1
)),
5843 NULL_TREE
, NULL_TREE
);
5844 else if (rhs_code
== ABSU_EXPR
)
5846 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
5847 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
5848 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5849 gimple_set_location (g
, loc
);
5850 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5851 build_zero_cst (TREE_TYPE (rhs1
)),
5852 NULL_TREE
, NULL_TREE
);
5855 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
5857 rhs2
= gimple_assign_rhs2 (stmt
);
5858 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5859 std::swap (rhs1
, rhs2
);
5860 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
5861 NULL_TREE
, NULL_TREE
);
5862 if (rhs_code
== MAX_EXPR
)
5863 std::swap (rhs1
, rhs2
);
5867 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5868 build_zero_cst (TREE_TYPE (rhs1
)),
5869 NULL_TREE
, NULL_TREE
);
5870 rhs1
= gimple_assign_rhs2 (stmt
);
5871 rhs2
= gimple_assign_rhs3 (stmt
);
5873 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5874 gimple_set_location (g
, loc
);
5875 edge e1
= split_block (gsi_bb (gsi
), g
);
5876 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
5877 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
5878 e3
->probability
= profile_probability::even ();
5879 e1
->flags
= EDGE_TRUE_VALUE
;
5880 e1
->probability
= e3
->probability
.invert ();
5881 if (dom_info_available_p (CDI_DOMINATORS
))
5882 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
5883 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
5885 gsi
= gsi_after_labels (e1
->dest
);
5886 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5888 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5889 gimple_set_location (g
, loc
);
5890 rhs2
= gimple_assign_lhs (g
);
5891 std::swap (rhs1
, rhs2
);
5893 gsi
= gsi_for_stmt (stmt
);
5894 gsi_remove (&gsi
, true);
5895 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
5896 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
5897 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
5901 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5902 into memory. Such functions could have no large/huge SSA_NAMEs. */
5903 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5905 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5906 if (is_gimple_assign (g
) && gimple_store_p (g
))
5908 tree t
= gimple_assign_rhs1 (g
);
5909 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5910 && (bitint_precision_kind (TREE_TYPE (t
))
5911 >= bitint_prec_large
))
5912 has_large_huge
= true;
5915 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5916 to floating point types need to be rewritten. */
5917 else if (SCALAR_FLOAT_TYPE_P (type
))
5919 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5920 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5922 tree t
= gimple_assign_rhs1 (g
);
5923 if (TREE_CODE (t
) == INTEGER_CST
5924 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5925 && (bitint_precision_kind (TREE_TYPE (t
))
5926 >= bitint_prec_large
))
5927 has_large_huge
= true;
5931 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
5933 tree s
= ssa_name (i
);
5936 tree type
= TREE_TYPE (s
);
5937 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5938 type
= TREE_TYPE (type
);
5939 if (TREE_CODE (type
) == BITINT_TYPE
5940 && bitint_precision_kind (type
) >= bitint_prec_large
)
5942 use_operand_p use_p
;
5944 has_large_huge
= true;
5946 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
5948 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5949 the same bb and could be handled in the same loop with the
5952 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5953 && single_imm_use (s
, &use_p
, &use_stmt
)
5954 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
5956 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
5958 if (mergeable_op (use_stmt
))
5960 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
5961 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
5963 if (gimple_assign_cast_p (use_stmt
))
5965 tree lhs
= gimple_assign_lhs (use_stmt
);
5966 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5969 else if (gimple_store_p (use_stmt
)
5970 && is_gimple_assign (use_stmt
)
5971 && !gimple_has_volatile_ops (use_stmt
)
5972 && !stmt_ends_bb_p (use_stmt
))
5975 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5977 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5978 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5979 && ((is_gimple_assign (use_stmt
)
5980 && (gimple_assign_rhs_code (use_stmt
)
5982 || gimple_code (use_stmt
) == GIMPLE_COND
)
5983 && (!gimple_store_p (use_stmt
)
5984 || (is_gimple_assign (use_stmt
)
5985 && !gimple_has_volatile_ops (use_stmt
)
5986 && !stmt_ends_bb_p (use_stmt
)))
5987 && (TREE_CODE (rhs1
) != SSA_NAME
5988 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
5990 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
5991 || (bitint_precision_kind (TREE_TYPE (rhs1
))
5992 < bitint_prec_large
))
5994 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
5995 >= TYPE_PRECISION (TREE_TYPE (s
)))
5996 && mergeable_op (use_stmt
))
5998 /* Prevent merging a widening non-mergeable cast
5999 on result of some narrower mergeable op
6000 together with later mergeable operations. E.g.
6001 result of _BitInt(223) addition shouldn't be
6002 sign-extended to _BitInt(513) and have another
6003 _BitInt(513) added to it, as handle_plus_minus
6004 with its PHI node handling inside of handle_cast
6005 will not work correctly. An exception is if
6006 use_stmt is a store, this is handled directly
6007 in lower_mergeable_stmt. */
6008 if (TREE_CODE (rhs1
) != SSA_NAME
6009 || !has_single_use (rhs1
)
6010 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6011 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6012 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6013 || gimple_store_p (use_stmt
))
6015 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6016 < TYPE_PRECISION (TREE_TYPE (s
)))
6017 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6019 /* Another exception is if the widening cast is
6020 from mergeable same precision cast from something
6023 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6024 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6025 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6026 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6028 if (TREE_CODE (rhs2
) != SSA_NAME
6029 || !has_single_use (rhs2
)
6030 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6031 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6032 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6038 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6039 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6043 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6044 rhs1
= TREE_OPERAND (rhs1
, 0);
6045 if (TREE_CODE (rhs1
) == SSA_NAME
)
6047 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6048 if (optimizable_arith_overflow (g
))
6056 case TRUNC_DIV_EXPR
:
6057 case TRUNC_MOD_EXPR
:
6058 case FIX_TRUNC_EXPR
:
6060 if (gimple_store_p (use_stmt
)
6061 && is_gimple_assign (use_stmt
)
6062 && !gimple_has_volatile_ops (use_stmt
)
6063 && !stmt_ends_bb_p (use_stmt
))
6065 tree lhs
= gimple_assign_lhs (use_stmt
);
6066 /* As multiply/division passes address of the lhs
6067 to library function and that assumes it can extend
6068 it to whole number of limbs, avoid merging those
6069 with bit-field stores. Don't allow it for
6070 shifts etc. either, so that the bit-field store
6071 handling doesn't have to be done everywhere. */
6072 if (TREE_CODE (lhs
) == COMPONENT_REF
6073 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6083 /* Also ignore uninitialized uses. */
6084 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6085 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6088 if (!large_huge
.m_names
)
6089 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6090 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6091 if (has_single_use (s
))
6093 if (!large_huge
.m_single_use_names
)
6094 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6095 bitmap_set_bit (large_huge
.m_single_use_names
,
6096 SSA_NAME_VERSION (s
));
6098 if (SSA_NAME_VAR (s
)
6099 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6100 && SSA_NAME_IS_DEFAULT_DEF (s
))
6101 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6102 has_large_huge_parm_result
= true;
6104 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6105 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6106 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6107 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6109 use_operand_p use_p
;
6110 imm_use_iterator iter
;
6111 bool optimizable_load
= true;
6112 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6114 gimple
*use_stmt
= USE_STMT (use_p
);
6115 if (is_gimple_debug (use_stmt
))
6117 if (gimple_code (use_stmt
) == GIMPLE_PHI
6118 || is_gimple_call (use_stmt
))
6120 optimizable_load
= false;
6126 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6129 tree s2
= USE_FROM_PTR (use_p
);
6130 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6132 optimizable_load
= false;
6137 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6139 if (!large_huge
.m_loads
)
6140 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6141 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6145 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6146 into memory. Such functions could have no large/huge SSA_NAMEs. */
6147 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6149 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6150 if (is_gimple_assign (g
) && gimple_store_p (g
))
6152 tree t
= gimple_assign_rhs1 (g
);
6153 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6154 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6155 has_large_huge
= true;
6160 if (large_huge
.m_names
|| has_large_huge
)
6162 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6163 calculate_dominance_info (CDI_DOMINATORS
);
6165 enable_ranger (cfun
);
6166 if (large_huge
.m_loads
)
6168 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6170 bitint_dom_walker (large_huge
.m_names
,
6171 large_huge
.m_loads
).walk (entry
);
6172 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6173 clear_aux_for_blocks ();
6174 BITMAP_FREE (large_huge
.m_loads
);
6176 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6177 large_huge
.m_limb_size
6178 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6180 if (large_huge
.m_names
)
6183 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6184 coalesce_ssa_name (large_huge
.m_map
);
6185 partition_view_normal (large_huge
.m_map
);
6186 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6188 fprintf (dump_file
, "After Coalescing:\n");
6189 dump_var_map (dump_file
, large_huge
.m_map
);
6192 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6194 if (has_large_huge_parm_result
)
6195 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6197 tree s
= ssa_name (i
);
6198 if (SSA_NAME_VAR (s
)
6199 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6200 && SSA_NAME_IS_DEFAULT_DEF (s
))
6201 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6203 int p
= var_to_partition (large_huge
.m_map
, s
);
6204 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6206 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6207 mark_addressable (SSA_NAME_VAR (s
));
6211 tree atype
= NULL_TREE
;
6212 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6214 tree s
= ssa_name (i
);
6215 int p
= var_to_partition (large_huge
.m_map
, s
);
6216 if (large_huge
.m_vars
[p
] != NULL_TREE
)
6218 if (atype
== NULL_TREE
6219 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6220 TYPE_SIZE (TREE_TYPE (s
))))
6222 unsigned HOST_WIDE_INT nelts
6223 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6224 atype
= build_array_type_nelts (large_huge
.m_limb_type
, nelts
);
6226 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6227 mark_addressable (large_huge
.m_vars
[p
]);
6231 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6233 gimple_stmt_iterator prev
;
6234 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6240 gimple
*stmt
= gsi_stmt (gsi
);
6241 if (is_gimple_debug (stmt
))
6243 bitint_prec_kind kind
= bitint_prec_small
;
6245 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6246 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6248 bitint_prec_kind this_kind
6249 = bitint_precision_kind (TREE_TYPE (t
));
6250 if (this_kind
> kind
)
6253 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6255 t
= gimple_assign_rhs1 (stmt
);
6256 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6258 bitint_prec_kind this_kind
6259 = bitint_precision_kind (TREE_TYPE (t
));
6260 if (this_kind
> kind
)
6264 if (is_gimple_assign (stmt
)
6265 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6267 t
= gimple_assign_rhs1 (stmt
);
6268 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6269 && TREE_CODE (t
) == INTEGER_CST
)
6271 bitint_prec_kind this_kind
6272 = bitint_precision_kind (TREE_TYPE (t
));
6273 if (this_kind
> kind
)
6277 if (is_gimple_call (stmt
))
6279 t
= gimple_call_lhs (stmt
);
6281 && TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
6282 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6284 bitint_prec_kind this_kind
6285 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6286 if (this_kind
> kind
)
6290 if (kind
== bitint_prec_small
)
6292 switch (gimple_code (stmt
))
6295 /* For now. We'll need to handle some internal functions and
6296 perhaps some builtins. */
6297 if (kind
== bitint_prec_middle
)
6301 if (kind
== bitint_prec_middle
)
6307 if (gimple_clobber_p (stmt
))
6309 if (kind
>= bitint_prec_large
)
6311 if (gimple_assign_single_p (stmt
))
6312 /* No need to lower copies, loads or stores. */
6314 if (gimple_assign_cast_p (stmt
))
6316 tree lhs
= gimple_assign_lhs (stmt
);
6317 tree rhs
= gimple_assign_rhs1 (stmt
);
6318 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6319 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6320 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6321 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6322 /* No need to lower casts to same precision. */
6330 if (kind
== bitint_prec_middle
)
6332 tree type
= NULL_TREE
;
6333 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6334 with the same precision and back. */
6335 unsigned int nops
= gimple_num_ops (stmt
);
6336 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6338 if (tree op
= gimple_op (stmt
, i
))
6340 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6342 gimple_set_op (stmt
, i
, nop
);
6343 else if (COMPARISON_CLASS_P (op
))
6345 TREE_OPERAND (op
, 0)
6346 = maybe_cast_middle_bitint (&gsi
,
6347 TREE_OPERAND (op
, 0),
6349 TREE_OPERAND (op
, 1)
6350 = maybe_cast_middle_bitint (&gsi
,
6351 TREE_OPERAND (op
, 1),
6354 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6357 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6360 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6364 if (tree lhs
= gimple_get_lhs (stmt
))
6365 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6366 && (bitint_precision_kind (TREE_TYPE (lhs
))
6367 == bitint_prec_middle
))
6369 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6370 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6371 type
= build_nonstandard_integer_type (prec
, uns
);
6372 tree lhs2
= make_ssa_name (type
);
6373 gimple_set_lhs (stmt
, lhs2
);
6374 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6375 if (stmt_ends_bb_p (stmt
))
6377 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6378 gsi_insert_on_edge_immediate (e
, g
);
6381 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6387 if (tree lhs
= gimple_get_lhs (stmt
))
6388 if (TREE_CODE (lhs
) == SSA_NAME
)
6390 tree type
= TREE_TYPE (lhs
);
6391 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6392 type
= TREE_TYPE (type
);
6393 if (TREE_CODE (type
) == BITINT_TYPE
6394 && bitint_precision_kind (type
) >= bitint_prec_large
6395 && (large_huge
.m_names
== NULL
6396 || !bitmap_bit_p (large_huge
.m_names
,
6397 SSA_NAME_VERSION (lhs
))))
6401 large_huge
.lower_stmt (stmt
);
6404 tree atype
= NULL_TREE
;
6405 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6408 gphi
*phi
= gsi
.phi ();
6409 tree lhs
= gimple_phi_result (phi
);
6410 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6411 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6413 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6414 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6415 tree v1
= large_huge
.m_vars
[p1
];
6416 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6418 tree arg
= gimple_phi_arg_def (phi
, i
);
6419 edge e
= gimple_phi_arg_edge (phi
, i
);
6421 switch (TREE_CODE (arg
))
6424 if (integer_zerop (arg
) && VAR_P (v1
))
6426 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6427 g
= gimple_build_assign (v1
, zero
);
6428 gsi_insert_on_edge (e
, g
);
6429 edge_insertions
= true;
6433 unsigned int min_prec
, prec
, rem
;
6435 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6436 rem
= prec
% (2 * limb_prec
);
6437 min_prec
= bitint_min_cst_precision (arg
, ext
);
6438 if (min_prec
> prec
- rem
- 2 * limb_prec
6439 && min_prec
> (unsigned) limb_prec
)
6440 /* Constant which has enough significant bits that it
6441 isn't worth trying to save .rodata space by extending
6442 from smaller number. */
6445 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6448 else if (min_prec
== prec
)
6449 c
= tree_output_constant_def (arg
);
6450 else if (min_prec
== (unsigned) limb_prec
)
6451 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6454 tree ctype
= build_bitint_type (min_prec
, 1);
6455 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6459 if (VAR_P (v1
) && min_prec
== prec
)
6461 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6463 g
= gimple_build_assign (v1
, v2
);
6464 gsi_insert_on_edge (e
, g
);
6465 edge_insertions
= true;
6468 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6469 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6474 unsigned HOST_WIDE_INT nelts
6475 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
6478 = build_array_type_nelts (large_huge
.m_limb_type
,
6480 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6482 build1 (VIEW_CONVERT_EXPR
,
6485 gsi_insert_on_edge (e
, g
);
6489 unsigned HOST_WIDE_INT nelts
6490 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
6491 - min_prec
) / limb_prec
;
6493 = build_array_type_nelts (large_huge
.m_limb_type
,
6495 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6496 tree off
= fold_convert (ptype
,
6497 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6498 tree vd
= build2 (MEM_REF
, vtype
,
6499 build_fold_addr_expr (v1
), off
);
6500 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
6507 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6509 = fold_convert (ptype
,
6510 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6511 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
6512 build_fold_addr_expr (v1
), off
);
6514 vd
= build_fold_addr_expr (vd
);
6515 unsigned HOST_WIDE_INT nbytes
6516 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
6519 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6520 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
6521 g
= gimple_build_call (fn
, 3, vd
,
6522 integer_minus_one_node
,
6523 build_int_cst (sizetype
,
6526 gsi_insert_on_edge (e
, g
);
6527 edge_insertions
= true;
6532 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
6534 if (large_huge
.m_names
== NULL
6535 || !bitmap_bit_p (large_huge
.m_names
,
6536 SSA_NAME_VERSION (arg
)))
6539 int p2
= var_to_partition (large_huge
.m_map
, arg
);
6542 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
6543 tree v2
= large_huge
.m_vars
[p2
];
6544 if (VAR_P (v1
) && VAR_P (v2
))
6545 g
= gimple_build_assign (v1
, v2
);
6546 else if (VAR_P (v1
))
6547 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
6548 TREE_TYPE (v1
), v2
));
6549 else if (VAR_P (v2
))
6550 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6551 TREE_TYPE (v2
), v1
), v2
);
6554 if (atype
== NULL_TREE
6555 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6556 TYPE_SIZE (TREE_TYPE (lhs
))))
6558 unsigned HOST_WIDE_INT nelts
6559 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
6562 = build_array_type_nelts (large_huge
.m_limb_type
,
6565 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6567 build1 (VIEW_CONVERT_EXPR
,
6570 gsi_insert_on_edge (e
, g
);
6571 edge_insertions
= true;
6578 if (large_huge
.m_names
|| has_large_huge
)
6581 for (i
= 0; i
< num_ssa_names
; ++i
)
6583 tree s
= ssa_name (i
);
6586 tree type
= TREE_TYPE (s
);
6587 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6588 type
= TREE_TYPE (type
);
6589 if (TREE_CODE (type
) == BITINT_TYPE
6590 && bitint_precision_kind (type
) >= bitint_prec_large
)
6592 if (large_huge
.m_preserved
6593 && bitmap_bit_p (large_huge
.m_preserved
,
6594 SSA_NAME_VERSION (s
)))
6596 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6597 if (gimple_code (g
) == GIMPLE_NOP
)
6599 if (SSA_NAME_VAR (s
))
6600 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
6601 release_ssa_name (s
);
6604 if (gimple_code (g
) != GIMPLE_ASM
)
6606 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
6607 bool save_vta
= flag_var_tracking_assignments
;
6608 flag_var_tracking_assignments
= false;
6609 gsi_remove (&gsi
, true);
6610 flag_var_tracking_assignments
= save_vta
;
6613 nop
= gimple_build_nop ();
6614 SSA_NAME_DEF_STMT (s
) = nop
;
6615 release_ssa_name (s
);
6619 disable_ranger (cfun
);
6622 if (edge_insertions
)
6623 gsi_commit_edge_inserts ();
6630 const pass_data pass_data_lower_bitint
=
6632 GIMPLE_PASS
, /* type */
6633 "bitintlower", /* name */
6634 OPTGROUP_NONE
, /* optinfo_flags */
6635 TV_NONE
, /* tv_id */
6636 PROP_ssa
, /* properties_required */
6637 PROP_gimple_lbitint
, /* properties_provided */
6638 0, /* properties_destroyed */
6639 0, /* todo_flags_start */
6640 0, /* todo_flags_finish */
6643 class pass_lower_bitint
: public gimple_opt_pass
6646 pass_lower_bitint (gcc::context
*ctxt
)
6647 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
6650 /* opt_pass methods: */
6651 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
6652 unsigned int execute (function
*) final override
6654 return gimple_lower_bitint ();
6657 }; // class pass_lower_bitint
6662 make_pass_lower_bitint (gcc::context
*ctxt
)
6664 return new pass_lower_bitint (ctxt
);
6670 const pass_data pass_data_lower_bitint_O0
=
6672 GIMPLE_PASS
, /* type */
6673 "bitintlower0", /* name */
6674 OPTGROUP_NONE
, /* optinfo_flags */
6675 TV_NONE
, /* tv_id */
6676 PROP_cfg
, /* properties_required */
6677 PROP_gimple_lbitint
, /* properties_provided */
6678 0, /* properties_destroyed */
6679 0, /* todo_flags_start */
6680 0, /* todo_flags_finish */
6683 class pass_lower_bitint_O0
: public gimple_opt_pass
6686 pass_lower_bitint_O0 (gcc::context
*ctxt
)
6687 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
6690 /* opt_pass methods: */
6691 bool gate (function
*fun
) final override
6693 /* With errors, normal optimization passes are not run. If we don't
6694 lower bitint operations at all, rtl expansion will abort. */
6695 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
6698 unsigned int execute (function
*) final override
6700 return gimple_lower_bitint ();
6703 }; // class pass_lower_bitint_O0
6708 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
6710 return new pass_lower_bitint_O0 (ctxt
);