1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind
{
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec
)
85 if (prec
<= small_max_prec
)
86 return bitint_prec_small
;
87 if (huge_min_prec
&& prec
>= huge_min_prec
)
88 return bitint_prec_huge
;
89 if (large_min_prec
&& prec
>= large_min_prec
)
90 return bitint_prec_large
;
91 if (mid_min_prec
&& prec
>= mid_min_prec
)
92 return bitint_prec_middle
;
94 struct bitint_info info
;
95 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
97 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
98 if (prec
<= GET_MODE_PRECISION (limb_mode
))
100 small_max_prec
= prec
;
101 return bitint_prec_small
;
104 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
105 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
107 limb_prec
= GET_MODE_PRECISION (limb_mode
);
110 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
111 huge_min_prec
= 4 * limb_prec
;
113 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
115 if (prec
<= MAX_FIXED_MODE_SIZE
)
117 if (!mid_min_prec
|| prec
< mid_min_prec
)
119 return bitint_prec_middle
;
121 if (large_min_prec
&& prec
<= large_min_prec
)
122 return bitint_prec_large
;
123 return bitint_prec_huge
;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type
)
131 return bitint_precision_kind (TYPE_PRECISION (type
));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
140 bitint_min_cst_precision (tree cst
, int &ext
)
142 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
143 wide_int w
= wi::to_wide (cst
);
144 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
154 if (min_prec2
< min_prec
)
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
169 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
172 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
176 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
177 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
178 if (type
== NULL_TREE
179 || TYPE_PRECISION (type
) != prec
180 || TYPE_UNSIGNED (type
) != uns
)
181 type
= build_nonstandard_integer_type (prec
, uns
);
183 if (TREE_CODE (op
) != SSA_NAME
)
185 tree nop
= fold_convert (type
, op
);
186 if (is_gimple_val (nop
))
190 tree nop
= make_ssa_name (type
);
191 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
192 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
200 mergeable_op (gimple
*stmt
)
202 if (!is_gimple_assign (stmt
))
204 switch (gimple_assign_rhs_code (stmt
))
218 tree cnt
= gimple_assign_rhs2 (stmt
);
219 if (tree_fits_uhwi_p (cnt
)
220 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
225 case VIEW_CONVERT_EXPR
:
227 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
228 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
230 && TREE_CODE (lhs_type
) == BITINT_TYPE
231 && TREE_CODE (rhs_type
) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type
), TYPE_SIZE (rhs_type
)))
236 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
238 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
240 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple
*stmt
)
264 bool is_ubsan
= false;
265 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
267 switch (gimple_call_internal_fn (stmt
))
269 case IFN_ADD_OVERFLOW
:
270 case IFN_SUB_OVERFLOW
:
271 case IFN_MUL_OVERFLOW
:
273 case IFN_UBSAN_CHECK_ADD
:
274 case IFN_UBSAN_CHECK_SUB
:
275 case IFN_UBSAN_CHECK_MUL
:
281 tree lhs
= gimple_call_lhs (stmt
);
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
286 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
287 if (TREE_CODE (type
) != BITINT_TYPE
288 || bitint_precision_kind (type
) < bitint_prec_large
)
295 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
296 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
297 || !gimple_store_p (use_stmt
)
298 || !is_gimple_assign (use_stmt
)
299 || gimple_has_volatile_ops (use_stmt
)
300 || stmt_ends_bb_p (use_stmt
))
308 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
310 gimple
*g
= USE_STMT (use_p
);
311 if (is_gimple_debug (g
))
313 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
315 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
321 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
327 use_operand_p use2_p
;
329 tree lhs2
= gimple_assign_lhs (g
);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
332 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
333 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
334 || !gimple_assign_cast_p (use_stmt
))
337 lhs2
= gimple_assign_lhs (use_stmt
);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
339 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
347 return seen
== 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
355 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
357 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
358 tree_code code
= ERROR_MARK
;
359 if (gimple_code (stmt
) == GIMPLE_COND
)
361 code
= gimple_cond_code (stmt
);
362 op1
= gimple_cond_lhs (stmt
);
363 op2
= gimple_cond_rhs (stmt
);
365 else if (is_gimple_assign (stmt
))
367 code
= gimple_assign_rhs_code (stmt
);
368 op1
= gimple_assign_rhs1 (stmt
);
369 if (TREE_CODE_CLASS (code
) == tcc_comparison
370 || TREE_CODE_CLASS (code
) == tcc_binary
)
371 op2
= gimple_assign_rhs2 (stmt
);
373 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
375 tree type
= TREE_TYPE (op1
);
376 if (TREE_CODE (type
) != BITINT_TYPE
377 || bitint_precision_kind (type
) < bitint_prec_large
)
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
393 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
394 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
395 m_limb_type (NULL_TREE
), m_data (vNULL
) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple
*);
400 tree
limb_access_type (tree
, tree
);
401 tree
limb_access (tree
, tree
, tree
, bool);
402 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
403 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
404 void if_then_if_then_else (gimple
*g
, gimple
*,
405 profile_probability
, profile_probability
,
406 edge
&, edge
&, edge
&);
407 tree
handle_operand (tree
, tree
);
408 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
409 tree
add_cast (tree
, tree
);
410 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
411 tree
handle_lshift (tree
, tree
, tree
);
412 tree
handle_cast (tree
, tree
, tree
);
413 tree
handle_load (gimple
*, tree
);
414 tree
handle_stmt (gimple
*, tree
);
415 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
416 tree
create_loop (tree
, tree
*);
417 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
418 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
419 void lower_shift_stmt (tree
, gimple
*);
420 void lower_muldiv_stmt (tree
, gimple
*);
421 void lower_float_conv_stmt (tree
, gimple
*);
422 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
424 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
426 void lower_addsub_overflow (tree
, gimple
*);
427 void lower_mul_overflow (tree
, gimple
*);
428 void lower_cplxpart_stmt (tree
, gimple
*);
429 void lower_complexexpr_stmt (gimple
*);
430 void lower_bit_query (gimple
*);
431 void lower_call (tree
, gimple
*);
432 void lower_asm (gimple
*);
433 void lower_stmt (gimple
*);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names
;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
454 /* Mapping of the partitions to corresponding decls. */
456 /* Unsigned integer type with limb precision. */
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size
;
460 /* Location of a gimple stmt which is being currently lowered. */
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi
;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple
*m_after_stmt
;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb
, m_preheader_bb
;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi
;
470 /* Decl into which a mergeable statement stores result. */
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
522 unsigned m_upwards_2limb
;
524 bool m_cast_conditional
;
525 unsigned m_bitfld_load
;
527 unsigned int m_data_cnt
;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names
);
533 BITMAP_FREE (m_loads
);
534 BITMAP_FREE (m_preserved
);
535 BITMAP_FREE (m_single_use_names
);
537 delete_var_map (m_map
);
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
546 bitint_large_huge::insert_before (gimple
*g
)
548 gimple_set_location (g
, m_loc
);
549 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
557 bitint_large_huge::limb_access_type (tree type
, tree idx
)
559 if (type
== NULL_TREE
)
561 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
562 unsigned int prec
= TYPE_PRECISION (type
);
563 gcc_assert (i
* limb_prec
< prec
);
564 if ((i
+ 1) * limb_prec
<= prec
)
567 return build_nonstandard_integer_type (prec
% limb_prec
,
568 TYPE_UNSIGNED (type
));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
575 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
577 tree atype
= (tree_fits_uhwi_p (idx
)
578 ? limb_access_type (type
, idx
) : m_limb_type
);
580 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
582 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
583 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
584 ret
= build2 (MEM_REF
, m_limb_type
,
585 build_fold_addr_expr (var
),
586 build_int_cst (ptype
, off
));
587 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
588 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
590 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
593 = build2 (MEM_REF
, m_limb_type
, TREE_OPERAND (var
, 0),
594 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
598 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
599 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
600 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
604 var
= unshare_expr (var
);
605 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type
,
607 TREE_TYPE (TREE_TYPE (var
))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type
)), limb_prec
);
611 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
612 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
614 ret
= build4 (ARRAY_REF
, m_limb_type
, var
, idx
, NULL_TREE
, NULL_TREE
);
616 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
618 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
620 ret
= gimple_assign_lhs (g
);
621 ret
= build1 (NOP_EXPR
, atype
, ret
);
626 /* Emit a half diamond,
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
642 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
643 edge
&edge_true
, edge
&edge_false
)
645 insert_before (cond
);
646 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
647 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
648 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
649 e1
->flags
= EDGE_TRUE_VALUE
;
650 e1
->probability
= prob
;
651 e3
->probability
= prob
.invert ();
652 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
655 m_gsi
= gsi_after_labels (e1
->dest
);
658 /* Emit a full diamond,
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
674 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
675 edge
&edge_true
, edge
&edge_false
)
677 insert_before (cond
);
678 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
679 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
680 basic_block bb
= create_empty_bb (e1
->dest
);
681 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
682 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
683 e1
->flags
= EDGE_FALSE_VALUE
;
684 e3
->probability
= prob
;
685 e1
->probability
= prob
.invert ();
686 bb
->count
= e1
->src
->count
.apply_probability (prob
);
687 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
688 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
689 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
691 m_gsi
= gsi_after_labels (bb
);
694 /* Emit a half diamond with full diamond in it
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
720 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
721 profile_probability prob1
,
722 profile_probability prob2
,
723 edge
&edge_true_true
,
724 edge
&edge_true_false
,
727 edge e2
, e3
, e4
= NULL
;
728 if_then (cond1
, prob1
, e2
, e3
);
731 edge_true_true
= NULL
;
732 edge_true_false
= e2
;
736 insert_before (cond2
);
737 e2
= split_block (gsi_bb (m_gsi
), cond2
);
738 basic_block bb
= create_empty_bb (e2
->dest
);
739 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
740 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
741 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
742 e4
->probability
= prob2
;
743 e2
->flags
= EDGE_FALSE_VALUE
;
744 e2
->probability
= prob2
.invert ();
745 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
746 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
747 e2
= find_edge (e2
->dest
, e3
->dest
);
749 edge_true_false
= e2
;
751 m_gsi
= gsi_after_labels (e2
->src
);
754 /* Emit code to access limb IDX from OP. */
757 bitint_large_huge::handle_operand (tree op
, tree idx
)
759 switch (TREE_CODE (op
))
763 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op
))
769 tree v
= create_tmp_reg (m_limb_type
);
770 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
772 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
773 DECL_SOURCE_LOCATION (v
)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
776 v
= get_or_create_ssa_default_def (cfun
, v
);
777 m_data
.safe_push (v
);
779 tree ret
= m_data
[m_data_cnt
];
781 if (tree_fits_uhwi_p (idx
))
783 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
784 ret
= add_cast (type
, ret
);
788 location_t loc_save
= m_loc
;
789 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
790 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
797 p
= var_to_partition (m_map
, op
);
798 gcc_assert (m_vars
[p
] != NULL_TREE
);
799 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
800 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
802 t
= gimple_assign_lhs (g
);
804 && m_single_use_names
805 && m_vars
[p
] != m_lhs
807 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
809 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
810 CLOBBER_STORAGE_END
);
811 g
= gimple_build_assign (m_vars
[p
], clobber
);
812 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
813 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
817 if (tree_fits_uhwi_p (idx
))
819 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
820 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
823 m_data
.safe_push (NULL_TREE
);
824 m_data
.safe_push (NULL_TREE
);
826 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
828 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
829 TYPE_SIGN (TREE_TYPE (op
)));
830 c
= wide_int_to_tree (type
,
831 wide_int::from (w
, TYPE_PRECISION (type
),
834 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
835 c
= build_int_cst (type
,
836 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
838 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
843 || (m_data
[m_data_cnt
] == NULL_TREE
844 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
846 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
847 unsigned int rem
= prec
% (2 * limb_prec
);
849 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
852 m_data
.safe_push (NULL_TREE
);
853 m_data
.safe_push (NULL_TREE
);
855 if (integer_zerop (op
))
857 tree c
= build_zero_cst (m_limb_type
);
858 m_data
[m_data_cnt
] = c
;
859 m_data
[m_data_cnt
+ 1] = c
;
861 else if (integer_all_onesp (op
))
863 tree c
= build_all_ones_cst (m_limb_type
);
864 m_data
[m_data_cnt
] = c
;
865 m_data
[m_data_cnt
+ 1] = c
;
867 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
869 /* Single limb constant. Use a phi with that limb from
870 the preheader edge and 0 or -1 constant from the other edge
871 and for the second limb in the loop. */
873 gcc_assert (m_first
);
876 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
877 build_int_cst (m_limb_type
, ext
));
879 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
881 /* Constant which has enough significant bits that it isn't
882 worth trying to save .rodata space by extending from smaller
886 type
= TREE_TYPE (op
);
888 /* If we have a guarantee the most significant partial limb
889 (if any) will be only accessed through handle_operand
890 with INTEGER_CST idx, we don't need to include the partial
892 type
= build_bitint_type (prec
- rem
, 1);
893 tree c
= tree_output_constant_def (fold_convert (type
, op
));
894 m_data
[m_data_cnt
] = c
;
895 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
897 else if (m_upwards_2limb
)
899 /* Constant with smaller number of bits. Trade conditional
900 code for .rodata space by extending from smaller number. */
901 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
902 tree type
= build_bitint_type (min_prec
, 1);
903 tree c
= tree_output_constant_def (fold_convert (type
, op
));
904 tree idx2
= make_ssa_name (sizetype
);
905 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
907 g
= gimple_build_cond (LT_EXPR
, idx
,
908 size_int (min_prec
/ limb_prec
),
909 NULL_TREE
, NULL_TREE
);
910 edge edge_true
, edge_false
;
911 if_then (g
, (min_prec
>= (prec
- rem
) / 2
912 ? profile_probability::likely ()
913 : profile_probability::unlikely ()),
914 edge_true
, edge_false
);
915 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
916 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
918 c1
= gimple_assign_lhs (g
);
919 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
920 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
922 c2
= gimple_assign_lhs (g
);
923 tree c3
= build_int_cst (m_limb_type
, ext
);
924 m_gsi
= gsi_after_labels (edge_true
->dest
);
925 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
926 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
927 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
929 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
930 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
931 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
932 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
933 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
937 /* Constant with smaller number of bits. Trade conditional
938 code for .rodata space by extending from smaller number.
939 Version for loops with random access to the limbs or
941 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
943 if (min_prec
<= (unsigned) limb_prec
)
944 c
= fold_convert (m_limb_type
, op
);
947 tree type
= build_bitint_type (min_prec
, 1);
948 c
= tree_output_constant_def (fold_convert (type
, op
));
950 m_data
[m_data_cnt
] = c
;
951 m_data
[m_data_cnt
+ 1] = integer_type_node
;
953 t
= m_data
[m_data_cnt
];
954 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
956 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
957 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
959 t
= gimple_assign_lhs (g
);
962 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
964 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
965 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
967 t
= gimple_assign_lhs (g
);
970 t
= m_data
[m_data_cnt
+ 1];
971 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
973 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
974 unsigned rem
= prec
% (2 * limb_prec
);
975 int ext
= tree_int_cst_sgn (op
) < 0 ? -1 : 0;
976 tree c
= m_data
[m_data_cnt
];
977 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
978 g
= gimple_build_cond (LT_EXPR
, idx
,
979 size_int (min_prec
/ limb_prec
),
980 NULL_TREE
, NULL_TREE
);
981 edge edge_true
, edge_false
;
982 if_then (g
, (min_prec
>= (prec
- rem
) / 2
983 ? profile_probability::likely ()
984 : profile_probability::unlikely ()),
985 edge_true
, edge_false
);
986 if (min_prec
> (unsigned) limb_prec
)
988 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
989 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
991 c
= gimple_assign_lhs (g
);
993 tree c2
= build_int_cst (m_limb_type
, ext
);
994 m_gsi
= gsi_after_labels (edge_true
->dest
);
995 t
= make_ssa_name (m_limb_type
);
996 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
997 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
998 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1007 /* Helper method, add a PHI node with VAL from preheader edge if
1008 inside of a loop and m_first. Keep state in a pair of m_data
1009 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1010 the latch edge, otherwise create a new SSA_NAME for it and let
1011 caller initialize it. */
1014 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1019 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1020 return m_data
[m_data_cnt
];
1023 *data_out
= NULL_TREE
;
1024 if (tree_fits_uhwi_p (idx
))
1026 m_data
.safe_push (val
);
1027 m_data
.safe_push (NULL_TREE
);
1031 tree in
= make_ssa_name (TREE_TYPE (val
));
1032 gphi
*phi
= create_phi_node (in
, m_bb
);
1033 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1034 edge e2
= EDGE_PRED (m_bb
, 0);
1036 e2
= EDGE_PRED (m_bb
, 1);
1037 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1038 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1039 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1040 m_data
.safe_push (in
);
1041 m_data
.safe_push (out
);
1045 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1046 convert it without emitting any code, otherwise emit
1047 the conversion statement before the current location. */
1050 bitint_large_huge::add_cast (tree type
, tree val
)
1052 if (TREE_CODE (val
) == INTEGER_CST
)
1053 return fold_convert (type
, val
);
1055 tree lhs
= make_ssa_name (type
);
1056 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1061 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1064 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1067 tree lhs
, data_out
, ctype
;
1068 tree rhs1_type
= TREE_TYPE (rhs1
);
1070 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1073 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1074 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1076 ctype
= build_complex_type (m_limb_type
);
1077 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1079 if (!TYPE_UNSIGNED (rhs1_type
))
1081 tree type
= unsigned_type_for (rhs1_type
);
1082 rhs1
= add_cast (type
, rhs1
);
1083 rhs2
= add_cast (type
, rhs2
);
1085 rhs1
= add_cast (m_limb_type
, rhs1
);
1086 rhs2
= add_cast (m_limb_type
, rhs2
);
1088 lhs
= make_ssa_name (ctype
);
1089 g
= gimple_build_call_internal (code
== PLUS_EXPR
1090 ? IFN_UADDC
: IFN_USUBC
,
1091 3, rhs1
, rhs2
, data_in
);
1092 gimple_call_set_lhs (g
, lhs
);
1094 if (data_out
== NULL_TREE
)
1095 data_out
= make_ssa_name (m_limb_type
);
1096 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1097 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1100 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1102 ctype
= build_complex_type (m_limb_type
);
1103 lhs
= make_ssa_name (ctype
);
1104 g
= gimple_build_call_internal (code
== PLUS_EXPR
1105 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1107 gimple_call_set_lhs (g
, lhs
);
1109 if (data_out
== NULL_TREE
)
1110 data_out
= make_ssa_name (m_limb_type
);
1111 if (!integer_zerop (data_in
))
1113 rhs1
= make_ssa_name (m_limb_type
);
1114 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1115 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1117 rhs2
= make_ssa_name (m_limb_type
);
1118 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1119 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1121 lhs
= make_ssa_name (ctype
);
1122 g
= gimple_build_call_internal (code
== PLUS_EXPR
1126 gimple_call_set_lhs (g
, lhs
);
1128 data_in
= make_ssa_name (m_limb_type
);
1129 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1130 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1132 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1137 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1138 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1144 tree in
= add_cast (rhs1_type
, data_in
);
1145 lhs
= make_ssa_name (rhs1_type
);
1146 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1148 rhs1
= make_ssa_name (rhs1_type
);
1149 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1151 m_data
[m_data_cnt
] = NULL_TREE
;
1155 rhs1
= make_ssa_name (m_limb_type
);
1156 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1157 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1159 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1160 rhs1
= add_cast (rhs1_type
, rhs1
);
1161 m_data
[m_data_cnt
] = data_out
;
1166 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1167 count in [0, limb_prec - 1] range. */
1170 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1172 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1173 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1177 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1179 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1182 if (!integer_zerop (data_in
))
1184 lhs
= make_ssa_name (m_limb_type
);
1185 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1186 build_int_cst (unsigned_type_node
,
1189 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1190 lhs
= add_cast (rhs1_type
, lhs
);
1193 if (types_compatible_p (rhs1_type
, m_limb_type
))
1195 if (data_out
== NULL_TREE
)
1196 data_out
= make_ssa_name (m_limb_type
);
1197 g
= gimple_build_assign (data_out
, rhs1
);
1200 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1202 lhs
= make_ssa_name (rhs1_type
);
1203 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1205 if (!integer_zerop (data_in
))
1208 lhs
= make_ssa_name (rhs1_type
);
1209 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1215 m_data
[m_data_cnt
] = data_out
;
1220 /* Helper function for handle_stmt method, handle an integral
1221 to integral conversion. */
1224 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1226 tree rhs_type
= TREE_TYPE (rhs1
);
1228 if (TREE_CODE (rhs1
) == SSA_NAME
1229 && TREE_CODE (lhs_type
) == BITINT_TYPE
1230 && TREE_CODE (rhs_type
) == BITINT_TYPE
1231 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1232 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1234 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1235 /* If lhs has bigger precision than rhs, we can use
1236 the simple case only if there is a guarantee that
1237 the most significant limb is handled in straight
1238 line code. If m_var_msb (on left shifts) or
1239 if m_upwards_2limb * limb_prec is equal to
1240 lhs precision that is not the case. */
1242 && tree_int_cst_equal (TYPE_SIZE (rhs_type
),
1243 TYPE_SIZE (lhs_type
))
1244 && (!m_upwards_2limb
1245 || (m_upwards_2limb
* limb_prec
1246 < TYPE_PRECISION (lhs_type
)))))
1248 rhs1
= handle_operand (rhs1
, idx
);
1249 if (tree_fits_uhwi_p (idx
))
1251 tree type
= limb_access_type (lhs_type
, idx
);
1252 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1253 rhs1
= add_cast (type
, rhs1
);
1258 /* Indexes lower than this don't need any special processing. */
1259 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1260 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1261 /* Indexes >= than this always contain an extension. */
1262 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1263 bool save_first
= m_first
;
1266 m_data
.safe_push (NULL_TREE
);
1267 m_data
.safe_push (NULL_TREE
);
1268 m_data
.safe_push (NULL_TREE
);
1269 if (TYPE_UNSIGNED (rhs_type
))
1270 /* No need to keep state between iterations. */
1272 else if (m_upwards
&& !m_upwards_2limb
)
1273 /* We need to keep state between iterations, but
1274 not within any loop, everything is straight line
1275 code with only increasing indexes. */
1277 else if (!m_upwards_2limb
)
1279 unsigned save_data_cnt
= m_data_cnt
;
1280 gimple_stmt_iterator save_gsi
= m_gsi
;
1282 if (gsi_end_p (m_gsi
))
1283 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1286 m_data_cnt
= save_data_cnt
+ 3;
1287 t
= handle_operand (rhs1
, size_int (low
));
1289 m_data
[save_data_cnt
+ 2]
1290 = build_int_cst (NULL_TREE
, m_data_cnt
);
1291 m_data_cnt
= save_data_cnt
;
1292 t
= add_cast (signed_type_for (m_limb_type
), t
);
1293 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1294 tree n
= make_ssa_name (TREE_TYPE (t
));
1295 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1297 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1299 if (gsi_end_p (m_init_gsi
))
1300 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1302 gsi_prev (&m_init_gsi
);
1305 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1306 /* We need to keep state between iterations, but
1307 fortunately not within the loop, only afterwards. */
1312 m_data
.truncate (m_data_cnt
);
1313 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1314 m_data
.safe_push (NULL_TREE
);
1318 unsigned save_data_cnt
= m_data_cnt
;
1320 if (!tree_fits_uhwi_p (idx
))
1323 && (m_upwards_2limb
* limb_prec
1324 <= ((unsigned) TYPE_PRECISION (rhs_type
)
1325 - !TYPE_UNSIGNED (rhs_type
))))
1327 rhs1
= handle_operand (rhs1
, idx
);
1329 m_data
[save_data_cnt
+ 2]
1330 = build_int_cst (NULL_TREE
, m_data_cnt
);
1331 m_first
= save_first
;
1334 bool single_comparison
1335 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1336 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1337 idx
, size_int (low
), NULL_TREE
, NULL_TREE
);
1338 edge edge_true_true
, edge_true_false
, edge_false
;
1339 if_then_if_then_else (g
, (single_comparison
? NULL
1340 : gimple_build_cond (EQ_EXPR
, idx
,
1344 profile_probability::likely (),
1345 profile_probability::unlikely (),
1346 edge_true_true
, edge_true_false
, edge_false
);
1347 bool save_cast_conditional
= m_cast_conditional
;
1348 m_cast_conditional
= true;
1350 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1352 m_data
[save_data_cnt
+ 2]
1353 = build_int_cst (NULL_TREE
, m_data_cnt
);
1354 tree ext
= NULL_TREE
;
1355 tree bitfld
= NULL_TREE
;
1356 if (!single_comparison
)
1358 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1360 m_data_cnt
= save_data_cnt
+ 3;
1363 bitfld
= m_data
[m_bitfld_load
];
1364 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1367 t2
= handle_operand (rhs1
, size_int (low
));
1368 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1369 t2
= add_cast (m_limb_type
, t2
);
1370 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1372 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1373 tree lpm1
= build_int_cst (unsigned_type_node
,
1375 tree n
= make_ssa_name (TREE_TYPE (ext
));
1376 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1378 ext
= add_cast (m_limb_type
, n
);
1382 if (TYPE_UNSIGNED (rhs_type
))
1383 t3
= build_zero_cst (m_limb_type
);
1384 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1385 t3
= m_data
[save_data_cnt
];
1387 t3
= m_data
[save_data_cnt
+ 1];
1388 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1389 t
= make_ssa_name (m_limb_type
);
1390 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1391 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1392 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1394 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1397 tree t4
= make_ssa_name (m_limb_type
);
1398 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1399 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1401 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1403 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1404 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1411 t4
= m_data
[m_bitfld_load
+ 1];
1413 t4
= make_ssa_name (m_limb_type
);
1414 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1416 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1417 edge_true_false
, UNKNOWN_LOCATION
);
1418 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1419 edge_false
, UNKNOWN_LOCATION
);
1421 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1423 m_data
[m_bitfld_load
] = t4
;
1424 m_data
[m_bitfld_load
+ 2] = t4
;
1427 m_cast_conditional
= save_cast_conditional
;
1428 m_first
= save_first
;
1433 if (tree_to_uhwi (idx
) < low
)
1435 t
= handle_operand (rhs1
, idx
);
1437 m_data
[save_data_cnt
+ 2]
1438 = build_int_cst (NULL_TREE
, m_data_cnt
);
1440 else if (tree_to_uhwi (idx
) < high
)
1442 t
= handle_operand (rhs1
, size_int (low
));
1444 m_data
[save_data_cnt
+ 2]
1445 = build_int_cst (NULL_TREE
, m_data_cnt
);
1446 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1447 t
= add_cast (m_limb_type
, t
);
1448 tree ext
= NULL_TREE
;
1449 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1451 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1452 tree lpm1
= build_int_cst (unsigned_type_node
,
1454 tree n
= make_ssa_name (TREE_TYPE (ext
));
1455 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1457 ext
= add_cast (m_limb_type
, n
);
1458 m_data
[save_data_cnt
+ 1] = ext
;
1463 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1465 handle_operand (rhs1
, size_zero_node
);
1466 m_data
[save_data_cnt
+ 2]
1467 = build_int_cst (NULL_TREE
, m_data_cnt
);
1470 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1471 if (TYPE_UNSIGNED (rhs_type
))
1472 t
= build_zero_cst (m_limb_type
);
1474 t
= m_data
[save_data_cnt
+ 1];
1476 tree type
= limb_access_type (lhs_type
, idx
);
1477 if (!useless_type_conversion_p (type
, m_limb_type
))
1478 t
= add_cast (type
, t
);
1479 m_first
= save_first
;
1483 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1484 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1485 && INTEGRAL_TYPE_P (rhs_type
))
1487 /* Add support for 3 or more limbs filled in from normal integral
1488 type if this assert fails. If no target chooses limb mode smaller
1489 than half of largest supported normal integral type, this will not
1491 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1492 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1495 gimple_stmt_iterator save_gsi
= m_gsi
;
1497 if (gsi_end_p (m_gsi
))
1498 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1501 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1502 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1504 tree type
= NULL_TREE
;
1505 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1506 rhs_type
= TREE_TYPE (rhs1
);
1509 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1510 r1
= add_cast (m_limb_type
, rhs1
);
1511 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1513 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1515 build_int_cst (unsigned_type_node
,
1518 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1520 if (TYPE_UNSIGNED (rhs_type
))
1521 rext
= build_zero_cst (m_limb_type
);
1524 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1525 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1527 build_int_cst (unsigned_type_node
,
1530 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1533 if (gsi_end_p (m_init_gsi
))
1534 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1536 gsi_prev (&m_init_gsi
);
1540 if (m_upwards_2limb
)
1545 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1546 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1548 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1551 m_data
[m_data_cnt
+ 1] = t
;
1554 m_data
[m_data_cnt
+ 1] = rext
;
1555 m_data
.safe_push (rext
);
1556 t
= m_data
[m_data_cnt
];
1558 else if (!tree_fits_uhwi_p (idx
))
1559 t
= m_data
[m_data_cnt
+ 1];
1562 tree type
= limb_access_type (lhs_type
, idx
);
1563 t
= m_data
[m_data_cnt
+ 2];
1564 if (!useless_type_conversion_p (type
, m_limb_type
))
1565 t
= add_cast (type
, t
);
1572 m_data
.safe_push (r1
);
1573 m_data
.safe_push (r2
);
1574 m_data
.safe_push (rext
);
1576 if (tree_fits_uhwi_p (idx
))
1578 tree type
= limb_access_type (lhs_type
, idx
);
1579 if (integer_zerop (idx
))
1580 t
= m_data
[m_data_cnt
];
1581 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1582 && integer_onep (idx
))
1583 t
= m_data
[m_data_cnt
+ 1];
1585 t
= m_data
[m_data_cnt
+ 2];
1586 if (!useless_type_conversion_p (type
, m_limb_type
))
1587 t
= add_cast (type
, t
);
1591 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1592 NULL_TREE
, NULL_TREE
);
1593 edge e2
, e3
, e4
= NULL
;
1594 if_then (g
, profile_probability::likely (), e2
, e3
);
1595 if (m_data
[m_data_cnt
+ 1])
1597 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1598 NULL_TREE
, NULL_TREE
);
1600 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1601 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1602 e2
= find_edge (e5
->dest
, e2
->dest
);
1603 e4
->probability
= profile_probability::unlikely ();
1604 e5
->flags
= EDGE_FALSE_VALUE
;
1605 e5
->probability
= e4
->probability
.invert ();
1607 m_gsi
= gsi_after_labels (e2
->dest
);
1608 t
= make_ssa_name (m_limb_type
);
1609 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1610 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1611 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1613 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1620 /* Helper function for handle_stmt method, handle a load from memory. */
1623 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1625 tree rhs1
= gimple_assign_rhs1 (stmt
);
1626 tree rhs_type
= TREE_TYPE (rhs1
);
1627 bool eh
= stmt_ends_bb_p (stmt
);
1628 edge eh_edge
= NULL
;
1634 basic_block bb
= gimple_bb (stmt
);
1636 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1637 if (eh_edge
->flags
& EDGE_EH
)
1641 if (TREE_CODE (rhs1
) == COMPONENT_REF
1642 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1644 tree fld
= TREE_OPERAND (rhs1
, 1);
1645 /* For little-endian, we can allow as inputs bit-fields
1646 which start at a limb boundary. */
1647 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1648 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1649 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1651 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1652 handle it normally for now. */
1653 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1655 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1656 poly_int64 bitoffset
;
1657 poly_uint64 field_offset
, repr_offset
;
1658 bool var_field_off
= false;
1659 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1660 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1661 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1665 var_field_off
= true;
1667 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1668 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1669 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1670 TREE_OPERAND (rhs1
, 0), repr
,
1671 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1672 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1673 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1674 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1679 gimple_stmt_iterator save_gsi
= m_gsi
;
1681 if (gsi_end_p (m_gsi
))
1682 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1685 tree t
= limb_access (rhs_type
, nrhs1
, size_int (bo_idx
), true);
1686 tree iv
= make_ssa_name (m_limb_type
);
1687 g
= gimple_build_assign (iv
, t
);
1691 maybe_duplicate_eh_stmt (g
, stmt
);
1694 edge e
= split_block (gsi_bb (m_gsi
), g
);
1695 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1696 = profile_probability::very_unlikely ();
1697 m_gsi
= gsi_after_labels (e
->dest
);
1698 if (gsi_bb (save_gsi
) == e
->src
)
1700 if (gsi_end_p (save_gsi
))
1701 save_gsi
= gsi_end_bb (e
->dest
);
1703 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1705 if (m_preheader_bb
== e
->src
)
1706 m_preheader_bb
= e
->dest
;
1710 if (gsi_end_p (m_init_gsi
))
1711 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1713 gsi_prev (&m_init_gsi
);
1716 prepare_data_in_out (iv
, idx
, &out
);
1717 out
= m_data
[m_data_cnt
];
1718 m_data
.safe_push (out
);
1722 m_data
.safe_push (NULL_TREE
);
1723 m_data
.safe_push (NULL_TREE
);
1724 m_data
.safe_push (NULL_TREE
);
1728 tree nidx0
= NULL_TREE
, nidx1
;
1729 tree iv
= m_data
[m_data_cnt
];
1730 if (m_cast_conditional
&& iv
)
1732 gcc_assert (!m_bitfld_load
);
1733 m_bitfld_load
= m_data_cnt
;
1735 if (tree_fits_uhwi_p (idx
))
1737 unsigned prec
= TYPE_PRECISION (rhs_type
);
1738 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1739 gcc_assert (i
* limb_prec
< prec
);
1740 nidx1
= size_int (i
+ bo_idx
+ 1);
1741 if ((i
+ 1) * limb_prec
> prec
)
1744 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1748 nidx0
= size_int (i
+ bo_idx
);
1758 nidx0
= make_ssa_name (sizetype
);
1759 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1764 nidx1
= make_ssa_name (sizetype
);
1765 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1766 size_int (bo_idx
+ 1));
1770 tree iv2
= NULL_TREE
;
1773 tree t
= limb_access (rhs_type
, nrhs1
, nidx0
, true);
1774 iv
= make_ssa_name (m_limb_type
);
1775 g
= gimple_build_assign (iv
, t
);
1781 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
1782 unsigned prec
= TYPE_PRECISION (rhs_type
);
1785 if ((prec
% limb_prec
) == 0
1786 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
1787 conditional
= false;
1789 edge edge_true
= NULL
, edge_false
= NULL
;
1792 g
= gimple_build_cond (NE_EXPR
, idx
,
1793 size_int (prec
/ limb_prec
),
1794 NULL_TREE
, NULL_TREE
);
1795 if_then (g
, profile_probability::likely (),
1796 edge_true
, edge_false
);
1798 tree t
= limb_access (rhs_type
, nrhs1
, nidx1
, true);
1802 && !tree_fits_uhwi_p (idx
))
1803 iv2
= m_data
[m_data_cnt
+ 1];
1805 iv2
= make_ssa_name (m_limb_type
);
1806 g
= gimple_build_assign (iv2
, t
);
1810 maybe_duplicate_eh_stmt (g
, stmt
);
1813 edge e
= split_block (gsi_bb (m_gsi
), g
);
1814 m_gsi
= gsi_after_labels (e
->dest
);
1815 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1816 = profile_probability::very_unlikely ();
1821 tree iv3
= make_ssa_name (m_limb_type
);
1823 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
1824 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
1825 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
1826 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
1827 edge_false
, UNKNOWN_LOCATION
);
1828 m_gsi
= gsi_after_labels (edge_true
->dest
);
1831 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
1832 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
1834 iv
= gimple_assign_lhs (g
);
1837 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
1838 iv2
, build_int_cst (unsigned_type_node
,
1839 limb_prec
- bo_bit
));
1841 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
1842 gimple_assign_lhs (g
), iv
);
1844 iv
= gimple_assign_lhs (g
);
1845 if (m_data
[m_data_cnt
])
1846 m_data
[m_data_cnt
] = iv2
;
1848 if (tree_fits_uhwi_p (idx
))
1850 tree atype
= limb_access_type (rhs_type
, idx
);
1851 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
1852 iv
= add_cast (atype
, iv
);
1859 /* Use write_p = true for loads with EH edges to make
1860 sure limb_access doesn't add a cast as separate
1861 statement after it. */
1862 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
1863 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
1864 g
= gimple_build_assign (ret
, rhs1
);
1868 maybe_duplicate_eh_stmt (g
, stmt
);
1871 edge e
= split_block (gsi_bb (m_gsi
), g
);
1872 m_gsi
= gsi_after_labels (e
->dest
);
1873 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1874 = profile_probability::very_unlikely ();
1876 if (tree_fits_uhwi_p (idx
))
1878 tree atype
= limb_access_type (rhs_type
, idx
);
1879 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
1880 ret
= add_cast (atype
, ret
);
1886 /* Return a limb IDX from a mergeable statement STMT. */
1889 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
1891 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
1893 switch (gimple_code (stmt
))
1896 if (gimple_assign_load_p (stmt
))
1897 return handle_load (stmt
, idx
);
1898 switch (gimple_assign_rhs_code (stmt
))
1903 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1906 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1907 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
1908 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
1914 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1915 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1916 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
1919 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1920 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
1921 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
1923 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
1925 gimple_assign_rhs2 (stmt
), idx
);
1928 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1930 case VIEW_CONVERT_EXPR
:
1931 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
1932 gimple_assign_rhs1 (stmt
), idx
);
1943 /* Return minimum precision of OP at STMT.
1944 Positive value is minimum precision above which all bits
1945 are zero, negative means all bits above negation of the
1946 value are copies of the sign bit. */
1949 range_to_prec (tree op
, gimple
*stmt
)
1953 tree type
= TREE_TYPE (op
);
1954 unsigned int prec
= TYPE_PRECISION (type
);
1957 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
1958 || r
.undefined_p ())
1960 if (TYPE_UNSIGNED (type
))
1963 return MIN ((int) -prec
, -2);
1966 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
1968 w
= r
.lower_bound ();
1971 int min_prec1
= wi::min_precision (w
, SIGNED
);
1972 w
= r
.upper_bound ();
1973 int min_prec2
= wi::min_precision (w
, SIGNED
);
1974 int min_prec
= MAX (min_prec1
, min_prec2
);
1975 return MIN (-min_prec
, -2);
1979 w
= r
.upper_bound ();
1980 int min_prec
= wi::min_precision (w
, UNSIGNED
);
1981 return MAX (min_prec
, 1);
1984 /* Return address of the first limb of OP and write into *PREC
1985 its precision. If positive, the operand is zero extended
1986 from that precision, if it is negative, the operand is sign-extended
1987 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1988 otherwise *PREC_STORED is prec from the innermost call without
1989 range optimizations. */
1992 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
1993 int *prec_stored
, int *prec
)
1996 location_t loc_save
= m_loc
;
1997 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
1998 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
1999 && TREE_CODE (op
) != INTEGER_CST
)
2002 *prec
= range_to_prec (op
, stmt
);
2003 bitint_prec_kind kind
= bitint_prec_small
;
2004 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2005 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2006 kind
= bitint_precision_kind (TREE_TYPE (op
));
2007 if (kind
== bitint_prec_middle
)
2009 tree type
= NULL_TREE
;
2010 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2012 tree op_type
= TREE_TYPE (op
);
2013 unsigned HOST_WIDE_INT nelts
2014 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2015 /* Add support for 3 or more limbs filled in from normal
2016 integral type if this assert fails. If no target chooses
2017 limb mode smaller than half of largest supported normal
2018 integral type, this will not be needed. */
2019 gcc_assert (nelts
<= 2);
2021 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2022 ? TYPE_PRECISION (op_type
)
2023 : -TYPE_PRECISION (op_type
));
2024 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2029 if (TYPE_UNSIGNED (op_type
))
2031 if (*prec_stored
> limb_prec
)
2032 *prec_stored
= limb_prec
;
2034 else if (*prec_stored
< -limb_prec
)
2035 *prec_stored
= -limb_prec
;
2038 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2039 tree var
= create_tmp_var (atype
);
2041 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2042 t1
= add_cast (m_limb_type
, t1
);
2043 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2044 NULL_TREE
, NULL_TREE
);
2045 gimple
*g
= gimple_build_assign (v
, t1
);
2049 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2050 g
= gimple_build_assign (make_ssa_name (op_type
),
2051 RSHIFT_EXPR
, op
, lp
);
2053 tree t2
= gimple_assign_lhs (g
);
2054 t2
= add_cast (m_limb_type
, t2
);
2055 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2056 NULL_TREE
, NULL_TREE
);
2057 g
= gimple_build_assign (v
, t2
);
2060 tree ret
= build_fold_addr_expr (var
);
2061 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2063 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2064 g
= gimple_build_assign (var
, clobber
);
2065 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2070 switch (TREE_CODE (op
))
2074 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2076 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2078 m_loc
= gimple_location (g
);
2079 if (gimple_assign_load_p (g
))
2081 *prec
= range_to_prec (op
, NULL
);
2083 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2084 ? TYPE_PRECISION (TREE_TYPE (op
))
2085 : -TYPE_PRECISION (TREE_TYPE (op
)));
2086 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2087 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2088 NULL_TREE
, true, GSI_SAME_STMT
);
2090 else if (gimple_code (g
) == GIMPLE_NOP
)
2092 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2094 *prec_stored
= *prec
;
2095 tree var
= create_tmp_var (m_limb_type
);
2096 TREE_ADDRESSABLE (var
) = 1;
2097 ret
= build_fold_addr_expr (var
);
2098 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2100 tree clobber
= build_clobber (m_limb_type
,
2101 CLOBBER_STORAGE_END
);
2102 g
= gimple_build_assign (var
, clobber
);
2103 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2108 gcc_assert (gimple_assign_cast_p (g
));
2109 tree rhs1
= gimple_assign_rhs1 (g
);
2110 bitint_prec_kind kind
= bitint_prec_small
;
2111 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2112 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2113 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2114 if (kind
>= bitint_prec_large
)
2116 tree lhs_type
= TREE_TYPE (op
);
2117 tree rhs_type
= TREE_TYPE (rhs1
);
2118 int prec_stored_val
= 0;
2119 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2120 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2122 if (TYPE_UNSIGNED (lhs_type
)
2123 && !TYPE_UNSIGNED (rhs_type
))
2124 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2128 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2130 else if (TYPE_UNSIGNED (lhs_type
))
2132 gcc_assert (*prec
> 0
2133 || prec_stored_val
> 0
2134 || (-prec_stored_val
2135 >= TYPE_PRECISION (lhs_type
)));
2136 *prec
= TYPE_PRECISION (lhs_type
);
2138 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2141 *prec
= -TYPE_PRECISION (lhs_type
);
2156 int p
= var_to_partition (m_map
, op
);
2157 gcc_assert (m_vars
[p
] != NULL_TREE
);
2158 *prec
= range_to_prec (op
, stmt
);
2160 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2161 ? TYPE_PRECISION (TREE_TYPE (op
))
2162 : -TYPE_PRECISION (TREE_TYPE (op
)));
2163 return build_fold_addr_expr (m_vars
[p
]);
2166 unsigned int min_prec
, mp
;
2168 w
= wi::to_wide (op
);
2169 if (tree_int_cst_sgn (op
) >= 0)
2171 min_prec
= wi::min_precision (w
, UNSIGNED
);
2172 *prec
= MAX (min_prec
, 1);
2176 min_prec
= wi::min_precision (w
, SIGNED
);
2177 *prec
= MIN ((int) -min_prec
, -2);
2179 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2182 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
)))
2183 type
= TREE_TYPE (op
);
2185 type
= build_bitint_type (mp
, 1);
2186 if (TREE_CODE (type
) != BITINT_TYPE
2187 || bitint_precision_kind (type
) == bitint_prec_small
)
2189 if (TYPE_PRECISION (type
) <= limb_prec
)
2192 /* This case is for targets which e.g. have 64-bit
2193 limb but categorize up to 128-bits _BitInts as
2194 small. We could use type of m_limb_type[2] and
2195 similar instead to save space. */
2196 type
= build_bitint_type (mid_min_prec
, 1);
2200 if (tree_int_cst_sgn (op
) >= 0)
2201 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2203 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2205 op
= tree_output_constant_def (fold_convert (type
, op
));
2206 return build_fold_addr_expr (op
);
2212 /* Helper function, create a loop before the current location,
2213 start with sizetype INIT value from the preheader edge. Return
2214 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2215 from the latch edge. */
2218 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2220 if (!gsi_end_p (m_gsi
))
2223 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2224 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2225 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2226 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2227 e3
->probability
= profile_probability::very_unlikely ();
2228 e2
->flags
= EDGE_FALSE_VALUE
;
2229 e2
->probability
= e3
->probability
.invert ();
2230 tree idx
= make_ssa_name (sizetype
);
2231 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2232 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2233 *idx_next
= make_ssa_name (sizetype
);
2234 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2235 m_gsi
= gsi_after_labels (e1
->dest
);
2237 m_preheader_bb
= e1
->src
;
2238 class loop
*loop
= alloc_loop ();
2239 loop
->header
= e1
->dest
;
2240 add_loop (loop
, e1
->src
->loop_father
);
2244 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2245 lowered using iteration from the least significant limb up to the most
2246 significant limb. For large _BitInt it is emitted as straight line code
2247 before current location, for huge _BitInt as a loop handling two limbs
2248 at once, followed by handling up to limbs in straight line code (at most
2249 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2250 comparisons, in that case CMP_CODE should be the comparison code and
2251 CMP_OP1/CMP_OP2 the comparison operands. */
2254 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2255 tree cmp_op1
, tree cmp_op2
)
2257 bool eq_p
= cmp_code
!= ERROR_MARK
;
2260 type
= TREE_TYPE (cmp_op1
);
2262 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2263 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2264 bitint_prec_kind kind
= bitint_precision_kind (type
);
2265 gcc_assert (kind
>= bitint_prec_large
);
2267 tree lhs
= gimple_get_lhs (stmt
);
2268 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2270 && TREE_CODE (lhs
) == SSA_NAME
2271 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2272 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2274 int p
= var_to_partition (m_map
, lhs
);
2275 gcc_assert (m_vars
[p
] != NULL_TREE
);
2276 m_lhs
= lhs
= m_vars
[p
];
2278 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2280 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2282 basic_block eh_pad
= NULL
;
2283 tree nlhs
= NULL_TREE
;
2284 unsigned HOST_WIDE_INT bo_idx
= 0;
2285 unsigned HOST_WIDE_INT bo_bit
= 0;
2286 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2287 if (gimple_store_p (stmt
))
2289 store_operand
= gimple_assign_rhs1 (stmt
);
2290 eh
= stmt_ends_bb_p (stmt
);
2295 basic_block bb
= gimple_bb (stmt
);
2297 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2298 if (e
->flags
& EDGE_EH
)
2304 if (TREE_CODE (lhs
) == COMPONENT_REF
2305 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2307 tree fld
= TREE_OPERAND (lhs
, 1);
2308 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2309 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2310 poly_int64 bitoffset
;
2311 poly_uint64 field_offset
, repr_offset
;
2312 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2316 bool var_field_off
= false;
2317 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2318 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2319 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2323 var_field_off
= true;
2325 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2326 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2327 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2328 TREE_OPERAND (lhs
, 0), repr
,
2330 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2331 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2332 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2333 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2338 && TREE_CODE (store_operand
) == SSA_NAME
2340 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2341 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2342 || gimple_assign_cast_p (stmt
))
2344 rhs1
= gimple_assign_rhs1 (store_operand
2345 ? SSA_NAME_DEF_STMT (store_operand
)
2347 /* Optimize mergeable ops ending with widening cast to _BitInt
2348 (or followed by store). We can lower just the limbs of the
2349 cast operand and widen afterwards. */
2350 if (TREE_CODE (rhs1
) == SSA_NAME
2352 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2353 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2354 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2355 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2356 limb_prec
) < CEIL (prec
, limb_prec
)
2357 || (kind
== bitint_prec_huge
2358 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2360 store_operand
= rhs1
;
2361 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2362 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2363 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2367 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2368 if (kind
== bitint_prec_large
)
2369 cnt
= CEIL (prec
, limb_prec
);
2372 rem
= (prec
% (2 * limb_prec
));
2373 end
= (prec
- rem
) / limb_prec
;
2374 cnt
= 2 + CEIL (rem
, limb_prec
);
2375 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2378 basic_block edge_bb
= NULL
;
2381 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2383 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2385 if (kind
== bitint_prec_large
)
2386 m_gsi
= gsi_end_bb (edge_bb
);
2389 m_after_stmt
= stmt
;
2390 if (kind
!= bitint_prec_large
)
2391 m_upwards_2limb
= end
;
2395 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2396 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2397 > CEIL (prec
, limb_prec
)));
2399 for (unsigned i
= 0; i
< cnt
; i
++)
2402 if (kind
== bitint_prec_large
)
2405 idx
= size_int (end
+ (i
> 2));
2408 rhs1
= handle_operand (cmp_op1
, idx
);
2409 tree rhs2
= handle_operand (cmp_op2
, idx
);
2410 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2412 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2413 e1
->flags
= EDGE_FALSE_VALUE
;
2414 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2415 e1
->probability
= profile_probability::unlikely ();
2416 e2
->probability
= e1
->probability
.invert ();
2418 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2419 m_gsi
= gsi_after_labels (e1
->dest
);
2424 rhs1
= handle_operand (store_operand
, idx
);
2426 rhs1
= handle_stmt (stmt
, idx
);
2427 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2428 rhs1
= add_cast (m_limb_type
, rhs1
);
2429 if (sext
&& i
== cnt
- 1)
2434 if (tree_fits_uhwi_p (idx
))
2435 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2438 nidx
= make_ssa_name (sizetype
);
2439 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2445 basic_block new_bb
= NULL
;
2446 /* Handle stores into bit-fields. */
2452 if (kind
!= bitint_prec_large
)
2454 prepare_data_in_out (build_zero_cst (m_limb_type
),
2456 bf_next
= m_data
.pop ();
2457 bf_cur
= m_data
.pop ();
2458 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2459 NULL_TREE
, NULL_TREE
);
2461 if_then_else (g
, profile_probability::unlikely (),
2466 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2467 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2468 bitsize_int (limb_prec
- bo_bit
),
2469 bitsize_int (bo_idx
* limb_prec
+ bo_bit
));
2470 tree t
= add_cast (ftype
, rhs1
);
2471 g
= gimple_build_assign (bfr
, t
);
2475 maybe_duplicate_eh_stmt (g
, stmt
);
2478 edge e
= split_block (gsi_bb (m_gsi
), g
);
2479 m_gsi
= gsi_after_labels (e
->dest
);
2480 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2481 = profile_probability::very_unlikely ();
2484 if (kind
== bitint_prec_large
)
2490 m_gsi
= gsi_after_labels (e2
->src
);
2494 tree t1
= make_ssa_name (m_limb_type
);
2495 tree t2
= make_ssa_name (m_limb_type
);
2496 tree t3
= make_ssa_name (m_limb_type
);
2497 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2498 build_int_cst (unsigned_type_node
,
2499 limb_prec
- bo_bit
));
2501 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2502 build_int_cst (unsigned_type_node
,
2506 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2509 if (bf_next
&& i
== 1)
2511 g
= gimple_build_assign (bf_next
, bf_cur
);
2518 /* Handle bit-field access to partial last limb if needed. */
2522 && tree_fits_uhwi_p (idx
))
2524 unsigned int tprec
= TYPE_PRECISION (type
);
2525 unsigned int rprec
= tprec
% limb_prec
;
2526 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2529 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2530 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2531 unshare_expr (nlhs
),
2532 bitsize_int (rprec
+ bo_bit
),
2533 bitsize_int ((bo_idx
2534 + tprec
/ limb_prec
)
2536 tree t
= add_cast (ftype
, rhs1
);
2537 g
= gimple_build_assign (bfr
, t
);
2541 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2544 /* Otherwise, stores to any other lhs. */
2547 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
,
2549 g
= gimple_build_assign (l
, rhs1
);
2554 maybe_duplicate_eh_stmt (g
, stmt
);
2557 edge e
= split_block (gsi_bb (m_gsi
), g
);
2558 m_gsi
= gsi_after_labels (e
->dest
);
2559 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2560 = profile_probability::very_unlikely ();
2564 m_gsi
= gsi_after_labels (new_bb
);
2568 if (kind
== bitint_prec_huge
&& i
<= 1)
2572 idx
= make_ssa_name (sizetype
);
2573 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2579 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2582 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2583 NULL_TREE
, NULL_TREE
);
2586 m_gsi
= gsi_after_labels (edge_bb
);
2588 m_gsi
= gsi_for_stmt (stmt
);
2597 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2598 tree lpm1
= build_int_cst (unsigned_type_node
,
2600 tree n
= make_ssa_name (TREE_TYPE (ext
));
2601 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2603 ext
= add_cast (m_limb_type
, n
);
2606 ext
= build_zero_cst (m_limb_type
);
2607 kind
= bitint_precision_kind (type
);
2608 unsigned start
= CEIL (prec
, limb_prec
);
2609 prec
= TYPE_PRECISION (type
);
2610 idx
= idx_first
= idx_next
= NULL_TREE
;
2611 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2612 kind
= bitint_prec_large
;
2613 if (kind
== bitint_prec_large
)
2614 cnt
= CEIL (prec
, limb_prec
) - start
;
2617 rem
= prec
% limb_prec
;
2618 end
= (prec
- rem
) / limb_prec
;
2619 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2621 for (unsigned i
= 0; i
< cnt
; i
++)
2623 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2624 idx
= size_int (start
+ i
);
2625 else if (i
== cnt
- 1 && (rem
!= 0))
2626 idx
= size_int (end
);
2627 else if (i
== (bo_bit
!= 0))
2628 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2630 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2632 tree t1
= make_ssa_name (m_limb_type
);
2633 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2634 build_int_cst (unsigned_type_node
,
2635 limb_prec
- bo_bit
));
2637 if (integer_zerop (ext
))
2641 tree t2
= make_ssa_name (m_limb_type
);
2642 rhs1
= make_ssa_name (m_limb_type
);
2643 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2644 build_int_cst (unsigned_type_node
,
2647 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2655 if (tree_fits_uhwi_p (idx
))
2656 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2659 nidx
= make_ssa_name (sizetype
);
2660 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2666 /* Handle bit-field access to partial last limb if needed. */
2667 if (nlhs
&& i
== cnt
- 1)
2669 unsigned int tprec
= TYPE_PRECISION (type
);
2670 unsigned int rprec
= tprec
% limb_prec
;
2671 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2674 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2675 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2676 unshare_expr (nlhs
),
2677 bitsize_int (rprec
+ bo_bit
),
2678 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2680 tree t
= add_cast (ftype
, rhs1
);
2681 g
= gimple_build_assign (bfr
, t
);
2685 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2688 /* Otherwise, stores to any other lhs. */
2691 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
, nidx
, true);
2692 g
= gimple_build_assign (l
, rhs1
);
2697 maybe_duplicate_eh_stmt (g
, stmt
);
2700 edge e
= split_block (gsi_bb (m_gsi
), g
);
2701 m_gsi
= gsi_after_labels (e
->dest
);
2702 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2703 = profile_probability::very_unlikely ();
2706 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2708 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2711 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2712 NULL_TREE
, NULL_TREE
);
2714 m_gsi
= gsi_for_stmt (stmt
);
2718 if (bf_cur
!= NULL_TREE
)
2720 unsigned int tprec
= TYPE_PRECISION (type
);
2721 unsigned int rprec
= tprec
% limb_prec
;
2722 tree ftype
= build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2723 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2724 bitsize_int (rprec
+ bo_bit
),
2725 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2730 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2731 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2732 build_int_cst (unsigned_type_node
,
2733 limb_prec
- bo_bit
));
2736 rhs1
= add_cast (ftype
, rhs1
);
2737 g
= gimple_build_assign (bfr
, rhs1
);
2741 maybe_duplicate_eh_stmt (g
, stmt
);
2744 edge e
= split_block (gsi_bb (m_gsi
), g
);
2745 m_gsi
= gsi_after_labels (e
->dest
);
2746 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2747 = profile_probability::very_unlikely ();
2752 if (gimple_store_p (stmt
))
2754 unlink_stmt_vdef (stmt
);
2755 release_ssa_name (gimple_vdef (stmt
));
2756 gsi_remove (&m_gsi
, true);
2760 lhs
= make_ssa_name (boolean_type_node
);
2761 basic_block bb
= gimple_bb (stmt
);
2762 gphi
*phi
= create_phi_node (lhs
, bb
);
2763 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2764 unsigned int n
= EDGE_COUNT (bb
->preds
);
2765 for (unsigned int i
= 0; i
< n
; i
++)
2767 edge e2
= EDGE_PRED (bb
, i
);
2768 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
2769 e2
, UNKNOWN_LOCATION
);
2771 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2778 /* Handle a large/huge _BitInt comparison statement STMT other than
2779 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2780 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2781 lowered by iteration from the most significant limb downwards to
2782 the least significant one, for large _BitInt in straight line code,
2783 otherwise with most significant limb handled in
2784 straight line code followed by a loop handling one limb at a time.
2785 Comparisons with unsigned huge _BitInt with precisions which are
2786 multiples of limb precision can use just the loop and don't need to
2787 handle most significant limb before the loop. The loop or straight
2788 line code jumps to final basic block if a particular pair of limbs
2792 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2793 tree cmp_op1
, tree cmp_op2
)
2795 tree type
= TREE_TYPE (cmp_op1
);
2796 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2797 bitint_prec_kind kind
= bitint_precision_kind (type
);
2798 gcc_assert (kind
>= bitint_prec_large
);
2800 if (!TYPE_UNSIGNED (type
)
2801 && integer_zerop (cmp_op2
)
2802 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
2804 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
2805 tree idx
= size_int (end
);
2807 tree rhs1
= handle_operand (cmp_op1
, idx
);
2808 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2810 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2811 rhs1
= add_cast (stype
, rhs1
);
2813 tree lhs
= make_ssa_name (boolean_type_node
);
2814 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
2815 build_zero_cst (TREE_TYPE (rhs1
)));
2821 unsigned cnt
, rem
= 0, end
= 0;
2822 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
2823 if (kind
== bitint_prec_large
)
2824 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
2827 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
2828 if (rem
== 0 && !TYPE_UNSIGNED (type
))
2830 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
2831 cnt
= 1 + (rem
!= 0);
2834 basic_block edge_bb
= NULL
;
2835 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2837 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2839 m_gsi
= gsi_end_bb (edge_bb
);
2841 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
2842 for (unsigned i
= 0; i
< cnt
; i
++)
2845 if (kind
== bitint_prec_large
)
2846 idx
= size_int (cnt
- i
- 1);
2847 else if (i
== cnt
- 1)
2848 idx
= create_loop (size_int (end
- 1), &idx_next
);
2850 idx
= size_int (end
);
2851 tree rhs1
= handle_operand (cmp_op1
, idx
);
2852 tree rhs2
= handle_operand (cmp_op2
, idx
);
2854 && !TYPE_UNSIGNED (type
)
2855 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2857 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2858 rhs1
= add_cast (stype
, rhs1
);
2859 rhs2
= add_cast (stype
, rhs2
);
2861 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2863 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2864 e1
->flags
= EDGE_FALSE_VALUE
;
2865 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2866 e1
->probability
= profile_probability::likely ();
2867 e2
->probability
= e1
->probability
.invert ();
2869 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2870 m_gsi
= gsi_after_labels (e1
->dest
);
2872 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2874 e1
= split_block (gsi_bb (m_gsi
), g
);
2875 e1
->flags
= EDGE_FALSE_VALUE
;
2876 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2877 e1
->probability
= profile_probability::unlikely ();
2878 e2
->probability
= e1
->probability
.invert ();
2879 m_gsi
= gsi_after_labels (e1
->dest
);
2880 edges
[2 * i
+ 1] = e2
;
2882 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
2884 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
2886 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
2887 NULL_TREE
, NULL_TREE
);
2889 edge true_edge
, false_edge
;
2890 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
2891 &true_edge
, &false_edge
);
2892 m_gsi
= gsi_after_labels (false_edge
->dest
);
2896 tree lhs
= make_ssa_name (boolean_type_node
);
2897 basic_block bb
= gimple_bb (stmt
);
2898 gphi
*phi
= create_phi_node (lhs
, bb
);
2899 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
2901 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
2902 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
2903 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
2905 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
2906 ? boolean_true_node
: boolean_false_node
,
2907 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
2912 /* Lower large/huge _BitInt left and right shift except for left
2913 shift by < limb_prec constant. */
2916 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
2918 tree rhs1
= gimple_assign_rhs1 (stmt
);
2919 tree lhs
= gimple_assign_lhs (stmt
);
2920 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
2921 tree type
= TREE_TYPE (rhs1
);
2922 gimple
*final_stmt
= gsi_stmt (m_gsi
);
2923 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
2924 && bitint_precision_kind (type
) >= bitint_prec_large
);
2925 int prec
= TYPE_PRECISION (type
);
2926 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
2928 if (obj
== NULL_TREE
)
2930 int part
= var_to_partition (m_map
, lhs
);
2931 gcc_assert (m_vars
[part
] != NULL_TREE
);
2934 /* Preparation code common for both left and right shifts.
2935 unsigned n1 = n % limb_prec;
2936 size_t n2 = n / limb_prec;
2937 size_t n3 = n1 != 0;
2938 unsigned n4 = (limb_prec - n1) % limb_prec;
2939 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2940 if (TREE_CODE (n
) == INTEGER_CST
)
2942 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2943 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
2944 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
2945 n3
= size_int (!integer_zerop (n1
));
2946 n4
= int_const_binop (TRUNC_MOD_EXPR
,
2947 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
2951 n1
= make_ssa_name (TREE_TYPE (n
));
2952 n2
= make_ssa_name (sizetype
);
2953 n3
= make_ssa_name (sizetype
);
2954 n4
= make_ssa_name (TREE_TYPE (n
));
2955 if (pow2p_hwi (limb_prec
))
2957 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
2958 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
2960 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2962 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2964 build_int_cst (TREE_TYPE (n
),
2965 exact_log2 (limb_prec
)));
2967 if (gimple_assign_lhs (g
) != n2
)
2969 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2972 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
2975 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
2981 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2982 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
2984 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2986 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2987 TRUNC_DIV_EXPR
, n
, lp
);
2989 if (gimple_assign_lhs (g
) != n2
)
2991 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2994 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
2995 MINUS_EXPR
, lp
, n1
);
2997 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3001 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3002 build_zero_cst (TREE_TYPE (n
)));
3004 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3007 tree p
= build_int_cst (sizetype
,
3008 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3009 if (rhs_code
== RSHIFT_EXPR
)
3014 unsigned n1 = n % limb_prec;
3015 size_t n2 = n / limb_prec;
3016 size_t n3 = n1 != 0;
3017 unsigned n4 = (limb_prec - n1) % limb_prec;
3019 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3020 int signed_p = (typeof (src) -1) < 0;
3021 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3022 ? p : p - n3); ++idx)
3023 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3025 if (prec % limb_prec == 0)
3028 ext = ((signed limb_type) (src[p] << (limb_prec
3029 - (prec % limb_prec))))
3030 >> (limb_prec - (prec % limb_prec));
3032 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3033 if (!signed_p && (prec % limb_prec == 0))
3035 else if (idx < prec / 64)
3037 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3043 dst[idx] = ((signed limb_type) ext) >> n1;
3044 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3048 dst[idx] = ext >> n1;
3051 for (++idx; idx <= p; ++idx)
3054 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3056 else if (TREE_CODE (n3
) == INTEGER_CST
)
3057 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3060 pmn3
= make_ssa_name (sizetype
);
3061 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3064 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3065 edge edge_true
, edge_false
;
3066 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3068 tree idx
= create_loop (n2
, &idx_next
);
3069 tree idxmn2
= make_ssa_name (sizetype
);
3070 tree idxpn3
= make_ssa_name (sizetype
);
3071 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3073 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3076 tree t1
= handle_operand (rhs1
, idx
);
3078 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3079 RSHIFT_EXPR
, t1
, n1
);
3081 t1
= gimple_assign_lhs (g
);
3082 if (!integer_zerop (n3
))
3085 tree t2
= handle_operand (rhs1
, idxpn3
);
3086 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3087 LSHIFT_EXPR
, t2
, n4
);
3089 t2
= gimple_assign_lhs (g
);
3090 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3091 BIT_IOR_EXPR
, t1
, t2
);
3093 t1
= gimple_assign_lhs (g
);
3095 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3096 g
= gimple_build_assign (l
, t1
);
3098 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3100 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3102 idx
= make_ssa_name (sizetype
);
3103 m_gsi
= gsi_for_stmt (final_stmt
);
3104 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3105 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3106 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3107 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3108 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3109 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3111 tree ms
= handle_operand (rhs1
, p
);
3113 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3114 ext
= add_cast (m_limb_type
, ms
);
3115 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3116 && !integer_zerop (n3
))
3118 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3119 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3121 t1
= handle_operand (rhs1
, idx
);
3122 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3123 RSHIFT_EXPR
, t1
, n1
);
3125 t1
= gimple_assign_lhs (g
);
3126 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3127 LSHIFT_EXPR
, ext
, n4
);
3129 tree t2
= gimple_assign_lhs (g
);
3130 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3131 BIT_IOR_EXPR
, t1
, t2
);
3133 t1
= gimple_assign_lhs (g
);
3134 idxmn2
= make_ssa_name (sizetype
);
3135 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3137 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3138 g
= gimple_build_assign (l
, t1
);
3140 idx_next
= make_ssa_name (sizetype
);
3141 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3143 m_gsi
= gsi_for_stmt (final_stmt
);
3144 tree nidx
= make_ssa_name (sizetype
);
3145 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3146 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3147 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3148 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3149 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3150 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3153 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3155 idx
= gimple_assign_lhs (g
);
3157 if (!TYPE_UNSIGNED (type
))
3158 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3159 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3160 RSHIFT_EXPR
, sext
, n1
);
3162 t1
= gimple_assign_lhs (g
);
3163 if (!TYPE_UNSIGNED (type
))
3165 t1
= add_cast (m_limb_type
, t1
);
3166 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3168 build_int_cst (TREE_TYPE (n
),
3171 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3174 ext
= build_zero_cst (m_limb_type
);
3175 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3176 g
= gimple_build_assign (l
, t1
);
3178 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3181 idx
= gimple_assign_lhs (g
);
3182 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3183 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3184 idx
= create_loop (idx
, &idx_next
);
3185 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3186 g
= gimple_build_assign (l
, ext
);
3188 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3190 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3198 unsigned n1 = n % limb_prec;
3199 size_t n2 = n / limb_prec;
3200 size_t n3 = n1 != 0;
3201 unsigned n4 = (limb_prec - n1) % limb_prec;
3203 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3204 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3205 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3208 dst[idx] = src[idx - n2] << n1;
3211 for (; (ssize_t) idx >= 0; --idx)
3214 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3215 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3218 n2pn3
= make_ssa_name (sizetype
);
3219 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3222 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3223 idx even to access the most significant partial limb. */
3225 if (integer_zerop (n3
))
3226 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3227 counts. Emit if (true) condition that can be optimized later. */
3228 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3229 NULL_TREE
, NULL_TREE
);
3231 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3232 edge edge_true
, edge_false
;
3233 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3235 tree idx
= create_loop (p
, &idx_next
);
3236 tree idxmn2
= make_ssa_name (sizetype
);
3237 tree idxmn2mn3
= make_ssa_name (sizetype
);
3238 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3240 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3243 tree t1
= handle_operand (rhs1
, idxmn2
);
3245 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3246 LSHIFT_EXPR
, t1
, n1
);
3248 t1
= gimple_assign_lhs (g
);
3249 if (!integer_zerop (n3
))
3252 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3253 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3254 RSHIFT_EXPR
, t2
, n4
);
3256 t2
= gimple_assign_lhs (g
);
3257 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3258 BIT_IOR_EXPR
, t1
, t2
);
3260 t1
= gimple_assign_lhs (g
);
3262 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3263 g
= gimple_build_assign (l
, t1
);
3265 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3267 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3268 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3269 NULL_TREE
, NULL_TREE
);
3271 idx
= make_ssa_name (sizetype
);
3272 m_gsi
= gsi_for_stmt (final_stmt
);
3273 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3274 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3275 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3276 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3277 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3278 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3280 if (!integer_zerop (n3
))
3282 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3283 NULL_TREE
, NULL_TREE
);
3284 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3285 idxmn2
= make_ssa_name (sizetype
);
3286 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3289 t1
= handle_operand (rhs1
, idxmn2
);
3290 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3291 LSHIFT_EXPR
, t1
, n1
);
3293 t1
= gimple_assign_lhs (g
);
3294 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3295 g
= gimple_build_assign (l
, t1
);
3297 idx_next
= make_ssa_name (sizetype
);
3298 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3300 m_gsi
= gsi_for_stmt (final_stmt
);
3301 tree nidx
= make_ssa_name (sizetype
);
3302 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3303 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3304 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3305 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3306 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3307 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3310 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3311 ssize_int (0), NULL_TREE
, NULL_TREE
);
3312 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3313 idx
= create_loop (idx
, &idx_next
);
3314 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3315 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3317 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3319 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3320 ssize_int (0), NULL_TREE
, NULL_TREE
);
3325 /* Lower large/huge _BitInt multiplication or division. */
3328 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3330 tree rhs1
= gimple_assign_rhs1 (stmt
);
3331 tree rhs2
= gimple_assign_rhs2 (stmt
);
3332 tree lhs
= gimple_assign_lhs (stmt
);
3333 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3334 tree type
= TREE_TYPE (rhs1
);
3335 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3336 && bitint_precision_kind (type
) >= bitint_prec_large
);
3337 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3338 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3339 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3340 if (obj
== NULL_TREE
)
3342 int part
= var_to_partition (m_map
, lhs
);
3343 gcc_assert (m_vars
[part
] != NULL_TREE
);
3345 lhs
= build_fold_addr_expr (obj
);
3349 lhs
= build_fold_addr_expr (obj
);
3350 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3351 NULL_TREE
, true, GSI_SAME_STMT
);
3353 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3358 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3359 lhs
, build_int_cst (sitype
, prec
),
3360 rhs1
, build_int_cst (sitype
, prec1
),
3361 rhs2
, build_int_cst (sitype
, prec2
));
3364 case TRUNC_DIV_EXPR
:
3365 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3366 lhs
, build_int_cst (sitype
, prec
),
3368 build_int_cst (sitype
, 0),
3369 rhs1
, build_int_cst (sitype
, prec1
),
3370 rhs2
, build_int_cst (sitype
, prec2
));
3371 if (!stmt_ends_bb_p (stmt
))
3372 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3375 case TRUNC_MOD_EXPR
:
3376 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3377 build_int_cst (sitype
, 0),
3378 lhs
, build_int_cst (sitype
, prec
),
3379 rhs1
, build_int_cst (sitype
, prec1
),
3380 rhs2
, build_int_cst (sitype
, prec2
));
3381 if (!stmt_ends_bb_p (stmt
))
3382 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3388 if (stmt_ends_bb_p (stmt
))
3390 maybe_duplicate_eh_stmt (g
, stmt
);
3393 basic_block bb
= gimple_bb (stmt
);
3395 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3396 if (e1
->flags
& EDGE_EH
)
3400 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3401 m_gsi
= gsi_after_labels (e2
->dest
);
3402 make_edge (e2
->src
, e1
->dest
, EDGE_EH
)->probability
3403 = profile_probability::very_unlikely ();
3408 /* Lower large/huge _BitInt conversion to/from floating point. */
3411 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3413 tree rhs1
= gimple_assign_rhs1 (stmt
);
3414 tree lhs
= gimple_assign_lhs (stmt
);
3415 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3416 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3418 if (rhs_code
== FIX_TRUNC_EXPR
)
3420 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3421 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3423 if (obj
== NULL_TREE
)
3425 int part
= var_to_partition (m_map
, lhs
);
3426 gcc_assert (m_vars
[part
] != NULL_TREE
);
3428 lhs
= build_fold_addr_expr (obj
);
3432 lhs
= build_fold_addr_expr (obj
);
3433 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3434 NULL_TREE
, true, GSI_SAME_STMT
);
3436 scalar_mode from_mode
3437 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3439 /* IEEE single is a full superset of both IEEE half and
3440 bfloat formats, convert to float first and then to _BitInt
3441 to avoid the need of another 2 library routines. */
3442 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3443 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3444 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3446 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3448 rhs1
= add_cast (type
, rhs1
);
3451 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3452 lhs
, build_int_cst (sitype
, prec
),
3459 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3460 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3461 rhs1
, build_int_cst (sitype
, prec
));
3462 gimple_call_set_lhs (g
, lhs
);
3463 if (!stmt_ends_bb_p (stmt
))
3464 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3465 gsi_replace (&m_gsi
, g
, true);
3469 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3470 If check_zero is true, caller wants to check if all bits in [start, end)
3471 are zero, otherwise if bits in [start, end) are either all zero or
3472 all ones. L is the limb with index LIMB, START and END are measured
3476 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3477 unsigned int end
, tree l
,
3481 unsigned startlimb
= start
/ limb_prec
;
3482 unsigned endlimb
= (end
- 1) / limb_prec
;
3485 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3487 if (startlimb
== endlimb
&& limb
== startlimb
)
3491 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3492 end
- start
, false, limb_prec
);
3493 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3495 wide_int_to_tree (m_limb_type
, w
));
3497 return gimple_assign_lhs (g
);
3499 unsigned int shift
= start
% limb_prec
;
3500 if ((end
% limb_prec
) != 0)
3502 unsigned int lshift
= (-end
) % limb_prec
;
3504 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3506 build_int_cst (unsigned_type_node
,
3509 l
= gimple_assign_lhs (g
);
3511 l
= add_cast (signed_type_for (m_limb_type
), l
);
3512 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3514 build_int_cst (unsigned_type_node
, shift
));
3516 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3518 else if (limb
== startlimb
)
3520 if ((start
% limb_prec
) == 0)
3523 l
= add_cast (signed_type_for (m_limb_type
), l
);
3524 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3526 build_int_cst (unsigned_type_node
,
3527 start
% limb_prec
));
3529 l
= gimple_assign_lhs (g
);
3531 l
= add_cast (m_limb_type
, l
);
3534 else if (limb
== endlimb
)
3536 if ((end
% limb_prec
) == 0)
3540 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3541 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3543 wide_int_to_tree (m_limb_type
, w
));
3545 return gimple_assign_lhs (g
);
3547 unsigned int shift
= (-end
) % limb_prec
;
3548 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3550 build_int_cst (unsigned_type_node
, shift
));
3552 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3553 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3555 build_int_cst (unsigned_type_node
, shift
));
3557 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3562 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3563 result including overflow flag into the right locations. */
3566 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3567 tree ovf
, tree lhs
, tree orig_obj
,
3568 gimple
*stmt
, tree_code code
)
3572 if (obj
== NULL_TREE
3573 && (TREE_CODE (type
) != BITINT_TYPE
3574 || bitint_precision_kind (type
) < bitint_prec_large
))
3576 /* Add support for 3 or more limbs filled in from normal integral
3577 type if this assert fails. If no target chooses limb mode smaller
3578 than half of largest supported normal integral type, this will not
3580 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3581 tree lhs_type
= type
;
3582 if (TREE_CODE (type
) == BITINT_TYPE
3583 && bitint_precision_kind (type
) == bitint_prec_middle
)
3584 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3585 TYPE_UNSIGNED (type
));
3586 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3587 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3589 r1
= gimple_assign_lhs (g
);
3590 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3591 r1
= add_cast (lhs_type
, r1
);
3592 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3594 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3595 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3597 r2
= gimple_assign_lhs (g
);
3598 r2
= add_cast (lhs_type
, r2
);
3599 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3600 build_int_cst (unsigned_type_node
,
3603 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3604 gimple_assign_lhs (g
));
3606 r1
= gimple_assign_lhs (g
);
3608 if (lhs_type
!= type
)
3609 r1
= add_cast (type
, r1
);
3610 ovf
= add_cast (lhs_type
, ovf
);
3611 if (lhs_type
!= type
)
3612 ovf
= add_cast (type
, ovf
);
3613 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3614 m_gsi
= gsi_for_stmt (stmt
);
3615 gsi_replace (&m_gsi
, g
, true);
3619 unsigned HOST_WIDE_INT nelts
= 0;
3620 tree atype
= NULL_TREE
;
3623 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3624 if (orig_obj
== NULL_TREE
)
3626 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3632 if (orig_obj
== NULL_TREE
)
3634 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3635 v1
= build2 (MEM_REF
, atype
,
3636 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3638 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3639 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3641 v1
= unshare_expr (obj
);
3642 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3643 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3644 g
= gimple_build_assign (v1
, v2
);
3647 if (orig_obj
== NULL_TREE
&& obj
)
3649 ovf
= add_cast (m_limb_type
, ovf
);
3650 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3651 g
= gimple_build_assign (l
, ovf
);
3655 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3656 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3657 (nelts
+ 1) * m_limb_size
);
3658 tree v1
= build2 (MEM_REF
, atype
,
3659 build_fold_addr_expr (unshare_expr (obj
)),
3661 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3665 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3667 imm_use_iterator ui
;
3668 use_operand_p use_p
;
3669 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3671 g
= USE_STMT (use_p
);
3672 if (!is_gimple_assign (g
)
3673 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3675 tree lhs2
= gimple_assign_lhs (g
);
3677 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3678 lhs2
= gimple_assign_lhs (use_stmt
);
3679 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3680 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3681 g
= gimple_build_assign (lhs2
, ovf
);
3683 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3684 gsi_replace (&gsi
, g
, true);
3685 if (gsi_stmt (m_gsi
) == use_stmt
)
3686 m_gsi
= gsi_for_stmt (g
);
3690 else if (ovf
!= boolean_false_node
)
3692 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3693 NULL_TREE
, NULL_TREE
);
3694 edge edge_true
, edge_false
;
3695 if_then (g
, profile_probability::very_unlikely (),
3696 edge_true
, edge_false
);
3697 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3698 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3701 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3702 true, GSI_SAME_STMT
);
3703 m_gsi
= gsi_after_labels (edge_true
->dest
);
3708 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3709 g
= gimple_build_assign (var
, clobber
);
3710 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3714 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3715 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3716 argument 1 precision PREC1 and minimum precision for the result
3717 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3720 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3721 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3726 /* Ignore this special rule for subtraction, even if both
3727 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3728 in infinite precision. */
3729 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3731 /* Result in [0, prec2) is unsigned, if prec > prec2,
3732 all bits above it will be zero. */
3733 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3734 return boolean_false_node
;
3737 /* ovf if any of bits in [start, end) is non-zero. */
3738 *start
= prec
- !TYPE_UNSIGNED (type
);
3742 else if (TYPE_UNSIGNED (type
))
3744 /* If result in [0, prec2) is signed and if prec > prec2,
3745 all bits above it will be sign bit copies. */
3748 /* ovf if bit prec - 1 is non-zero. */
3754 /* ovf if any of bits in [start, end) is non-zero. */
3759 else if (prec
>= prec2
)
3760 return boolean_false_node
;
3763 /* ovf if [start, end) bits aren't all zeros or all ones. */
3766 *check_zero
= false;
3771 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3772 argument or return type _Complex large/huge _BitInt. */
3775 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
3777 tree arg0
= gimple_call_arg (stmt
, 0);
3778 tree arg1
= gimple_call_arg (stmt
, 1);
3779 tree lhs
= gimple_call_lhs (stmt
);
3784 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3785 gsi_remove (&gsi
, true);
3788 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3789 tree type
= TREE_TYPE (lhs
);
3790 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3791 type
= TREE_TYPE (type
);
3792 int prec
= TYPE_PRECISION (type
);
3793 int prec0
= range_to_prec (arg0
, stmt
);
3794 int prec1
= range_to_prec (arg1
, stmt
);
3795 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3796 the be minimum unsigned precision of any possible operation's
3797 result, otherwise it is minimum signed precision.
3799 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3800 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3801 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3802 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3803 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3804 8 + 8 [0, 0x1fe] 9 UNSIGNED
3805 8 + 10 [0, 0x4fe] 11 UNSIGNED
3806 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3807 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3808 8 + -8 [-0x80, 0x17e] 10 SIGNED
3809 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3810 10 + -8 [-0x80, 0x47e] 12 SIGNED
3811 8 - 8 [-0xff, 0xff] 9 SIGNED
3812 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3813 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3814 -8 - -8 [-0xff, 0xff] 9 SIGNED
3815 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3816 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3817 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3818 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3819 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3820 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3821 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3822 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3823 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
3824 prec1
< 0 ? -prec1
: prec1
);
3825 /* If operands are either both signed or both unsigned,
3826 we need just one additional bit. */
3827 prec2
= (((prec0
< 0) == (prec1
< 0)
3828 /* If one operand is signed and one unsigned and
3829 the signed one has larger precision, we need
3830 just one extra bit, otherwise two. */
3831 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
3832 : (prec2
== -prec1
&& prec2
!= prec0
)))
3833 ? prec2
+ 1 : prec2
+ 2);
3834 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
3835 prec1
< 0 ? -prec1
: prec1
);
3836 prec3
= MAX (prec3
, prec
);
3837 tree var
= NULL_TREE
;
3838 tree orig_obj
= obj
;
3839 if (obj
== NULL_TREE
3840 && TREE_CODE (type
) == BITINT_TYPE
3841 && bitint_precision_kind (type
) >= bitint_prec_large
3843 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
3845 int part
= var_to_partition (m_map
, lhs
);
3846 gcc_assert (m_vars
[part
] != NULL_TREE
);
3848 if (TREE_TYPE (lhs
) == type
)
3851 if (TREE_CODE (type
) != BITINT_TYPE
3852 || bitint_precision_kind (type
) < bitint_prec_large
)
3854 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
3855 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
3856 var
= create_tmp_var (atype
);
3859 enum tree_code code
;
3860 switch (gimple_call_internal_fn (stmt
))
3862 case IFN_ADD_OVERFLOW
:
3863 case IFN_UBSAN_CHECK_ADD
:
3866 case IFN_SUB_OVERFLOW
:
3867 case IFN_UBSAN_CHECK_SUB
:
3873 unsigned start
, end
;
3875 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
3876 &start
, &end
, &check_zero
);
3878 unsigned startlimb
, endlimb
;
3886 startlimb
= start
/ limb_prec
;
3887 endlimb
= (end
- 1) / limb_prec
;
3890 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
3891 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
3892 unsigned cnt
, rem
= 0, fin
= 0;
3893 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
3894 bool last_ovf
= (ovf
== NULL_TREE
3895 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
3896 if (kind
!= bitint_prec_huge
)
3897 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
3900 rem
= (prec4
% (2 * limb_prec
));
3901 fin
= (prec4
- rem
) / limb_prec
;
3902 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
3903 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
3906 if (kind
== bitint_prec_huge
)
3907 m_upwards_2limb
= fin
;
3910 tree type0
= TREE_TYPE (arg0
);
3911 tree type1
= TREE_TYPE (arg1
);
3913 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
3914 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
3915 if (TYPE_PRECISION (type0
) < prec5
)
3917 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
3918 if (TREE_CODE (arg0
) == INTEGER_CST
)
3919 arg0
= fold_convert (type0
, arg0
);
3921 if (TYPE_PRECISION (type1
) < prec5
)
3923 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
3924 if (TREE_CODE (arg1
) == INTEGER_CST
)
3925 arg1
= fold_convert (type1
, arg1
);
3927 unsigned int data_cnt
= 0;
3928 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
3929 tree cmp
= build_zero_cst (m_limb_type
);
3930 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
3931 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
3932 for (unsigned i
= 0; i
< cnt
; i
++)
3936 if (kind
!= bitint_prec_huge
)
3939 idx
= size_int (fin
+ (i
> 2));
3940 if (!last_ovf
|| i
< cnt
- 1)
3942 if (type0
!= TREE_TYPE (arg0
))
3943 rhs1
= handle_cast (type0
, arg0
, idx
);
3945 rhs1
= handle_operand (arg0
, idx
);
3946 if (type1
!= TREE_TYPE (arg1
))
3947 rhs2
= handle_cast (type1
, arg1
, idx
);
3949 rhs2
= handle_operand (arg1
, idx
);
3951 data_cnt
= m_data_cnt
;
3952 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
3953 rhs1
= add_cast (m_limb_type
, rhs1
);
3954 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
3955 rhs2
= add_cast (m_limb_type
, rhs2
);
3961 m_data_cnt
= data_cnt
;
3962 if (TYPE_UNSIGNED (type0
))
3963 rhs1
= build_zero_cst (m_limb_type
);
3966 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
3967 if (TREE_CODE (rhs1
) == INTEGER_CST
)
3968 rhs1
= build_int_cst (m_limb_type
,
3969 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
3972 tree lpm1
= build_int_cst (unsigned_type_node
,
3974 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
3975 RSHIFT_EXPR
, rhs1
, lpm1
);
3977 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3980 if (TYPE_UNSIGNED (type1
))
3981 rhs2
= build_zero_cst (m_limb_type
);
3984 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
3985 if (TREE_CODE (rhs2
) == INTEGER_CST
)
3986 rhs2
= build_int_cst (m_limb_type
,
3987 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
3990 tree lpm1
= build_int_cst (unsigned_type_node
,
3992 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
3993 RSHIFT_EXPR
, rhs2
, lpm1
);
3995 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3999 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4000 if (ovf
!= boolean_false_node
)
4002 if (tree_fits_uhwi_p (idx
))
4004 unsigned limb
= tree_to_uhwi (idx
);
4005 if (limb
>= startlimb
&& limb
<= endlimb
)
4007 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4009 tree this_ovf
= make_ssa_name (boolean_type_node
);
4010 if (ovf
== NULL_TREE
&& !check_zero
)
4013 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4015 build_int_cst (m_limb_type
, 1));
4017 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4018 gimple_assign_lhs (g
),
4019 build_int_cst (m_limb_type
, 1));
4022 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4024 if (ovf
== NULL_TREE
)
4028 tree b
= make_ssa_name (boolean_type_node
);
4029 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4035 else if (startlimb
< fin
)
4037 if (m_first
&& startlimb
+ 2 < fin
)
4040 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4041 ovf_out
= m_data
.pop ();
4045 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4046 cmp_out
= m_data
.pop ();
4050 if (i
!= 0 || startlimb
!= fin
- 1)
4053 bool single_comparison
4054 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4055 if (!single_comparison
)
4058 if (!check_zero
&& (start
% limb_prec
) == 0)
4059 single_comparison
= true;
4061 else if ((startlimb
& 1) == (i
& 1))
4065 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4066 NULL_TREE
, NULL_TREE
);
4067 edge edge_true_true
, edge_true_false
, edge_false
;
4069 if (!single_comparison
)
4070 g2
= gimple_build_cond (NE_EXPR
, idx
,
4071 size_int (startlimb
), NULL_TREE
,
4073 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4074 profile_probability::likely (),
4075 edge_true_true
, edge_true_false
,
4077 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4078 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4080 tree this_ovf
= make_ssa_name (boolean_type_node
);
4081 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4083 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4085 build_int_cst (m_limb_type
, 1));
4087 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4088 gimple_assign_lhs (g
),
4089 build_int_cst (m_limb_type
, 1));
4092 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4094 if (cmp_code
== GT_EXPR
)
4096 tree t
= make_ssa_name (boolean_type_node
);
4097 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4101 tree this_ovf2
= NULL_TREE
;
4102 if (!single_comparison
)
4104 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4105 tree t
= make_ssa_name (boolean_type_node
);
4106 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4108 this_ovf2
= make_ssa_name (boolean_type_node
);
4109 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4113 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4115 if (i
== 1 && ovf_out
)
4118 t
= make_ssa_name (boolean_type_node
);
4119 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4120 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4122 add_phi_arg (phi
, ovf
? ovf
4123 : boolean_false_node
, edge_false
,
4126 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4129 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4131 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4132 phi
= create_phi_node (t
, edge_true_false
->dest
);
4133 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4134 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4136 add_phi_arg (phi
, cmp
, edge_true_true
,
4146 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4148 else if (!tree_fits_uhwi_p (idx
)
4149 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4151 bool single_comparison
4152 = (((unsigned) prec
% limb_prec
) == 0
4153 || prec_limbs
+ 1 >= fin
4154 || (prec_limbs
& 1) == (i
& 1));
4155 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4156 NULL_TREE
, NULL_TREE
);
4158 if (!single_comparison
)
4159 g2
= gimple_build_cond (LT_EXPR
, idx
,
4160 size_int (prec_limbs
- 1),
4161 NULL_TREE
, NULL_TREE
);
4162 edge edge_true_true
, edge_true_false
, edge_false
;
4163 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4164 profile_probability::likely (),
4165 edge_true_true
, edge_true_false
,
4167 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4168 g
= gimple_build_assign (l
, rhs
);
4170 if (!single_comparison
)
4172 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4173 l
= limb_access (type
, var
? var
: obj
,
4174 size_int (prec_limbs
- 1), true);
4175 if (!useless_type_conversion_p (TREE_TYPE (l
),
4177 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4178 g
= gimple_build_assign (l
, rhs
);
4181 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4185 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4186 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4187 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4188 g
= gimple_build_assign (l
, rhs
);
4193 if (kind
== bitint_prec_huge
&& i
<= 1)
4197 idx
= make_ssa_name (sizetype
);
4198 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4204 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4207 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4208 NULL_TREE
, NULL_TREE
);
4210 m_gsi
= gsi_for_stmt (final_stmt
);
4215 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4218 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4219 argument or return type _Complex large/huge _BitInt. */
4222 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4224 tree arg0
= gimple_call_arg (stmt
, 0);
4225 tree arg1
= gimple_call_arg (stmt
, 1);
4226 tree lhs
= gimple_call_lhs (stmt
);
4229 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4230 gsi_remove (&gsi
, true);
4233 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4234 tree type
= TREE_TYPE (lhs
);
4235 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4236 type
= TREE_TYPE (type
);
4237 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4238 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4239 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4240 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4241 + (prec1
< 0 ? -prec1
: prec1
));
4242 if (prec0
== 1 || prec1
== 1)
4244 tree var
= NULL_TREE
;
4245 tree orig_obj
= obj
;
4246 bool force_var
= false;
4247 if (obj
== NULL_TREE
4248 && TREE_CODE (type
) == BITINT_TYPE
4249 && bitint_precision_kind (type
) >= bitint_prec_large
4251 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4253 int part
= var_to_partition (m_map
, lhs
);
4254 gcc_assert (m_vars
[part
] != NULL_TREE
);
4256 if (TREE_TYPE (lhs
) == type
)
4259 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4261 for (int i
= 0; i
< 2; ++i
)
4263 tree arg
= i
? arg1
: arg0
;
4264 if (TREE_CODE (arg
) == ADDR_EXPR
)
4265 arg
= TREE_OPERAND (arg
, 0);
4266 if (get_base_address (arg
) == obj
)
4273 if (obj
== NULL_TREE
4275 || TREE_CODE (type
) != BITINT_TYPE
4276 || bitint_precision_kind (type
) < bitint_prec_large
4277 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4279 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4280 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4281 var
= create_tmp_var (atype
);
4283 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4284 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4285 NULL_TREE
, true, GSI_SAME_STMT
);
4286 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4288 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4289 addr
, build_int_cst (sitype
,
4291 arg0
, build_int_cst (sitype
, prec0
),
4292 arg1
, build_int_cst (sitype
, prec1
));
4295 unsigned start
, end
;
4297 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4298 &start
, &end
, &check_zero
);
4299 if (ovf
== NULL_TREE
)
4301 unsigned startlimb
= start
/ limb_prec
;
4302 unsigned endlimb
= (end
- 1) / limb_prec
;
4304 bool use_loop
= false;
4305 if (startlimb
== endlimb
)
4307 else if (startlimb
+ 1 == endlimb
)
4309 else if ((end
% limb_prec
) == 0)
4317 use_loop
= startlimb
+ 2 < endlimb
;
4321 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4322 size_int (startlimb
), true);
4323 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4325 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4326 startlimb
, check_zero
);
4327 ovf
= make_ssa_name (boolean_type_node
);
4329 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4330 build_zero_cst (m_limb_type
));
4333 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4335 build_int_cst (m_limb_type
, 1));
4337 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4338 build_int_cst (m_limb_type
, 1));
4344 basic_block edge_bb
= NULL
;
4345 gimple_stmt_iterator gsi
= m_gsi
;
4347 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4349 m_gsi
= gsi_end_bb (edge_bb
);
4351 tree cmp
= build_zero_cst (m_limb_type
);
4352 for (unsigned i
= 0; i
< cnt
; i
++)
4354 tree idx
, idx_next
= NULL_TREE
;
4356 idx
= size_int (startlimb
);
4358 idx
= size_int (endlimb
);
4360 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4362 idx
= size_int (startlimb
+ 1);
4363 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4364 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4366 l
= gimple_assign_lhs (g
);
4367 if (i
== 0 || i
== 2)
4368 l
= arith_overflow_extract_bits (start
, end
, l
,
4371 if (i
== 0 && !check_zero
)
4374 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4376 build_int_cst (m_limb_type
, 1));
4378 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4379 build_int_cst (m_limb_type
, 1),
4380 NULL_TREE
, NULL_TREE
);
4383 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4385 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4386 e1
->flags
= EDGE_FALSE_VALUE
;
4387 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4389 e1
->probability
= profile_probability::likely ();
4390 e2
->probability
= e1
->probability
.invert ();
4392 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4393 m_gsi
= gsi_after_labels (e1
->dest
);
4394 if (i
== 1 && use_loop
)
4396 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4399 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4400 size_int (endlimb
+ (cnt
== 1)),
4401 NULL_TREE
, NULL_TREE
);
4403 edge true_edge
, false_edge
;
4404 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4407 m_gsi
= gsi_after_labels (false_edge
->dest
);
4411 ovf
= make_ssa_name (boolean_type_node
);
4412 basic_block bb
= gimple_bb (final_stmt
);
4413 gphi
*phi
= create_phi_node (ovf
, bb
);
4414 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4416 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4418 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4419 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4421 m_gsi
= gsi_for_stmt (final_stmt
);
4425 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4428 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4429 .{ADD,SUB,MUL}_OVERFLOW call. */
4432 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4434 tree rhs1
= gimple_assign_rhs1 (stmt
);
4435 rhs1
= TREE_OPERAND (rhs1
, 0);
4436 if (obj
== NULL_TREE
)
4438 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4439 gcc_assert (m_vars
[part
] != NULL_TREE
);
4442 if (TREE_CODE (rhs1
) == SSA_NAME
4444 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4446 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4449 int part
= var_to_partition (m_map
, rhs1
);
4450 gcc_assert (m_vars
[part
] != NULL_TREE
);
4451 tree var
= m_vars
[part
];
4452 unsigned HOST_WIDE_INT nelts
4453 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4454 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4455 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4456 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4457 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4458 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4459 ? 0 : nelts
* m_limb_size
);
4460 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4461 gimple
*g
= gimple_build_assign (obj
, v2
);
4465 /* Lower COMPLEX_EXPR stmt. */
4468 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4470 tree lhs
= gimple_assign_lhs (stmt
);
4471 tree rhs1
= gimple_assign_rhs1 (stmt
);
4472 tree rhs2
= gimple_assign_rhs2 (stmt
);
4473 int part
= var_to_partition (m_map
, lhs
);
4474 gcc_assert (m_vars
[part
] != NULL_TREE
);
4476 unsigned HOST_WIDE_INT nelts
4477 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4478 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4479 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4480 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4482 if (TREE_CODE (rhs1
) == SSA_NAME
)
4484 part
= var_to_partition (m_map
, rhs1
);
4485 gcc_assert (m_vars
[part
] != NULL_TREE
);
4488 else if (integer_zerop (rhs1
))
4489 v2
= build_zero_cst (atype
);
4491 v2
= tree_output_constant_def (rhs1
);
4492 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4493 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4494 gimple
*g
= gimple_build_assign (v1
, v2
);
4496 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4497 TYPE_SIZE_UNIT (atype
));
4498 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4499 if (TREE_CODE (rhs2
) == SSA_NAME
)
4501 part
= var_to_partition (m_map
, rhs2
);
4502 gcc_assert (m_vars
[part
] != NULL_TREE
);
4505 else if (integer_zerop (rhs2
))
4506 v2
= build_zero_cst (atype
);
4508 v2
= tree_output_constant_def (rhs2
);
4509 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4510 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4511 g
= gimple_build_assign (v1
, v2
);
4515 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4519 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4521 tree arg0
= gimple_call_arg (stmt
, 0);
4522 tree arg1
= (gimple_call_num_args (stmt
) == 2
4523 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4524 tree lhs
= gimple_call_lhs (stmt
);
4529 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4530 gsi_remove (&gsi
, true);
4533 tree type
= TREE_TYPE (arg0
);
4534 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4535 bitint_prec_kind kind
= bitint_precision_kind (type
);
4536 gcc_assert (kind
>= bitint_prec_large
);
4537 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4538 enum built_in_function fcode
= END_BUILTINS
;
4539 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4540 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4541 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4545 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4546 fcode
= BUILT_IN_CLZ
;
4547 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4548 fcode
= BUILT_IN_CLZL
;
4550 fcode
= BUILT_IN_CLZLL
;
4553 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4554 we don't add the addend at the end. */
4555 arg1
= integer_zero_node
;
4558 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4559 fcode
= BUILT_IN_CTZ
;
4560 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4561 fcode
= BUILT_IN_CTZL
;
4563 fcode
= BUILT_IN_CTZLL
;
4567 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4568 fcode
= BUILT_IN_CLRSB
;
4569 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4570 fcode
= BUILT_IN_CLRSBL
;
4572 fcode
= BUILT_IN_CLRSBLL
;
4575 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4576 fcode
= BUILT_IN_PARITY
;
4577 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4578 fcode
= BUILT_IN_PARITYL
;
4580 fcode
= BUILT_IN_PARITYLL
;
4584 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4585 fcode
= BUILT_IN_POPCOUNT
;
4586 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4587 fcode
= BUILT_IN_POPCOUNTL
;
4589 fcode
= BUILT_IN_POPCOUNTLL
;
4595 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4596 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4597 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4598 basic_block edge_bb
= NULL
;
4601 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4602 if (kind
== bitint_prec_large
)
4603 cnt
= CEIL (prec
, limb_prec
);
4606 rem
= (prec
% (2 * limb_prec
));
4607 end
= (prec
- rem
) / limb_prec
;
4608 cnt
= 2 + CEIL (rem
, limb_prec
);
4609 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4612 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4614 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4616 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4618 if (kind
== bitint_prec_large
)
4619 m_gsi
= gsi_end_bb (edge_bb
);
4620 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4623 m_after_stmt
= stmt
;
4624 if (kind
!= bitint_prec_large
)
4625 m_upwards_2limb
= end
;
4627 for (unsigned i
= 0; i
< cnt
; i
++)
4630 if (kind
== bitint_prec_large
)
4633 idx
= size_int (end
+ (i
> 2));
4635 tree rhs1
= handle_operand (arg0
, idx
);
4636 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4638 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4639 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4640 rhs1
= add_cast (m_limb_type
, rhs1
);
4644 if (ifn
== IFN_PARITY
)
4645 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4646 else if (ifn
== IFN_FFS
)
4647 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4649 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4655 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4656 build_zero_cst (m_limb_type
),
4657 NULL_TREE
, NULL_TREE
);
4660 e1
= split_block (gsi_bb (m_gsi
), g
);
4661 e1
->flags
= EDGE_FALSE_VALUE
;
4662 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4663 e1
->probability
= profile_probability::unlikely ();
4664 e2
->probability
= e1
->probability
.invert ();
4666 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4667 m_gsi
= gsi_after_labels (e1
->dest
);
4670 if (tree_fits_uhwi_p (idx
))
4672 = build_int_cst (integer_type_node
,
4673 tree_to_uhwi (idx
) * limb_prec
4674 + (ifn
== IFN_FFS
));
4681 res
= make_ssa_name (integer_type_node
);
4682 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4683 build_int_cst (integer_type_node
,
4686 m_data
[m_data_cnt
] = res
;
4690 if (!integer_zerop (in
))
4692 if (kind
== bitint_prec_huge
&& i
== 1)
4695 res
= make_ssa_name (m_limb_type
);
4696 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4701 m_data
[m_data_cnt
] = res
;
4704 g
= gimple_build_call (fndecl
, 1, rhs1
);
4705 tem
= make_ssa_name (integer_type_node
);
4706 gimple_call_set_lhs (g
, tem
);
4708 if (!integer_zerop (in
))
4710 if (kind
== bitint_prec_huge
&& i
== 1)
4713 res
= make_ssa_name (integer_type_node
);
4714 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4719 m_data
[m_data_cnt
] = res
;
4726 if (kind
== bitint_prec_huge
&& i
<= 1)
4730 idx
= make_ssa_name (sizetype
);
4731 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4737 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4740 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4741 NULL_TREE
, NULL_TREE
);
4743 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4744 m_gsi
= gsi_after_labels (edge_bb
);
4746 m_gsi
= gsi_for_stmt (stmt
);
4753 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4755 if (kind
== bitint_prec_large
)
4756 cnt
= CEIL (prec
, limb_prec
);
4759 rem
= prec
% limb_prec
;
4760 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4762 end
= (prec
- rem
) / limb_prec
;
4763 cnt
= 1 + (rem
!= 0);
4764 if (ifn
== IFN_CLRSB
)
4768 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4770 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4772 m_gsi
= gsi_end_bb (edge_bb
);
4775 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4778 gsi
= gsi_for_stmt (stmt
);
4780 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4782 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
4785 for (unsigned i
= 0; i
< cnt
; i
++)
4788 if (kind
== bitint_prec_large
)
4789 idx
= size_int (cnt
- i
- 1);
4790 else if (i
== cnt
- 1)
4791 idx
= create_loop (size_int (end
- 1), &idx_next
);
4793 idx
= size_int (end
);
4795 tree rhs1
= handle_operand (arg0
, idx
);
4796 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4798 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4799 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4800 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4801 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
4802 rhs1
= add_cast (m_limb_type
, rhs1
);
4807 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4808 build_zero_cst (m_limb_type
),
4809 NULL_TREE
, NULL_TREE
);
4811 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4812 e1
->flags
= EDGE_FALSE_VALUE
;
4813 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4814 e1
->probability
= profile_probability::unlikely ();
4815 e2
->probability
= e1
->probability
.invert ();
4817 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4818 m_gsi
= gsi_after_labels (e1
->dest
);
4827 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4829 build_int_cst (m_limb_type
, 1));
4831 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4832 build_int_cst (m_limb_type
, 1),
4833 NULL_TREE
, NULL_TREE
);
4838 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4839 BIT_XOR_EXPR
, rhs1
, first
);
4841 tree stype
= signed_type_for (m_limb_type
);
4842 g
= gimple_build_cond (LT_EXPR
,
4844 gimple_assign_lhs (g
)),
4845 build_zero_cst (stype
),
4846 NULL_TREE
, NULL_TREE
);
4848 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4849 e1
->flags
= EDGE_FALSE_VALUE
;
4850 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
4852 e1
->probability
= profile_probability::unlikely ();
4853 e2
->probability
= e1
->probability
.invert ();
4855 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
4857 m_gsi
= gsi_after_labels (e1
->dest
);
4859 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
4860 NULL_TREE
, NULL_TREE
);
4863 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4864 e1
->flags
= EDGE_FALSE_VALUE
;
4865 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
4866 e1
->probability
= profile_probability::unlikely ();
4867 e2
->probability
= e1
->probability
.invert ();
4869 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4870 m_gsi
= gsi_after_labels (e1
->dest
);
4871 bqp
[2 * i
+ 1].e
= e2
;
4874 if (tree_fits_uhwi_p (idx
))
4876 = build_int_cst (integer_type_node
,
4878 - (((int) tree_to_uhwi (idx
) + 1)
4879 * limb_prec
) - sub_one
);
4883 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
4885 in
= prepare_data_in_out (in
, idx
, &out
);
4886 out
= m_data
[m_data_cnt
+ 1];
4888 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
4889 build_int_cst (integer_type_node
,
4892 m_data
[m_data_cnt
] = out
;
4896 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
4898 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4901 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
4902 NULL_TREE
, NULL_TREE
);
4904 edge true_edge
, false_edge
;
4905 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4906 &true_edge
, &false_edge
);
4907 m_gsi
= gsi_after_labels (false_edge
->dest
);
4916 gphi
*phi1
, *phi2
, *phi3
;
4918 bb
= gsi_bb (m_gsi
);
4919 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
4920 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4922 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4924 for (unsigned i
= 0; i
< cnt
; i
++)
4926 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4927 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4929 if (arg1
== NULL_TREE
)
4931 g
= gimple_build_builtin_unreachable (m_loc
);
4934 m_gsi
= gsi_for_stmt (stmt
);
4935 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
4936 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4938 if (arg1
== NULL_TREE
)
4939 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
4940 gimple_phi_result (phi2
),
4941 gimple_call_lhs (g
));
4944 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4945 PLUS_EXPR
, gimple_phi_result (phi2
),
4946 gimple_call_lhs (g
));
4948 edge e1
= split_block (gimple_bb (stmt
), g
);
4949 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
4950 e2
->probability
= profile_probability::always ();
4951 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
4952 get_immediate_dominator (CDI_DOMINATORS
,
4954 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
4955 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
4956 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
4957 m_gsi
= gsi_for_stmt (stmt
);
4958 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
4960 gsi_replace (&m_gsi
, g
, true);
4963 bb
= gsi_bb (m_gsi
);
4964 remove_edge (find_edge (bb
, edge_bb
));
4966 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
4967 e
->probability
= profile_probability::always ();
4968 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
4969 get_immediate_dominator (CDI_DOMINATORS
,
4971 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4973 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4975 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
4977 for (unsigned i
= 0; i
< cnt
; i
++)
4979 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
4980 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
4982 tree a
= bqp
[i
].addend
;
4983 if (i
&& kind
== bitint_prec_large
)
4984 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
4986 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
4988 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
4990 m_gsi
= gsi_after_labels (edge_bb
);
4991 g
= gimple_build_call (fndecl
, 1,
4992 add_cast (signed_type_for (m_limb_type
),
4993 gimple_phi_result (phi1
)));
4994 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4996 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4997 PLUS_EXPR
, gimple_call_lhs (g
),
4998 gimple_phi_result (phi2
));
5000 if (kind
!= bitint_prec_large
)
5002 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5003 PLUS_EXPR
, gimple_assign_lhs (g
),
5007 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5008 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5009 m_gsi
= gsi_for_stmt (stmt
);
5010 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5011 gsi_replace (&m_gsi
, g
, true);
5014 g
= gimple_build_call (fndecl
, 1, res
);
5015 gimple_call_set_lhs (g
, lhs
);
5016 gsi_replace (&m_gsi
, g
, true);
5019 g
= gimple_build_assign (lhs
, res
);
5020 gsi_replace (&m_gsi
, g
, true);
5027 /* Lower a call statement with one or more large/huge _BitInt
5028 arguments or large/huge _BitInt return value. */
5031 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5033 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5034 unsigned int nargs
= gimple_call_num_args (stmt
);
5035 if (gimple_call_internal_p (stmt
))
5036 switch (gimple_call_internal_fn (stmt
))
5038 case IFN_ADD_OVERFLOW
:
5039 case IFN_SUB_OVERFLOW
:
5040 case IFN_UBSAN_CHECK_ADD
:
5041 case IFN_UBSAN_CHECK_SUB
:
5042 lower_addsub_overflow (obj
, stmt
);
5044 case IFN_MUL_OVERFLOW
:
5045 case IFN_UBSAN_CHECK_MUL
:
5046 lower_mul_overflow (obj
, stmt
);
5054 lower_bit_query (stmt
);
5059 for (unsigned int i
= 0; i
< nargs
; ++i
)
5061 tree arg
= gimple_call_arg (stmt
, i
);
5062 if (TREE_CODE (arg
) != SSA_NAME
5063 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5064 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5066 int p
= var_to_partition (m_map
, arg
);
5068 gcc_assert (v
!= NULL_TREE
);
5069 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5070 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5071 arg
= make_ssa_name (TREE_TYPE (arg
));
5072 gimple
*g
= gimple_build_assign (arg
, v
);
5073 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5074 gimple_call_set_arg (stmt
, i
, arg
);
5075 if (m_preserved
== NULL
)
5076 m_preserved
= BITMAP_ALLOC (NULL
);
5077 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5079 tree lhs
= gimple_call_lhs (stmt
);
5081 && TREE_CODE (lhs
) == SSA_NAME
5082 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5083 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5085 int p
= var_to_partition (m_map
, lhs
);
5087 gcc_assert (v
!= NULL_TREE
);
5088 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5089 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5090 gimple_call_set_lhs (stmt
, v
);
5091 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5096 /* Lower __asm STMT which involves large/huge _BitInt values. */
5099 bitint_large_huge::lower_asm (gimple
*stmt
)
5101 gasm
*g
= as_a
<gasm
*> (stmt
);
5102 unsigned noutputs
= gimple_asm_noutputs (g
);
5103 unsigned ninputs
= gimple_asm_ninputs (g
);
5105 for (unsigned i
= 0; i
< noutputs
; ++i
)
5107 tree t
= gimple_asm_output_op (g
, i
);
5108 tree s
= TREE_VALUE (t
);
5109 if (TREE_CODE (s
) == SSA_NAME
5110 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5111 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5113 int part
= var_to_partition (m_map
, s
);
5114 gcc_assert (m_vars
[part
] != NULL_TREE
);
5115 TREE_VALUE (t
) = m_vars
[part
];
5118 for (unsigned i
= 0; i
< ninputs
; ++i
)
5120 tree t
= gimple_asm_input_op (g
, i
);
5121 tree s
= TREE_VALUE (t
);
5122 if (TREE_CODE (s
) == SSA_NAME
5123 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5124 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5126 int part
= var_to_partition (m_map
, s
);
5127 gcc_assert (m_vars
[part
] != NULL_TREE
);
5128 TREE_VALUE (t
) = m_vars
[part
];
5134 /* Lower statement STMT which involves large/huge _BitInt values
5135 into code accessing individual limbs. */
5138 bitint_large_huge::lower_stmt (gimple
*stmt
)
5142 m_data
.truncate (0);
5144 m_gsi
= gsi_for_stmt (stmt
);
5145 m_after_stmt
= NULL
;
5148 gsi_prev (&m_init_gsi
);
5149 m_preheader_bb
= NULL
;
5150 m_upwards_2limb
= 0;
5153 m_cast_conditional
= false;
5155 m_loc
= gimple_location (stmt
);
5156 if (is_gimple_call (stmt
))
5158 lower_call (NULL_TREE
, stmt
);
5161 if (gimple_code (stmt
) == GIMPLE_ASM
)
5166 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5167 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5168 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5169 bool mergeable_cast_p
= false;
5170 bool final_cast_p
= false;
5171 if (gimple_assign_cast_p (stmt
))
5173 lhs
= gimple_assign_lhs (stmt
);
5174 tree rhs1
= gimple_assign_rhs1 (stmt
);
5175 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5176 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5177 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5178 mergeable_cast_p
= true;
5179 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5180 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5181 && INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5183 final_cast_p
= true;
5184 if (TREE_CODE (rhs1
) == SSA_NAME
5186 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5188 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5189 if (is_gimple_assign (g
)
5190 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5192 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5193 if (TREE_CODE (rhs2
) == SSA_NAME
5195 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5197 g
= SSA_NAME_DEF_STMT (rhs2
);
5198 int ovf
= optimizable_arith_overflow (g
);
5200 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5201 and IMAGPART_EXPR uses, where the latter is cast to
5202 non-_BitInt, it will be optimized when handling
5203 the REALPART_EXPR. */
5207 lower_call (NULL_TREE
, g
);
5215 if (gimple_store_p (stmt
))
5217 tree rhs1
= gimple_assign_rhs1 (stmt
);
5218 if (TREE_CODE (rhs1
) == SSA_NAME
5220 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5222 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5223 m_loc
= gimple_location (g
);
5224 lhs
= gimple_assign_lhs (stmt
);
5225 if (is_gimple_assign (g
) && !mergeable_op (g
))
5226 switch (gimple_assign_rhs_code (g
))
5230 lower_shift_stmt (lhs
, g
);
5232 m_gsi
= gsi_for_stmt (stmt
);
5233 unlink_stmt_vdef (stmt
);
5234 release_ssa_name (gimple_vdef (stmt
));
5235 gsi_remove (&m_gsi
, true);
5238 case TRUNC_DIV_EXPR
:
5239 case TRUNC_MOD_EXPR
:
5240 lower_muldiv_stmt (lhs
, g
);
5242 case FIX_TRUNC_EXPR
:
5243 lower_float_conv_stmt (lhs
, g
);
5247 lower_cplxpart_stmt (lhs
, g
);
5252 else if (optimizable_arith_overflow (g
) == 3)
5254 lower_call (lhs
, g
);
5257 m_loc
= gimple_location (stmt
);
5260 if (mergeable_op (stmt
)
5261 || gimple_store_p (stmt
)
5262 || gimple_assign_load_p (stmt
)
5264 || mergeable_cast_p
)
5266 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5270 else if (cmp_code
!= ERROR_MARK
)
5271 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5272 if (cmp_code
!= ERROR_MARK
)
5274 if (gimple_code (stmt
) == GIMPLE_COND
)
5276 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5277 gimple_cond_set_lhs (cstmt
, lhs
);
5278 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5279 gimple_cond_set_code (cstmt
, cmp_code
);
5283 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5285 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5286 boolean_false_node
);
5287 gimple_assign_set_rhs1 (stmt
, cond
);
5288 lhs
= gimple_assign_lhs (stmt
);
5289 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5290 || (bitint_precision_kind (TREE_TYPE (lhs
))
5291 <= bitint_prec_middle
));
5295 gimple_assign_set_rhs1 (stmt
, lhs
);
5296 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5297 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5303 tree lhs_type
= TREE_TYPE (lhs
);
5304 /* Add support for 3 or more limbs filled in from normal integral
5305 type if this assert fails. If no target chooses limb mode smaller
5306 than half of largest supported normal integral type, this will not
5308 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5310 if (TREE_CODE (lhs_type
) == BITINT_TYPE
5311 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5312 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5313 TYPE_UNSIGNED (lhs_type
));
5315 tree rhs1
= gimple_assign_rhs1 (stmt
);
5316 tree r1
= handle_operand (rhs1
, size_int (0));
5317 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5318 r1
= add_cast (lhs_type
, r1
);
5319 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5323 tree r2
= handle_operand (rhs1
, size_int (1));
5324 r2
= add_cast (lhs_type
, r2
);
5325 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5326 build_int_cst (unsigned_type_node
,
5329 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5330 gimple_assign_lhs (g
));
5332 r1
= gimple_assign_lhs (g
);
5334 if (lhs_type
!= TREE_TYPE (lhs
))
5335 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5337 g
= gimple_build_assign (lhs
, r1
);
5338 gsi_replace (&m_gsi
, g
, true);
5341 if (is_gimple_assign (stmt
))
5342 switch (gimple_assign_rhs_code (stmt
))
5346 lower_shift_stmt (NULL_TREE
, stmt
);
5349 case TRUNC_DIV_EXPR
:
5350 case TRUNC_MOD_EXPR
:
5351 lower_muldiv_stmt (NULL_TREE
, stmt
);
5353 case FIX_TRUNC_EXPR
:
5355 lower_float_conv_stmt (NULL_TREE
, stmt
);
5359 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5362 lower_complexexpr_stmt (stmt
);
5370 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5371 the desired memory state. */
5374 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5376 tree vuse2
= (tree
) data
;
5383 /* Return true if STMT uses a library function and needs to take
5384 address of its inputs. We need to avoid bit-fields in those
5388 stmt_needs_operand_addr (gimple
*stmt
)
5390 if (is_gimple_assign (stmt
))
5391 switch (gimple_assign_rhs_code (stmt
))
5394 case TRUNC_DIV_EXPR
:
5395 case TRUNC_MOD_EXPR
:
5401 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5402 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5407 /* Dominator walker used to discover which large/huge _BitInt
5408 loads could be sunk into all their uses. */
5410 class bitint_dom_walker
: public dom_walker
5413 bitint_dom_walker (bitmap names
, bitmap loads
)
5414 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5416 edge
before_dom_children (basic_block
) final override
;
5419 bitmap m_names
, m_loads
;
5423 bitint_dom_walker::before_dom_children (basic_block bb
)
5425 gphi
*phi
= get_virtual_phi (bb
);
5428 vop
= gimple_phi_result (phi
);
5429 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5432 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5434 auto_vec
<tree
, 16> worklist
;
5435 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5436 !gsi_end_p (gsi
); gsi_next (&gsi
))
5438 gimple
*stmt
= gsi_stmt (gsi
);
5439 if (is_gimple_debug (stmt
))
5442 if (!vop
&& gimple_vuse (stmt
))
5443 vop
= gimple_vuse (stmt
);
5446 if (gimple_vdef (stmt
))
5447 vop
= gimple_vdef (stmt
);
5449 tree lhs
= gimple_get_lhs (stmt
);
5451 && TREE_CODE (lhs
) == SSA_NAME
5452 && TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5453 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5454 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5455 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5456 it means it will be handled in a loop or straight line code
5457 at the location of its (ultimate) immediate use, so for
5458 vop checking purposes check these only at the ultimate
5463 use_operand_p use_p
;
5464 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5466 tree s
= USE_FROM_PTR (use_p
);
5467 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5468 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5469 worklist
.safe_push (s
);
5472 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5473 while (worklist
.length () > 0)
5475 tree s
= worklist
.pop ();
5477 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5479 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5480 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5481 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5483 tree s2
= USE_FROM_PTR (use_p
);
5484 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5485 && (bitint_precision_kind (TREE_TYPE (s2
))
5486 >= bitint_prec_large
))
5487 worklist
.safe_push (s2
);
5491 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5492 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5494 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5495 if (TREE_CODE (rhs
) == SSA_NAME
5496 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5501 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5504 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5505 if (needs_operand_addr
5506 && TREE_CODE (rhs1
) == COMPONENT_REF
5507 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5509 tree fld
= TREE_OPERAND (rhs1
, 1);
5510 /* For little-endian, we can allow as inputs bit-fields
5511 which start at a limb boundary. */
5512 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5513 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5514 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5519 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5525 ao_ref_init (&ref
, rhs1
);
5526 tree lvop
= gimple_vuse (SSA_NAME_DEF_STMT (s
));
5527 unsigned limit
= 64;
5530 && is_gimple_assign (stmt
)
5531 && gimple_store_p (stmt
)
5532 && !operand_equal_p (lhs
,
5533 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
)),
5537 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5538 NULL
, NULL
, limit
, lvop
) == NULL
)
5539 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5543 bb
->aux
= (void *) vop
;
5549 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5550 build_ssa_conflict_graph.
5551 The differences are:
5552 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5553 2) for large/huge _BitInt multiplication/division/modulo process def
5554 only after processing uses rather than before to make uses conflict
5556 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5557 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5558 the final statement. */
5561 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5562 ssa_conflicts
*graph
, bitmap names
,
5563 void (*def
) (live_track
*, tree
,
5565 void (*use
) (live_track
*, tree
))
5567 bool muldiv_p
= false;
5568 tree lhs
= NULL_TREE
;
5569 if (is_gimple_assign (stmt
))
5571 lhs
= gimple_assign_lhs (stmt
);
5572 if (TREE_CODE (lhs
) == SSA_NAME
5573 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5574 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5576 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5578 switch (gimple_assign_rhs_code (stmt
))
5581 case TRUNC_DIV_EXPR
:
5582 case TRUNC_MOD_EXPR
:
5594 /* For stmts with more than one SSA_NAME definition pretend all the
5595 SSA_NAME outputs but the first one are live at this point, so
5596 that conflicts are added in between all those even when they are
5597 actually not really live after the asm, because expansion might
5598 copy those into pseudos after the asm and if multiple outputs
5599 share the same partition, it might overwrite those that should
5601 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5605 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5611 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5612 def (live
, var
, graph
);
5615 auto_vec
<tree
, 16> worklist
;
5616 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5617 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5618 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5620 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5623 worklist
.safe_push (var
);
5626 while (worklist
.length () > 0)
5628 tree s
= worklist
.pop ();
5629 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5630 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5631 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5633 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5636 worklist
.safe_push (var
);
5641 def (live
, lhs
, graph
);
5644 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5645 return the largest bitint_prec_kind of them, otherwise return
5646 bitint_prec_small. */
5648 static bitint_prec_kind
5649 arith_overflow_arg_kind (gimple
*stmt
)
5651 bitint_prec_kind ret
= bitint_prec_small
;
5652 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
5653 switch (gimple_call_internal_fn (stmt
))
5655 case IFN_ADD_OVERFLOW
:
5656 case IFN_SUB_OVERFLOW
:
5657 case IFN_MUL_OVERFLOW
:
5658 for (int i
= 0; i
< 2; ++i
)
5660 tree a
= gimple_call_arg (stmt
, i
);
5661 if (TREE_CODE (a
) == INTEGER_CST
5662 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
5664 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
5665 ret
= MAX (ret
, kind
);
5675 /* Entry point for _BitInt(N) operation lowering during optimization. */
5678 gimple_lower_bitint (void)
5680 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
5684 for (i
= 0; i
< num_ssa_names
; ++i
)
5686 tree s
= ssa_name (i
);
5689 tree type
= TREE_TYPE (s
);
5690 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5692 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5693 != bitint_prec_small
)
5695 type
= TREE_TYPE (type
);
5697 if (TREE_CODE (type
) == BITINT_TYPE
5698 && bitint_precision_kind (type
) != bitint_prec_small
)
5700 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5701 into memory. Such functions could have no large/huge SSA_NAMEs. */
5702 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5704 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5705 if (is_gimple_assign (g
) && gimple_store_p (g
))
5707 tree t
= gimple_assign_rhs1 (g
);
5708 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5709 && (bitint_precision_kind (TREE_TYPE (t
))
5710 >= bitint_prec_large
))
5714 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5715 to floating point types need to be rewritten. */
5716 else if (SCALAR_FLOAT_TYPE_P (type
))
5718 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5719 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5721 tree t
= gimple_assign_rhs1 (g
);
5722 if (TREE_CODE (t
) == INTEGER_CST
5723 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5724 && (bitint_precision_kind (TREE_TYPE (t
))
5725 != bitint_prec_small
))
5730 if (i
== num_ssa_names
)
5734 auto_vec
<gimple
*, 4> switch_statements
;
5735 FOR_EACH_BB_FN (bb
, cfun
)
5737 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
5739 tree idx
= gimple_switch_index (swtch
);
5740 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
5741 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
5745 group_case_labels_stmt (swtch
);
5746 switch_statements
.safe_push (swtch
);
5750 if (!switch_statements
.is_empty ())
5752 bool expanded
= false;
5756 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
5758 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
5759 tree_switch_conversion::switch_decision_tree
dt (swtch
);
5760 expanded
|= dt
.analyze_switch_statement ();
5765 free_dominance_info (CDI_DOMINATORS
);
5766 free_dominance_info (CDI_POST_DOMINATORS
);
5767 mark_virtual_operands_for_renaming (cfun
);
5768 cleanup_tree_cfg (TODO_update_ssa
);
5772 struct bitint_large_huge large_huge
;
5773 bool has_large_huge_parm_result
= false;
5774 bool has_large_huge
= false;
5775 unsigned int ret
= 0, first_large_huge
= ~0U;
5776 bool edge_insertions
= false;
5777 for (; i
< num_ssa_names
; ++i
)
5779 tree s
= ssa_name (i
);
5782 tree type
= TREE_TYPE (s
);
5783 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5785 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5786 >= bitint_prec_large
)
5787 has_large_huge
= true;
5788 type
= TREE_TYPE (type
);
5790 if (TREE_CODE (type
) == BITINT_TYPE
5791 && bitint_precision_kind (type
) >= bitint_prec_large
)
5793 if (first_large_huge
== ~0U)
5794 first_large_huge
= i
;
5795 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
5796 gimple_stmt_iterator gsi
;
5798 /* Unoptimize certain constructs to simpler alternatives to
5799 avoid having to lower all of them. */
5800 if (is_gimple_assign (stmt
))
5801 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
5808 first_large_huge
= 0;
5809 location_t loc
= gimple_location (stmt
);
5810 gsi
= gsi_for_stmt (stmt
);
5811 tree rhs1
= gimple_assign_rhs1 (stmt
);
5812 tree type
= TREE_TYPE (rhs1
);
5813 tree n
= gimple_assign_rhs2 (stmt
), m
;
5814 tree p
= build_int_cst (TREE_TYPE (n
),
5815 TYPE_PRECISION (type
));
5816 if (TREE_CODE (n
) == INTEGER_CST
)
5817 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
5820 m
= make_ssa_name (TREE_TYPE (n
));
5821 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
5822 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5823 gimple_set_location (g
, loc
);
5825 if (!TYPE_UNSIGNED (type
))
5827 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
5829 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5830 rhs1
= fold_convert (utype
, rhs1
);
5833 tree t
= make_ssa_name (type
);
5834 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
5835 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5836 gimple_set_location (g
, loc
);
5839 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5840 rhs_code
== LROTATE_EXPR
5841 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
5843 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5844 gimple_set_location (g
, loc
);
5845 tree op1
= gimple_assign_lhs (g
);
5846 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5847 rhs_code
== LROTATE_EXPR
5848 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
5850 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5851 gimple_set_location (g
, loc
);
5852 tree op2
= gimple_assign_lhs (g
);
5853 tree lhs
= gimple_assign_lhs (stmt
);
5854 if (!TYPE_UNSIGNED (type
))
5856 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
5857 BIT_IOR_EXPR
, op1
, op2
);
5858 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5859 gimple_set_location (g
, loc
);
5860 g
= gimple_build_assign (lhs
, NOP_EXPR
,
5861 gimple_assign_lhs (g
));
5864 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
5865 gsi_replace (&gsi
, g
, true);
5866 gimple_set_location (g
, loc
);
5874 first_large_huge
= 0;
5875 gsi
= gsi_for_stmt (stmt
);
5876 tree lhs
= gimple_assign_lhs (stmt
);
5877 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
5878 location_t loc
= gimple_location (stmt
);
5879 if (rhs_code
== ABS_EXPR
)
5880 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5881 build_zero_cst (TREE_TYPE (rhs1
)),
5882 NULL_TREE
, NULL_TREE
);
5883 else if (rhs_code
== ABSU_EXPR
)
5885 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
5886 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
5887 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5888 gimple_set_location (g
, loc
);
5889 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5890 build_zero_cst (TREE_TYPE (rhs1
)),
5891 NULL_TREE
, NULL_TREE
);
5894 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
5896 rhs2
= gimple_assign_rhs2 (stmt
);
5897 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5898 std::swap (rhs1
, rhs2
);
5899 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
5900 NULL_TREE
, NULL_TREE
);
5901 if (rhs_code
== MAX_EXPR
)
5902 std::swap (rhs1
, rhs2
);
5906 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5907 build_zero_cst (TREE_TYPE (rhs1
)),
5908 NULL_TREE
, NULL_TREE
);
5909 rhs1
= gimple_assign_rhs2 (stmt
);
5910 rhs2
= gimple_assign_rhs3 (stmt
);
5912 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5913 gimple_set_location (g
, loc
);
5914 edge e1
= split_block (gsi_bb (gsi
), g
);
5915 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
5916 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
5917 e3
->probability
= profile_probability::even ();
5918 e1
->flags
= EDGE_TRUE_VALUE
;
5919 e1
->probability
= e3
->probability
.invert ();
5920 if (dom_info_available_p (CDI_DOMINATORS
))
5921 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
5922 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
5924 gsi
= gsi_after_labels (e1
->dest
);
5925 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5927 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5928 gimple_set_location (g
, loc
);
5929 rhs2
= gimple_assign_lhs (g
);
5930 std::swap (rhs1
, rhs2
);
5932 gsi
= gsi_for_stmt (stmt
);
5933 gsi_remove (&gsi
, true);
5934 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
5935 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
5936 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
5940 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5941 into memory. Such functions could have no large/huge SSA_NAMEs. */
5942 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5944 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5945 if (is_gimple_assign (g
) && gimple_store_p (g
))
5947 tree t
= gimple_assign_rhs1 (g
);
5948 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5949 && (bitint_precision_kind (TREE_TYPE (t
))
5950 >= bitint_prec_large
))
5951 has_large_huge
= true;
5954 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5955 to floating point types need to be rewritten. */
5956 else if (SCALAR_FLOAT_TYPE_P (type
))
5958 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5959 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5961 tree t
= gimple_assign_rhs1 (g
);
5962 if (TREE_CODE (t
) == INTEGER_CST
5963 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5964 && (bitint_precision_kind (TREE_TYPE (t
))
5965 >= bitint_prec_large
))
5966 has_large_huge
= true;
5970 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
5972 tree s
= ssa_name (i
);
5975 tree type
= TREE_TYPE (s
);
5976 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5977 type
= TREE_TYPE (type
);
5978 if (TREE_CODE (type
) == BITINT_TYPE
5979 && bitint_precision_kind (type
) >= bitint_prec_large
)
5981 use_operand_p use_p
;
5983 has_large_huge
= true;
5985 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
5987 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5988 the same bb and could be handled in the same loop with the
5991 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5992 && single_imm_use (s
, &use_p
, &use_stmt
)
5993 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
5995 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
5997 if (mergeable_op (use_stmt
))
5999 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6000 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6002 if (gimple_assign_cast_p (use_stmt
))
6004 tree lhs
= gimple_assign_lhs (use_stmt
);
6005 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6008 else if (gimple_store_p (use_stmt
)
6009 && is_gimple_assign (use_stmt
)
6010 && !gimple_has_volatile_ops (use_stmt
)
6011 && !stmt_ends_bb_p (use_stmt
))
6014 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6016 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6017 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6018 && ((is_gimple_assign (use_stmt
)
6019 && (gimple_assign_rhs_code (use_stmt
)
6021 || gimple_code (use_stmt
) == GIMPLE_COND
)
6022 && (!gimple_store_p (use_stmt
)
6023 || (is_gimple_assign (use_stmt
)
6024 && !gimple_has_volatile_ops (use_stmt
)
6025 && !stmt_ends_bb_p (use_stmt
)))
6026 && (TREE_CODE (rhs1
) != SSA_NAME
6027 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6029 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6030 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6031 < bitint_prec_large
))
6033 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6034 >= TYPE_PRECISION (TREE_TYPE (s
)))
6035 && mergeable_op (use_stmt
))
6037 /* Prevent merging a widening non-mergeable cast
6038 on result of some narrower mergeable op
6039 together with later mergeable operations. E.g.
6040 result of _BitInt(223) addition shouldn't be
6041 sign-extended to _BitInt(513) and have another
6042 _BitInt(513) added to it, as handle_plus_minus
6043 with its PHI node handling inside of handle_cast
6044 will not work correctly. An exception is if
6045 use_stmt is a store, this is handled directly
6046 in lower_mergeable_stmt. */
6047 if (TREE_CODE (rhs1
) != SSA_NAME
6048 || !has_single_use (rhs1
)
6049 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6050 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6051 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6052 || gimple_store_p (use_stmt
))
6054 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6055 < TYPE_PRECISION (TREE_TYPE (s
)))
6056 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6058 /* Another exception is if the widening cast is
6059 from mergeable same precision cast from something
6062 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6063 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6064 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6065 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6067 if (TREE_CODE (rhs2
) != SSA_NAME
6068 || !has_single_use (rhs2
)
6069 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6070 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6071 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6077 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6078 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6082 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6083 rhs1
= TREE_OPERAND (rhs1
, 0);
6084 if (TREE_CODE (rhs1
) == SSA_NAME
)
6086 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6087 if (optimizable_arith_overflow (g
))
6095 case TRUNC_DIV_EXPR
:
6096 case TRUNC_MOD_EXPR
:
6097 case FIX_TRUNC_EXPR
:
6099 if (gimple_store_p (use_stmt
)
6100 && is_gimple_assign (use_stmt
)
6101 && !gimple_has_volatile_ops (use_stmt
)
6102 && !stmt_ends_bb_p (use_stmt
))
6104 tree lhs
= gimple_assign_lhs (use_stmt
);
6105 /* As multiply/division passes address of the lhs
6106 to library function and that assumes it can extend
6107 it to whole number of limbs, avoid merging those
6108 with bit-field stores. Don't allow it for
6109 shifts etc. either, so that the bit-field store
6110 handling doesn't have to be done everywhere. */
6111 if (TREE_CODE (lhs
) == COMPONENT_REF
6112 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6122 /* Also ignore uninitialized uses. */
6123 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6124 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6127 if (!large_huge
.m_names
)
6128 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6129 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6130 if (has_single_use (s
))
6132 if (!large_huge
.m_single_use_names
)
6133 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6134 bitmap_set_bit (large_huge
.m_single_use_names
,
6135 SSA_NAME_VERSION (s
));
6137 if (SSA_NAME_VAR (s
)
6138 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6139 && SSA_NAME_IS_DEFAULT_DEF (s
))
6140 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6141 has_large_huge_parm_result
= true;
6143 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6144 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6145 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6146 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6148 use_operand_p use_p
;
6149 imm_use_iterator iter
;
6150 bool optimizable_load
= true;
6151 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6153 gimple
*use_stmt
= USE_STMT (use_p
);
6154 if (is_gimple_debug (use_stmt
))
6156 if (gimple_code (use_stmt
) == GIMPLE_PHI
6157 || is_gimple_call (use_stmt
))
6159 optimizable_load
= false;
6165 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6168 tree s2
= USE_FROM_PTR (use_p
);
6169 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6171 optimizable_load
= false;
6176 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6178 if (!large_huge
.m_loads
)
6179 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6180 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6184 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6185 into memory. Such functions could have no large/huge SSA_NAMEs. */
6186 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6188 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6189 if (is_gimple_assign (g
) && gimple_store_p (g
))
6191 tree t
= gimple_assign_rhs1 (g
);
6192 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6193 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6194 has_large_huge
= true;
6199 if (large_huge
.m_names
|| has_large_huge
)
6201 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6202 calculate_dominance_info (CDI_DOMINATORS
);
6204 enable_ranger (cfun
);
6205 if (large_huge
.m_loads
)
6207 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6209 bitint_dom_walker (large_huge
.m_names
,
6210 large_huge
.m_loads
).walk (entry
);
6211 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6212 clear_aux_for_blocks ();
6213 BITMAP_FREE (large_huge
.m_loads
);
6215 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6216 large_huge
.m_limb_size
6217 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6219 if (large_huge
.m_names
)
6222 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6223 coalesce_ssa_name (large_huge
.m_map
);
6224 partition_view_normal (large_huge
.m_map
);
6225 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6227 fprintf (dump_file
, "After Coalescing:\n");
6228 dump_var_map (dump_file
, large_huge
.m_map
);
6231 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6233 if (has_large_huge_parm_result
)
6234 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6236 tree s
= ssa_name (i
);
6237 if (SSA_NAME_VAR (s
)
6238 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6239 && SSA_NAME_IS_DEFAULT_DEF (s
))
6240 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6242 int p
= var_to_partition (large_huge
.m_map
, s
);
6243 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6245 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6246 mark_addressable (SSA_NAME_VAR (s
));
6250 tree atype
= NULL_TREE
;
6251 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6253 tree s
= ssa_name (i
);
6254 int p
= var_to_partition (large_huge
.m_map
, s
);
6255 if (large_huge
.m_vars
[p
] != NULL_TREE
)
6257 if (atype
== NULL_TREE
6258 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6259 TYPE_SIZE (TREE_TYPE (s
))))
6261 unsigned HOST_WIDE_INT nelts
6262 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6263 atype
= build_array_type_nelts (large_huge
.m_limb_type
, nelts
);
6265 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6266 mark_addressable (large_huge
.m_vars
[p
]);
6270 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6272 gimple_stmt_iterator prev
;
6273 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6279 gimple
*stmt
= gsi_stmt (gsi
);
6280 if (is_gimple_debug (stmt
))
6282 bitint_prec_kind kind
= bitint_prec_small
;
6284 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6285 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6287 bitint_prec_kind this_kind
6288 = bitint_precision_kind (TREE_TYPE (t
));
6289 kind
= MAX (kind
, this_kind
);
6291 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6293 t
= gimple_assign_rhs1 (stmt
);
6294 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6296 bitint_prec_kind this_kind
6297 = bitint_precision_kind (TREE_TYPE (t
));
6298 kind
= MAX (kind
, this_kind
);
6301 if (is_gimple_assign (stmt
)
6302 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6304 t
= gimple_assign_rhs1 (stmt
);
6305 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6306 && TREE_CODE (t
) == INTEGER_CST
)
6308 bitint_prec_kind this_kind
6309 = bitint_precision_kind (TREE_TYPE (t
));
6310 kind
= MAX (kind
, this_kind
);
6313 if (is_gimple_call (stmt
))
6315 t
= gimple_call_lhs (stmt
);
6316 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6318 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6319 kind
= MAX (kind
, this_kind
);
6320 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6323 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6324 kind
= MAX (kind
, this_kind
);
6328 if (kind
== bitint_prec_small
)
6330 switch (gimple_code (stmt
))
6333 /* For now. We'll need to handle some internal functions and
6334 perhaps some builtins. */
6335 if (kind
== bitint_prec_middle
)
6339 if (kind
== bitint_prec_middle
)
6345 if (gimple_clobber_p (stmt
))
6347 if (kind
>= bitint_prec_large
)
6349 if (gimple_assign_single_p (stmt
))
6350 /* No need to lower copies, loads or stores. */
6352 if (gimple_assign_cast_p (stmt
))
6354 tree lhs
= gimple_assign_lhs (stmt
);
6355 tree rhs
= gimple_assign_rhs1 (stmt
);
6356 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6357 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6358 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6359 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6360 /* No need to lower casts to same precision. */
6368 if (kind
== bitint_prec_middle
)
6370 tree type
= NULL_TREE
;
6371 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6372 with the same precision and back. */
6373 unsigned int nops
= gimple_num_ops (stmt
);
6374 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6376 if (tree op
= gimple_op (stmt
, i
))
6378 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6380 gimple_set_op (stmt
, i
, nop
);
6381 else if (COMPARISON_CLASS_P (op
))
6383 TREE_OPERAND (op
, 0)
6384 = maybe_cast_middle_bitint (&gsi
,
6385 TREE_OPERAND (op
, 0),
6387 TREE_OPERAND (op
, 1)
6388 = maybe_cast_middle_bitint (&gsi
,
6389 TREE_OPERAND (op
, 1),
6392 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6395 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6398 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6402 if (tree lhs
= gimple_get_lhs (stmt
))
6403 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6404 && (bitint_precision_kind (TREE_TYPE (lhs
))
6405 == bitint_prec_middle
))
6407 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6408 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6409 type
= build_nonstandard_integer_type (prec
, uns
);
6410 tree lhs2
= make_ssa_name (type
);
6411 gimple_set_lhs (stmt
, lhs2
);
6412 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6413 if (stmt_ends_bb_p (stmt
))
6415 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6416 gsi_insert_on_edge_immediate (e
, g
);
6419 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6425 if (tree lhs
= gimple_get_lhs (stmt
))
6426 if (TREE_CODE (lhs
) == SSA_NAME
)
6428 tree type
= TREE_TYPE (lhs
);
6429 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6430 type
= TREE_TYPE (type
);
6431 if (TREE_CODE (type
) == BITINT_TYPE
6432 && bitint_precision_kind (type
) >= bitint_prec_large
6433 && (large_huge
.m_names
== NULL
6434 || !bitmap_bit_p (large_huge
.m_names
,
6435 SSA_NAME_VERSION (lhs
))))
6439 large_huge
.lower_stmt (stmt
);
6442 tree atype
= NULL_TREE
;
6443 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6446 gphi
*phi
= gsi
.phi ();
6447 tree lhs
= gimple_phi_result (phi
);
6448 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6449 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6451 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6452 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6453 tree v1
= large_huge
.m_vars
[p1
];
6454 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6456 tree arg
= gimple_phi_arg_def (phi
, i
);
6457 edge e
= gimple_phi_arg_edge (phi
, i
);
6459 switch (TREE_CODE (arg
))
6462 if (integer_zerop (arg
) && VAR_P (v1
))
6464 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6465 g
= gimple_build_assign (v1
, zero
);
6466 gsi_insert_on_edge (e
, g
);
6467 edge_insertions
= true;
6471 unsigned int min_prec
, prec
, rem
;
6473 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6474 rem
= prec
% (2 * limb_prec
);
6475 min_prec
= bitint_min_cst_precision (arg
, ext
);
6476 if (min_prec
> prec
- rem
- 2 * limb_prec
6477 && min_prec
> (unsigned) limb_prec
)
6478 /* Constant which has enough significant bits that it
6479 isn't worth trying to save .rodata space by extending
6480 from smaller number. */
6483 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6486 else if (min_prec
== prec
)
6487 c
= tree_output_constant_def (arg
);
6488 else if (min_prec
== (unsigned) limb_prec
)
6489 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6492 tree ctype
= build_bitint_type (min_prec
, 1);
6493 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6497 if (VAR_P (v1
) && min_prec
== prec
)
6499 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6501 g
= gimple_build_assign (v1
, v2
);
6502 gsi_insert_on_edge (e
, g
);
6503 edge_insertions
= true;
6506 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6507 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6512 unsigned HOST_WIDE_INT nelts
6513 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
6516 = build_array_type_nelts (large_huge
.m_limb_type
,
6518 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6520 build1 (VIEW_CONVERT_EXPR
,
6523 gsi_insert_on_edge (e
, g
);
6527 unsigned HOST_WIDE_INT nelts
6528 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
6529 - min_prec
) / limb_prec
;
6531 = build_array_type_nelts (large_huge
.m_limb_type
,
6533 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6534 tree off
= fold_convert (ptype
,
6535 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6536 tree vd
= build2 (MEM_REF
, vtype
,
6537 build_fold_addr_expr (v1
), off
);
6538 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
6545 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6547 = fold_convert (ptype
,
6548 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6549 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
6550 build_fold_addr_expr (v1
), off
);
6552 vd
= build_fold_addr_expr (vd
);
6553 unsigned HOST_WIDE_INT nbytes
6554 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
6557 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6558 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
6559 g
= gimple_build_call (fn
, 3, vd
,
6560 integer_minus_one_node
,
6561 build_int_cst (sizetype
,
6564 gsi_insert_on_edge (e
, g
);
6565 edge_insertions
= true;
6570 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
6572 if (large_huge
.m_names
== NULL
6573 || !bitmap_bit_p (large_huge
.m_names
,
6574 SSA_NAME_VERSION (arg
)))
6577 int p2
= var_to_partition (large_huge
.m_map
, arg
);
6580 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
6581 tree v2
= large_huge
.m_vars
[p2
];
6582 if (VAR_P (v1
) && VAR_P (v2
))
6583 g
= gimple_build_assign (v1
, v2
);
6584 else if (VAR_P (v1
))
6585 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
6586 TREE_TYPE (v1
), v2
));
6587 else if (VAR_P (v2
))
6588 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6589 TREE_TYPE (v2
), v1
), v2
);
6592 if (atype
== NULL_TREE
6593 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6594 TYPE_SIZE (TREE_TYPE (lhs
))))
6596 unsigned HOST_WIDE_INT nelts
6597 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
6600 = build_array_type_nelts (large_huge
.m_limb_type
,
6603 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6605 build1 (VIEW_CONVERT_EXPR
,
6608 gsi_insert_on_edge (e
, g
);
6609 edge_insertions
= true;
6616 if (large_huge
.m_names
|| has_large_huge
)
6619 for (i
= 0; i
< num_ssa_names
; ++i
)
6621 tree s
= ssa_name (i
);
6624 tree type
= TREE_TYPE (s
);
6625 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6626 type
= TREE_TYPE (type
);
6627 if (TREE_CODE (type
) == BITINT_TYPE
6628 && bitint_precision_kind (type
) >= bitint_prec_large
)
6630 if (large_huge
.m_preserved
6631 && bitmap_bit_p (large_huge
.m_preserved
,
6632 SSA_NAME_VERSION (s
)))
6634 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6635 if (gimple_code (g
) == GIMPLE_NOP
)
6637 if (SSA_NAME_VAR (s
))
6638 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
6639 release_ssa_name (s
);
6642 if (gimple_code (g
) != GIMPLE_ASM
)
6644 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
6645 bool save_vta
= flag_var_tracking_assignments
;
6646 flag_var_tracking_assignments
= false;
6647 gsi_remove (&gsi
, true);
6648 flag_var_tracking_assignments
= save_vta
;
6651 nop
= gimple_build_nop ();
6652 SSA_NAME_DEF_STMT (s
) = nop
;
6653 release_ssa_name (s
);
6657 disable_ranger (cfun
);
6660 if (edge_insertions
)
6661 gsi_commit_edge_inserts ();
6668 const pass_data pass_data_lower_bitint
=
6670 GIMPLE_PASS
, /* type */
6671 "bitintlower", /* name */
6672 OPTGROUP_NONE
, /* optinfo_flags */
6673 TV_NONE
, /* tv_id */
6674 PROP_ssa
, /* properties_required */
6675 PROP_gimple_lbitint
, /* properties_provided */
6676 0, /* properties_destroyed */
6677 0, /* todo_flags_start */
6678 0, /* todo_flags_finish */
6681 class pass_lower_bitint
: public gimple_opt_pass
6684 pass_lower_bitint (gcc::context
*ctxt
)
6685 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
6688 /* opt_pass methods: */
6689 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
6690 unsigned int execute (function
*) final override
6692 return gimple_lower_bitint ();
6695 }; // class pass_lower_bitint
6700 make_pass_lower_bitint (gcc::context
*ctxt
)
6702 return new pass_lower_bitint (ctxt
);
6708 const pass_data pass_data_lower_bitint_O0
=
6710 GIMPLE_PASS
, /* type */
6711 "bitintlower0", /* name */
6712 OPTGROUP_NONE
, /* optinfo_flags */
6713 TV_NONE
, /* tv_id */
6714 PROP_cfg
, /* properties_required */
6715 PROP_gimple_lbitint
, /* properties_provided */
6716 0, /* properties_destroyed */
6717 0, /* todo_flags_start */
6718 0, /* todo_flags_finish */
6721 class pass_lower_bitint_O0
: public gimple_opt_pass
6724 pass_lower_bitint_O0 (gcc::context
*ctxt
)
6725 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
6728 /* opt_pass methods: */
6729 bool gate (function
*fun
) final override
6731 /* With errors, normal optimization passes are not run. If we don't
6732 lower bitint operations at all, rtl expansion will abort. */
6733 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
6736 unsigned int execute (function
*) final override
6738 return gimple_lower_bitint ();
6741 }; // class pass_lower_bitint_O0
6746 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
6748 return new pass_lower_bitint_O0 (ctxt
);