1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind
{
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec
)
85 if (prec
<= small_max_prec
)
86 return bitint_prec_small
;
87 if (huge_min_prec
&& prec
>= huge_min_prec
)
88 return bitint_prec_huge
;
89 if (large_min_prec
&& prec
>= large_min_prec
)
90 return bitint_prec_large
;
91 if (mid_min_prec
&& prec
>= mid_min_prec
)
92 return bitint_prec_middle
;
94 struct bitint_info info
;
95 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
97 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
98 if (prec
<= GET_MODE_PRECISION (limb_mode
))
100 small_max_prec
= prec
;
101 return bitint_prec_small
;
104 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
105 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
107 limb_prec
= GET_MODE_PRECISION (limb_mode
);
110 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
111 huge_min_prec
= 4 * limb_prec
;
113 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
115 if (prec
<= MAX_FIXED_MODE_SIZE
)
117 if (!mid_min_prec
|| prec
< mid_min_prec
)
119 return bitint_prec_middle
;
121 if (large_min_prec
&& prec
<= large_min_prec
)
122 return bitint_prec_large
;
123 return bitint_prec_huge
;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type
)
131 return bitint_precision_kind (TYPE_PRECISION (type
));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
140 bitint_min_cst_precision (tree cst
, int &ext
)
142 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
143 wide_int w
= wi::to_wide (cst
);
144 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
154 if (min_prec2
< min_prec
)
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
169 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
172 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
176 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
177 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
178 if (type
== NULL_TREE
179 || TYPE_PRECISION (type
) != prec
180 || TYPE_UNSIGNED (type
) != uns
)
181 type
= build_nonstandard_integer_type (prec
, uns
);
183 if (TREE_CODE (op
) != SSA_NAME
)
185 tree nop
= fold_convert (type
, op
);
186 if (is_gimple_val (nop
))
190 tree nop
= make_ssa_name (type
);
191 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
192 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
200 mergeable_op (gimple
*stmt
)
202 if (!is_gimple_assign (stmt
))
204 switch (gimple_assign_rhs_code (stmt
))
218 tree cnt
= gimple_assign_rhs2 (stmt
);
219 if (tree_fits_uhwi_p (cnt
)
220 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
225 case VIEW_CONVERT_EXPR
:
227 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
228 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
230 && TREE_CODE (lhs_type
) == BITINT_TYPE
231 && TREE_CODE (rhs_type
) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type
), TYPE_SIZE (rhs_type
)))
236 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
238 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
240 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple
*stmt
)
264 bool is_ubsan
= false;
265 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
267 switch (gimple_call_internal_fn (stmt
))
269 case IFN_ADD_OVERFLOW
:
270 case IFN_SUB_OVERFLOW
:
271 case IFN_MUL_OVERFLOW
:
273 case IFN_UBSAN_CHECK_ADD
:
274 case IFN_UBSAN_CHECK_SUB
:
275 case IFN_UBSAN_CHECK_MUL
:
281 tree lhs
= gimple_call_lhs (stmt
);
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
286 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
287 if (TREE_CODE (type
) != BITINT_TYPE
288 || bitint_precision_kind (type
) < bitint_prec_large
)
295 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
296 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
297 || !gimple_store_p (use_stmt
)
298 || !is_gimple_assign (use_stmt
)
299 || gimple_has_volatile_ops (use_stmt
)
300 || stmt_ends_bb_p (use_stmt
))
308 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
310 gimple
*g
= USE_STMT (use_p
);
311 if (is_gimple_debug (g
))
313 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
315 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
321 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
327 use_operand_p use2_p
;
329 tree lhs2
= gimple_assign_lhs (g
);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
332 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
333 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
334 || !gimple_assign_cast_p (use_stmt
))
337 lhs2
= gimple_assign_lhs (use_stmt
);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
339 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
347 return seen
== 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
355 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
357 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
358 tree_code code
= ERROR_MARK
;
359 if (gimple_code (stmt
) == GIMPLE_COND
)
361 code
= gimple_cond_code (stmt
);
362 op1
= gimple_cond_lhs (stmt
);
363 op2
= gimple_cond_rhs (stmt
);
365 else if (is_gimple_assign (stmt
))
367 code
= gimple_assign_rhs_code (stmt
);
368 op1
= gimple_assign_rhs1 (stmt
);
369 if (TREE_CODE_CLASS (code
) == tcc_comparison
370 || TREE_CODE_CLASS (code
) == tcc_binary
)
371 op2
= gimple_assign_rhs2 (stmt
);
373 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
375 tree type
= TREE_TYPE (op1
);
376 if (TREE_CODE (type
) != BITINT_TYPE
377 || bitint_precision_kind (type
) < bitint_prec_large
)
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
393 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
394 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
395 m_limb_type (NULL_TREE
), m_data (vNULL
) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple
*);
400 tree
limb_access_type (tree
, tree
);
401 tree
limb_access (tree
, tree
, tree
, bool);
402 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
403 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
404 void if_then_if_then_else (gimple
*g
, gimple
*,
405 profile_probability
, profile_probability
,
406 edge
&, edge
&, edge
&);
407 tree
handle_operand (tree
, tree
);
408 tree
prepare_data_in_out (tree
, tree
, tree
*);
409 tree
add_cast (tree
, tree
);
410 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
411 tree
handle_lshift (tree
, tree
, tree
);
412 tree
handle_cast (tree
, tree
, tree
);
413 tree
handle_load (gimple
*, tree
);
414 tree
handle_stmt (gimple
*, tree
);
415 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
416 tree
create_loop (tree
, tree
*);
417 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
418 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
419 void lower_shift_stmt (tree
, gimple
*);
420 void lower_muldiv_stmt (tree
, gimple
*);
421 void lower_float_conv_stmt (tree
, gimple
*);
422 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
424 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
426 void lower_addsub_overflow (tree
, gimple
*);
427 void lower_mul_overflow (tree
, gimple
*);
428 void lower_cplxpart_stmt (tree
, gimple
*);
429 void lower_complexexpr_stmt (gimple
*);
430 void lower_bit_query (gimple
*);
431 void lower_call (tree
, gimple
*);
432 void lower_asm (gimple
*);
433 void lower_stmt (gimple
*);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names
;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
454 /* Mapping of the partitions to corresponding decls. */
456 /* Unsigned integer type with limb precision. */
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size
;
460 /* Location of a gimple stmt which is being currently lowered. */
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi
;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple
*m_after_stmt
;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb
, m_preheader_bb
;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi
;
470 /* Decl into which a mergeable statement stores result. */
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
522 unsigned m_upwards_2limb
;
524 bool m_cast_conditional
;
525 unsigned m_bitfld_load
;
527 unsigned int m_data_cnt
;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names
);
533 BITMAP_FREE (m_loads
);
534 BITMAP_FREE (m_preserved
);
535 BITMAP_FREE (m_single_use_names
);
537 delete_var_map (m_map
);
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
546 bitint_large_huge::insert_before (gimple
*g
)
548 gimple_set_location (g
, m_loc
);
549 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
557 bitint_large_huge::limb_access_type (tree type
, tree idx
)
559 if (type
== NULL_TREE
)
561 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
562 unsigned int prec
= TYPE_PRECISION (type
);
563 gcc_assert (i
* limb_prec
< prec
);
564 if ((i
+ 1) * limb_prec
<= prec
)
567 return build_nonstandard_integer_type (prec
% limb_prec
,
568 TYPE_UNSIGNED (type
));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
575 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
577 tree atype
= (tree_fits_uhwi_p (idx
)
578 ? limb_access_type (type
, idx
) : m_limb_type
);
580 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
582 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
583 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
584 ret
= build2 (MEM_REF
, m_limb_type
,
585 build_fold_addr_expr (var
),
586 build_int_cst (ptype
, off
));
587 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
588 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
590 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
593 = build2 (MEM_REF
, m_limb_type
, TREE_OPERAND (var
, 0),
594 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
598 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
599 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
600 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
604 var
= unshare_expr (var
);
605 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type
,
607 TREE_TYPE (TREE_TYPE (var
))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type
)), limb_prec
);
611 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
612 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
614 ret
= build4 (ARRAY_REF
, m_limb_type
, var
, idx
, NULL_TREE
, NULL_TREE
);
616 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
618 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
620 ret
= gimple_assign_lhs (g
);
621 ret
= build1 (NOP_EXPR
, atype
, ret
);
626 /* Emit a half diamond,
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
642 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
643 edge
&edge_true
, edge
&edge_false
)
645 insert_before (cond
);
646 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
647 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
648 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
649 e1
->flags
= EDGE_TRUE_VALUE
;
650 e1
->probability
= prob
;
651 e3
->probability
= prob
.invert ();
652 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
655 m_gsi
= gsi_after_labels (e1
->dest
);
658 /* Emit a full diamond,
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
674 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
675 edge
&edge_true
, edge
&edge_false
)
677 insert_before (cond
);
678 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
679 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
680 basic_block bb
= create_empty_bb (e1
->dest
);
681 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
682 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
683 e1
->flags
= EDGE_FALSE_VALUE
;
684 e3
->probability
= prob
;
685 e1
->probability
= prob
.invert ();
686 bb
->count
= e1
->src
->count
.apply_probability (prob
);
687 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
688 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
689 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
691 m_gsi
= gsi_after_labels (bb
);
694 /* Emit a half diamond with full diamond in it
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
720 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
721 profile_probability prob1
,
722 profile_probability prob2
,
723 edge
&edge_true_true
,
724 edge
&edge_true_false
,
727 edge e2
, e3
, e4
= NULL
;
728 if_then (cond1
, prob1
, e2
, e3
);
731 edge_true_true
= NULL
;
732 edge_true_false
= e2
;
736 insert_before (cond2
);
737 e2
= split_block (gsi_bb (m_gsi
), cond2
);
738 basic_block bb
= create_empty_bb (e2
->dest
);
739 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
740 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
741 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
742 e4
->probability
= prob2
;
743 e2
->flags
= EDGE_FALSE_VALUE
;
744 e2
->probability
= prob2
.invert ();
745 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
746 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
747 e2
= find_edge (e2
->dest
, e3
->dest
);
749 edge_true_false
= e2
;
751 m_gsi
= gsi_after_labels (e2
->src
);
754 /* Emit code to access limb IDX from OP. */
757 bitint_large_huge::handle_operand (tree op
, tree idx
)
759 switch (TREE_CODE (op
))
763 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op
))
769 tree v
= create_tmp_reg (m_limb_type
);
770 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
772 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
773 DECL_SOURCE_LOCATION (v
)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
776 v
= get_or_create_ssa_default_def (cfun
, v
);
777 m_data
.safe_push (v
);
779 tree ret
= m_data
[m_data_cnt
];
781 if (tree_fits_uhwi_p (idx
))
783 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
784 ret
= add_cast (type
, ret
);
788 location_t loc_save
= m_loc
;
789 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
790 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
797 p
= var_to_partition (m_map
, op
);
798 gcc_assert (m_vars
[p
] != NULL_TREE
);
799 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
800 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
802 t
= gimple_assign_lhs (g
);
804 && m_single_use_names
805 && m_vars
[p
] != m_lhs
807 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
809 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]), CLOBBER_EOL
);
810 g
= gimple_build_assign (m_vars
[p
], clobber
);
811 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
812 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
816 if (tree_fits_uhwi_p (idx
))
818 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
819 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
822 m_data
.safe_push (NULL_TREE
);
823 m_data
.safe_push (NULL_TREE
);
825 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
827 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
828 TYPE_SIGN (TREE_TYPE (op
)));
829 c
= wide_int_to_tree (type
,
830 wide_int::from (w
, TYPE_PRECISION (type
),
833 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
834 c
= build_int_cst (type
,
835 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
837 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
842 || (m_data
[m_data_cnt
] == NULL_TREE
843 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
845 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
846 unsigned int rem
= prec
% (2 * limb_prec
);
848 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
851 m_data
.safe_push (NULL_TREE
);
852 m_data
.safe_push (NULL_TREE
);
854 if (integer_zerop (op
))
856 tree c
= build_zero_cst (m_limb_type
);
857 m_data
[m_data_cnt
] = c
;
858 m_data
[m_data_cnt
+ 1] = c
;
860 else if (integer_all_onesp (op
))
862 tree c
= build_all_ones_cst (m_limb_type
);
863 m_data
[m_data_cnt
] = c
;
864 m_data
[m_data_cnt
+ 1] = c
;
866 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
868 /* Single limb constant. Use a phi with that limb from
869 the preheader edge and 0 or -1 constant from the other edge
870 and for the second limb in the loop. */
872 gcc_assert (m_first
);
875 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
);
876 g
= gimple_build_assign (m_data
[m_data_cnt
+ 1],
877 build_int_cst (m_limb_type
, ext
));
879 m_data
[m_data_cnt
+ 1] = gimple_assign_rhs1 (g
);
881 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
883 /* Constant which has enough significant bits that it isn't
884 worth trying to save .rodata space by extending from smaller
888 type
= TREE_TYPE (op
);
890 /* If we have a guarantee the most significant partial limb
891 (if any) will be only accessed through handle_operand
892 with INTEGER_CST idx, we don't need to include the partial
894 type
= build_bitint_type (prec
- rem
, 1);
895 tree c
= tree_output_constant_def (fold_convert (type
, op
));
896 m_data
[m_data_cnt
] = c
;
897 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
899 else if (m_upwards_2limb
)
901 /* Constant with smaller number of bits. Trade conditional
902 code for .rodata space by extending from smaller number. */
903 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
904 tree type
= build_bitint_type (min_prec
, 1);
905 tree c
= tree_output_constant_def (fold_convert (type
, op
));
906 tree idx2
= make_ssa_name (sizetype
);
907 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
909 g
= gimple_build_cond (LT_EXPR
, idx
,
910 size_int (min_prec
/ limb_prec
),
911 NULL_TREE
, NULL_TREE
);
912 edge edge_true
, edge_false
;
913 if_then (g
, (min_prec
>= (prec
- rem
) / 2
914 ? profile_probability::likely ()
915 : profile_probability::unlikely ()),
916 edge_true
, edge_false
);
917 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
918 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
920 c1
= gimple_assign_lhs (g
);
921 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
922 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
924 c2
= gimple_assign_lhs (g
);
925 tree c3
= build_int_cst (m_limb_type
, ext
);
926 m_gsi
= gsi_after_labels (edge_true
->dest
);
927 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
928 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
929 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
931 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
932 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
933 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
934 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
935 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
939 /* Constant with smaller number of bits. Trade conditional
940 code for .rodata space by extending from smaller number.
941 Version for loops with random access to the limbs or
943 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
945 if (min_prec
<= (unsigned) limb_prec
)
946 c
= fold_convert (m_limb_type
, op
);
949 tree type
= build_bitint_type (min_prec
, 1);
950 c
= tree_output_constant_def (fold_convert (type
, op
));
952 m_data
[m_data_cnt
] = c
;
953 m_data
[m_data_cnt
+ 1] = integer_type_node
;
955 t
= m_data
[m_data_cnt
];
956 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
958 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
959 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
961 t
= gimple_assign_lhs (g
);
964 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
966 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
967 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
969 t
= gimple_assign_lhs (g
);
972 t
= m_data
[m_data_cnt
+ 1];
973 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
975 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
976 unsigned rem
= prec
% (2 * limb_prec
);
977 int ext
= tree_int_cst_sgn (op
) < 0 ? -1 : 0;
978 tree c
= m_data
[m_data_cnt
];
979 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
980 g
= gimple_build_cond (LT_EXPR
, idx
,
981 size_int (min_prec
/ limb_prec
),
982 NULL_TREE
, NULL_TREE
);
983 edge edge_true
, edge_false
;
984 if_then (g
, (min_prec
>= (prec
- rem
) / 2
985 ? profile_probability::likely ()
986 : profile_probability::unlikely ()),
987 edge_true
, edge_false
);
988 if (min_prec
> (unsigned) limb_prec
)
990 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
991 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
993 c
= gimple_assign_lhs (g
);
995 tree c2
= build_int_cst (m_limb_type
, ext
);
996 m_gsi
= gsi_after_labels (edge_true
->dest
);
997 t
= make_ssa_name (m_limb_type
);
998 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
999 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1000 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1009 /* Helper method, add a PHI node with VAL from preheader edge if
1010 inside of a loop and m_first. Keep state in a pair of m_data
1014 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
)
1018 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1019 return m_data
[m_data_cnt
];
1022 *data_out
= NULL_TREE
;
1023 if (tree_fits_uhwi_p (idx
))
1025 m_data
.safe_push (val
);
1026 m_data
.safe_push (NULL_TREE
);
1030 tree in
= make_ssa_name (TREE_TYPE (val
));
1031 gphi
*phi
= create_phi_node (in
, m_bb
);
1032 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1033 edge e2
= EDGE_PRED (m_bb
, 0);
1035 e2
= EDGE_PRED (m_bb
, 1);
1036 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1037 tree out
= make_ssa_name (TREE_TYPE (val
));
1038 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1039 m_data
.safe_push (in
);
1040 m_data
.safe_push (out
);
1044 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1045 convert it without emitting any code, otherwise emit
1046 the conversion statement before the current location. */
1049 bitint_large_huge::add_cast (tree type
, tree val
)
1051 if (TREE_CODE (val
) == INTEGER_CST
)
1052 return fold_convert (type
, val
);
1054 tree lhs
= make_ssa_name (type
);
1055 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1060 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1063 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1066 tree lhs
, data_out
, ctype
;
1067 tree rhs1_type
= TREE_TYPE (rhs1
);
1069 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1072 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1073 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1075 ctype
= build_complex_type (m_limb_type
);
1076 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1078 if (!TYPE_UNSIGNED (rhs1_type
))
1080 tree type
= unsigned_type_for (rhs1_type
);
1081 rhs1
= add_cast (type
, rhs1
);
1082 rhs2
= add_cast (type
, rhs2
);
1084 rhs1
= add_cast (m_limb_type
, rhs1
);
1085 rhs2
= add_cast (m_limb_type
, rhs2
);
1087 lhs
= make_ssa_name (ctype
);
1088 g
= gimple_build_call_internal (code
== PLUS_EXPR
1089 ? IFN_UADDC
: IFN_USUBC
,
1090 3, rhs1
, rhs2
, data_in
);
1091 gimple_call_set_lhs (g
, lhs
);
1093 if (data_out
== NULL_TREE
)
1094 data_out
= make_ssa_name (m_limb_type
);
1095 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1096 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1099 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1101 ctype
= build_complex_type (m_limb_type
);
1102 lhs
= make_ssa_name (ctype
);
1103 g
= gimple_build_call_internal (code
== PLUS_EXPR
1104 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1106 gimple_call_set_lhs (g
, lhs
);
1108 if (data_out
== NULL_TREE
)
1109 data_out
= make_ssa_name (m_limb_type
);
1110 if (!integer_zerop (data_in
))
1112 rhs1
= make_ssa_name (m_limb_type
);
1113 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1114 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1116 rhs2
= make_ssa_name (m_limb_type
);
1117 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1118 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1120 lhs
= make_ssa_name (ctype
);
1121 g
= gimple_build_call_internal (code
== PLUS_EXPR
1125 gimple_call_set_lhs (g
, lhs
);
1127 data_in
= make_ssa_name (m_limb_type
);
1128 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1129 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1131 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1136 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1137 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1143 tree in
= add_cast (rhs1_type
, data_in
);
1144 lhs
= make_ssa_name (rhs1_type
);
1145 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1147 rhs1
= make_ssa_name (rhs1_type
);
1148 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1150 m_data
[m_data_cnt
] = NULL_TREE
;
1154 rhs1
= make_ssa_name (m_limb_type
);
1155 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1156 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1158 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1159 rhs1
= add_cast (rhs1_type
, rhs1
);
1160 m_data
[m_data_cnt
] = data_out
;
1165 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1166 count in [0, limb_prec - 1] range. */
1169 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1171 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1172 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1176 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1178 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1181 if (!integer_zerop (data_in
))
1183 lhs
= make_ssa_name (m_limb_type
);
1184 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1185 build_int_cst (unsigned_type_node
,
1188 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1189 lhs
= add_cast (rhs1_type
, lhs
);
1192 if (types_compatible_p (rhs1_type
, m_limb_type
))
1194 if (data_out
== NULL_TREE
)
1195 data_out
= make_ssa_name (m_limb_type
);
1196 g
= gimple_build_assign (data_out
, rhs1
);
1199 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1201 lhs
= make_ssa_name (rhs1_type
);
1202 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1204 if (!integer_zerop (data_in
))
1207 lhs
= make_ssa_name (rhs1_type
);
1208 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1214 m_data
[m_data_cnt
] = data_out
;
1219 /* Helper function for handle_stmt method, handle an integral
1220 to integral conversion. */
1223 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1225 tree rhs_type
= TREE_TYPE (rhs1
);
1227 if (TREE_CODE (rhs1
) == SSA_NAME
1228 && TREE_CODE (lhs_type
) == BITINT_TYPE
1229 && TREE_CODE (rhs_type
) == BITINT_TYPE
1230 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1231 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1233 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1234 /* If lhs has bigger precision than rhs, we can use
1235 the simple case only if there is a guarantee that
1236 the most significant limb is handled in straight
1237 line code. If m_var_msb (on left shifts) or
1238 if m_upwards_2limb * limb_prec is equal to
1239 lhs precision that is not the case. */
1241 && tree_int_cst_equal (TYPE_SIZE (rhs_type
),
1242 TYPE_SIZE (lhs_type
))
1243 && (!m_upwards_2limb
1244 || (m_upwards_2limb
* limb_prec
1245 < TYPE_PRECISION (lhs_type
)))))
1247 rhs1
= handle_operand (rhs1
, idx
);
1248 if (tree_fits_uhwi_p (idx
))
1250 tree type
= limb_access_type (lhs_type
, idx
);
1251 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1252 rhs1
= add_cast (type
, rhs1
);
1257 /* Indexes lower than this don't need any special processing. */
1258 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1259 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1260 /* Indexes >= than this always contain an extension. */
1261 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1262 bool save_first
= m_first
;
1265 m_data
.safe_push (NULL_TREE
);
1266 m_data
.safe_push (NULL_TREE
);
1267 m_data
.safe_push (NULL_TREE
);
1268 if (TYPE_UNSIGNED (rhs_type
))
1269 /* No need to keep state between iterations. */
1271 else if (m_upwards
&& !m_upwards_2limb
)
1272 /* We need to keep state between iterations, but
1273 not within any loop, everything is straight line
1274 code with only increasing indexes. */
1276 else if (!m_upwards_2limb
)
1278 unsigned save_data_cnt
= m_data_cnt
;
1279 gimple_stmt_iterator save_gsi
= m_gsi
;
1281 if (gsi_end_p (m_gsi
))
1282 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1285 m_data_cnt
= save_data_cnt
+ 3;
1286 t
= handle_operand (rhs1
, size_int (low
));
1288 m_data
[save_data_cnt
+ 2]
1289 = build_int_cst (NULL_TREE
, m_data_cnt
);
1290 m_data_cnt
= save_data_cnt
;
1291 t
= add_cast (signed_type_for (m_limb_type
), t
);
1292 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1293 tree n
= make_ssa_name (TREE_TYPE (t
));
1294 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1296 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1298 if (gsi_end_p (m_init_gsi
))
1299 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1301 gsi_prev (&m_init_gsi
);
1304 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1305 /* We need to keep state between iterations, but
1306 fortunately not within the loop, only afterwards. */
1311 m_data
.truncate (m_data_cnt
);
1312 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1313 m_data
.safe_push (NULL_TREE
);
1317 unsigned save_data_cnt
= m_data_cnt
;
1319 if (!tree_fits_uhwi_p (idx
))
1322 && (m_upwards_2limb
* limb_prec
1323 <= ((unsigned) TYPE_PRECISION (rhs_type
)
1324 - !TYPE_UNSIGNED (rhs_type
))))
1326 rhs1
= handle_operand (rhs1
, idx
);
1328 m_data
[save_data_cnt
+ 2]
1329 = build_int_cst (NULL_TREE
, m_data_cnt
);
1330 m_first
= save_first
;
1333 bool single_comparison
1334 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1335 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1336 idx
, size_int (low
), NULL_TREE
, NULL_TREE
);
1337 edge edge_true_true
, edge_true_false
, edge_false
;
1338 if_then_if_then_else (g
, (single_comparison
? NULL
1339 : gimple_build_cond (EQ_EXPR
, idx
,
1343 profile_probability::likely (),
1344 profile_probability::unlikely (),
1345 edge_true_true
, edge_true_false
, edge_false
);
1346 bool save_cast_conditional
= m_cast_conditional
;
1347 m_cast_conditional
= true;
1349 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1351 m_data
[save_data_cnt
+ 2]
1352 = build_int_cst (NULL_TREE
, m_data_cnt
);
1353 tree ext
= NULL_TREE
;
1354 tree bitfld
= NULL_TREE
;
1355 if (!single_comparison
)
1357 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1359 m_data_cnt
= save_data_cnt
+ 3;
1362 bitfld
= m_data
[m_bitfld_load
];
1363 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1366 t2
= handle_operand (rhs1
, size_int (low
));
1367 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1368 t2
= add_cast (m_limb_type
, t2
);
1369 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1371 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1372 tree lpm1
= build_int_cst (unsigned_type_node
,
1374 tree n
= make_ssa_name (TREE_TYPE (ext
));
1375 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1377 ext
= add_cast (m_limb_type
, n
);
1381 if (TYPE_UNSIGNED (rhs_type
))
1382 t3
= build_zero_cst (m_limb_type
);
1383 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1384 t3
= m_data
[save_data_cnt
];
1386 t3
= m_data
[save_data_cnt
+ 1];
1387 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1388 t
= make_ssa_name (m_limb_type
);
1389 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1390 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1391 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1393 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1396 tree t4
= make_ssa_name (m_limb_type
);
1397 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1398 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1400 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1402 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1403 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1410 t4
= m_data
[m_bitfld_load
+ 1];
1412 t4
= make_ssa_name (m_limb_type
);
1413 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1415 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1416 edge_true_false
, UNKNOWN_LOCATION
);
1417 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1418 edge_false
, UNKNOWN_LOCATION
);
1420 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1422 m_data
[m_bitfld_load
] = t4
;
1423 m_data
[m_bitfld_load
+ 2] = t4
;
1426 m_cast_conditional
= save_cast_conditional
;
1427 m_first
= save_first
;
1432 if (tree_to_uhwi (idx
) < low
)
1434 t
= handle_operand (rhs1
, idx
);
1436 m_data
[save_data_cnt
+ 2]
1437 = build_int_cst (NULL_TREE
, m_data_cnt
);
1439 else if (tree_to_uhwi (idx
) < high
)
1441 t
= handle_operand (rhs1
, size_int (low
));
1443 m_data
[save_data_cnt
+ 2]
1444 = build_int_cst (NULL_TREE
, m_data_cnt
);
1445 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1446 t
= add_cast (m_limb_type
, t
);
1447 tree ext
= NULL_TREE
;
1448 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1450 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1451 tree lpm1
= build_int_cst (unsigned_type_node
,
1453 tree n
= make_ssa_name (TREE_TYPE (ext
));
1454 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1456 ext
= add_cast (m_limb_type
, n
);
1457 m_data
[save_data_cnt
+ 1] = ext
;
1462 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1464 handle_operand (rhs1
, size_zero_node
);
1465 m_data
[save_data_cnt
+ 2]
1466 = build_int_cst (NULL_TREE
, m_data_cnt
);
1469 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1470 if (TYPE_UNSIGNED (rhs_type
))
1471 t
= build_zero_cst (m_limb_type
);
1473 t
= m_data
[save_data_cnt
+ 1];
1475 tree type
= limb_access_type (lhs_type
, idx
);
1476 if (!useless_type_conversion_p (type
, m_limb_type
))
1477 t
= add_cast (type
, t
);
1478 m_first
= save_first
;
1482 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1483 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1484 && INTEGRAL_TYPE_P (rhs_type
))
1486 /* Add support for 3 or more limbs filled in from normal integral
1487 type if this assert fails. If no target chooses limb mode smaller
1488 than half of largest supported normal integral type, this will not
1490 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1491 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1494 gimple_stmt_iterator save_gsi
= m_gsi
;
1496 if (gsi_end_p (m_gsi
))
1497 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1500 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1501 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1503 tree type
= NULL_TREE
;
1504 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1505 rhs_type
= TREE_TYPE (rhs1
);
1508 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1509 r1
= add_cast (m_limb_type
, rhs1
);
1510 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1512 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1514 build_int_cst (unsigned_type_node
,
1517 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1519 if (TYPE_UNSIGNED (rhs_type
))
1520 rext
= build_zero_cst (m_limb_type
);
1523 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1524 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1526 build_int_cst (unsigned_type_node
,
1529 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1532 if (gsi_end_p (m_init_gsi
))
1533 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1535 gsi_prev (&m_init_gsi
);
1539 if (m_upwards_2limb
)
1544 prepare_data_in_out (r1
, idx
, &out1
);
1545 g
= gimple_build_assign (m_data
[m_data_cnt
+ 1], rext
);
1547 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1549 prepare_data_in_out (r2
, idx
, &out2
);
1550 g
= gimple_build_assign (m_data
[m_data_cnt
+ 3], rext
);
1554 m_data
[m_data_cnt
+ 1] = t
;
1557 m_data
[m_data_cnt
+ 1] = rext
;
1558 m_data
.safe_push (rext
);
1559 t
= m_data
[m_data_cnt
];
1561 else if (!tree_fits_uhwi_p (idx
))
1562 t
= m_data
[m_data_cnt
+ 1];
1565 tree type
= limb_access_type (lhs_type
, idx
);
1566 t
= m_data
[m_data_cnt
+ 2];
1567 if (!useless_type_conversion_p (type
, m_limb_type
))
1568 t
= add_cast (type
, t
);
1575 m_data
.safe_push (r1
);
1576 m_data
.safe_push (r2
);
1577 m_data
.safe_push (rext
);
1579 if (tree_fits_uhwi_p (idx
))
1581 tree type
= limb_access_type (lhs_type
, idx
);
1582 if (integer_zerop (idx
))
1583 t
= m_data
[m_data_cnt
];
1584 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1585 && integer_onep (idx
))
1586 t
= m_data
[m_data_cnt
+ 1];
1588 t
= m_data
[m_data_cnt
+ 2];
1589 if (!useless_type_conversion_p (type
, m_limb_type
))
1590 t
= add_cast (type
, t
);
1594 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1595 NULL_TREE
, NULL_TREE
);
1596 edge e2
, e3
, e4
= NULL
;
1597 if_then (g
, profile_probability::likely (), e2
, e3
);
1598 if (m_data
[m_data_cnt
+ 1])
1600 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1601 NULL_TREE
, NULL_TREE
);
1603 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1604 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1605 e2
= find_edge (e5
->dest
, e2
->dest
);
1606 e4
->probability
= profile_probability::unlikely ();
1607 e5
->flags
= EDGE_FALSE_VALUE
;
1608 e5
->probability
= e4
->probability
.invert ();
1610 m_gsi
= gsi_after_labels (e2
->dest
);
1611 t
= make_ssa_name (m_limb_type
);
1612 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1613 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1614 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1616 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1623 /* Helper function for handle_stmt method, handle a load from memory. */
1626 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1628 tree rhs1
= gimple_assign_rhs1 (stmt
);
1629 tree rhs_type
= TREE_TYPE (rhs1
);
1630 bool eh
= stmt_ends_bb_p (stmt
);
1631 edge eh_edge
= NULL
;
1637 basic_block bb
= gimple_bb (stmt
);
1639 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1640 if (eh_edge
->flags
& EDGE_EH
)
1644 if (TREE_CODE (rhs1
) == COMPONENT_REF
1645 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1647 tree fld
= TREE_OPERAND (rhs1
, 1);
1648 /* For little-endian, we can allow as inputs bit-fields
1649 which start at a limb boundary. */
1650 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1651 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1652 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1654 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1655 handle it normally for now. */
1656 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1658 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1659 poly_int64 bitoffset
;
1660 poly_uint64 field_offset
, repr_offset
;
1661 bool var_field_off
= false;
1662 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1663 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1664 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1668 var_field_off
= true;
1670 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1671 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1672 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1673 TREE_OPERAND (rhs1
, 0), repr
,
1674 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1675 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1676 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1677 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1682 gimple_stmt_iterator save_gsi
= m_gsi
;
1684 if (gsi_end_p (m_gsi
))
1685 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1688 tree t
= limb_access (rhs_type
, nrhs1
, size_int (bo_idx
), true);
1689 tree iv
= make_ssa_name (m_limb_type
);
1690 g
= gimple_build_assign (iv
, t
);
1694 maybe_duplicate_eh_stmt (g
, stmt
);
1697 edge e
= split_block (gsi_bb (m_gsi
), g
);
1698 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1699 = profile_probability::very_unlikely ();
1700 m_gsi
= gsi_after_labels (e
->dest
);
1701 if (gsi_bb (save_gsi
) == e
->src
)
1703 if (gsi_end_p (save_gsi
))
1704 save_gsi
= gsi_end_bb (e
->dest
);
1706 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1708 if (m_preheader_bb
== e
->src
)
1709 m_preheader_bb
= e
->dest
;
1713 if (gsi_end_p (m_init_gsi
))
1714 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1716 gsi_prev (&m_init_gsi
);
1719 prepare_data_in_out (iv
, idx
, &out
);
1720 out
= m_data
[m_data_cnt
];
1721 m_data
.safe_push (out
);
1725 m_data
.safe_push (NULL_TREE
);
1726 m_data
.safe_push (NULL_TREE
);
1727 m_data
.safe_push (NULL_TREE
);
1731 tree nidx0
= NULL_TREE
, nidx1
;
1732 tree iv
= m_data
[m_data_cnt
];
1733 if (m_cast_conditional
&& iv
)
1735 gcc_assert (!m_bitfld_load
);
1736 m_bitfld_load
= m_data_cnt
;
1738 if (tree_fits_uhwi_p (idx
))
1740 unsigned prec
= TYPE_PRECISION (rhs_type
);
1741 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1742 gcc_assert (i
* limb_prec
< prec
);
1743 nidx1
= size_int (i
+ bo_idx
+ 1);
1744 if ((i
+ 1) * limb_prec
> prec
)
1747 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1751 nidx0
= size_int (i
+ bo_idx
);
1761 nidx0
= make_ssa_name (sizetype
);
1762 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1767 nidx1
= make_ssa_name (sizetype
);
1768 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1769 size_int (bo_idx
+ 1));
1773 tree iv2
= NULL_TREE
;
1776 tree t
= limb_access (rhs_type
, nrhs1
, nidx0
, true);
1777 iv
= make_ssa_name (m_limb_type
);
1778 g
= gimple_build_assign (iv
, t
);
1784 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
1785 unsigned prec
= TYPE_PRECISION (rhs_type
);
1788 if ((prec
% limb_prec
) == 0
1789 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
1790 conditional
= false;
1792 edge edge_true
= NULL
, edge_false
= NULL
;
1795 g
= gimple_build_cond (NE_EXPR
, idx
,
1796 size_int (prec
/ limb_prec
),
1797 NULL_TREE
, NULL_TREE
);
1798 if_then (g
, profile_probability::likely (),
1799 edge_true
, edge_false
);
1801 tree t
= limb_access (rhs_type
, nrhs1
, nidx1
, true);
1805 && !tree_fits_uhwi_p (idx
))
1806 iv2
= m_data
[m_data_cnt
+ 1];
1808 iv2
= make_ssa_name (m_limb_type
);
1809 g
= gimple_build_assign (iv2
, t
);
1813 maybe_duplicate_eh_stmt (g
, stmt
);
1816 edge e
= split_block (gsi_bb (m_gsi
), g
);
1817 m_gsi
= gsi_after_labels (e
->dest
);
1818 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1819 = profile_probability::very_unlikely ();
1824 tree iv3
= make_ssa_name (m_limb_type
);
1826 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
1827 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
1828 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
1829 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
1830 edge_false
, UNKNOWN_LOCATION
);
1831 m_gsi
= gsi_after_labels (edge_true
->dest
);
1834 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
1835 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
1837 iv
= gimple_assign_lhs (g
);
1840 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
1841 iv2
, build_int_cst (unsigned_type_node
,
1842 limb_prec
- bo_bit
));
1844 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
1845 gimple_assign_lhs (g
), iv
);
1847 iv
= gimple_assign_lhs (g
);
1848 if (m_data
[m_data_cnt
])
1849 m_data
[m_data_cnt
] = iv2
;
1851 if (tree_fits_uhwi_p (idx
))
1853 tree atype
= limb_access_type (rhs_type
, idx
);
1854 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
1855 iv
= add_cast (atype
, iv
);
1862 /* Use write_p = true for loads with EH edges to make
1863 sure limb_access doesn't add a cast as separate
1864 statement after it. */
1865 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
1866 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
1867 g
= gimple_build_assign (ret
, rhs1
);
1871 maybe_duplicate_eh_stmt (g
, stmt
);
1874 edge e
= split_block (gsi_bb (m_gsi
), g
);
1875 m_gsi
= gsi_after_labels (e
->dest
);
1876 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1877 = profile_probability::very_unlikely ();
1879 if (tree_fits_uhwi_p (idx
))
1881 tree atype
= limb_access_type (rhs_type
, idx
);
1882 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
1883 ret
= add_cast (atype
, ret
);
1889 /* Return a limb IDX from a mergeable statement STMT. */
1892 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
1894 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
1896 switch (gimple_code (stmt
))
1899 if (gimple_assign_load_p (stmt
))
1900 return handle_load (stmt
, idx
);
1901 switch (gimple_assign_rhs_code (stmt
))
1906 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1909 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1910 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
1911 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
1917 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1918 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1919 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
1922 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1923 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
1924 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
1926 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
1928 gimple_assign_rhs2 (stmt
), idx
);
1931 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1933 case VIEW_CONVERT_EXPR
:
1934 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
1935 gimple_assign_rhs1 (stmt
), idx
);
1946 /* Return minimum precision of OP at STMT.
1947 Positive value is minimum precision above which all bits
1948 are zero, negative means all bits above negation of the
1949 value are copies of the sign bit. */
1952 range_to_prec (tree op
, gimple
*stmt
)
1956 tree type
= TREE_TYPE (op
);
1957 unsigned int prec
= TYPE_PRECISION (type
);
1960 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
1961 || r
.undefined_p ())
1963 if (TYPE_UNSIGNED (type
))
1969 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
1971 w
= r
.lower_bound ();
1974 int min_prec1
= wi::min_precision (w
, SIGNED
);
1975 w
= r
.upper_bound ();
1976 int min_prec2
= wi::min_precision (w
, SIGNED
);
1977 int min_prec
= MAX (min_prec1
, min_prec2
);
1978 return MIN (-min_prec
, -2);
1982 w
= r
.upper_bound ();
1983 int min_prec
= wi::min_precision (w
, UNSIGNED
);
1984 return MAX (min_prec
, 1);
1987 /* Return address of the first limb of OP and write into *PREC
1988 its precision. If positive, the operand is zero extended
1989 from that precision, if it is negative, the operand is sign-extended
1990 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1991 otherwise *PREC_STORED is prec from the innermost call without
1992 range optimizations. */
1995 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
1996 int *prec_stored
, int *prec
)
1999 location_t loc_save
= m_loc
;
2000 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2001 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2002 && TREE_CODE (op
) != INTEGER_CST
)
2005 *prec
= range_to_prec (op
, stmt
);
2006 bitint_prec_kind kind
= bitint_prec_small
;
2007 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2008 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2009 kind
= bitint_precision_kind (TREE_TYPE (op
));
2010 if (kind
== bitint_prec_middle
)
2012 tree type
= NULL_TREE
;
2013 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2015 tree op_type
= TREE_TYPE (op
);
2016 unsigned HOST_WIDE_INT nelts
2017 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2018 /* Add support for 3 or more limbs filled in from normal
2019 integral type if this assert fails. If no target chooses
2020 limb mode smaller than half of largest supported normal
2021 integral type, this will not be needed. */
2022 gcc_assert (nelts
<= 2);
2024 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2025 ? TYPE_PRECISION (op_type
)
2026 : -TYPE_PRECISION (op_type
));
2027 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2032 if (TYPE_UNSIGNED (op_type
))
2034 if (*prec_stored
> limb_prec
)
2035 *prec_stored
= limb_prec
;
2037 else if (*prec_stored
< -limb_prec
)
2038 *prec_stored
= -limb_prec
;
2041 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2042 tree var
= create_tmp_var (atype
);
2044 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2045 t1
= add_cast (m_limb_type
, t1
);
2046 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2047 NULL_TREE
, NULL_TREE
);
2048 gimple
*g
= gimple_build_assign (v
, t1
);
2052 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2053 g
= gimple_build_assign (make_ssa_name (op_type
),
2054 RSHIFT_EXPR
, op
, lp
);
2056 tree t2
= gimple_assign_lhs (g
);
2057 t2
= add_cast (m_limb_type
, t2
);
2058 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2059 NULL_TREE
, NULL_TREE
);
2060 g
= gimple_build_assign (v
, t2
);
2063 tree ret
= build_fold_addr_expr (var
);
2064 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2066 tree clobber
= build_clobber (atype
, CLOBBER_EOL
);
2067 g
= gimple_build_assign (var
, clobber
);
2068 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2073 switch (TREE_CODE (op
))
2077 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2079 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2081 m_loc
= gimple_location (g
);
2082 if (gimple_assign_load_p (g
))
2084 *prec
= range_to_prec (op
, NULL
);
2086 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2087 ? TYPE_PRECISION (TREE_TYPE (op
))
2088 : -TYPE_PRECISION (TREE_TYPE (op
)));
2089 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2090 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2091 NULL_TREE
, true, GSI_SAME_STMT
);
2093 else if (gimple_code (g
) == GIMPLE_NOP
)
2095 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2097 *prec_stored
= *prec
;
2098 tree var
= create_tmp_var (m_limb_type
);
2099 TREE_ADDRESSABLE (var
) = 1;
2100 ret
= build_fold_addr_expr (var
);
2101 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2103 tree clobber
= build_clobber (m_limb_type
, CLOBBER_EOL
);
2104 g
= gimple_build_assign (var
, clobber
);
2105 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2110 gcc_assert (gimple_assign_cast_p (g
));
2111 tree rhs1
= gimple_assign_rhs1 (g
);
2112 bitint_prec_kind kind
= bitint_prec_small
;
2113 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2114 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2115 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2116 if (kind
>= bitint_prec_large
)
2118 tree lhs_type
= TREE_TYPE (op
);
2119 tree rhs_type
= TREE_TYPE (rhs1
);
2120 int prec_stored_val
= 0;
2121 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2122 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2124 if (TYPE_UNSIGNED (lhs_type
)
2125 && !TYPE_UNSIGNED (rhs_type
))
2126 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2130 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2132 else if (TYPE_UNSIGNED (lhs_type
))
2134 gcc_assert (*prec
> 0
2135 || prec_stored_val
> 0
2136 || (-prec_stored_val
2137 >= TYPE_PRECISION (lhs_type
)));
2138 *prec
= TYPE_PRECISION (lhs_type
);
2140 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2143 *prec
= -TYPE_PRECISION (lhs_type
);
2158 int p
= var_to_partition (m_map
, op
);
2159 gcc_assert (m_vars
[p
] != NULL_TREE
);
2160 *prec
= range_to_prec (op
, stmt
);
2162 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2163 ? TYPE_PRECISION (TREE_TYPE (op
))
2164 : -TYPE_PRECISION (TREE_TYPE (op
)));
2165 return build_fold_addr_expr (m_vars
[p
]);
2168 unsigned int min_prec
, mp
;
2170 w
= wi::to_wide (op
);
2171 if (tree_int_cst_sgn (op
) >= 0)
2173 min_prec
= wi::min_precision (w
, UNSIGNED
);
2174 *prec
= MAX (min_prec
, 1);
2178 min_prec
= wi::min_precision (w
, SIGNED
);
2179 *prec
= MIN ((int) -min_prec
, -2);
2181 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2182 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
)))
2183 type
= TREE_TYPE (op
);
2185 type
= build_bitint_type (mp
, 1);
2186 if (TREE_CODE (type
) != BITINT_TYPE
2187 || bitint_precision_kind (type
) == bitint_prec_small
)
2189 if (TYPE_PRECISION (type
) <= limb_prec
)
2192 /* This case is for targets which e.g. have 64-bit
2193 limb but categorize up to 128-bits _BitInts as
2194 small. We could use type of m_limb_type[2] and
2195 similar instead to save space. */
2196 type
= build_bitint_type (mid_min_prec
, 1);
2200 if (tree_int_cst_sgn (op
) >= 0)
2201 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2203 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2205 op
= tree_output_constant_def (fold_convert (type
, op
));
2206 return build_fold_addr_expr (op
);
2212 /* Helper function, create a loop before the current location,
2213 start with sizetype INIT value from the preheader edge. Return
2214 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2215 from the latch edge. */
2218 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2220 if (!gsi_end_p (m_gsi
))
2223 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2224 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2225 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2226 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2227 e3
->probability
= profile_probability::very_unlikely ();
2228 e2
->flags
= EDGE_FALSE_VALUE
;
2229 e2
->probability
= e3
->probability
.invert ();
2230 tree idx
= make_ssa_name (sizetype
);
2231 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2232 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2233 *idx_next
= make_ssa_name (sizetype
);
2234 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2235 m_gsi
= gsi_after_labels (e1
->dest
);
2237 m_preheader_bb
= e1
->src
;
2238 class loop
*loop
= alloc_loop ();
2239 loop
->header
= e1
->dest
;
2240 add_loop (loop
, e1
->src
->loop_father
);
2244 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2245 lowered using iteration from the least significant limb up to the most
2246 significant limb. For large _BitInt it is emitted as straight line code
2247 before current location, for huge _BitInt as a loop handling two limbs
2248 at once, followed by handling up to limbs in straight line code (at most
2249 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2250 comparisons, in that case CMP_CODE should be the comparison code and
2251 CMP_OP1/CMP_OP2 the comparison operands. */
2254 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2255 tree cmp_op1
, tree cmp_op2
)
2257 bool eq_p
= cmp_code
!= ERROR_MARK
;
2260 type
= TREE_TYPE (cmp_op1
);
2262 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2263 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2264 bitint_prec_kind kind
= bitint_precision_kind (type
);
2265 gcc_assert (kind
>= bitint_prec_large
);
2267 tree lhs
= gimple_get_lhs (stmt
);
2268 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2270 && TREE_CODE (lhs
) == SSA_NAME
2271 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2272 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2274 int p
= var_to_partition (m_map
, lhs
);
2275 gcc_assert (m_vars
[p
] != NULL_TREE
);
2276 m_lhs
= lhs
= m_vars
[p
];
2278 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2280 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2282 basic_block eh_pad
= NULL
;
2283 tree nlhs
= NULL_TREE
;
2284 unsigned HOST_WIDE_INT bo_idx
= 0;
2285 unsigned HOST_WIDE_INT bo_bit
= 0;
2286 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2287 if (gimple_store_p (stmt
))
2289 store_operand
= gimple_assign_rhs1 (stmt
);
2290 eh
= stmt_ends_bb_p (stmt
);
2295 basic_block bb
= gimple_bb (stmt
);
2297 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2298 if (e
->flags
& EDGE_EH
)
2304 if (TREE_CODE (lhs
) == COMPONENT_REF
2305 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2307 tree fld
= TREE_OPERAND (lhs
, 1);
2308 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2309 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2310 poly_int64 bitoffset
;
2311 poly_uint64 field_offset
, repr_offset
;
2312 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2316 bool var_field_off
= false;
2317 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2318 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2319 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2323 var_field_off
= true;
2325 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2326 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2327 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2328 TREE_OPERAND (lhs
, 0), repr
,
2330 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2331 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2332 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2333 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2338 && TREE_CODE (store_operand
) == SSA_NAME
2340 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2341 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2342 || gimple_assign_cast_p (stmt
))
2344 rhs1
= gimple_assign_rhs1 (store_operand
2345 ? SSA_NAME_DEF_STMT (store_operand
)
2347 /* Optimize mergeable ops ending with widening cast to _BitInt
2348 (or followed by store). We can lower just the limbs of the
2349 cast operand and widen afterwards. */
2350 if (TREE_CODE (rhs1
) == SSA_NAME
2352 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2353 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2354 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2355 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2356 limb_prec
) < CEIL (prec
, limb_prec
)
2357 || (kind
== bitint_prec_huge
2358 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2360 store_operand
= rhs1
;
2361 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2362 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2363 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2367 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2368 if (kind
== bitint_prec_large
)
2369 cnt
= CEIL (prec
, limb_prec
);
2372 rem
= (prec
% (2 * limb_prec
));
2373 end
= (prec
- rem
) / limb_prec
;
2374 cnt
= 2 + CEIL (rem
, limb_prec
);
2375 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2378 basic_block edge_bb
= NULL
;
2381 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2383 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2385 if (kind
== bitint_prec_large
)
2386 m_gsi
= gsi_end_bb (edge_bb
);
2389 m_after_stmt
= stmt
;
2390 if (kind
!= bitint_prec_large
)
2391 m_upwards_2limb
= end
;
2395 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2396 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2397 > CEIL (prec
, limb_prec
)));
2399 for (unsigned i
= 0; i
< cnt
; i
++)
2402 if (kind
== bitint_prec_large
)
2405 idx
= size_int (end
+ (i
> 2));
2408 rhs1
= handle_operand (cmp_op1
, idx
);
2409 tree rhs2
= handle_operand (cmp_op2
, idx
);
2410 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2412 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2413 e1
->flags
= EDGE_FALSE_VALUE
;
2414 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2415 e1
->probability
= profile_probability::unlikely ();
2416 e2
->probability
= e1
->probability
.invert ();
2418 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2419 m_gsi
= gsi_after_labels (e1
->dest
);
2424 rhs1
= handle_operand (store_operand
, idx
);
2426 rhs1
= handle_stmt (stmt
, idx
);
2427 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2428 rhs1
= add_cast (m_limb_type
, rhs1
);
2429 if (sext
&& i
== cnt
- 1)
2434 if (tree_fits_uhwi_p (idx
))
2435 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2438 nidx
= make_ssa_name (sizetype
);
2439 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2445 basic_block new_bb
= NULL
;
2446 /* Handle stores into bit-fields. */
2452 if (kind
!= bitint_prec_large
)
2454 prepare_data_in_out (build_zero_cst (m_limb_type
),
2456 bf_next
= m_data
.pop ();
2457 bf_cur
= m_data
.pop ();
2458 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2459 NULL_TREE
, NULL_TREE
);
2461 if_then_else (g
, profile_probability::unlikely (),
2466 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2467 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2468 bitsize_int (limb_prec
- bo_bit
),
2469 bitsize_int (bo_idx
* limb_prec
+ bo_bit
));
2470 tree t
= add_cast (ftype
, rhs1
);
2471 g
= gimple_build_assign (bfr
, t
);
2475 maybe_duplicate_eh_stmt (g
, stmt
);
2478 edge e
= split_block (gsi_bb (m_gsi
), g
);
2479 m_gsi
= gsi_after_labels (e
->dest
);
2480 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2481 = profile_probability::very_unlikely ();
2484 if (kind
== bitint_prec_large
)
2490 m_gsi
= gsi_after_labels (e2
->src
);
2494 tree t1
= make_ssa_name (m_limb_type
);
2495 tree t2
= make_ssa_name (m_limb_type
);
2496 tree t3
= make_ssa_name (m_limb_type
);
2497 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2498 build_int_cst (unsigned_type_node
,
2499 limb_prec
- bo_bit
));
2501 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2502 build_int_cst (unsigned_type_node
,
2506 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2509 if (bf_next
&& i
== 1)
2511 g
= gimple_build_assign (bf_next
, bf_cur
);
2518 /* Handle bit-field access to partial last limb if needed. */
2522 && tree_fits_uhwi_p (idx
))
2524 unsigned int tprec
= TYPE_PRECISION (type
);
2525 unsigned int rprec
= tprec
% limb_prec
;
2526 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2529 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2530 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2531 unshare_expr (nlhs
),
2532 bitsize_int (rprec
+ bo_bit
),
2533 bitsize_int ((bo_idx
2534 + tprec
/ limb_prec
)
2536 tree t
= add_cast (ftype
, rhs1
);
2537 g
= gimple_build_assign (bfr
, t
);
2541 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2544 /* Otherwise, stores to any other lhs. */
2547 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
,
2549 g
= gimple_build_assign (l
, rhs1
);
2554 maybe_duplicate_eh_stmt (g
, stmt
);
2557 edge e
= split_block (gsi_bb (m_gsi
), g
);
2558 m_gsi
= gsi_after_labels (e
->dest
);
2559 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2560 = profile_probability::very_unlikely ();
2564 m_gsi
= gsi_after_labels (new_bb
);
2568 if (kind
== bitint_prec_huge
&& i
<= 1)
2572 idx
= make_ssa_name (sizetype
);
2573 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2579 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2582 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2583 NULL_TREE
, NULL_TREE
);
2586 m_gsi
= gsi_after_labels (edge_bb
);
2588 m_gsi
= gsi_for_stmt (stmt
);
2597 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2598 tree lpm1
= build_int_cst (unsigned_type_node
,
2600 tree n
= make_ssa_name (TREE_TYPE (ext
));
2601 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2603 ext
= add_cast (m_limb_type
, n
);
2606 ext
= build_zero_cst (m_limb_type
);
2607 kind
= bitint_precision_kind (type
);
2608 unsigned start
= CEIL (prec
, limb_prec
);
2609 prec
= TYPE_PRECISION (type
);
2610 idx
= idx_first
= idx_next
= NULL_TREE
;
2611 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2612 kind
= bitint_prec_large
;
2613 if (kind
== bitint_prec_large
)
2614 cnt
= CEIL (prec
, limb_prec
) - start
;
2617 rem
= prec
% limb_prec
;
2618 end
= (prec
- rem
) / limb_prec
;
2619 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2621 for (unsigned i
= 0; i
< cnt
; i
++)
2623 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2624 idx
= size_int (start
+ i
);
2625 else if (i
== cnt
- 1)
2626 idx
= size_int (end
);
2627 else if (i
== (bo_bit
!= 0))
2628 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2630 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2632 tree t1
= make_ssa_name (m_limb_type
);
2633 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2634 build_int_cst (unsigned_type_node
,
2635 limb_prec
- bo_bit
));
2637 if (integer_zerop (ext
))
2641 tree t2
= make_ssa_name (m_limb_type
);
2642 rhs1
= make_ssa_name (m_limb_type
);
2643 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2644 build_int_cst (unsigned_type_node
,
2647 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2655 if (tree_fits_uhwi_p (idx
))
2656 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2659 nidx
= make_ssa_name (sizetype
);
2660 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2666 /* Handle bit-field access to partial last limb if needed. */
2667 if (nlhs
&& i
== cnt
- 1)
2669 unsigned int tprec
= TYPE_PRECISION (type
);
2670 unsigned int rprec
= tprec
% limb_prec
;
2671 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2674 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2675 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2676 unshare_expr (nlhs
),
2677 bitsize_int (rprec
+ bo_bit
),
2678 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2680 tree t
= add_cast (ftype
, rhs1
);
2681 g
= gimple_build_assign (bfr
, t
);
2685 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2688 /* Otherwise, stores to any other lhs. */
2691 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
, nidx
, true);
2692 g
= gimple_build_assign (l
, rhs1
);
2697 maybe_duplicate_eh_stmt (g
, stmt
);
2700 edge e
= split_block (gsi_bb (m_gsi
), g
);
2701 m_gsi
= gsi_after_labels (e
->dest
);
2702 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2703 = profile_probability::very_unlikely ();
2706 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2708 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2711 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2712 NULL_TREE
, NULL_TREE
);
2714 m_gsi
= gsi_for_stmt (stmt
);
2718 if (bf_cur
!= NULL_TREE
)
2720 unsigned int tprec
= TYPE_PRECISION (type
);
2721 unsigned int rprec
= tprec
% limb_prec
;
2722 tree ftype
= build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2723 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2724 bitsize_int (rprec
+ bo_bit
),
2725 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2730 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2731 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2732 build_int_cst (unsigned_type_node
,
2733 limb_prec
- bo_bit
));
2736 rhs1
= add_cast (ftype
, rhs1
);
2737 g
= gimple_build_assign (bfr
, rhs1
);
2741 maybe_duplicate_eh_stmt (g
, stmt
);
2744 edge e
= split_block (gsi_bb (m_gsi
), g
);
2745 m_gsi
= gsi_after_labels (e
->dest
);
2746 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2747 = profile_probability::very_unlikely ();
2752 if (gimple_store_p (stmt
))
2754 unlink_stmt_vdef (stmt
);
2755 release_ssa_name (gimple_vdef (stmt
));
2756 gsi_remove (&m_gsi
, true);
2760 lhs
= make_ssa_name (boolean_type_node
);
2761 basic_block bb
= gimple_bb (stmt
);
2762 gphi
*phi
= create_phi_node (lhs
, bb
);
2763 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2764 unsigned int n
= EDGE_COUNT (bb
->preds
);
2765 for (unsigned int i
= 0; i
< n
; i
++)
2767 edge e2
= EDGE_PRED (bb
, i
);
2768 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
2769 e2
, UNKNOWN_LOCATION
);
2771 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2778 /* Handle a large/huge _BitInt comparison statement STMT other than
2779 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2780 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2781 lowered by iteration from the most significant limb downwards to
2782 the least significant one, for large _BitInt in straight line code,
2783 otherwise with most significant limb handled in
2784 straight line code followed by a loop handling one limb at a time.
2785 Comparisons with unsigned huge _BitInt with precisions which are
2786 multiples of limb precision can use just the loop and don't need to
2787 handle most significant limb before the loop. The loop or straight
2788 line code jumps to final basic block if a particular pair of limbs
2792 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2793 tree cmp_op1
, tree cmp_op2
)
2795 tree type
= TREE_TYPE (cmp_op1
);
2796 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2797 bitint_prec_kind kind
= bitint_precision_kind (type
);
2798 gcc_assert (kind
>= bitint_prec_large
);
2800 if (!TYPE_UNSIGNED (type
)
2801 && integer_zerop (cmp_op2
)
2802 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
2804 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
2805 tree idx
= size_int (end
);
2807 tree rhs1
= handle_operand (cmp_op1
, idx
);
2808 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2810 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2811 rhs1
= add_cast (stype
, rhs1
);
2813 tree lhs
= make_ssa_name (boolean_type_node
);
2814 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
2815 build_zero_cst (TREE_TYPE (rhs1
)));
2821 unsigned cnt
, rem
= 0, end
= 0;
2822 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
2823 if (kind
== bitint_prec_large
)
2824 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
2827 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
2828 if (rem
== 0 && !TYPE_UNSIGNED (type
))
2830 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
2831 cnt
= 1 + (rem
!= 0);
2834 basic_block edge_bb
= NULL
;
2835 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2837 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2839 m_gsi
= gsi_end_bb (edge_bb
);
2841 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
2842 for (unsigned i
= 0; i
< cnt
; i
++)
2845 if (kind
== bitint_prec_large
)
2846 idx
= size_int (cnt
- i
- 1);
2847 else if (i
== cnt
- 1)
2848 idx
= create_loop (size_int (end
- 1), &idx_next
);
2850 idx
= size_int (end
);
2851 tree rhs1
= handle_operand (cmp_op1
, idx
);
2852 tree rhs2
= handle_operand (cmp_op2
, idx
);
2854 && !TYPE_UNSIGNED (type
)
2855 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2857 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2858 rhs1
= add_cast (stype
, rhs1
);
2859 rhs2
= add_cast (stype
, rhs2
);
2861 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2863 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2864 e1
->flags
= EDGE_FALSE_VALUE
;
2865 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2866 e1
->probability
= profile_probability::likely ();
2867 e2
->probability
= e1
->probability
.invert ();
2869 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2870 m_gsi
= gsi_after_labels (e1
->dest
);
2872 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2874 e1
= split_block (gsi_bb (m_gsi
), g
);
2875 e1
->flags
= EDGE_FALSE_VALUE
;
2876 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2877 e1
->probability
= profile_probability::unlikely ();
2878 e2
->probability
= e1
->probability
.invert ();
2879 m_gsi
= gsi_after_labels (e1
->dest
);
2880 edges
[2 * i
+ 1] = e2
;
2882 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
2884 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
2886 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
2887 NULL_TREE
, NULL_TREE
);
2889 edge true_edge
, false_edge
;
2890 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
2891 &true_edge
, &false_edge
);
2892 m_gsi
= gsi_after_labels (false_edge
->dest
);
2896 tree lhs
= make_ssa_name (boolean_type_node
);
2897 basic_block bb
= gimple_bb (stmt
);
2898 gphi
*phi
= create_phi_node (lhs
, bb
);
2899 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
2901 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
2902 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
2903 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
2905 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
2906 ? boolean_true_node
: boolean_false_node
,
2907 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
2912 /* Lower large/huge _BitInt left and right shift except for left
2913 shift by < limb_prec constant. */
2916 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
2918 tree rhs1
= gimple_assign_rhs1 (stmt
);
2919 tree lhs
= gimple_assign_lhs (stmt
);
2920 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
2921 tree type
= TREE_TYPE (rhs1
);
2922 gimple
*final_stmt
= gsi_stmt (m_gsi
);
2923 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
2924 && bitint_precision_kind (type
) >= bitint_prec_large
);
2925 int prec
= TYPE_PRECISION (type
);
2926 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
2928 if (obj
== NULL_TREE
)
2930 int part
= var_to_partition (m_map
, lhs
);
2931 gcc_assert (m_vars
[part
] != NULL_TREE
);
2934 /* Preparation code common for both left and right shifts.
2935 unsigned n1 = n % limb_prec;
2936 size_t n2 = n / limb_prec;
2937 size_t n3 = n1 != 0;
2938 unsigned n4 = (limb_prec - n1) % limb_prec;
2939 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2940 if (TREE_CODE (n
) == INTEGER_CST
)
2942 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2943 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
2944 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
2945 n3
= size_int (!integer_zerop (n1
));
2946 n4
= int_const_binop (TRUNC_MOD_EXPR
,
2947 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
2951 n1
= make_ssa_name (TREE_TYPE (n
));
2952 n2
= make_ssa_name (sizetype
);
2953 n3
= make_ssa_name (sizetype
);
2954 n4
= make_ssa_name (TREE_TYPE (n
));
2955 if (pow2p_hwi (limb_prec
))
2957 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
2958 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
2960 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2962 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2964 build_int_cst (TREE_TYPE (n
),
2965 exact_log2 (limb_prec
)));
2967 if (gimple_assign_lhs (g
) != n2
)
2969 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2972 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
2975 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
2981 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2982 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
2984 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2986 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2987 TRUNC_DIV_EXPR
, n
, lp
);
2989 if (gimple_assign_lhs (g
) != n2
)
2991 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2994 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
2995 MINUS_EXPR
, lp
, n1
);
2997 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3001 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3002 build_zero_cst (TREE_TYPE (n
)));
3004 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3007 tree p
= build_int_cst (sizetype
,
3008 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3009 if (rhs_code
== RSHIFT_EXPR
)
3014 unsigned n1 = n % limb_prec;
3015 size_t n2 = n / limb_prec;
3016 size_t n3 = n1 != 0;
3017 unsigned n4 = (limb_prec - n1) % limb_prec;
3019 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3020 int signed_p = (typeof (src) -1) < 0;
3021 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3022 ? p : p - n3); ++idx)
3023 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3025 if (prec % limb_prec == 0)
3028 ext = ((signed limb_type) (src[p] << (limb_prec
3029 - (prec % limb_prec))))
3030 >> (limb_prec - (prec % limb_prec));
3032 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3033 if (!signed_p && (prec % limb_prec == 0))
3035 else if (idx < prec / 64)
3037 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3043 dst[idx] = ((signed limb_type) ext) >> n1;
3044 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3048 dst[idx] = ext >> n1;
3051 for (++idx; idx <= p; ++idx)
3054 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3056 else if (TREE_CODE (n3
) == INTEGER_CST
)
3057 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3060 pmn3
= make_ssa_name (sizetype
);
3061 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3064 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3065 edge edge_true
, edge_false
;
3066 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3068 tree idx
= create_loop (n2
, &idx_next
);
3069 tree idxmn2
= make_ssa_name (sizetype
);
3070 tree idxpn3
= make_ssa_name (sizetype
);
3071 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3073 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3076 tree t1
= handle_operand (rhs1
, idx
);
3078 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3079 RSHIFT_EXPR
, t1
, n1
);
3081 t1
= gimple_assign_lhs (g
);
3082 if (!integer_zerop (n3
))
3085 tree t2
= handle_operand (rhs1
, idxpn3
);
3086 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3087 LSHIFT_EXPR
, t2
, n4
);
3089 t2
= gimple_assign_lhs (g
);
3090 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3091 BIT_IOR_EXPR
, t1
, t2
);
3093 t1
= gimple_assign_lhs (g
);
3095 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3096 g
= gimple_build_assign (l
, t1
);
3098 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3100 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3102 idx
= make_ssa_name (sizetype
);
3103 m_gsi
= gsi_for_stmt (final_stmt
);
3104 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3105 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3106 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3107 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3108 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3109 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3111 tree ms
= handle_operand (rhs1
, p
);
3113 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3114 ext
= add_cast (m_limb_type
, ms
);
3115 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3116 && !integer_zerop (n3
))
3118 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3119 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3121 t1
= handle_operand (rhs1
, idx
);
3122 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3123 RSHIFT_EXPR
, t1
, n1
);
3125 t1
= gimple_assign_lhs (g
);
3126 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3127 LSHIFT_EXPR
, ext
, n4
);
3129 tree t2
= gimple_assign_lhs (g
);
3130 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3131 BIT_IOR_EXPR
, t1
, t2
);
3133 t1
= gimple_assign_lhs (g
);
3134 idxmn2
= make_ssa_name (sizetype
);
3135 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3137 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3138 g
= gimple_build_assign (l
, t1
);
3140 idx_next
= make_ssa_name (sizetype
);
3141 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3143 m_gsi
= gsi_for_stmt (final_stmt
);
3144 tree nidx
= make_ssa_name (sizetype
);
3145 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3146 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3147 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3148 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3149 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3150 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3153 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3155 idx
= gimple_assign_lhs (g
);
3157 if (!TYPE_UNSIGNED (type
))
3158 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3159 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3160 RSHIFT_EXPR
, sext
, n1
);
3162 t1
= gimple_assign_lhs (g
);
3163 if (!TYPE_UNSIGNED (type
))
3165 t1
= add_cast (m_limb_type
, t1
);
3166 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3168 build_int_cst (TREE_TYPE (n
),
3171 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3174 ext
= build_zero_cst (m_limb_type
);
3175 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3176 g
= gimple_build_assign (l
, t1
);
3178 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3181 idx
= gimple_assign_lhs (g
);
3182 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3183 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3184 idx
= create_loop (idx
, &idx_next
);
3185 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3186 g
= gimple_build_assign (l
, ext
);
3188 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3190 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3198 unsigned n1 = n % limb_prec;
3199 size_t n2 = n / limb_prec;
3200 size_t n3 = n1 != 0;
3201 unsigned n4 = (limb_prec - n1) % limb_prec;
3203 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3204 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3205 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3208 dst[idx] = src[idx - n2] << n1;
3211 for (; (ssize_t) idx >= 0; --idx)
3214 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3215 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3218 n2pn3
= make_ssa_name (sizetype
);
3219 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3222 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3223 idx even to access the most significant partial limb. */
3225 if (integer_zerop (n3
))
3226 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3227 counts. Emit if (true) condition that can be optimized later. */
3228 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3229 NULL_TREE
, NULL_TREE
);
3231 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3232 edge edge_true
, edge_false
;
3233 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3235 tree idx
= create_loop (p
, &idx_next
);
3236 tree idxmn2
= make_ssa_name (sizetype
);
3237 tree idxmn2mn3
= make_ssa_name (sizetype
);
3238 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3240 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3243 tree t1
= handle_operand (rhs1
, idxmn2
);
3245 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3246 LSHIFT_EXPR
, t1
, n1
);
3248 t1
= gimple_assign_lhs (g
);
3249 if (!integer_zerop (n3
))
3252 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3253 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3254 RSHIFT_EXPR
, t2
, n4
);
3256 t2
= gimple_assign_lhs (g
);
3257 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3258 BIT_IOR_EXPR
, t1
, t2
);
3260 t1
= gimple_assign_lhs (g
);
3262 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3263 g
= gimple_build_assign (l
, t1
);
3265 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3267 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3268 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3269 NULL_TREE
, NULL_TREE
);
3271 idx
= make_ssa_name (sizetype
);
3272 m_gsi
= gsi_for_stmt (final_stmt
);
3273 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3274 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3275 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3276 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3277 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3278 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3280 if (!integer_zerop (n3
))
3282 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3283 NULL_TREE
, NULL_TREE
);
3284 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3285 idxmn2
= make_ssa_name (sizetype
);
3286 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3289 t1
= handle_operand (rhs1
, idxmn2
);
3290 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3291 LSHIFT_EXPR
, t1
, n1
);
3293 t1
= gimple_assign_lhs (g
);
3294 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3295 g
= gimple_build_assign (l
, t1
);
3297 idx_next
= make_ssa_name (sizetype
);
3298 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3300 m_gsi
= gsi_for_stmt (final_stmt
);
3301 tree nidx
= make_ssa_name (sizetype
);
3302 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3303 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3304 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3305 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3306 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3307 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3310 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3311 ssize_int (0), NULL_TREE
, NULL_TREE
);
3312 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3313 idx
= create_loop (idx
, &idx_next
);
3314 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3315 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3317 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3319 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3320 ssize_int (0), NULL_TREE
, NULL_TREE
);
3325 /* Lower large/huge _BitInt multiplication or division. */
3328 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3330 tree rhs1
= gimple_assign_rhs1 (stmt
);
3331 tree rhs2
= gimple_assign_rhs2 (stmt
);
3332 tree lhs
= gimple_assign_lhs (stmt
);
3333 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3334 tree type
= TREE_TYPE (rhs1
);
3335 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3336 && bitint_precision_kind (type
) >= bitint_prec_large
);
3337 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3338 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3339 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3340 if (obj
== NULL_TREE
)
3342 int part
= var_to_partition (m_map
, lhs
);
3343 gcc_assert (m_vars
[part
] != NULL_TREE
);
3345 lhs
= build_fold_addr_expr (obj
);
3349 lhs
= build_fold_addr_expr (obj
);
3350 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3351 NULL_TREE
, true, GSI_SAME_STMT
);
3353 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3358 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3359 lhs
, build_int_cst (sitype
, prec
),
3360 rhs1
, build_int_cst (sitype
, prec1
),
3361 rhs2
, build_int_cst (sitype
, prec2
));
3364 case TRUNC_DIV_EXPR
:
3365 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3366 lhs
, build_int_cst (sitype
, prec
),
3368 build_int_cst (sitype
, 0),
3369 rhs1
, build_int_cst (sitype
, prec1
),
3370 rhs2
, build_int_cst (sitype
, prec2
));
3371 if (!stmt_ends_bb_p (stmt
))
3372 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3375 case TRUNC_MOD_EXPR
:
3376 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3377 build_int_cst (sitype
, 0),
3378 lhs
, build_int_cst (sitype
, prec
),
3379 rhs1
, build_int_cst (sitype
, prec1
),
3380 rhs2
, build_int_cst (sitype
, prec2
));
3381 if (!stmt_ends_bb_p (stmt
))
3382 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3388 if (stmt_ends_bb_p (stmt
))
3390 maybe_duplicate_eh_stmt (g
, stmt
);
3393 basic_block bb
= gimple_bb (stmt
);
3395 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3396 if (e1
->flags
& EDGE_EH
)
3400 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3401 m_gsi
= gsi_after_labels (e2
->dest
);
3402 make_edge (e2
->src
, e1
->dest
, EDGE_EH
)->probability
3403 = profile_probability::very_unlikely ();
3408 /* Lower large/huge _BitInt conversion to/from floating point. */
3411 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3413 tree rhs1
= gimple_assign_rhs1 (stmt
);
3414 tree lhs
= gimple_assign_lhs (stmt
);
3415 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3416 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3418 if (rhs_code
== FIX_TRUNC_EXPR
)
3420 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3421 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3423 if (obj
== NULL_TREE
)
3425 int part
= var_to_partition (m_map
, lhs
);
3426 gcc_assert (m_vars
[part
] != NULL_TREE
);
3428 lhs
= build_fold_addr_expr (obj
);
3432 lhs
= build_fold_addr_expr (obj
);
3433 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3434 NULL_TREE
, true, GSI_SAME_STMT
);
3436 scalar_mode from_mode
3437 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3439 /* IEEE single is a full superset of both IEEE half and
3440 bfloat formats, convert to float first and then to _BitInt
3441 to avoid the need of another 2 library routines. */
3442 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3443 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3444 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3446 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3448 rhs1
= add_cast (type
, rhs1
);
3451 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3452 lhs
, build_int_cst (sitype
, prec
),
3459 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3460 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3461 rhs1
, build_int_cst (sitype
, prec
));
3462 gimple_call_set_lhs (g
, lhs
);
3463 if (!stmt_ends_bb_p (stmt
))
3464 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3465 gsi_replace (&m_gsi
, g
, true);
3469 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3470 If check_zero is true, caller wants to check if all bits in [start, end)
3471 are zero, otherwise if bits in [start, end) are either all zero or
3472 all ones. L is the limb with index LIMB, START and END are measured
3476 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3477 unsigned int end
, tree l
,
3481 unsigned startlimb
= start
/ limb_prec
;
3482 unsigned endlimb
= (end
- 1) / limb_prec
;
3485 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3487 if (startlimb
== endlimb
&& limb
== startlimb
)
3491 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3492 end
- start
, false, limb_prec
);
3493 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3495 wide_int_to_tree (m_limb_type
, w
));
3497 return gimple_assign_lhs (g
);
3499 unsigned int shift
= start
% limb_prec
;
3500 if ((end
% limb_prec
) != 0)
3502 unsigned int lshift
= (-end
) % limb_prec
;
3504 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3506 build_int_cst (unsigned_type_node
,
3509 l
= gimple_assign_lhs (g
);
3511 l
= add_cast (signed_type_for (m_limb_type
), l
);
3512 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3514 build_int_cst (unsigned_type_node
, shift
));
3516 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3518 else if (limb
== startlimb
)
3520 if ((start
% limb_prec
) == 0)
3523 l
= add_cast (signed_type_for (m_limb_type
), l
);
3524 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3526 build_int_cst (unsigned_type_node
,
3527 start
% limb_prec
));
3529 l
= gimple_assign_lhs (g
);
3531 l
= add_cast (m_limb_type
, l
);
3534 else if (limb
== endlimb
)
3536 if ((end
% limb_prec
) == 0)
3540 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3541 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3543 wide_int_to_tree (m_limb_type
, w
));
3545 return gimple_assign_lhs (g
);
3547 unsigned int shift
= (-end
) % limb_prec
;
3548 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3550 build_int_cst (unsigned_type_node
, shift
));
3552 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3553 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3555 build_int_cst (unsigned_type_node
, shift
));
3557 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3562 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3563 result including overflow flag into the right locations. */
3566 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3567 tree ovf
, tree lhs
, tree orig_obj
,
3568 gimple
*stmt
, tree_code code
)
3572 if (obj
== NULL_TREE
3573 && (TREE_CODE (type
) != BITINT_TYPE
3574 || bitint_precision_kind (type
) < bitint_prec_large
))
3576 /* Add support for 3 or more limbs filled in from normal integral
3577 type if this assert fails. If no target chooses limb mode smaller
3578 than half of largest supported normal integral type, this will not
3580 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3581 tree lhs_type
= type
;
3582 if (TREE_CODE (type
) == BITINT_TYPE
3583 && bitint_precision_kind (type
) == bitint_prec_middle
)
3584 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3585 TYPE_UNSIGNED (type
));
3586 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3587 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3589 r1
= gimple_assign_lhs (g
);
3590 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3591 r1
= add_cast (lhs_type
, r1
);
3592 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3594 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3595 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3597 r2
= gimple_assign_lhs (g
);
3598 r2
= add_cast (lhs_type
, r2
);
3599 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3600 build_int_cst (unsigned_type_node
,
3603 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3604 gimple_assign_lhs (g
));
3606 r1
= gimple_assign_lhs (g
);
3608 if (lhs_type
!= type
)
3609 r1
= add_cast (type
, r1
);
3610 ovf
= add_cast (lhs_type
, ovf
);
3611 if (lhs_type
!= type
)
3612 ovf
= add_cast (type
, ovf
);
3613 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3614 m_gsi
= gsi_for_stmt (stmt
);
3615 gsi_replace (&m_gsi
, g
, true);
3619 unsigned HOST_WIDE_INT nelts
= 0;
3620 tree atype
= NULL_TREE
;
3623 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3624 if (orig_obj
== NULL_TREE
)
3626 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3632 if (orig_obj
== NULL_TREE
)
3634 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3635 v1
= build2 (MEM_REF
, atype
,
3636 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3638 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3639 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3641 v1
= unshare_expr (obj
);
3642 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3643 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3644 g
= gimple_build_assign (v1
, v2
);
3647 if (orig_obj
== NULL_TREE
&& obj
)
3649 ovf
= add_cast (m_limb_type
, ovf
);
3650 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3651 g
= gimple_build_assign (l
, ovf
);
3655 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3656 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3657 (nelts
+ 1) * m_limb_size
);
3658 tree v1
= build2 (MEM_REF
, atype
,
3659 build_fold_addr_expr (unshare_expr (obj
)),
3661 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3665 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3667 imm_use_iterator ui
;
3668 use_operand_p use_p
;
3669 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3671 g
= USE_STMT (use_p
);
3672 if (!is_gimple_assign (g
)
3673 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3675 tree lhs2
= gimple_assign_lhs (g
);
3677 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3678 lhs2
= gimple_assign_lhs (use_stmt
);
3679 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3680 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3681 g
= gimple_build_assign (lhs2
, ovf
);
3683 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3684 gsi_replace (&gsi
, g
, true);
3688 else if (ovf
!= boolean_false_node
)
3690 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3691 NULL_TREE
, NULL_TREE
);
3692 edge edge_true
, edge_false
;
3693 if_then (g
, profile_probability::very_unlikely (),
3694 edge_true
, edge_false
);
3695 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3696 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3699 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3700 true, GSI_SAME_STMT
);
3701 m_gsi
= gsi_after_labels (edge_true
->dest
);
3706 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_EOL
);
3707 g
= gimple_build_assign (var
, clobber
);
3708 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3712 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3713 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3714 argument 1 precision PREC1 and minimum precision for the result
3715 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3718 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3719 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3724 /* Ignore this special rule for subtraction, even if both
3725 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3726 in infinite precision. */
3727 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3729 /* Result in [0, prec2) is unsigned, if prec > prec2,
3730 all bits above it will be zero. */
3731 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3732 return boolean_false_node
;
3735 /* ovf if any of bits in [start, end) is non-zero. */
3736 *start
= prec
- !TYPE_UNSIGNED (type
);
3740 else if (TYPE_UNSIGNED (type
))
3742 /* If result in [0, prec2) is signed and if prec > prec2,
3743 all bits above it will be sign bit copies. */
3746 /* ovf if bit prec - 1 is non-zero. */
3752 /* ovf if any of bits in [start, end) is non-zero. */
3757 else if (prec
>= prec2
)
3758 return boolean_false_node
;
3761 /* ovf if [start, end) bits aren't all zeros or all ones. */
3764 *check_zero
= false;
3769 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3770 argument or return type _Complex large/huge _BitInt. */
3773 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
3775 tree arg0
= gimple_call_arg (stmt
, 0);
3776 tree arg1
= gimple_call_arg (stmt
, 1);
3777 tree lhs
= gimple_call_lhs (stmt
);
3782 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3783 gsi_remove (&gsi
, true);
3786 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3787 tree type
= TREE_TYPE (lhs
);
3788 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3789 type
= TREE_TYPE (type
);
3790 int prec
= TYPE_PRECISION (type
);
3791 int prec0
= range_to_prec (arg0
, stmt
);
3792 int prec1
= range_to_prec (arg1
, stmt
);
3793 int prec2
= ((prec0
< 0) == (prec1
< 0)
3794 ? MAX (prec0
< 0 ? -prec0
: prec0
,
3795 prec1
< 0 ? -prec1
: prec1
) + 1
3796 : MAX (prec0
< 0 ? -prec0
: prec0
+ 1,
3797 prec1
< 0 ? -prec1
: prec1
+ 1) + 1);
3798 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
3799 prec1
< 0 ? -prec1
: prec1
);
3800 prec3
= MAX (prec3
, prec
);
3801 tree var
= NULL_TREE
;
3802 tree orig_obj
= obj
;
3803 if (obj
== NULL_TREE
3804 && TREE_CODE (type
) == BITINT_TYPE
3805 && bitint_precision_kind (type
) >= bitint_prec_large
3807 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
3809 int part
= var_to_partition (m_map
, lhs
);
3810 gcc_assert (m_vars
[part
] != NULL_TREE
);
3812 if (TREE_TYPE (lhs
) == type
)
3815 if (TREE_CODE (type
) != BITINT_TYPE
3816 || bitint_precision_kind (type
) < bitint_prec_large
)
3818 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
3819 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
3820 var
= create_tmp_var (atype
);
3823 enum tree_code code
;
3824 switch (gimple_call_internal_fn (stmt
))
3826 case IFN_ADD_OVERFLOW
:
3827 case IFN_UBSAN_CHECK_ADD
:
3830 case IFN_SUB_OVERFLOW
:
3831 case IFN_UBSAN_CHECK_SUB
:
3837 unsigned start
, end
;
3839 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
3840 &start
, &end
, &check_zero
);
3842 unsigned startlimb
, endlimb
;
3850 startlimb
= start
/ limb_prec
;
3851 endlimb
= (end
- 1) / limb_prec
;
3854 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
3855 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
3856 unsigned cnt
, rem
= 0, fin
= 0;
3857 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
3858 bool last_ovf
= (ovf
== NULL_TREE
3859 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
3860 if (kind
!= bitint_prec_huge
)
3861 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
3864 rem
= (prec4
% (2 * limb_prec
));
3865 fin
= (prec4
- rem
) / limb_prec
;
3866 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
3867 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
3870 if (kind
== bitint_prec_huge
)
3871 m_upwards_2limb
= fin
;
3874 tree type0
= TREE_TYPE (arg0
);
3875 tree type1
= TREE_TYPE (arg1
);
3876 if (TYPE_PRECISION (type0
) < prec3
)
3878 type0
= build_bitint_type (prec3
, TYPE_UNSIGNED (type0
));
3879 if (TREE_CODE (arg0
) == INTEGER_CST
)
3880 arg0
= fold_convert (type0
, arg0
);
3882 if (TYPE_PRECISION (type1
) < prec3
)
3884 type1
= build_bitint_type (prec3
, TYPE_UNSIGNED (type1
));
3885 if (TREE_CODE (arg1
) == INTEGER_CST
)
3886 arg1
= fold_convert (type1
, arg1
);
3888 unsigned int data_cnt
= 0;
3889 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
3890 tree cmp
= build_zero_cst (m_limb_type
);
3891 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
3892 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
3893 for (unsigned i
= 0; i
< cnt
; i
++)
3897 if (kind
!= bitint_prec_huge
)
3900 idx
= size_int (fin
+ (i
> 2));
3901 if (!last_ovf
|| i
< cnt
- 1)
3903 if (type0
!= TREE_TYPE (arg0
))
3904 rhs1
= handle_cast (type0
, arg0
, idx
);
3906 rhs1
= handle_operand (arg0
, idx
);
3907 if (type1
!= TREE_TYPE (arg1
))
3908 rhs2
= handle_cast (type1
, arg1
, idx
);
3910 rhs2
= handle_operand (arg1
, idx
);
3912 data_cnt
= m_data_cnt
;
3913 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
3914 rhs1
= add_cast (m_limb_type
, rhs1
);
3915 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
3916 rhs2
= add_cast (m_limb_type
, rhs2
);
3922 m_data_cnt
= data_cnt
;
3923 if (TYPE_UNSIGNED (type0
))
3924 rhs1
= build_zero_cst (m_limb_type
);
3927 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
3928 if (TREE_CODE (rhs1
) == INTEGER_CST
)
3929 rhs1
= build_int_cst (m_limb_type
,
3930 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
3933 tree lpm1
= build_int_cst (unsigned_type_node
,
3935 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
3936 RSHIFT_EXPR
, rhs1
, lpm1
);
3938 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3941 if (TYPE_UNSIGNED (type1
))
3942 rhs2
= build_zero_cst (m_limb_type
);
3945 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
3946 if (TREE_CODE (rhs2
) == INTEGER_CST
)
3947 rhs2
= build_int_cst (m_limb_type
,
3948 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
3951 tree lpm1
= build_int_cst (unsigned_type_node
,
3953 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
3954 RSHIFT_EXPR
, rhs2
, lpm1
);
3956 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3960 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
3961 if (ovf
!= boolean_false_node
)
3963 if (tree_fits_uhwi_p (idx
))
3965 unsigned limb
= tree_to_uhwi (idx
);
3966 if (limb
>= startlimb
&& limb
<= endlimb
)
3968 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
3970 tree this_ovf
= make_ssa_name (boolean_type_node
);
3971 if (ovf
== NULL_TREE
&& !check_zero
)
3974 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3976 build_int_cst (m_limb_type
, 1));
3978 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
3979 gimple_assign_lhs (g
),
3980 build_int_cst (m_limb_type
, 1));
3983 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
3985 if (ovf
== NULL_TREE
)
3989 tree b
= make_ssa_name (boolean_type_node
);
3990 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
3996 else if (startlimb
< fin
)
3998 if (m_first
&& startlimb
+ 2 < fin
)
4001 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4002 ovf_out
= m_data
.pop ();
4006 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4007 cmp_out
= m_data
.pop ();
4011 if (i
!= 0 || startlimb
!= fin
- 1)
4014 bool single_comparison
4015 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4016 if (!single_comparison
)
4019 if (!check_zero
&& (start
% limb_prec
) == 0)
4020 single_comparison
= true;
4022 else if ((startlimb
& 1) == (i
& 1))
4026 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4027 NULL_TREE
, NULL_TREE
);
4028 edge edge_true_true
, edge_true_false
, edge_false
;
4030 if (!single_comparison
)
4031 g2
= gimple_build_cond (EQ_EXPR
, idx
,
4032 size_int (startlimb
), NULL_TREE
,
4034 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4035 profile_probability::unlikely (),
4036 edge_true_true
, edge_true_false
,
4038 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4039 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4041 tree this_ovf
= make_ssa_name (boolean_type_node
);
4042 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4044 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4046 build_int_cst (m_limb_type
, 1));
4048 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4049 gimple_assign_lhs (g
),
4050 build_int_cst (m_limb_type
, 1));
4053 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4055 if (cmp_code
== GT_EXPR
)
4057 tree t
= make_ssa_name (boolean_type_node
);
4058 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4062 tree this_ovf2
= NULL_TREE
;
4063 if (!single_comparison
)
4065 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4066 tree t
= make_ssa_name (boolean_type_node
);
4067 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4069 this_ovf2
= make_ssa_name (boolean_type_node
);
4070 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4074 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4076 if (i
== 1 && ovf_out
)
4079 t
= make_ssa_name (boolean_type_node
);
4080 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4081 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4083 add_phi_arg (phi
, ovf
? ovf
4084 : boolean_false_node
, edge_false
,
4087 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4090 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4092 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4093 phi
= create_phi_node (t
, edge_true_false
->dest
);
4094 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4095 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4097 add_phi_arg (phi
, cmp
, edge_true_true
,
4107 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4109 else if (!tree_fits_uhwi_p (idx
)
4110 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4112 bool single_comparison
4113 = (((unsigned) prec
% limb_prec
) == 0
4114 || prec_limbs
+ 1 >= fin
4115 || (prec_limbs
& 1) == (i
& 1));
4116 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4117 NULL_TREE
, NULL_TREE
);
4119 if (!single_comparison
)
4120 g2
= gimple_build_cond (LT_EXPR
, idx
,
4121 size_int (prec_limbs
- 1),
4122 NULL_TREE
, NULL_TREE
);
4123 edge edge_true_true
, edge_true_false
, edge_false
;
4124 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4125 profile_probability::likely (),
4126 edge_true_true
, edge_true_false
,
4128 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4129 g
= gimple_build_assign (l
, rhs
);
4131 if (!single_comparison
)
4133 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4134 l
= limb_access (type
, var
? var
: obj
,
4135 size_int (prec_limbs
- 1), true);
4136 if (!useless_type_conversion_p (TREE_TYPE (l
),
4138 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4139 g
= gimple_build_assign (l
, rhs
);
4142 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4146 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4147 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4148 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4149 g
= gimple_build_assign (l
, rhs
);
4154 if (kind
== bitint_prec_huge
&& i
<= 1)
4158 idx
= make_ssa_name (sizetype
);
4159 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4165 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4168 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4169 NULL_TREE
, NULL_TREE
);
4171 m_gsi
= gsi_for_stmt (final_stmt
);
4176 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4179 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4180 argument or return type _Complex large/huge _BitInt. */
4183 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4185 tree arg0
= gimple_call_arg (stmt
, 0);
4186 tree arg1
= gimple_call_arg (stmt
, 1);
4187 tree lhs
= gimple_call_lhs (stmt
);
4190 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4191 gsi_remove (&gsi
, true);
4194 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4195 tree type
= TREE_TYPE (lhs
);
4196 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4197 type
= TREE_TYPE (type
);
4198 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4199 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4200 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4201 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4202 + (prec1
< 0 ? -prec1
: prec1
)
4203 + ((prec0
< 0) != (prec1
< 0)));
4204 tree var
= NULL_TREE
;
4205 tree orig_obj
= obj
;
4206 bool force_var
= false;
4207 if (obj
== NULL_TREE
4208 && TREE_CODE (type
) == BITINT_TYPE
4209 && bitint_precision_kind (type
) >= bitint_prec_large
4211 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4213 int part
= var_to_partition (m_map
, lhs
);
4214 gcc_assert (m_vars
[part
] != NULL_TREE
);
4216 if (TREE_TYPE (lhs
) == type
)
4219 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4221 for (int i
= 0; i
< 2; ++i
)
4223 tree arg
= i
? arg1
: arg0
;
4224 if (TREE_CODE (arg
) == ADDR_EXPR
)
4225 arg
= TREE_OPERAND (arg
, 0);
4226 if (get_base_address (arg
) == obj
)
4233 if (obj
== NULL_TREE
4235 || TREE_CODE (type
) != BITINT_TYPE
4236 || bitint_precision_kind (type
) < bitint_prec_large
4237 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4239 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4240 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4241 var
= create_tmp_var (atype
);
4243 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4244 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4245 NULL_TREE
, true, GSI_SAME_STMT
);
4246 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4248 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4249 addr
, build_int_cst (sitype
,
4251 arg0
, build_int_cst (sitype
, prec0
),
4252 arg1
, build_int_cst (sitype
, prec1
));
4255 unsigned start
, end
;
4257 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4258 &start
, &end
, &check_zero
);
4259 if (ovf
== NULL_TREE
)
4261 unsigned startlimb
= start
/ limb_prec
;
4262 unsigned endlimb
= (end
- 1) / limb_prec
;
4264 bool use_loop
= false;
4265 if (startlimb
== endlimb
)
4267 else if (startlimb
+ 1 == endlimb
)
4269 else if ((end
% limb_prec
) == 0)
4277 use_loop
= startlimb
+ 2 < endlimb
;
4281 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4282 size_int (startlimb
), true);
4283 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4285 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4286 startlimb
, check_zero
);
4287 ovf
= make_ssa_name (boolean_type_node
);
4289 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4290 build_zero_cst (m_limb_type
));
4293 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4295 build_int_cst (m_limb_type
, 1));
4297 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4298 build_int_cst (m_limb_type
, 1));
4304 basic_block edge_bb
= NULL
;
4305 gimple_stmt_iterator gsi
= m_gsi
;
4307 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4309 m_gsi
= gsi_end_bb (edge_bb
);
4311 tree cmp
= build_zero_cst (m_limb_type
);
4312 for (unsigned i
= 0; i
< cnt
; i
++)
4314 tree idx
, idx_next
= NULL_TREE
;
4316 idx
= size_int (startlimb
);
4318 idx
= size_int (endlimb
);
4320 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4322 idx
= size_int (startlimb
+ 1);
4323 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4324 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4326 l
= gimple_assign_lhs (g
);
4327 if (i
== 0 || i
== 2)
4328 l
= arith_overflow_extract_bits (start
, end
, l
,
4331 if (i
== 0 && !check_zero
)
4334 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4336 build_int_cst (m_limb_type
, 1));
4338 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4339 build_int_cst (m_limb_type
, 1),
4340 NULL_TREE
, NULL_TREE
);
4343 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4345 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4346 e1
->flags
= EDGE_FALSE_VALUE
;
4347 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4349 e1
->probability
= profile_probability::likely ();
4350 e2
->probability
= e1
->probability
.invert ();
4352 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4353 m_gsi
= gsi_after_labels (e1
->dest
);
4354 if (i
== 1 && use_loop
)
4356 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4359 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4360 size_int (endlimb
+ (cnt
== 1)),
4361 NULL_TREE
, NULL_TREE
);
4363 edge true_edge
, false_edge
;
4364 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4367 m_gsi
= gsi_after_labels (false_edge
->dest
);
4371 ovf
= make_ssa_name (boolean_type_node
);
4372 basic_block bb
= gimple_bb (final_stmt
);
4373 gphi
*phi
= create_phi_node (ovf
, bb
);
4374 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4376 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4378 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4379 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4381 m_gsi
= gsi_for_stmt (final_stmt
);
4385 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4388 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4389 .{ADD,SUB,MUL}_OVERFLOW call. */
4392 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4394 tree rhs1
= gimple_assign_rhs1 (stmt
);
4395 rhs1
= TREE_OPERAND (rhs1
, 0);
4396 if (obj
== NULL_TREE
)
4398 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4399 gcc_assert (m_vars
[part
] != NULL_TREE
);
4402 if (TREE_CODE (rhs1
) == SSA_NAME
4404 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4406 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4409 int part
= var_to_partition (m_map
, rhs1
);
4410 gcc_assert (m_vars
[part
] != NULL_TREE
);
4411 tree var
= m_vars
[part
];
4412 unsigned HOST_WIDE_INT nelts
4413 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4414 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4415 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4416 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4417 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4418 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4419 ? 0 : nelts
* m_limb_size
);
4420 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4421 gimple
*g
= gimple_build_assign (obj
, v2
);
4425 /* Lower COMPLEX_EXPR stmt. */
4428 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4430 tree lhs
= gimple_assign_lhs (stmt
);
4431 tree rhs1
= gimple_assign_rhs1 (stmt
);
4432 tree rhs2
= gimple_assign_rhs2 (stmt
);
4433 int part
= var_to_partition (m_map
, lhs
);
4434 gcc_assert (m_vars
[part
] != NULL_TREE
);
4436 unsigned HOST_WIDE_INT nelts
4437 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4438 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4439 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4440 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4442 if (TREE_CODE (rhs1
) == SSA_NAME
)
4444 part
= var_to_partition (m_map
, rhs1
);
4445 gcc_assert (m_vars
[part
] != NULL_TREE
);
4448 else if (integer_zerop (rhs1
))
4449 v2
= build_zero_cst (atype
);
4451 v2
= tree_output_constant_def (rhs1
);
4452 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4453 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4454 gimple
*g
= gimple_build_assign (v1
, v2
);
4456 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4457 TYPE_SIZE_UNIT (atype
));
4458 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4459 if (TREE_CODE (rhs2
) == SSA_NAME
)
4461 part
= var_to_partition (m_map
, rhs2
);
4462 gcc_assert (m_vars
[part
] != NULL_TREE
);
4465 else if (integer_zerop (rhs2
))
4466 v2
= build_zero_cst (atype
);
4468 v2
= tree_output_constant_def (rhs2
);
4469 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4470 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4471 g
= gimple_build_assign (v1
, v2
);
4475 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4479 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4481 tree arg0
= gimple_call_arg (stmt
, 0);
4482 tree arg1
= (gimple_call_num_args (stmt
) == 2
4483 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4484 tree lhs
= gimple_call_lhs (stmt
);
4489 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4490 gsi_remove (&gsi
, true);
4493 tree type
= TREE_TYPE (arg0
);
4494 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4495 bitint_prec_kind kind
= bitint_precision_kind (type
);
4496 gcc_assert (kind
>= bitint_prec_large
);
4497 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4498 enum built_in_function fcode
= END_BUILTINS
;
4499 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4500 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4501 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4505 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4506 fcode
= BUILT_IN_CLZ
;
4507 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4508 fcode
= BUILT_IN_CLZL
;
4510 fcode
= BUILT_IN_CLZLL
;
4513 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4514 we don't add the addend at the end. */
4515 arg1
= integer_zero_node
;
4518 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4519 fcode
= BUILT_IN_CTZ
;
4520 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4521 fcode
= BUILT_IN_CTZL
;
4523 fcode
= BUILT_IN_CTZLL
;
4527 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4528 fcode
= BUILT_IN_CLRSB
;
4529 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4530 fcode
= BUILT_IN_CLRSBL
;
4532 fcode
= BUILT_IN_CLRSBLL
;
4535 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4536 fcode
= BUILT_IN_PARITY
;
4537 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4538 fcode
= BUILT_IN_PARITYL
;
4540 fcode
= BUILT_IN_PARITYLL
;
4544 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4545 fcode
= BUILT_IN_POPCOUNT
;
4546 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4547 fcode
= BUILT_IN_POPCOUNTL
;
4549 fcode
= BUILT_IN_POPCOUNTLL
;
4555 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4556 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4557 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4558 basic_block edge_bb
= NULL
;
4561 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4562 if (kind
== bitint_prec_large
)
4563 cnt
= CEIL (prec
, limb_prec
);
4566 rem
= (prec
% (2 * limb_prec
));
4567 end
= (prec
- rem
) / limb_prec
;
4568 cnt
= 2 + CEIL (rem
, limb_prec
);
4569 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4572 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4574 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4576 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4578 if (kind
== bitint_prec_large
)
4579 m_gsi
= gsi_end_bb (edge_bb
);
4580 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4583 m_after_stmt
= stmt
;
4584 if (kind
!= bitint_prec_large
)
4585 m_upwards_2limb
= end
;
4587 for (unsigned i
= 0; i
< cnt
; i
++)
4590 if (kind
== bitint_prec_large
)
4593 idx
= size_int (end
+ (i
> 2));
4595 tree rhs1
= handle_operand (arg0
, idx
);
4596 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4598 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4599 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4600 rhs1
= add_cast (m_limb_type
, rhs1
);
4604 if (ifn
== IFN_PARITY
)
4605 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4606 else if (ifn
== IFN_FFS
)
4607 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4609 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4615 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4616 build_zero_cst (m_limb_type
),
4617 NULL_TREE
, NULL_TREE
);
4620 e1
= split_block (gsi_bb (m_gsi
), g
);
4621 e1
->flags
= EDGE_FALSE_VALUE
;
4622 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4623 e1
->probability
= profile_probability::unlikely ();
4624 e2
->probability
= e1
->probability
.invert ();
4626 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4627 m_gsi
= gsi_after_labels (e1
->dest
);
4630 if (tree_fits_uhwi_p (idx
))
4632 = build_int_cst (integer_type_node
,
4633 tree_to_uhwi (idx
) * limb_prec
4634 + (ifn
== IFN_FFS
));
4641 res
= make_ssa_name (integer_type_node
);
4642 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4643 build_int_cst (integer_type_node
,
4646 m_data
[m_data_cnt
] = res
;
4650 if (!integer_zerop (in
))
4652 if (kind
== bitint_prec_huge
&& i
== 1)
4655 res
= make_ssa_name (m_limb_type
);
4656 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4661 m_data
[m_data_cnt
] = res
;
4664 g
= gimple_build_call (fndecl
, 1, rhs1
);
4665 tem
= make_ssa_name (integer_type_node
);
4666 gimple_call_set_lhs (g
, tem
);
4668 if (!integer_zerop (in
))
4670 if (kind
== bitint_prec_huge
&& i
== 1)
4673 res
= make_ssa_name (integer_type_node
);
4674 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4679 m_data
[m_data_cnt
] = res
;
4686 if (kind
== bitint_prec_huge
&& i
<= 1)
4690 idx
= make_ssa_name (sizetype
);
4691 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4697 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4700 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4701 NULL_TREE
, NULL_TREE
);
4703 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4704 m_gsi
= gsi_after_labels (edge_bb
);
4706 m_gsi
= gsi_for_stmt (stmt
);
4713 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4715 if (kind
== bitint_prec_large
)
4716 cnt
= CEIL (prec
, limb_prec
);
4719 rem
= prec
% limb_prec
;
4720 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4722 end
= (prec
- rem
) / limb_prec
;
4723 cnt
= 1 + (rem
!= 0);
4724 if (ifn
== IFN_CLRSB
)
4728 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4730 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4732 m_gsi
= gsi_end_bb (edge_bb
);
4735 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4738 gsi
= gsi_for_stmt (stmt
);
4740 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4742 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
4745 for (unsigned i
= 0; i
< cnt
; i
++)
4748 if (kind
== bitint_prec_large
)
4749 idx
= size_int (cnt
- i
- 1);
4750 else if (i
== cnt
- 1)
4751 idx
= create_loop (size_int (end
- 1), &idx_next
);
4753 idx
= size_int (end
);
4755 tree rhs1
= handle_operand (arg0
, idx
);
4756 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4758 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4759 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4760 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4761 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
4762 rhs1
= add_cast (m_limb_type
, rhs1
);
4767 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4768 build_zero_cst (m_limb_type
),
4769 NULL_TREE
, NULL_TREE
);
4771 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4772 e1
->flags
= EDGE_FALSE_VALUE
;
4773 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4774 e1
->probability
= profile_probability::unlikely ();
4775 e2
->probability
= e1
->probability
.invert ();
4777 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4778 m_gsi
= gsi_after_labels (e1
->dest
);
4787 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4789 build_int_cst (m_limb_type
, 1));
4791 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4792 build_int_cst (m_limb_type
, 1),
4793 NULL_TREE
, NULL_TREE
);
4798 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4799 BIT_XOR_EXPR
, rhs1
, first
);
4801 tree stype
= signed_type_for (m_limb_type
);
4802 g
= gimple_build_cond (LT_EXPR
,
4804 gimple_assign_lhs (g
)),
4805 build_zero_cst (stype
),
4806 NULL_TREE
, NULL_TREE
);
4808 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4809 e1
->flags
= EDGE_FALSE_VALUE
;
4810 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
4812 e1
->probability
= profile_probability::unlikely ();
4813 e2
->probability
= e1
->probability
.invert ();
4815 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
4817 m_gsi
= gsi_after_labels (e1
->dest
);
4819 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
4820 NULL_TREE
, NULL_TREE
);
4823 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4824 e1
->flags
= EDGE_FALSE_VALUE
;
4825 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
4826 e1
->probability
= profile_probability::unlikely ();
4827 e2
->probability
= e1
->probability
.invert ();
4829 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4830 m_gsi
= gsi_after_labels (e1
->dest
);
4831 bqp
[2 * i
+ 1].e
= e2
;
4834 if (tree_fits_uhwi_p (idx
))
4836 = build_int_cst (integer_type_node
,
4838 - (((int) tree_to_uhwi (idx
) + 1)
4839 * limb_prec
) - sub_one
);
4843 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
4845 in
= prepare_data_in_out (in
, idx
, &out
);
4846 out
= m_data
[m_data_cnt
+ 1];
4848 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
4849 build_int_cst (integer_type_node
,
4852 m_data
[m_data_cnt
] = out
;
4856 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
4858 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4861 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
4862 NULL_TREE
, NULL_TREE
);
4864 edge true_edge
, false_edge
;
4865 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4866 &true_edge
, &false_edge
);
4867 m_gsi
= gsi_after_labels (false_edge
->dest
);
4876 gphi
*phi1
, *phi2
, *phi3
;
4878 bb
= gsi_bb (m_gsi
);
4879 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
4880 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4882 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4884 for (unsigned i
= 0; i
< cnt
; i
++)
4886 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4887 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4889 if (arg1
== NULL_TREE
)
4891 g
= gimple_build_builtin_unreachable (m_loc
);
4894 m_gsi
= gsi_for_stmt (stmt
);
4895 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
4896 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4898 if (arg1
== NULL_TREE
)
4899 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
4900 gimple_phi_result (phi2
),
4901 gimple_call_lhs (g
));
4904 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4905 PLUS_EXPR
, gimple_phi_result (phi2
),
4906 gimple_call_lhs (g
));
4908 edge e1
= split_block (gimple_bb (stmt
), g
);
4909 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
4910 e2
->probability
= profile_probability::always ();
4911 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
4912 get_immediate_dominator (CDI_DOMINATORS
,
4914 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
4915 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
4916 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
4917 m_gsi
= gsi_for_stmt (stmt
);
4918 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
4920 gsi_replace (&m_gsi
, g
, true);
4923 bb
= gsi_bb (m_gsi
);
4924 remove_edge (find_edge (bb
, edge_bb
));
4926 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
4927 e
->probability
= profile_probability::always ();
4928 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
4929 get_immediate_dominator (CDI_DOMINATORS
,
4931 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4933 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4935 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
4937 for (unsigned i
= 0; i
< cnt
; i
++)
4939 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
4940 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
4942 tree a
= bqp
[i
].addend
;
4943 if (i
&& kind
== bitint_prec_large
)
4944 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
4946 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
4948 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
4950 m_gsi
= gsi_after_labels (edge_bb
);
4951 g
= gimple_build_call (fndecl
, 1,
4952 add_cast (signed_type_for (m_limb_type
),
4953 gimple_phi_result (phi1
)));
4954 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4956 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4957 PLUS_EXPR
, gimple_call_lhs (g
),
4958 gimple_phi_result (phi2
));
4960 if (kind
!= bitint_prec_large
)
4962 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4963 PLUS_EXPR
, gimple_assign_lhs (g
),
4967 add_phi_arg (phi3
, gimple_assign_lhs (g
),
4968 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
4969 m_gsi
= gsi_for_stmt (stmt
);
4970 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
4971 gsi_replace (&m_gsi
, g
, true);
4974 g
= gimple_build_call (fndecl
, 1, res
);
4975 gimple_call_set_lhs (g
, lhs
);
4976 gsi_replace (&m_gsi
, g
, true);
4979 g
= gimple_build_assign (lhs
, res
);
4980 gsi_replace (&m_gsi
, g
, true);
4987 /* Lower a call statement with one or more large/huge _BitInt
4988 arguments or large/huge _BitInt return value. */
4991 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
4993 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4994 unsigned int nargs
= gimple_call_num_args (stmt
);
4995 if (gimple_call_internal_p (stmt
))
4996 switch (gimple_call_internal_fn (stmt
))
4998 case IFN_ADD_OVERFLOW
:
4999 case IFN_SUB_OVERFLOW
:
5000 case IFN_UBSAN_CHECK_ADD
:
5001 case IFN_UBSAN_CHECK_SUB
:
5002 lower_addsub_overflow (obj
, stmt
);
5004 case IFN_MUL_OVERFLOW
:
5005 case IFN_UBSAN_CHECK_MUL
:
5006 lower_mul_overflow (obj
, stmt
);
5014 lower_bit_query (stmt
);
5019 for (unsigned int i
= 0; i
< nargs
; ++i
)
5021 tree arg
= gimple_call_arg (stmt
, i
);
5022 if (TREE_CODE (arg
) != SSA_NAME
5023 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5024 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5026 int p
= var_to_partition (m_map
, arg
);
5028 gcc_assert (v
!= NULL_TREE
);
5029 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5030 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5031 arg
= make_ssa_name (TREE_TYPE (arg
));
5032 gimple
*g
= gimple_build_assign (arg
, v
);
5033 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5034 gimple_call_set_arg (stmt
, i
, arg
);
5035 if (m_preserved
== NULL
)
5036 m_preserved
= BITMAP_ALLOC (NULL
);
5037 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5039 tree lhs
= gimple_call_lhs (stmt
);
5041 && TREE_CODE (lhs
) == SSA_NAME
5042 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5043 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5045 int p
= var_to_partition (m_map
, lhs
);
5047 gcc_assert (v
!= NULL_TREE
);
5048 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5049 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5050 gimple_call_set_lhs (stmt
, v
);
5051 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5056 /* Lower __asm STMT which involves large/huge _BitInt values. */
5059 bitint_large_huge::lower_asm (gimple
*stmt
)
5061 gasm
*g
= as_a
<gasm
*> (stmt
);
5062 unsigned noutputs
= gimple_asm_noutputs (g
);
5063 unsigned ninputs
= gimple_asm_ninputs (g
);
5065 for (unsigned i
= 0; i
< noutputs
; ++i
)
5067 tree t
= gimple_asm_output_op (g
, i
);
5068 tree s
= TREE_VALUE (t
);
5069 if (TREE_CODE (s
) == SSA_NAME
5070 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5071 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5073 int part
= var_to_partition (m_map
, s
);
5074 gcc_assert (m_vars
[part
] != NULL_TREE
);
5075 TREE_VALUE (t
) = m_vars
[part
];
5078 for (unsigned i
= 0; i
< ninputs
; ++i
)
5080 tree t
= gimple_asm_input_op (g
, i
);
5081 tree s
= TREE_VALUE (t
);
5082 if (TREE_CODE (s
) == SSA_NAME
5083 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5084 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5086 int part
= var_to_partition (m_map
, s
);
5087 gcc_assert (m_vars
[part
] != NULL_TREE
);
5088 TREE_VALUE (t
) = m_vars
[part
];
5094 /* Lower statement STMT which involves large/huge _BitInt values
5095 into code accessing individual limbs. */
5098 bitint_large_huge::lower_stmt (gimple
*stmt
)
5102 m_data
.truncate (0);
5104 m_gsi
= gsi_for_stmt (stmt
);
5105 m_after_stmt
= NULL
;
5108 gsi_prev (&m_init_gsi
);
5109 m_preheader_bb
= NULL
;
5110 m_upwards_2limb
= 0;
5113 m_cast_conditional
= false;
5115 m_loc
= gimple_location (stmt
);
5116 if (is_gimple_call (stmt
))
5118 lower_call (NULL_TREE
, stmt
);
5121 if (gimple_code (stmt
) == GIMPLE_ASM
)
5126 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5127 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5128 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5129 bool mergeable_cast_p
= false;
5130 bool final_cast_p
= false;
5131 if (gimple_assign_cast_p (stmt
))
5133 lhs
= gimple_assign_lhs (stmt
);
5134 tree rhs1
= gimple_assign_rhs1 (stmt
);
5135 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5136 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5137 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5138 mergeable_cast_p
= true;
5139 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5140 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5141 && INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5143 final_cast_p
= true;
5144 if (TREE_CODE (rhs1
) == SSA_NAME
5146 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5148 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5149 if (is_gimple_assign (g
)
5150 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5152 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5153 if (TREE_CODE (rhs2
) == SSA_NAME
5155 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5157 g
= SSA_NAME_DEF_STMT (rhs2
);
5158 int ovf
= optimizable_arith_overflow (g
);
5160 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5161 and IMAGPART_EXPR uses, where the latter is cast to
5162 non-_BitInt, it will be optimized when handling
5163 the REALPART_EXPR. */
5167 lower_call (NULL_TREE
, g
);
5175 if (gimple_store_p (stmt
))
5177 tree rhs1
= gimple_assign_rhs1 (stmt
);
5178 if (TREE_CODE (rhs1
) == SSA_NAME
5180 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5182 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5183 m_loc
= gimple_location (g
);
5184 lhs
= gimple_assign_lhs (stmt
);
5185 if (is_gimple_assign (g
) && !mergeable_op (g
))
5186 switch (gimple_assign_rhs_code (g
))
5190 lower_shift_stmt (lhs
, g
);
5192 m_gsi
= gsi_for_stmt (stmt
);
5193 unlink_stmt_vdef (stmt
);
5194 release_ssa_name (gimple_vdef (stmt
));
5195 gsi_remove (&m_gsi
, true);
5198 case TRUNC_DIV_EXPR
:
5199 case TRUNC_MOD_EXPR
:
5200 lower_muldiv_stmt (lhs
, g
);
5202 case FIX_TRUNC_EXPR
:
5203 lower_float_conv_stmt (lhs
, g
);
5207 lower_cplxpart_stmt (lhs
, g
);
5212 else if (optimizable_arith_overflow (g
) == 3)
5214 lower_call (lhs
, g
);
5217 m_loc
= gimple_location (stmt
);
5220 if (mergeable_op (stmt
)
5221 || gimple_store_p (stmt
)
5222 || gimple_assign_load_p (stmt
)
5224 || mergeable_cast_p
)
5226 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5230 else if (cmp_code
!= ERROR_MARK
)
5231 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5232 if (cmp_code
!= ERROR_MARK
)
5234 if (gimple_code (stmt
) == GIMPLE_COND
)
5236 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5237 gimple_cond_set_lhs (cstmt
, lhs
);
5238 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5239 gimple_cond_set_code (cstmt
, cmp_code
);
5243 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5245 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5246 boolean_false_node
);
5247 gimple_assign_set_rhs1 (stmt
, cond
);
5248 lhs
= gimple_assign_lhs (stmt
);
5249 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5250 || (bitint_precision_kind (TREE_TYPE (lhs
))
5251 <= bitint_prec_middle
));
5255 gimple_assign_set_rhs1 (stmt
, lhs
);
5256 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5257 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5263 tree lhs_type
= TREE_TYPE (lhs
);
5264 /* Add support for 3 or more limbs filled in from normal integral
5265 type if this assert fails. If no target chooses limb mode smaller
5266 than half of largest supported normal integral type, this will not
5268 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5270 if (TREE_CODE (lhs_type
) == BITINT_TYPE
5271 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5272 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5273 TYPE_UNSIGNED (lhs_type
));
5275 tree rhs1
= gimple_assign_rhs1 (stmt
);
5276 tree r1
= handle_operand (rhs1
, size_int (0));
5277 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5278 r1
= add_cast (lhs_type
, r1
);
5279 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5283 tree r2
= handle_operand (rhs1
, size_int (1));
5284 r2
= add_cast (lhs_type
, r2
);
5285 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5286 build_int_cst (unsigned_type_node
,
5289 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5290 gimple_assign_lhs (g
));
5292 r1
= gimple_assign_lhs (g
);
5294 if (lhs_type
!= TREE_TYPE (lhs
))
5295 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5297 g
= gimple_build_assign (lhs
, r1
);
5298 gsi_replace (&m_gsi
, g
, true);
5301 if (is_gimple_assign (stmt
))
5302 switch (gimple_assign_rhs_code (stmt
))
5306 lower_shift_stmt (NULL_TREE
, stmt
);
5309 case TRUNC_DIV_EXPR
:
5310 case TRUNC_MOD_EXPR
:
5311 lower_muldiv_stmt (NULL_TREE
, stmt
);
5313 case FIX_TRUNC_EXPR
:
5315 lower_float_conv_stmt (NULL_TREE
, stmt
);
5319 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5322 lower_complexexpr_stmt (stmt
);
5330 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5331 the desired memory state. */
5334 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5336 tree vuse2
= (tree
) data
;
5343 /* Return true if STMT uses a library function and needs to take
5344 address of its inputs. We need to avoid bit-fields in those
5348 stmt_needs_operand_addr (gimple
*stmt
)
5350 if (is_gimple_assign (stmt
))
5351 switch (gimple_assign_rhs_code (stmt
))
5354 case TRUNC_DIV_EXPR
:
5355 case TRUNC_MOD_EXPR
:
5361 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5362 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5367 /* Dominator walker used to discover which large/huge _BitInt
5368 loads could be sunk into all their uses. */
5370 class bitint_dom_walker
: public dom_walker
5373 bitint_dom_walker (bitmap names
, bitmap loads
)
5374 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5376 edge
before_dom_children (basic_block
) final override
;
5379 bitmap m_names
, m_loads
;
5383 bitint_dom_walker::before_dom_children (basic_block bb
)
5385 gphi
*phi
= get_virtual_phi (bb
);
5388 vop
= gimple_phi_result (phi
);
5389 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5392 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5394 auto_vec
<tree
, 16> worklist
;
5395 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5396 !gsi_end_p (gsi
); gsi_next (&gsi
))
5398 gimple
*stmt
= gsi_stmt (gsi
);
5399 if (is_gimple_debug (stmt
))
5402 if (!vop
&& gimple_vuse (stmt
))
5403 vop
= gimple_vuse (stmt
);
5406 if (gimple_vdef (stmt
))
5407 vop
= gimple_vdef (stmt
);
5409 tree lhs
= gimple_get_lhs (stmt
);
5411 && TREE_CODE (lhs
) == SSA_NAME
5412 && TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5413 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5414 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5415 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5416 it means it will be handled in a loop or straight line code
5417 at the location of its (ultimate) immediate use, so for
5418 vop checking purposes check these only at the ultimate
5423 use_operand_p use_p
;
5424 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5426 tree s
= USE_FROM_PTR (use_p
);
5427 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5428 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5429 worklist
.safe_push (s
);
5432 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5433 while (worklist
.length () > 0)
5435 tree s
= worklist
.pop ();
5437 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5439 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5440 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5441 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5443 tree s2
= USE_FROM_PTR (use_p
);
5444 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5445 && (bitint_precision_kind (TREE_TYPE (s2
))
5446 >= bitint_prec_large
))
5447 worklist
.safe_push (s2
);
5451 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5452 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5454 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5455 if (TREE_CODE (rhs
) == SSA_NAME
5456 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5461 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5464 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5465 if (needs_operand_addr
5466 && TREE_CODE (rhs1
) == COMPONENT_REF
5467 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5469 tree fld
= TREE_OPERAND (rhs1
, 1);
5470 /* For little-endian, we can allow as inputs bit-fields
5471 which start at a limb boundary. */
5472 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5473 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5474 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5479 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5485 ao_ref_init (&ref
, rhs1
);
5486 tree lvop
= gimple_vuse (SSA_NAME_DEF_STMT (s
));
5487 unsigned limit
= 64;
5490 && is_gimple_assign (stmt
)
5491 && gimple_store_p (stmt
)
5492 && !operand_equal_p (lhs
,
5493 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
)),
5497 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5498 NULL
, NULL
, limit
, lvop
) == NULL
)
5499 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5503 bb
->aux
= (void *) vop
;
5509 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5510 build_ssa_conflict_graph.
5511 The differences are:
5512 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5513 2) for large/huge _BitInt multiplication/division/modulo process def
5514 only after processing uses rather than before to make uses conflict
5516 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5517 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5518 the final statement. */
5521 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5522 ssa_conflicts
*graph
, bitmap names
,
5523 void (*def
) (live_track
*, tree
,
5525 void (*use
) (live_track
*, tree
))
5527 bool muldiv_p
= false;
5528 tree lhs
= NULL_TREE
;
5529 if (is_gimple_assign (stmt
))
5531 lhs
= gimple_assign_lhs (stmt
);
5532 if (TREE_CODE (lhs
) == SSA_NAME
5533 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5534 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5536 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5538 switch (gimple_assign_rhs_code (stmt
))
5541 case TRUNC_DIV_EXPR
:
5542 case TRUNC_MOD_EXPR
:
5554 /* For stmts with more than one SSA_NAME definition pretend all the
5555 SSA_NAME outputs but the first one are live at this point, so
5556 that conflicts are added in between all those even when they are
5557 actually not really live after the asm, because expansion might
5558 copy those into pseudos after the asm and if multiple outputs
5559 share the same partition, it might overwrite those that should
5561 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5565 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5571 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5572 def (live
, var
, graph
);
5575 auto_vec
<tree
, 16> worklist
;
5576 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5577 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5578 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5580 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5583 worklist
.safe_push (var
);
5586 while (worklist
.length () > 0)
5588 tree s
= worklist
.pop ();
5589 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5590 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5591 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5593 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5596 worklist
.safe_push (var
);
5601 def (live
, lhs
, graph
);
5604 /* Entry point for _BitInt(N) operation lowering during optimization. */
5607 gimple_lower_bitint (void)
5609 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
5613 for (i
= 0; i
< num_ssa_names
; ++i
)
5615 tree s
= ssa_name (i
);
5618 tree type
= TREE_TYPE (s
);
5619 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5620 type
= TREE_TYPE (type
);
5621 if (TREE_CODE (type
) == BITINT_TYPE
5622 && bitint_precision_kind (type
) != bitint_prec_small
)
5624 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5625 into memory. Such functions could have no large/huge SSA_NAMEs. */
5626 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5628 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5629 if (is_gimple_assign (g
) && gimple_store_p (g
))
5631 tree t
= gimple_assign_rhs1 (g
);
5632 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5633 && (bitint_precision_kind (TREE_TYPE (t
))
5634 >= bitint_prec_large
))
5638 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5639 to floating point types need to be rewritten. */
5640 else if (SCALAR_FLOAT_TYPE_P (type
))
5642 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5643 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5645 tree t
= gimple_assign_rhs1 (g
);
5646 if (TREE_CODE (t
) == INTEGER_CST
5647 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5648 && (bitint_precision_kind (TREE_TYPE (t
))
5649 != bitint_prec_small
))
5654 if (i
== num_ssa_names
)
5658 auto_vec
<gimple
*, 4> switch_statements
;
5659 FOR_EACH_BB_FN (bb
, cfun
)
5661 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
5663 tree idx
= gimple_switch_index (swtch
);
5664 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
5665 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
5669 group_case_labels_stmt (swtch
);
5670 switch_statements
.safe_push (swtch
);
5674 if (!switch_statements
.is_empty ())
5676 bool expanded
= false;
5680 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
5682 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
5683 tree_switch_conversion::switch_decision_tree
dt (swtch
);
5684 expanded
|= dt
.analyze_switch_statement ();
5689 free_dominance_info (CDI_DOMINATORS
);
5690 free_dominance_info (CDI_POST_DOMINATORS
);
5691 mark_virtual_operands_for_renaming (cfun
);
5692 cleanup_tree_cfg (TODO_update_ssa
);
5696 struct bitint_large_huge large_huge
;
5697 bool has_large_huge_parm_result
= false;
5698 bool has_large_huge
= false;
5699 unsigned int ret
= 0, first_large_huge
= ~0U;
5700 bool edge_insertions
= false;
5701 for (; i
< num_ssa_names
; ++i
)
5703 tree s
= ssa_name (i
);
5706 tree type
= TREE_TYPE (s
);
5707 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5708 type
= TREE_TYPE (type
);
5709 if (TREE_CODE (type
) == BITINT_TYPE
5710 && bitint_precision_kind (type
) >= bitint_prec_large
)
5712 if (first_large_huge
== ~0U)
5713 first_large_huge
= i
;
5714 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
5715 gimple_stmt_iterator gsi
;
5717 /* Unoptimize certain constructs to simpler alternatives to
5718 avoid having to lower all of them. */
5719 if (is_gimple_assign (stmt
))
5720 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
5727 first_large_huge
= 0;
5728 location_t loc
= gimple_location (stmt
);
5729 gsi
= gsi_for_stmt (stmt
);
5730 tree rhs1
= gimple_assign_rhs1 (stmt
);
5731 tree type
= TREE_TYPE (rhs1
);
5732 tree n
= gimple_assign_rhs2 (stmt
), m
;
5733 tree p
= build_int_cst (TREE_TYPE (n
),
5734 TYPE_PRECISION (type
));
5735 if (TREE_CODE (n
) == INTEGER_CST
)
5736 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
5739 m
= make_ssa_name (TREE_TYPE (n
));
5740 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
5741 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5742 gimple_set_location (g
, loc
);
5744 if (!TYPE_UNSIGNED (type
))
5746 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
5748 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5749 rhs1
= fold_convert (utype
, rhs1
);
5752 tree t
= make_ssa_name (type
);
5753 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
5754 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5755 gimple_set_location (g
, loc
);
5758 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5759 rhs_code
== LROTATE_EXPR
5760 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
5762 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5763 gimple_set_location (g
, loc
);
5764 tree op1
= gimple_assign_lhs (g
);
5765 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5766 rhs_code
== LROTATE_EXPR
5767 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
5769 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5770 gimple_set_location (g
, loc
);
5771 tree op2
= gimple_assign_lhs (g
);
5772 tree lhs
= gimple_assign_lhs (stmt
);
5773 if (!TYPE_UNSIGNED (type
))
5775 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
5776 BIT_IOR_EXPR
, op1
, op2
);
5777 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5778 gimple_set_location (g
, loc
);
5779 g
= gimple_build_assign (lhs
, NOP_EXPR
,
5780 gimple_assign_lhs (g
));
5783 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
5784 gsi_replace (&gsi
, g
, true);
5785 gimple_set_location (g
, loc
);
5793 first_large_huge
= 0;
5794 gsi
= gsi_for_stmt (stmt
);
5795 tree lhs
= gimple_assign_lhs (stmt
);
5796 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
5797 location_t loc
= gimple_location (stmt
);
5798 if (rhs_code
== ABS_EXPR
)
5799 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5800 build_zero_cst (TREE_TYPE (rhs1
)),
5801 NULL_TREE
, NULL_TREE
);
5802 else if (rhs_code
== ABSU_EXPR
)
5804 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
5805 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
5806 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5807 gimple_set_location (g
, loc
);
5808 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5809 build_zero_cst (TREE_TYPE (rhs1
)),
5810 NULL_TREE
, NULL_TREE
);
5813 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
5815 rhs2
= gimple_assign_rhs2 (stmt
);
5816 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5817 std::swap (rhs1
, rhs2
);
5818 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
5819 NULL_TREE
, NULL_TREE
);
5820 if (rhs_code
== MAX_EXPR
)
5821 std::swap (rhs1
, rhs2
);
5825 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5826 build_zero_cst (TREE_TYPE (rhs1
)),
5827 NULL_TREE
, NULL_TREE
);
5828 rhs1
= gimple_assign_rhs2 (stmt
);
5829 rhs2
= gimple_assign_rhs3 (stmt
);
5831 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5832 gimple_set_location (g
, loc
);
5833 edge e1
= split_block (gsi_bb (gsi
), g
);
5834 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
5835 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
5836 e3
->probability
= profile_probability::even ();
5837 e1
->flags
= EDGE_TRUE_VALUE
;
5838 e1
->probability
= e3
->probability
.invert ();
5839 if (dom_info_available_p (CDI_DOMINATORS
))
5840 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
5841 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
5843 gsi
= gsi_after_labels (e1
->dest
);
5844 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5846 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5847 gimple_set_location (g
, loc
);
5848 rhs2
= gimple_assign_lhs (g
);
5849 std::swap (rhs1
, rhs2
);
5851 gsi
= gsi_for_stmt (stmt
);
5852 gsi_remove (&gsi
, true);
5853 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
5854 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
5855 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
5859 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5860 into memory. Such functions could have no large/huge SSA_NAMEs. */
5861 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5863 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5864 if (is_gimple_assign (g
) && gimple_store_p (g
))
5866 tree t
= gimple_assign_rhs1 (g
);
5867 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5868 && (bitint_precision_kind (TREE_TYPE (t
))
5869 >= bitint_prec_large
))
5870 has_large_huge
= true;
5873 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5874 to floating point types need to be rewritten. */
5875 else if (SCALAR_FLOAT_TYPE_P (type
))
5877 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5878 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5880 tree t
= gimple_assign_rhs1 (g
);
5881 if (TREE_CODE (t
) == INTEGER_CST
5882 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5883 && (bitint_precision_kind (TREE_TYPE (t
))
5884 >= bitint_prec_large
))
5885 has_large_huge
= true;
5889 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
5891 tree s
= ssa_name (i
);
5894 tree type
= TREE_TYPE (s
);
5895 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5896 type
= TREE_TYPE (type
);
5897 if (TREE_CODE (type
) == BITINT_TYPE
5898 && bitint_precision_kind (type
) >= bitint_prec_large
)
5900 use_operand_p use_p
;
5902 has_large_huge
= true;
5904 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
5906 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5907 the same bb and could be handled in the same loop with the
5910 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5911 && single_imm_use (s
, &use_p
, &use_stmt
)
5912 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
5914 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
5916 if (mergeable_op (use_stmt
))
5918 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
5919 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
5921 if (gimple_assign_cast_p (use_stmt
))
5923 tree lhs
= gimple_assign_lhs (use_stmt
);
5924 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5927 else if (gimple_store_p (use_stmt
)
5928 && is_gimple_assign (use_stmt
)
5929 && !gimple_has_volatile_ops (use_stmt
)
5930 && !stmt_ends_bb_p (use_stmt
))
5933 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5935 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5936 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5937 && ((is_gimple_assign (use_stmt
)
5938 && (gimple_assign_rhs_code (use_stmt
)
5940 || gimple_code (use_stmt
) == GIMPLE_COND
)
5941 && (!gimple_store_p (use_stmt
)
5942 || (is_gimple_assign (use_stmt
)
5943 && !gimple_has_volatile_ops (use_stmt
)
5944 && !stmt_ends_bb_p (use_stmt
)))
5945 && (TREE_CODE (rhs1
) != SSA_NAME
5946 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
5948 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
5949 || (bitint_precision_kind (TREE_TYPE (rhs1
))
5950 < bitint_prec_large
)
5951 || (TYPE_PRECISION (TREE_TYPE (rhs1
))
5952 >= TYPE_PRECISION (TREE_TYPE (s
)))
5953 || mergeable_op (SSA_NAME_DEF_STMT (s
)))
5955 /* Prevent merging a widening non-mergeable cast
5956 on result of some narrower mergeable op
5957 together with later mergeable operations. E.g.
5958 result of _BitInt(223) addition shouldn't be
5959 sign-extended to _BitInt(513) and have another
5960 _BitInt(513) added to it, as handle_plus_minus
5961 with its PHI node handling inside of handle_cast
5962 will not work correctly. An exception is if
5963 use_stmt is a store, this is handled directly
5964 in lower_mergeable_stmt. */
5965 if (TREE_CODE (rhs1
) != SSA_NAME
5966 || !has_single_use (rhs1
)
5967 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
5968 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
5969 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
5970 || gimple_store_p (use_stmt
))
5972 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
5974 /* Another exception is if the widening cast is
5975 from mergeable same precision cast from something
5978 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
5979 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
5980 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
5981 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
5983 if (TREE_CODE (rhs2
) != SSA_NAME
5984 || !has_single_use (rhs2
)
5985 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
5986 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
5987 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
5993 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
5994 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
5998 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5999 rhs1
= TREE_OPERAND (rhs1
, 0);
6000 if (TREE_CODE (rhs1
) == SSA_NAME
)
6002 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6003 if (optimizable_arith_overflow (g
))
6011 case TRUNC_DIV_EXPR
:
6012 case TRUNC_MOD_EXPR
:
6013 case FIX_TRUNC_EXPR
:
6015 if (gimple_store_p (use_stmt
)
6016 && is_gimple_assign (use_stmt
)
6017 && !gimple_has_volatile_ops (use_stmt
)
6018 && !stmt_ends_bb_p (use_stmt
))
6020 tree lhs
= gimple_assign_lhs (use_stmt
);
6021 /* As multiply/division passes address of the lhs
6022 to library function and that assumes it can extend
6023 it to whole number of limbs, avoid merging those
6024 with bit-field stores. Don't allow it for
6025 shifts etc. either, so that the bit-field store
6026 handling doesn't have to be done everywhere. */
6027 if (TREE_CODE (lhs
) == COMPONENT_REF
6028 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6038 /* Also ignore uninitialized uses. */
6039 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6040 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6043 if (!large_huge
.m_names
)
6044 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6045 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6046 if (has_single_use (s
))
6048 if (!large_huge
.m_single_use_names
)
6049 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6050 bitmap_set_bit (large_huge
.m_single_use_names
,
6051 SSA_NAME_VERSION (s
));
6053 if (SSA_NAME_VAR (s
)
6054 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6055 && SSA_NAME_IS_DEFAULT_DEF (s
))
6056 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6057 has_large_huge_parm_result
= true;
6059 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6060 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6061 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6062 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6064 use_operand_p use_p
;
6065 imm_use_iterator iter
;
6066 bool optimizable_load
= true;
6067 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6069 gimple
*use_stmt
= USE_STMT (use_p
);
6070 if (is_gimple_debug (use_stmt
))
6072 if (gimple_code (use_stmt
) == GIMPLE_PHI
6073 || is_gimple_call (use_stmt
))
6075 optimizable_load
= false;
6081 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6084 tree s2
= USE_FROM_PTR (use_p
);
6085 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6087 optimizable_load
= false;
6092 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6094 if (!large_huge
.m_loads
)
6095 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6096 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6100 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6101 into memory. Such functions could have no large/huge SSA_NAMEs. */
6102 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6104 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6105 if (is_gimple_assign (g
) && gimple_store_p (g
))
6107 tree t
= gimple_assign_rhs1 (g
);
6108 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6109 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6110 has_large_huge
= true;
6115 if (large_huge
.m_names
|| has_large_huge
)
6117 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6118 calculate_dominance_info (CDI_DOMINATORS
);
6120 enable_ranger (cfun
);
6121 if (large_huge
.m_loads
)
6123 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6125 bitint_dom_walker (large_huge
.m_names
,
6126 large_huge
.m_loads
).walk (entry
);
6127 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6128 clear_aux_for_blocks ();
6129 BITMAP_FREE (large_huge
.m_loads
);
6131 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6132 large_huge
.m_limb_size
6133 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6135 if (large_huge
.m_names
)
6138 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6139 coalesce_ssa_name (large_huge
.m_map
);
6140 partition_view_normal (large_huge
.m_map
);
6141 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6143 fprintf (dump_file
, "After Coalescing:\n");
6144 dump_var_map (dump_file
, large_huge
.m_map
);
6147 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6149 if (has_large_huge_parm_result
)
6150 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6152 tree s
= ssa_name (i
);
6153 if (SSA_NAME_VAR (s
)
6154 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6155 && SSA_NAME_IS_DEFAULT_DEF (s
))
6156 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6158 int p
= var_to_partition (large_huge
.m_map
, s
);
6159 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6161 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6162 mark_addressable (SSA_NAME_VAR (s
));
6166 tree atype
= NULL_TREE
;
6167 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6169 tree s
= ssa_name (i
);
6170 int p
= var_to_partition (large_huge
.m_map
, s
);
6171 if (large_huge
.m_vars
[p
] != NULL_TREE
)
6173 if (atype
== NULL_TREE
6174 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6175 TYPE_SIZE (TREE_TYPE (s
))))
6177 unsigned HOST_WIDE_INT nelts
6178 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6179 atype
= build_array_type_nelts (large_huge
.m_limb_type
, nelts
);
6181 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6182 mark_addressable (large_huge
.m_vars
[p
]);
6186 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6188 gimple_stmt_iterator prev
;
6189 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6195 gimple
*stmt
= gsi_stmt (gsi
);
6196 if (is_gimple_debug (stmt
))
6198 bitint_prec_kind kind
= bitint_prec_small
;
6200 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6201 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6203 bitint_prec_kind this_kind
6204 = bitint_precision_kind (TREE_TYPE (t
));
6205 if (this_kind
> kind
)
6208 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6210 t
= gimple_assign_rhs1 (stmt
);
6211 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6213 bitint_prec_kind this_kind
6214 = bitint_precision_kind (TREE_TYPE (t
));
6215 if (this_kind
> kind
)
6219 if (is_gimple_assign (stmt
)
6220 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6222 t
= gimple_assign_rhs1 (stmt
);
6223 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6224 && TREE_CODE (t
) == INTEGER_CST
)
6226 bitint_prec_kind this_kind
6227 = bitint_precision_kind (TREE_TYPE (t
));
6228 if (this_kind
> kind
)
6232 if (is_gimple_call (stmt
))
6234 t
= gimple_call_lhs (stmt
);
6236 && TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
6237 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6239 bitint_prec_kind this_kind
6240 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6241 if (this_kind
> kind
)
6245 if (kind
== bitint_prec_small
)
6247 switch (gimple_code (stmt
))
6250 /* For now. We'll need to handle some internal functions and
6251 perhaps some builtins. */
6252 if (kind
== bitint_prec_middle
)
6256 if (kind
== bitint_prec_middle
)
6262 if (gimple_clobber_p (stmt
))
6264 if (kind
>= bitint_prec_large
)
6266 if (gimple_assign_single_p (stmt
))
6267 /* No need to lower copies, loads or stores. */
6269 if (gimple_assign_cast_p (stmt
))
6271 tree lhs
= gimple_assign_lhs (stmt
);
6272 tree rhs
= gimple_assign_rhs1 (stmt
);
6273 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6274 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6275 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6276 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6277 /* No need to lower casts to same precision. */
6285 if (kind
== bitint_prec_middle
)
6287 tree type
= NULL_TREE
;
6288 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6289 with the same precision and back. */
6290 if (tree lhs
= gimple_get_lhs (stmt
))
6291 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6292 && (bitint_precision_kind (TREE_TYPE (lhs
))
6293 == bitint_prec_middle
))
6295 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6296 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6297 type
= build_nonstandard_integer_type (prec
, uns
);
6298 tree lhs2
= make_ssa_name (type
);
6299 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6300 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6301 gimple_set_lhs (stmt
, lhs2
);
6303 unsigned int nops
= gimple_num_ops (stmt
);
6304 for (unsigned int i
= 0; i
< nops
; ++i
)
6305 if (tree op
= gimple_op (stmt
, i
))
6307 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6309 gimple_set_op (stmt
, i
, nop
);
6310 else if (COMPARISON_CLASS_P (op
))
6312 TREE_OPERAND (op
, 0)
6313 = maybe_cast_middle_bitint (&gsi
,
6314 TREE_OPERAND (op
, 0),
6316 TREE_OPERAND (op
, 1)
6317 = maybe_cast_middle_bitint (&gsi
,
6318 TREE_OPERAND (op
, 1),
6321 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6324 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6327 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6335 if (tree lhs
= gimple_get_lhs (stmt
))
6336 if (TREE_CODE (lhs
) == SSA_NAME
)
6338 tree type
= TREE_TYPE (lhs
);
6339 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6340 type
= TREE_TYPE (type
);
6341 if (TREE_CODE (type
) == BITINT_TYPE
6342 && bitint_precision_kind (type
) >= bitint_prec_large
6343 && (large_huge
.m_names
== NULL
6344 || !bitmap_bit_p (large_huge
.m_names
,
6345 SSA_NAME_VERSION (lhs
))))
6349 large_huge
.lower_stmt (stmt
);
6352 tree atype
= NULL_TREE
;
6353 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6356 gphi
*phi
= gsi
.phi ();
6357 tree lhs
= gimple_phi_result (phi
);
6358 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6359 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6361 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6362 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6363 tree v1
= large_huge
.m_vars
[p1
];
6364 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6366 tree arg
= gimple_phi_arg_def (phi
, i
);
6367 edge e
= gimple_phi_arg_edge (phi
, i
);
6369 switch (TREE_CODE (arg
))
6372 if (integer_zerop (arg
) && VAR_P (v1
))
6374 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6375 g
= gimple_build_assign (v1
, zero
);
6376 gsi_insert_on_edge (e
, g
);
6377 edge_insertions
= true;
6381 unsigned int min_prec
, prec
, rem
;
6383 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6384 rem
= prec
% (2 * limb_prec
);
6385 min_prec
= bitint_min_cst_precision (arg
, ext
);
6386 if (min_prec
> prec
- rem
- 2 * limb_prec
6387 && min_prec
> (unsigned) limb_prec
)
6388 /* Constant which has enough significant bits that it
6389 isn't worth trying to save .rodata space by extending
6390 from smaller number. */
6393 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6396 else if (min_prec
== prec
)
6397 c
= tree_output_constant_def (arg
);
6398 else if (min_prec
== (unsigned) limb_prec
)
6399 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6402 tree ctype
= build_bitint_type (min_prec
, 1);
6403 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6407 if (VAR_P (v1
) && min_prec
== prec
)
6409 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6411 g
= gimple_build_assign (v1
, v2
);
6412 gsi_insert_on_edge (e
, g
);
6413 edge_insertions
= true;
6416 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6417 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6422 unsigned HOST_WIDE_INT nelts
6423 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
6426 = build_array_type_nelts (large_huge
.m_limb_type
,
6428 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6430 build1 (VIEW_CONVERT_EXPR
,
6433 gsi_insert_on_edge (e
, g
);
6437 unsigned HOST_WIDE_INT nelts
6438 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
6439 - min_prec
) / limb_prec
;
6441 = build_array_type_nelts (large_huge
.m_limb_type
,
6443 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6444 tree off
= fold_convert (ptype
,
6445 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6446 tree vd
= build2 (MEM_REF
, vtype
,
6447 build_fold_addr_expr (v1
), off
);
6448 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
6455 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6457 = fold_convert (ptype
,
6458 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6459 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
6460 build_fold_addr_expr (v1
), off
);
6462 vd
= build_fold_addr_expr (vd
);
6463 unsigned HOST_WIDE_INT nbytes
6464 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
6467 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6468 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
6469 g
= gimple_build_call (fn
, 3, vd
,
6470 integer_minus_one_node
,
6471 build_int_cst (sizetype
,
6474 gsi_insert_on_edge (e
, g
);
6475 edge_insertions
= true;
6480 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
6482 if (large_huge
.m_names
== NULL
6483 || !bitmap_bit_p (large_huge
.m_names
,
6484 SSA_NAME_VERSION (arg
)))
6487 int p2
= var_to_partition (large_huge
.m_map
, arg
);
6490 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
6491 tree v2
= large_huge
.m_vars
[p2
];
6492 if (VAR_P (v1
) && VAR_P (v2
))
6493 g
= gimple_build_assign (v1
, v2
);
6494 else if (VAR_P (v1
))
6495 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
6496 TREE_TYPE (v1
), v2
));
6497 else if (VAR_P (v2
))
6498 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6499 TREE_TYPE (v2
), v1
), v2
);
6502 if (atype
== NULL_TREE
6503 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6504 TYPE_SIZE (TREE_TYPE (lhs
))))
6506 unsigned HOST_WIDE_INT nelts
6507 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
6510 = build_array_type_nelts (large_huge
.m_limb_type
,
6513 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6515 build1 (VIEW_CONVERT_EXPR
,
6518 gsi_insert_on_edge (e
, g
);
6519 edge_insertions
= true;
6526 if (large_huge
.m_names
|| has_large_huge
)
6529 for (i
= 0; i
< num_ssa_names
; ++i
)
6531 tree s
= ssa_name (i
);
6534 tree type
= TREE_TYPE (s
);
6535 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6536 type
= TREE_TYPE (type
);
6537 if (TREE_CODE (type
) == BITINT_TYPE
6538 && bitint_precision_kind (type
) >= bitint_prec_large
)
6540 if (large_huge
.m_preserved
6541 && bitmap_bit_p (large_huge
.m_preserved
,
6542 SSA_NAME_VERSION (s
)))
6544 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6545 if (gimple_code (g
) == GIMPLE_NOP
)
6547 if (SSA_NAME_VAR (s
))
6548 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
6549 release_ssa_name (s
);
6552 if (gimple_code (g
) != GIMPLE_ASM
)
6554 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
6555 bool save_vta
= flag_var_tracking_assignments
;
6556 flag_var_tracking_assignments
= false;
6557 gsi_remove (&gsi
, true);
6558 flag_var_tracking_assignments
= save_vta
;
6561 nop
= gimple_build_nop ();
6562 SSA_NAME_DEF_STMT (s
) = nop
;
6563 release_ssa_name (s
);
6567 disable_ranger (cfun
);
6570 if (edge_insertions
)
6571 gsi_commit_edge_inserts ();
6578 const pass_data pass_data_lower_bitint
=
6580 GIMPLE_PASS
, /* type */
6581 "bitintlower", /* name */
6582 OPTGROUP_NONE
, /* optinfo_flags */
6583 TV_NONE
, /* tv_id */
6584 PROP_ssa
, /* properties_required */
6585 PROP_gimple_lbitint
, /* properties_provided */
6586 0, /* properties_destroyed */
6587 0, /* todo_flags_start */
6588 0, /* todo_flags_finish */
6591 class pass_lower_bitint
: public gimple_opt_pass
6594 pass_lower_bitint (gcc::context
*ctxt
)
6595 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
6598 /* opt_pass methods: */
6599 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
6600 unsigned int execute (function
*) final override
6602 return gimple_lower_bitint ();
6605 }; // class pass_lower_bitint
6610 make_pass_lower_bitint (gcc::context
*ctxt
)
6612 return new pass_lower_bitint (ctxt
);
6618 const pass_data pass_data_lower_bitint_O0
=
6620 GIMPLE_PASS
, /* type */
6621 "bitintlower0", /* name */
6622 OPTGROUP_NONE
, /* optinfo_flags */
6623 TV_NONE
, /* tv_id */
6624 PROP_cfg
, /* properties_required */
6625 PROP_gimple_lbitint
, /* properties_provided */
6626 0, /* properties_destroyed */
6627 0, /* todo_flags_start */
6628 0, /* todo_flags_finish */
6631 class pass_lower_bitint_O0
: public gimple_opt_pass
6634 pass_lower_bitint_O0 (gcc::context
*ctxt
)
6635 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
6638 /* opt_pass methods: */
6639 bool gate (function
*fun
) final override
6641 /* With errors, normal optimization passes are not run. If we don't
6642 lower bitint operations at all, rtl expansion will abort. */
6643 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
6646 unsigned int execute (function
*) final override
6648 return gimple_lower_bitint ();
6651 }; // class pass_lower_bitint_O0
6656 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
6658 return new pass_lower_bitint_O0 (ctxt
);