1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "stor-layout.h"
58 #include "gimple-lower-bitint.h"
60 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 target hook says it is a single limb, middle _BitInt which per ABI
62 does not, but there is some INTEGER_TYPE in which arithmetics can be
63 performed (operations on such _BitInt are lowered to casts to that
64 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 ones), large _BitInt which should by straight line code and
67 finally huge _BitInt which should be handled by loops over the limbs. */
69 enum bitint_prec_kind
{
76 /* Caches to speed up bitint_precision_kind. */
78 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
81 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
83 static bitint_prec_kind
84 bitint_precision_kind (int prec
)
86 if (prec
<= small_max_prec
)
87 return bitint_prec_small
;
88 if (huge_min_prec
&& prec
>= huge_min_prec
)
89 return bitint_prec_huge
;
90 if (large_min_prec
&& prec
>= large_min_prec
)
91 return bitint_prec_large
;
92 if (mid_min_prec
&& prec
>= mid_min_prec
)
93 return bitint_prec_middle
;
95 struct bitint_info info
;
96 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
98 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
99 if (prec
<= GET_MODE_PRECISION (limb_mode
))
101 small_max_prec
= prec
;
102 return bitint_prec_small
;
105 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
106 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
108 limb_prec
= GET_MODE_PRECISION (limb_mode
);
111 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
112 huge_min_prec
= 4 * limb_prec
;
114 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
116 if (prec
<= MAX_FIXED_MODE_SIZE
)
118 if (!mid_min_prec
|| prec
< mid_min_prec
)
120 return bitint_prec_middle
;
122 if (large_min_prec
&& prec
<= large_min_prec
)
123 return bitint_prec_large
;
124 return bitint_prec_huge
;
127 /* Same for a TYPE. */
129 static bitint_prec_kind
130 bitint_precision_kind (tree type
)
132 return bitint_precision_kind (TYPE_PRECISION (type
));
135 /* Return minimum precision needed to describe INTEGER_CST
136 CST. All bits above that precision up to precision of
137 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
138 if EXT is set to -1. */
141 bitint_min_cst_precision (tree cst
, int &ext
)
143 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
144 wide_int w
= wi::to_wide (cst
);
145 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
146 /* For signed values, we don't need to count the sign bit,
147 we'll use constant 0 or -1 for the upper bits. */
148 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
152 /* For unsigned values, also try signed min_precision
153 in case the constant has lots of most significant bits set. */
154 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
155 if (min_prec2
< min_prec
)
166 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
167 cached in TYPE and return it. */
170 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
173 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
174 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
177 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
178 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
179 if (type
== NULL_TREE
180 || TYPE_PRECISION (type
) != prec
181 || TYPE_UNSIGNED (type
) != uns
)
182 type
= build_nonstandard_integer_type (prec
, uns
);
184 if (TREE_CODE (op
) != SSA_NAME
)
186 tree nop
= fold_convert (type
, op
);
187 if (is_gimple_val (nop
))
191 tree nop
= make_ssa_name (type
);
192 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
193 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
197 /* Return true if STMT can be handled in a loop from least to most
198 significant limb together with its dependencies. */
201 mergeable_op (gimple
*stmt
)
203 if (!is_gimple_assign (stmt
))
205 switch (gimple_assign_rhs_code (stmt
))
220 tree cnt
= gimple_assign_rhs2 (stmt
);
221 if (tree_fits_uhwi_p (cnt
)
222 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
227 case VIEW_CONVERT_EXPR
:
229 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
230 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
231 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
232 && TREE_CODE (lhs_type
) == BITINT_TYPE
233 && TREE_CODE (rhs_type
) == BITINT_TYPE
234 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
235 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
236 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
237 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
)))
239 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
241 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
243 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
254 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
255 _Complex large/huge _BitInt lhs which has at most two immediate uses,
256 at most one use in REALPART_EXPR stmt in the same bb and exactly one
257 IMAGPART_EXPR use in the same bb with a single use which casts it to
258 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
259 return 2. Such cases (most common uses of those builtins) can be
260 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
261 of REALPART_EXPR as not needed to be backed up by a stack variable.
262 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
265 optimizable_arith_overflow (gimple
*stmt
)
267 bool is_ubsan
= false;
268 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
270 switch (gimple_call_internal_fn (stmt
))
272 case IFN_ADD_OVERFLOW
:
273 case IFN_SUB_OVERFLOW
:
274 case IFN_MUL_OVERFLOW
:
276 case IFN_UBSAN_CHECK_ADD
:
277 case IFN_UBSAN_CHECK_SUB
:
278 case IFN_UBSAN_CHECK_MUL
:
284 tree lhs
= gimple_call_lhs (stmt
);
287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
289 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
290 if (TREE_CODE (type
) != BITINT_TYPE
291 || bitint_precision_kind (type
) < bitint_prec_large
)
298 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
299 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
300 || !gimple_store_p (use_stmt
)
301 || !is_gimple_assign (use_stmt
)
302 || gimple_has_volatile_ops (use_stmt
)
303 || stmt_ends_bb_p (use_stmt
))
311 gimple
*realpart
= NULL
, *cast
= NULL
;
312 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
314 gimple
*g
= USE_STMT (use_p
);
315 if (is_gimple_debug (g
))
317 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
319 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
326 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
332 use_operand_p use2_p
;
334 tree lhs2
= gimple_assign_lhs (g
);
335 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
337 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
338 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
339 || !gimple_assign_cast_p (use_stmt
))
342 lhs2
= gimple_assign_lhs (use_stmt
);
343 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
344 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
355 /* Punt if the cast stmt appears before realpart stmt, because
356 if both appear, the lowering wants to emit all the code
357 at the location of realpart stmt. */
358 gimple_stmt_iterator gsi
= gsi_for_stmt (realpart
);
359 unsigned int cnt
= 0;
362 gsi_prev_nondebug (&gsi
);
363 if (gsi_end_p (gsi
) || gsi_stmt (gsi
) == cast
)
365 if (gsi_stmt (gsi
) == stmt
)
367 /* If realpart is too far from stmt, punt as well.
368 Usually it will appear right after it. */
377 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
378 comparing large/huge _BitInt types, return the comparison code and if
379 non-NULL fill in the comparison operands to *POP1 and *POP2. */
382 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
384 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
385 tree_code code
= ERROR_MARK
;
386 if (gimple_code (stmt
) == GIMPLE_COND
)
388 code
= gimple_cond_code (stmt
);
389 op1
= gimple_cond_lhs (stmt
);
390 op2
= gimple_cond_rhs (stmt
);
392 else if (is_gimple_assign (stmt
))
394 code
= gimple_assign_rhs_code (stmt
);
395 op1
= gimple_assign_rhs1 (stmt
);
396 if (TREE_CODE_CLASS (code
) == tcc_comparison
397 || TREE_CODE_CLASS (code
) == tcc_binary
)
398 op2
= gimple_assign_rhs2 (stmt
);
400 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
402 tree type
= TREE_TYPE (op1
);
403 if (TREE_CODE (type
) != BITINT_TYPE
404 || bitint_precision_kind (type
) < bitint_prec_large
)
414 /* Class used during large/huge _BitInt lowering containing all the
415 state for the methods. */
417 struct bitint_large_huge
420 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
421 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
422 m_limb_type (NULL_TREE
), m_data (vNULL
) {}
424 ~bitint_large_huge ();
426 void insert_before (gimple
*);
427 tree
limb_access_type (tree
, tree
);
428 tree
limb_access (tree
, tree
, tree
, bool);
429 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
430 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
431 void if_then_if_then_else (gimple
*g
, gimple
*,
432 profile_probability
, profile_probability
,
433 edge
&, edge
&, edge
&);
434 tree
handle_operand (tree
, tree
);
435 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
436 tree
add_cast (tree
, tree
);
437 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
438 tree
handle_lshift (tree
, tree
, tree
);
439 tree
handle_cast (tree
, tree
, tree
);
440 tree
handle_bit_field_ref (tree
, tree
);
441 tree
handle_load (gimple
*, tree
);
442 tree
handle_stmt (gimple
*, tree
);
443 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
444 tree
create_loop (tree
, tree
*);
445 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
446 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
447 void lower_shift_stmt (tree
, gimple
*);
448 void lower_muldiv_stmt (tree
, gimple
*);
449 void lower_float_conv_stmt (tree
, gimple
*);
450 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
452 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
454 void lower_addsub_overflow (tree
, gimple
*);
455 void lower_mul_overflow (tree
, gimple
*);
456 void lower_cplxpart_stmt (tree
, gimple
*);
457 void lower_complexexpr_stmt (gimple
*);
458 void lower_bit_query (gimple
*);
459 void lower_call (tree
, gimple
*);
460 void lower_asm (gimple
*);
461 void lower_stmt (gimple
*);
463 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
464 merged with their uses. */
466 /* Subset of those for lhs of load statements. These will be
467 cleared in m_names if the loads will be mergeable with all
470 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
471 to later passes (arguments or return values of calls). */
473 /* Subset of m_names which have a single use. As the lowering
474 can replace various original statements with their lowered
475 form even before it is done iterating over all basic blocks,
476 testing has_single_use for the purpose of emitting clobbers
477 doesn't work properly. */
478 bitmap m_single_use_names
;
479 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
482 /* Mapping of the partitions to corresponding decls. */
484 /* Unsigned integer type with limb precision. */
486 /* Its TYPE_SIZE_UNIT. */
487 unsigned HOST_WIDE_INT m_limb_size
;
488 /* Location of a gimple stmt which is being currently lowered. */
490 /* Current stmt iterator where code is being lowered currently. */
491 gimple_stmt_iterator m_gsi
;
492 /* Statement after which any clobbers should be added if non-NULL. */
493 gimple
*m_after_stmt
;
494 /* Set when creating loops to the loop header bb and its preheader. */
495 basic_block m_bb
, m_preheader_bb
;
496 /* Stmt iterator after which initialization statements should be emitted. */
497 gimple_stmt_iterator m_init_gsi
;
498 /* Decl into which a mergeable statement stores result. */
500 /* handle_operand/handle_stmt can be invoked in various ways.
502 lower_mergeable_stmt for large _BitInt calls those with constant
503 idx only, expanding to straight line code, for huge _BitInt
504 emits a loop from least significant limb upwards, where each loop
505 iteration handles 2 limbs, plus there can be up to one full limb
506 and one partial limb processed after the loop, where handle_operand
507 and/or handle_stmt are called with constant idx. m_upwards_2limb
508 is set for this case, false otherwise. m_upwards is true if it
509 is either large or huge _BitInt handled by lower_mergeable_stmt,
510 i.e. indexes always increase.
512 Another way is used by lower_comparison_stmt, which walks limbs
513 from most significant to least significant, partial limb if any
514 processed first with constant idx and then loop processing a single
515 limb per iteration with non-constant idx.
517 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
518 destination limbs are processed from most significant to least
519 significant or for RSHIFT_EXPR the other way around, in loops or
520 straight line code, but idx usually is non-constant (so from
521 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
522 handling there can access even partial limbs using non-constant
523 idx (then m_var_msb should be true, for all the other cases
524 including lower_mergeable_stmt/lower_comparison_stmt that is
525 not the case and so m_var_msb should be false.
527 m_first should be set the first time handle_operand/handle_stmt
528 is called and clear when it is called for some other limb with
529 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
530 or statement (e.g. +/-/<< with < limb_prec constant) needs some
531 state between the different calls, when m_first is true it should
532 push some trees to m_data vector and also make sure m_data_cnt is
533 incremented by how many trees were pushed, and when m_first is
534 false, it can use the m_data[m_data_cnt] etc. data or update them,
535 just needs to bump m_data_cnt by the same amount as when it was
536 called with m_first set. The toplevel calls to
537 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
538 m_data vector when setting m_first to true.
540 m_cast_conditional and m_bitfld_load are used when handling a
541 bit-field load inside of a widening cast. handle_cast sometimes
542 needs to do runtime comparisons and handle_operand only conditionally
543 or even in two separate conditional blocks for one idx (once with
544 constant index after comparing the runtime one for equality with the
545 constant). In these cases, m_cast_conditional is set to true and
546 the bit-field load then communicates its m_data_cnt to handle_cast
547 using m_bitfld_load. */
550 unsigned m_upwards_2limb
;
552 bool m_cast_conditional
;
553 unsigned m_bitfld_load
;
555 unsigned int m_data_cnt
;
558 bitint_large_huge::~bitint_large_huge ()
560 BITMAP_FREE (m_names
);
561 BITMAP_FREE (m_loads
);
562 BITMAP_FREE (m_preserved
);
563 BITMAP_FREE (m_single_use_names
);
565 delete_var_map (m_map
);
570 /* Insert gimple statement G before current location
571 and set its gimple_location. */
574 bitint_large_huge::insert_before (gimple
*g
)
576 gimple_set_location (g
, m_loc
);
577 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
580 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
581 This is normally m_limb_type, except for a partial most
582 significant limb if any. */
585 bitint_large_huge::limb_access_type (tree type
, tree idx
)
587 if (type
== NULL_TREE
)
589 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
590 unsigned int prec
= TYPE_PRECISION (type
);
591 gcc_assert (i
* limb_prec
< prec
);
592 if ((i
+ 1) * limb_prec
<= prec
)
595 return build_nonstandard_integer_type (prec
% limb_prec
,
596 TYPE_UNSIGNED (type
));
599 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
600 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
603 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
605 tree atype
= (tree_fits_uhwi_p (idx
)
606 ? limb_access_type (type
, idx
) : m_limb_type
);
607 tree ltype
= m_limb_type
;
608 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (var
));
609 if (as
!= TYPE_ADDR_SPACE (ltype
))
610 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
611 | ENCODE_QUAL_ADDR_SPACE (as
));
613 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
615 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
616 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
617 ret
= build2 (MEM_REF
, ltype
,
618 build_fold_addr_expr (var
),
619 build_int_cst (ptype
, off
));
620 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
621 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
623 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
626 = build2 (MEM_REF
, ltype
, unshare_expr (TREE_OPERAND (var
, 0)),
627 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
628 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
631 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
632 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
633 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
637 var
= unshare_expr (var
);
638 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
639 || !useless_type_conversion_p (m_limb_type
,
640 TREE_TYPE (TREE_TYPE (var
))))
642 unsigned HOST_WIDE_INT nelts
643 = CEIL (tree_to_uhwi (TYPE_SIZE (type
)), limb_prec
);
644 tree atype
= build_array_type_nelts (ltype
, nelts
);
645 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
647 ret
= build4 (ARRAY_REF
, ltype
, var
, idx
, NULL_TREE
, NULL_TREE
);
649 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
651 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
653 ret
= gimple_assign_lhs (g
);
654 ret
= build1 (NOP_EXPR
, atype
, ret
);
659 /* Emit a half diamond,
668 or if (COND) new_bb1;
669 PROB is the probability that the condition is true.
670 Updates m_gsi to start of new_bb1.
671 Sets EDGE_TRUE to edge from new_bb1 to successor and
672 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
675 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
676 edge
&edge_true
, edge
&edge_false
)
678 insert_before (cond
);
679 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
680 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
681 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
682 e1
->flags
= EDGE_TRUE_VALUE
;
683 e1
->probability
= prob
;
684 e3
->probability
= prob
.invert ();
685 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
688 m_gsi
= gsi_after_labels (e1
->dest
);
691 /* Emit a full diamond,
700 or if (COND) new_bb2; else new_bb1;
701 PROB is the probability that the condition is true.
702 Updates m_gsi to start of new_bb2.
703 Sets EDGE_TRUE to edge from new_bb1 to successor and
704 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
707 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
708 edge
&edge_true
, edge
&edge_false
)
710 insert_before (cond
);
711 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
712 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
713 basic_block bb
= create_empty_bb (e1
->dest
);
714 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
715 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
716 e1
->flags
= EDGE_FALSE_VALUE
;
717 e3
->probability
= prob
;
718 e1
->probability
= prob
.invert ();
719 bb
->count
= e1
->src
->count
.apply_probability (prob
);
720 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
721 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
722 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
724 m_gsi
= gsi_after_labels (bb
);
727 /* Emit a half diamond with full diamond in it
741 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
742 PROB1 is the probability that the condition 1 is true.
743 PROB2 is the probability that the condition 2 is true.
744 Updates m_gsi to start of new_bb1.
745 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
746 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
747 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
748 If COND2 is NULL, this is equivalent to
749 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
750 EDGE_TRUE_TRUE = NULL; */
753 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
754 profile_probability prob1
,
755 profile_probability prob2
,
756 edge
&edge_true_true
,
757 edge
&edge_true_false
,
760 edge e2
, e3
, e4
= NULL
;
761 if_then (cond1
, prob1
, e2
, e3
);
764 edge_true_true
= NULL
;
765 edge_true_false
= e2
;
769 insert_before (cond2
);
770 e2
= split_block (gsi_bb (m_gsi
), cond2
);
771 basic_block bb
= create_empty_bb (e2
->dest
);
772 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
773 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
774 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
775 e4
->probability
= prob2
;
776 e2
->flags
= EDGE_FALSE_VALUE
;
777 e2
->probability
= prob2
.invert ();
778 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
779 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
780 e2
= find_edge (e2
->dest
, e3
->dest
);
782 edge_true_false
= e2
;
784 m_gsi
= gsi_after_labels (e2
->src
);
787 /* Emit code to access limb IDX from OP. */
790 bitint_large_huge::handle_operand (tree op
, tree idx
)
792 switch (TREE_CODE (op
))
796 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
798 if (SSA_NAME_IS_DEFAULT_DEF (op
))
802 tree v
= create_tmp_reg (m_limb_type
);
803 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
805 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
806 DECL_SOURCE_LOCATION (v
)
807 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
809 v
= get_or_create_ssa_default_def (cfun
, v
);
810 m_data
.safe_push (v
);
812 tree ret
= m_data
[m_data_cnt
];
814 if (tree_fits_uhwi_p (idx
))
816 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
817 ret
= add_cast (type
, ret
);
821 location_t loc_save
= m_loc
;
822 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
823 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
830 p
= var_to_partition (m_map
, op
);
831 gcc_assert (m_vars
[p
] != NULL_TREE
);
832 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
833 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
835 t
= gimple_assign_lhs (g
);
837 && m_single_use_names
838 && m_vars
[p
] != m_lhs
840 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
842 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
843 CLOBBER_STORAGE_END
);
844 g
= gimple_build_assign (m_vars
[p
], clobber
);
845 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
846 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
850 if (tree_fits_uhwi_p (idx
))
852 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
853 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
856 m_data
.safe_push (NULL_TREE
);
857 m_data
.safe_push (NULL_TREE
);
859 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
861 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
862 TYPE_SIGN (TREE_TYPE (op
)));
863 c
= wide_int_to_tree (type
,
864 wide_int::from (w
, TYPE_PRECISION (type
),
867 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
868 c
= build_int_cst (type
,
869 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
871 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
876 || (m_data
[m_data_cnt
] == NULL_TREE
877 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
879 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
880 unsigned int rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
882 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
885 m_data
.safe_push (NULL_TREE
);
886 m_data
.safe_push (NULL_TREE
);
888 if (integer_zerop (op
))
890 tree c
= build_zero_cst (m_limb_type
);
891 m_data
[m_data_cnt
] = c
;
892 m_data
[m_data_cnt
+ 1] = c
;
894 else if (integer_all_onesp (op
))
896 tree c
= build_all_ones_cst (m_limb_type
);
897 m_data
[m_data_cnt
] = c
;
898 m_data
[m_data_cnt
+ 1] = c
;
900 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
902 /* Single limb constant. Use a phi with that limb from
903 the preheader edge and 0 or -1 constant from the other edge
904 and for the second limb in the loop. */
906 gcc_assert (m_first
);
909 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
910 build_int_cst (m_limb_type
, ext
));
912 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
914 /* Constant which has enough significant bits that it isn't
915 worth trying to save .rodata space by extending from smaller
919 type
= TREE_TYPE (op
);
921 /* If we have a guarantee the most significant partial limb
922 (if any) will be only accessed through handle_operand
923 with INTEGER_CST idx, we don't need to include the partial
925 type
= build_bitint_type (prec
- rem
, 1);
926 tree c
= tree_output_constant_def (fold_convert (type
, op
));
927 m_data
[m_data_cnt
] = c
;
928 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
930 else if (m_upwards_2limb
)
932 /* Constant with smaller number of bits. Trade conditional
933 code for .rodata space by extending from smaller number. */
934 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
935 tree type
= build_bitint_type (min_prec
, 1);
936 tree c
= tree_output_constant_def (fold_convert (type
, op
));
937 tree idx2
= make_ssa_name (sizetype
);
938 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
940 g
= gimple_build_cond (LT_EXPR
, idx
,
941 size_int (min_prec
/ limb_prec
),
942 NULL_TREE
, NULL_TREE
);
943 edge edge_true
, edge_false
;
944 if_then (g
, (min_prec
>= (prec
- rem
) / 2
945 ? profile_probability::likely ()
946 : profile_probability::unlikely ()),
947 edge_true
, edge_false
);
948 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
949 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
951 c1
= gimple_assign_lhs (g
);
952 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
953 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
955 c2
= gimple_assign_lhs (g
);
956 tree c3
= build_int_cst (m_limb_type
, ext
);
957 m_gsi
= gsi_after_labels (edge_true
->dest
);
958 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
959 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
960 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
962 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
963 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
964 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
965 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
966 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
970 /* Constant with smaller number of bits. Trade conditional
971 code for .rodata space by extending from smaller number.
972 Version for loops with random access to the limbs or
974 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
976 if (min_prec
<= (unsigned) limb_prec
)
977 c
= fold_convert (m_limb_type
, op
);
980 tree type
= build_bitint_type (min_prec
, 1);
981 c
= tree_output_constant_def (fold_convert (type
, op
));
983 m_data
[m_data_cnt
] = c
;
984 m_data
[m_data_cnt
+ 1] = integer_type_node
;
986 t
= m_data
[m_data_cnt
];
987 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
989 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
990 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
992 t
= gimple_assign_lhs (g
);
995 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
997 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
998 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1000 t
= gimple_assign_lhs (g
);
1003 t
= m_data
[m_data_cnt
+ 1];
1004 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
1006 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
1007 unsigned rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
1008 int ext
= wi::neg_p (wi::to_wide (op
)) ? -1 : 0;
1009 tree c
= m_data
[m_data_cnt
];
1010 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
1011 g
= gimple_build_cond (LT_EXPR
, idx
,
1012 size_int (min_prec
/ limb_prec
),
1013 NULL_TREE
, NULL_TREE
);
1014 edge edge_true
, edge_false
;
1015 if_then (g
, (min_prec
>= (prec
- rem
) / 2
1016 ? profile_probability::likely ()
1017 : profile_probability::unlikely ()),
1018 edge_true
, edge_false
);
1019 if (min_prec
> (unsigned) limb_prec
)
1021 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
1022 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
1024 c
= gimple_assign_lhs (g
);
1026 tree c2
= build_int_cst (m_limb_type
, ext
);
1027 m_gsi
= gsi_after_labels (edge_true
->dest
);
1028 t
= make_ssa_name (m_limb_type
);
1029 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
1030 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1031 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1040 /* Helper method, add a PHI node with VAL from preheader edge if
1041 inside of a loop and m_first. Keep state in a pair of m_data
1042 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1043 the latch edge, otherwise create a new SSA_NAME for it and let
1044 caller initialize it. */
1047 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1052 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1053 return m_data
[m_data_cnt
];
1056 *data_out
= NULL_TREE
;
1057 if (tree_fits_uhwi_p (idx
))
1059 m_data
.safe_push (val
);
1060 m_data
.safe_push (NULL_TREE
);
1064 tree in
= make_ssa_name (TREE_TYPE (val
));
1065 gphi
*phi
= create_phi_node (in
, m_bb
);
1066 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1067 edge e2
= EDGE_PRED (m_bb
, 0);
1069 e2
= EDGE_PRED (m_bb
, 1);
1070 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1071 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1072 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1073 m_data
.safe_push (in
);
1074 m_data
.safe_push (out
);
1078 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1079 convert it without emitting any code, otherwise emit
1080 the conversion statement before the current location. */
1083 bitint_large_huge::add_cast (tree type
, tree val
)
1085 if (TREE_CODE (val
) == INTEGER_CST
)
1086 return fold_convert (type
, val
);
1088 tree lhs
= make_ssa_name (type
);
1089 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1094 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1097 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1100 tree lhs
, data_out
, ctype
;
1101 tree rhs1_type
= TREE_TYPE (rhs1
);
1103 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1106 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1107 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1109 ctype
= build_complex_type (m_limb_type
);
1110 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1112 if (!TYPE_UNSIGNED (rhs1_type
))
1114 tree type
= unsigned_type_for (rhs1_type
);
1115 rhs1
= add_cast (type
, rhs1
);
1116 rhs2
= add_cast (type
, rhs2
);
1118 rhs1
= add_cast (m_limb_type
, rhs1
);
1119 rhs2
= add_cast (m_limb_type
, rhs2
);
1121 lhs
= make_ssa_name (ctype
);
1122 g
= gimple_build_call_internal (code
== PLUS_EXPR
1123 ? IFN_UADDC
: IFN_USUBC
,
1124 3, rhs1
, rhs2
, data_in
);
1125 gimple_call_set_lhs (g
, lhs
);
1127 if (data_out
== NULL_TREE
)
1128 data_out
= make_ssa_name (m_limb_type
);
1129 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1130 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1133 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1135 ctype
= build_complex_type (m_limb_type
);
1136 lhs
= make_ssa_name (ctype
);
1137 g
= gimple_build_call_internal (code
== PLUS_EXPR
1138 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1140 gimple_call_set_lhs (g
, lhs
);
1142 if (data_out
== NULL_TREE
)
1143 data_out
= make_ssa_name (m_limb_type
);
1144 if (!integer_zerop (data_in
))
1146 rhs1
= make_ssa_name (m_limb_type
);
1147 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1148 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1150 rhs2
= make_ssa_name (m_limb_type
);
1151 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1152 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1154 lhs
= make_ssa_name (ctype
);
1155 g
= gimple_build_call_internal (code
== PLUS_EXPR
1159 gimple_call_set_lhs (g
, lhs
);
1161 data_in
= make_ssa_name (m_limb_type
);
1162 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1163 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1165 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1170 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1171 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1177 tree in
= add_cast (rhs1_type
, data_in
);
1178 lhs
= make_ssa_name (rhs1_type
);
1179 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1181 rhs1
= make_ssa_name (rhs1_type
);
1182 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1184 m_data
[m_data_cnt
] = NULL_TREE
;
1188 rhs1
= make_ssa_name (m_limb_type
);
1189 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1190 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1192 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1193 rhs1
= add_cast (rhs1_type
, rhs1
);
1194 m_data
[m_data_cnt
] = data_out
;
1199 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1200 count in [0, limb_prec - 1] range. */
1203 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1205 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1206 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1210 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1212 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1215 if (!integer_zerop (data_in
))
1217 lhs
= make_ssa_name (m_limb_type
);
1218 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1219 build_int_cst (unsigned_type_node
,
1222 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1223 lhs
= add_cast (rhs1_type
, lhs
);
1226 if (types_compatible_p (rhs1_type
, m_limb_type
))
1228 if (data_out
== NULL_TREE
)
1229 data_out
= make_ssa_name (m_limb_type
);
1230 g
= gimple_build_assign (data_out
, rhs1
);
1233 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1235 lhs
= make_ssa_name (rhs1_type
);
1236 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1238 if (!integer_zerop (data_in
))
1241 lhs
= make_ssa_name (rhs1_type
);
1242 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1248 m_data
[m_data_cnt
] = data_out
;
1253 /* Helper function for handle_stmt method, handle an integral
1254 to integral conversion. */
1257 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1259 tree rhs_type
= TREE_TYPE (rhs1
);
1261 if ((TREE_CODE (rhs1
) == SSA_NAME
|| TREE_CODE (rhs1
) == INTEGER_CST
)
1262 && TREE_CODE (lhs_type
) == BITINT_TYPE
1263 && TREE_CODE (rhs_type
) == BITINT_TYPE
1264 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1265 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1267 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1268 /* If lhs has bigger precision than rhs, we can use
1269 the simple case only if there is a guarantee that
1270 the most significant limb is handled in straight
1271 line code. If m_var_msb (on left shifts) or
1272 if m_upwards_2limb * limb_prec is equal to
1273 lhs precision or if not m_upwards_2limb and lhs_type
1274 has precision which is multiple of limb_prec that is
1277 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
1278 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
))
1279 && ((!m_upwards_2limb
1280 && (TYPE_PRECISION (lhs_type
) % limb_prec
!= 0))
1282 && (m_upwards_2limb
* limb_prec
1283 < TYPE_PRECISION (lhs_type
))))))
1285 rhs1
= handle_operand (rhs1
, idx
);
1286 if (tree_fits_uhwi_p (idx
))
1288 tree type
= limb_access_type (lhs_type
, idx
);
1289 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1290 rhs1
= add_cast (type
, rhs1
);
1295 /* Indexes lower than this don't need any special processing. */
1296 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1297 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1298 /* Indexes >= than this always contain an extension. */
1299 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1300 bool save_first
= m_first
;
1303 m_data
.safe_push (NULL_TREE
);
1304 m_data
.safe_push (NULL_TREE
);
1305 m_data
.safe_push (NULL_TREE
);
1306 if (TYPE_UNSIGNED (rhs_type
))
1307 /* No need to keep state between iterations. */
1309 else if (m_upwards
&& !m_upwards_2limb
)
1310 /* We need to keep state between iterations, but
1311 not within any loop, everything is straight line
1312 code with only increasing indexes. */
1314 else if (!m_upwards_2limb
)
1316 unsigned save_data_cnt
= m_data_cnt
;
1317 gimple_stmt_iterator save_gsi
= m_gsi
;
1319 if (gsi_end_p (m_gsi
))
1320 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1323 m_data_cnt
= save_data_cnt
+ 3;
1324 t
= handle_operand (rhs1
, size_int (low
));
1326 m_data
[save_data_cnt
+ 2]
1327 = build_int_cst (NULL_TREE
, m_data_cnt
);
1328 m_data_cnt
= save_data_cnt
;
1329 t
= add_cast (signed_type_for (m_limb_type
), t
);
1330 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1331 tree n
= make_ssa_name (TREE_TYPE (t
));
1332 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1334 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1336 if (gsi_end_p (m_init_gsi
))
1337 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1339 gsi_prev (&m_init_gsi
);
1342 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1343 /* We need to keep state between iterations, but
1344 fortunately not within the loop, only afterwards. */
1349 m_data
.truncate (m_data_cnt
);
1350 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1351 m_data
.safe_push (NULL_TREE
);
1355 unsigned save_data_cnt
= m_data_cnt
;
1357 if (!tree_fits_uhwi_p (idx
))
1360 && low
>= m_upwards_2limb
- m_first
)
1362 rhs1
= handle_operand (rhs1
, idx
);
1364 m_data
[save_data_cnt
+ 2]
1365 = build_int_cst (NULL_TREE
, m_data_cnt
);
1366 m_first
= save_first
;
1369 bool single_comparison
1370 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1372 if (!single_comparison
1375 && low
+ 1 == m_upwards_2limb
)
1376 /* In this case we know that idx <= low always,
1377 so effectively we just needs a single comparison,
1378 idx < low or idx == low, but we'd need to emit different
1379 code for the 2 branches than single_comparison normally
1380 emits. So, instead of special-casing that, emit a
1381 low <= low comparison which cfg cleanup will clean up
1382 at the end of the pass. */
1383 idxc
= size_int (low
);
1384 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1385 idxc
, size_int (low
), NULL_TREE
, NULL_TREE
);
1386 edge edge_true_true
, edge_true_false
, edge_false
;
1387 if_then_if_then_else (g
, (single_comparison
? NULL
1388 : gimple_build_cond (EQ_EXPR
, idx
,
1392 profile_probability::likely (),
1393 profile_probability::unlikely (),
1394 edge_true_true
, edge_true_false
, edge_false
);
1395 bool save_cast_conditional
= m_cast_conditional
;
1396 m_cast_conditional
= true;
1398 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1400 m_data
[save_data_cnt
+ 2]
1401 = build_int_cst (NULL_TREE
, m_data_cnt
);
1402 tree ext
= NULL_TREE
;
1403 tree bitfld
= NULL_TREE
;
1404 if (!single_comparison
)
1406 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1408 m_data_cnt
= save_data_cnt
+ 3;
1411 bitfld
= m_data
[m_bitfld_load
];
1412 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1415 t2
= handle_operand (rhs1
, size_int (low
));
1416 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1417 t2
= add_cast (m_limb_type
, t2
);
1418 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1420 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1421 tree lpm1
= build_int_cst (unsigned_type_node
,
1423 tree n
= make_ssa_name (TREE_TYPE (ext
));
1424 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1426 ext
= add_cast (m_limb_type
, n
);
1430 if (TYPE_UNSIGNED (rhs_type
))
1431 t3
= build_zero_cst (m_limb_type
);
1432 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1433 t3
= m_data
[save_data_cnt
];
1435 t3
= m_data
[save_data_cnt
+ 1];
1436 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1437 t
= make_ssa_name (m_limb_type
);
1438 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1439 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1440 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1442 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1445 tree t4
= make_ssa_name (m_limb_type
);
1446 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1447 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1449 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1451 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1452 if (!save_cast_conditional
)
1454 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1458 for (basic_block bb
= gsi_bb (m_gsi
);;)
1460 edge e1
= single_succ_edge (bb
);
1461 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1462 tree t5
= (e2
? m_data
[save_data_cnt
+ 1]
1463 : make_ssa_name (m_limb_type
));
1464 phi
= create_phi_node (t5
, e1
->dest
);
1466 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1467 add_phi_arg (phi
, (e3
== e1
? t4
1468 : build_zero_cst (m_limb_type
)),
1469 e3
, UNKNOWN_LOCATION
);
1480 t4
= m_data
[m_bitfld_load
+ 1];
1482 t4
= make_ssa_name (m_limb_type
);
1483 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1485 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1486 edge_true_false
, UNKNOWN_LOCATION
);
1487 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1488 edge_false
, UNKNOWN_LOCATION
);
1490 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1492 m_data
[m_bitfld_load
] = t4
;
1493 m_data
[m_bitfld_load
+ 2] = t4
;
1496 m_cast_conditional
= save_cast_conditional
;
1497 m_first
= save_first
;
1502 if (tree_to_uhwi (idx
) < low
)
1504 t
= handle_operand (rhs1
, idx
);
1506 m_data
[save_data_cnt
+ 2]
1507 = build_int_cst (NULL_TREE
, m_data_cnt
);
1509 else if (tree_to_uhwi (idx
) < high
)
1511 t
= handle_operand (rhs1
, size_int (low
));
1513 m_data
[save_data_cnt
+ 2]
1514 = build_int_cst (NULL_TREE
, m_data_cnt
);
1515 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1516 t
= add_cast (m_limb_type
, t
);
1517 tree ext
= NULL_TREE
;
1518 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1520 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1521 tree lpm1
= build_int_cst (unsigned_type_node
,
1523 tree n
= make_ssa_name (TREE_TYPE (ext
));
1524 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1526 ext
= add_cast (m_limb_type
, n
);
1527 m_data
[save_data_cnt
+ 1] = ext
;
1532 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1534 handle_operand (rhs1
, size_zero_node
);
1535 m_data
[save_data_cnt
+ 2]
1536 = build_int_cst (NULL_TREE
, m_data_cnt
);
1539 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1540 if (TYPE_UNSIGNED (rhs_type
))
1541 t
= build_zero_cst (m_limb_type
);
1542 else if (m_bb
&& m_data
[save_data_cnt
])
1543 t
= m_data
[save_data_cnt
];
1545 t
= m_data
[save_data_cnt
+ 1];
1547 tree type
= limb_access_type (lhs_type
, idx
);
1548 if (!useless_type_conversion_p (type
, m_limb_type
))
1549 t
= add_cast (type
, t
);
1550 m_first
= save_first
;
1554 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1555 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1556 && INTEGRAL_TYPE_P (rhs_type
))
1558 /* Add support for 3 or more limbs filled in from normal integral
1559 type if this assert fails. If no target chooses limb mode smaller
1560 than half of largest supported normal integral type, this will not
1562 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1563 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1566 gimple_stmt_iterator save_gsi
= m_gsi
;
1568 if (gsi_end_p (m_gsi
))
1569 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1572 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1573 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1575 tree type
= NULL_TREE
;
1576 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1577 rhs_type
= TREE_TYPE (rhs1
);
1580 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1581 r1
= add_cast (m_limb_type
, rhs1
);
1582 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1584 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1586 build_int_cst (unsigned_type_node
,
1589 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1591 if (TYPE_UNSIGNED (rhs_type
))
1592 rext
= build_zero_cst (m_limb_type
);
1595 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1596 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1598 build_int_cst (unsigned_type_node
,
1601 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1604 if (gsi_end_p (m_init_gsi
))
1605 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1607 gsi_prev (&m_init_gsi
);
1611 if (m_upwards_2limb
)
1616 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1617 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1619 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1622 m_data
[m_data_cnt
+ 1] = t
;
1625 m_data
[m_data_cnt
+ 1] = rext
;
1626 m_data
.safe_push (rext
);
1627 t
= m_data
[m_data_cnt
];
1629 else if (!tree_fits_uhwi_p (idx
))
1630 t
= m_data
[m_data_cnt
+ 1];
1633 tree type
= limb_access_type (lhs_type
, idx
);
1634 t
= m_data
[m_data_cnt
+ 2];
1635 if (!useless_type_conversion_p (type
, m_limb_type
))
1636 t
= add_cast (type
, t
);
1643 m_data
.safe_push (r1
);
1644 m_data
.safe_push (r2
);
1645 m_data
.safe_push (rext
);
1647 if (tree_fits_uhwi_p (idx
))
1649 tree type
= limb_access_type (lhs_type
, idx
);
1650 if (integer_zerop (idx
))
1651 t
= m_data
[m_data_cnt
];
1652 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1653 && integer_onep (idx
))
1654 t
= m_data
[m_data_cnt
+ 1];
1656 t
= m_data
[m_data_cnt
+ 2];
1657 if (!useless_type_conversion_p (type
, m_limb_type
))
1658 t
= add_cast (type
, t
);
1662 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1663 NULL_TREE
, NULL_TREE
);
1664 edge e2
, e3
, e4
= NULL
;
1665 if_then (g
, profile_probability::likely (), e2
, e3
);
1666 if (m_data
[m_data_cnt
+ 1])
1668 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1669 NULL_TREE
, NULL_TREE
);
1671 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1672 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1673 e2
= find_edge (e5
->dest
, e2
->dest
);
1674 e4
->probability
= profile_probability::unlikely ();
1675 e5
->flags
= EDGE_FALSE_VALUE
;
1676 e5
->probability
= e4
->probability
.invert ();
1678 m_gsi
= gsi_after_labels (e2
->dest
);
1679 t
= make_ssa_name (m_limb_type
);
1680 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1681 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1682 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1684 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1691 /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1694 bitint_large_huge::handle_bit_field_ref (tree op
, tree idx
)
1696 if (tree_fits_uhwi_p (idx
))
1699 m_data
.safe_push (NULL
);
1701 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (m_limb_type
));
1702 tree bfr
= build3 (BIT_FIELD_REF
, m_limb_type
,
1703 TREE_OPERAND (op
, 0),
1704 TYPE_SIZE (m_limb_type
),
1705 size_binop (PLUS_EXPR
, TREE_OPERAND (op
, 2),
1706 bitsize_int (tree_to_uhwi (idx
) * sz
)));
1707 tree r
= make_ssa_name (m_limb_type
);
1708 gimple
*g
= gimple_build_assign (r
, bfr
);
1710 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
1711 if (!useless_type_conversion_p (type
, m_limb_type
))
1712 r
= add_cast (type
, r
);
1718 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
)));
1721 if (bitwise_mode_for_size (sz
).exists (&mode
)
1722 && known_eq (GET_MODE_BITSIZE (mode
), sz
))
1723 type
= bitwise_type_for_mode (mode
);
1727 type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op
, 0)));
1729 if (TYPE_ALIGN (type
) < TYPE_ALIGN (TREE_TYPE (op
)))
1730 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op
)));
1731 var
= create_tmp_var (type
);
1732 TREE_ADDRESSABLE (var
) = 1;
1734 if (mode
!= VOIDmode
)
1736 bfr
= build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (op
, 0),
1737 TYPE_SIZE (type
), TREE_OPERAND (op
, 2));
1738 g
= gimple_build_assign (make_ssa_name (type
),
1739 BIT_FIELD_REF
, bfr
);
1740 gimple_set_location (g
, m_loc
);
1741 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1742 bfr
= gimple_assign_lhs (g
);
1745 bfr
= TREE_OPERAND (op
, 0);
1746 g
= gimple_build_assign (var
, bfr
);
1747 gimple_set_location (g
, m_loc
);
1748 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1749 if (mode
== VOIDmode
)
1751 unsigned HOST_WIDE_INT nelts
1752 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
))), limb_prec
);
1753 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
1754 var
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
),
1755 build_int_cst (build_pointer_type (type
),
1756 tree_to_uhwi (TREE_OPERAND (op
, 2))
1759 m_data
.safe_push (var
);
1762 var
= unshare_expr (m_data
[m_data_cnt
]);
1764 var
= limb_access (TREE_TYPE (op
), var
, idx
, false);
1765 tree r
= make_ssa_name (m_limb_type
);
1766 gimple
*g
= gimple_build_assign (r
, var
);
1771 /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1772 is an older EH edge, and except for virtual PHIs duplicate the
1773 PHI argument from the EH_EDGE to the new EH edge. */
1776 add_eh_edge (basic_block src
, edge eh_edge
)
1778 edge e
= make_edge (src
, eh_edge
->dest
, EDGE_EH
);
1779 e
->probability
= profile_probability::very_unlikely ();
1780 for (gphi_iterator gsi
= gsi_start_phis (eh_edge
->dest
);
1781 !gsi_end_p (gsi
); gsi_next (&gsi
))
1783 gphi
*phi
= gsi
.phi ();
1784 tree lhs
= gimple_phi_result (phi
);
1785 if (virtual_operand_p (lhs
))
1787 const phi_arg_d
*arg
= gimple_phi_arg (phi
, eh_edge
->dest_idx
);
1788 add_phi_arg (phi
, arg
->def
, e
, arg
->locus
);
1792 /* Helper function for handle_stmt method, handle a load from memory. */
1795 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1797 tree rhs1
= gimple_assign_rhs1 (stmt
);
1798 tree rhs_type
= TREE_TYPE (rhs1
);
1799 bool eh
= stmt_ends_bb_p (stmt
);
1800 edge eh_edge
= NULL
;
1806 basic_block bb
= gimple_bb (stmt
);
1808 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1809 if (eh_edge
->flags
& EDGE_EH
)
1813 if (TREE_CODE (rhs1
) == COMPONENT_REF
1814 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1816 tree fld
= TREE_OPERAND (rhs1
, 1);
1817 /* For little-endian, we can allow as inputs bit-fields
1818 which start at a limb boundary. */
1819 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1820 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1821 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1823 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1824 handle it normally for now. */
1825 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1827 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1828 poly_int64 bitoffset
;
1829 poly_uint64 field_offset
, repr_offset
;
1830 bool var_field_off
= false;
1831 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1832 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1833 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1837 var_field_off
= true;
1839 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1840 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1841 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1842 TREE_OPERAND (rhs1
, 0), repr
,
1843 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1844 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1845 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1846 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1851 gimple_stmt_iterator save_gsi
= m_gsi
;
1853 if (gsi_end_p (m_gsi
))
1854 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1857 tree t
= limb_access (rhs_type
, nrhs1
, size_int (bo_idx
), true);
1858 tree iv
= make_ssa_name (m_limb_type
);
1859 g
= gimple_build_assign (iv
, t
);
1863 maybe_duplicate_eh_stmt (g
, stmt
);
1866 edge e
= split_block (gsi_bb (m_gsi
), g
);
1867 add_eh_edge (e
->src
, eh_edge
);
1868 m_gsi
= gsi_after_labels (e
->dest
);
1869 if (gsi_bb (save_gsi
) == e
->src
)
1871 if (gsi_end_p (save_gsi
))
1872 save_gsi
= gsi_end_bb (e
->dest
);
1874 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1876 if (m_preheader_bb
== e
->src
)
1877 m_preheader_bb
= e
->dest
;
1881 if (gsi_end_p (m_init_gsi
))
1882 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1884 gsi_prev (&m_init_gsi
);
1887 prepare_data_in_out (iv
, idx
, &out
);
1888 out
= m_data
[m_data_cnt
];
1889 m_data
.safe_push (out
);
1893 m_data
.safe_push (NULL_TREE
);
1894 m_data
.safe_push (NULL_TREE
);
1895 m_data
.safe_push (NULL_TREE
);
1899 tree nidx0
= NULL_TREE
, nidx1
;
1900 tree iv
= m_data
[m_data_cnt
];
1901 if (m_cast_conditional
&& iv
)
1903 gcc_assert (!m_bitfld_load
);
1904 m_bitfld_load
= m_data_cnt
;
1906 if (tree_fits_uhwi_p (idx
))
1908 unsigned prec
= TYPE_PRECISION (rhs_type
);
1909 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1910 gcc_assert (i
* limb_prec
< prec
);
1911 nidx1
= size_int (i
+ bo_idx
+ 1);
1912 if ((i
+ 1) * limb_prec
> prec
)
1915 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1919 nidx0
= size_int (i
+ bo_idx
);
1929 nidx0
= make_ssa_name (sizetype
);
1930 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1935 nidx1
= make_ssa_name (sizetype
);
1936 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1937 size_int (bo_idx
+ 1));
1941 tree iv2
= NULL_TREE
;
1944 tree t
= limb_access (rhs_type
, nrhs1
, nidx0
, true);
1945 iv
= make_ssa_name (m_limb_type
);
1946 g
= gimple_build_assign (iv
, t
);
1952 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
1953 unsigned prec
= TYPE_PRECISION (rhs_type
);
1956 if ((prec
% limb_prec
) == 0
1957 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
1958 conditional
= false;
1960 edge edge_true
= NULL
, edge_false
= NULL
;
1963 g
= gimple_build_cond (NE_EXPR
, idx
,
1964 size_int (prec
/ limb_prec
),
1965 NULL_TREE
, NULL_TREE
);
1966 if_then (g
, profile_probability::likely (),
1967 edge_true
, edge_false
);
1969 tree t
= limb_access (rhs_type
, nrhs1
, nidx1
, true);
1973 && !tree_fits_uhwi_p (idx
))
1974 iv2
= m_data
[m_data_cnt
+ 1];
1976 iv2
= make_ssa_name (m_limb_type
);
1977 g
= gimple_build_assign (iv2
, t
);
1981 maybe_duplicate_eh_stmt (g
, stmt
);
1984 edge e
= split_block (gsi_bb (m_gsi
), g
);
1985 m_gsi
= gsi_after_labels (e
->dest
);
1986 add_eh_edge (e
->src
, eh_edge
);
1991 tree iv3
= make_ssa_name (m_limb_type
);
1993 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
1994 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
1995 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
1996 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
1997 edge_false
, UNKNOWN_LOCATION
);
1998 m_gsi
= gsi_after_labels (edge_true
->dest
);
2001 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
2002 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
2004 iv
= gimple_assign_lhs (g
);
2007 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
2008 iv2
, build_int_cst (unsigned_type_node
,
2009 limb_prec
- bo_bit
));
2011 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
2012 gimple_assign_lhs (g
), iv
);
2014 iv
= gimple_assign_lhs (g
);
2015 if (m_data
[m_data_cnt
])
2016 m_data
[m_data_cnt
] = iv2
;
2018 if (tree_fits_uhwi_p (idx
))
2020 tree atype
= limb_access_type (rhs_type
, idx
);
2021 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
2022 iv
= add_cast (atype
, iv
);
2029 /* Use write_p = true for loads with EH edges to make
2030 sure limb_access doesn't add a cast as separate
2031 statement after it. */
2032 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
2033 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
2034 g
= gimple_build_assign (ret
, rhs1
);
2038 maybe_duplicate_eh_stmt (g
, stmt
);
2041 edge e
= split_block (gsi_bb (m_gsi
), g
);
2042 m_gsi
= gsi_after_labels (e
->dest
);
2043 add_eh_edge (e
->src
, eh_edge
);
2045 if (tree_fits_uhwi_p (idx
))
2047 tree atype
= limb_access_type (rhs_type
, idx
);
2048 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
2049 ret
= add_cast (atype
, ret
);
2055 /* Return a limb IDX from a mergeable statement STMT. */
2058 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
2060 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
2062 switch (gimple_code (stmt
))
2065 if (gimple_assign_load_p (stmt
))
2066 return handle_load (stmt
, idx
);
2067 switch (gimple_assign_rhs_code (stmt
))
2072 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2075 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2076 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
2077 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
2083 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2084 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2085 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
2088 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2089 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
2090 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
2092 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
2094 gimple_assign_rhs2 (stmt
), idx
);
2097 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2099 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2100 gimple_assign_rhs1 (stmt
), idx
);
2101 case VIEW_CONVERT_EXPR
:
2102 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2103 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0),
2106 return handle_bit_field_ref (gimple_assign_rhs1 (stmt
), idx
);
2117 /* Return minimum precision of OP at STMT.
2118 Positive value is minimum precision above which all bits
2119 are zero, negative means all bits above negation of the
2120 value are copies of the sign bit. */
2123 range_to_prec (tree op
, gimple
*stmt
)
2127 tree type
= TREE_TYPE (op
);
2128 unsigned int prec
= TYPE_PRECISION (type
);
2131 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
2132 || r
.undefined_p ())
2134 if (TYPE_UNSIGNED (type
))
2137 return MIN ((int) -prec
, -2);
2140 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
2142 w
= r
.lower_bound ();
2145 int min_prec1
= wi::min_precision (w
, SIGNED
);
2146 w
= r
.upper_bound ();
2147 int min_prec2
= wi::min_precision (w
, SIGNED
);
2148 int min_prec
= MAX (min_prec1
, min_prec2
);
2149 return MIN (-min_prec
, -2);
2153 w
= r
.upper_bound ();
2154 int min_prec
= wi::min_precision (w
, UNSIGNED
);
2155 return MAX (min_prec
, 1);
2158 /* Return address of the first limb of OP and write into *PREC
2159 its precision. If positive, the operand is zero extended
2160 from that precision, if it is negative, the operand is sign-extended
2161 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2162 otherwise *PREC_STORED is prec from the innermost call without
2163 range optimizations. */
2166 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
2167 int *prec_stored
, int *prec
)
2170 location_t loc_save
= m_loc
;
2171 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2172 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2173 && TREE_CODE (op
) != INTEGER_CST
)
2176 *prec
= range_to_prec (op
, stmt
);
2177 bitint_prec_kind kind
= bitint_prec_small
;
2178 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2179 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2180 kind
= bitint_precision_kind (TREE_TYPE (op
));
2181 if (kind
== bitint_prec_middle
)
2183 tree type
= NULL_TREE
;
2184 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2186 tree op_type
= TREE_TYPE (op
);
2187 unsigned HOST_WIDE_INT nelts
2188 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2189 /* Add support for 3 or more limbs filled in from normal
2190 integral type if this assert fails. If no target chooses
2191 limb mode smaller than half of largest supported normal
2192 integral type, this will not be needed. */
2193 gcc_assert (nelts
<= 2);
2195 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2196 ? TYPE_PRECISION (op_type
)
2197 : -TYPE_PRECISION (op_type
));
2198 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2203 if (TYPE_UNSIGNED (op_type
))
2205 if (*prec_stored
> limb_prec
)
2206 *prec_stored
= limb_prec
;
2208 else if (*prec_stored
< -limb_prec
)
2209 *prec_stored
= -limb_prec
;
2212 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2213 tree var
= create_tmp_var (atype
);
2215 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2216 t1
= add_cast (m_limb_type
, t1
);
2217 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2218 NULL_TREE
, NULL_TREE
);
2219 gimple
*g
= gimple_build_assign (v
, t1
);
2223 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2224 g
= gimple_build_assign (make_ssa_name (op_type
),
2225 RSHIFT_EXPR
, op
, lp
);
2227 tree t2
= gimple_assign_lhs (g
);
2228 t2
= add_cast (m_limb_type
, t2
);
2229 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2230 NULL_TREE
, NULL_TREE
);
2231 g
= gimple_build_assign (v
, t2
);
2234 tree ret
= build_fold_addr_expr (var
);
2235 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2237 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2238 g
= gimple_build_assign (var
, clobber
);
2239 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2244 switch (TREE_CODE (op
))
2248 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2250 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2252 m_loc
= gimple_location (g
);
2253 if (gimple_assign_load_p (g
))
2255 *prec
= range_to_prec (op
, NULL
);
2257 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2258 ? TYPE_PRECISION (TREE_TYPE (op
))
2259 : -TYPE_PRECISION (TREE_TYPE (op
)));
2260 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2261 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2262 NULL_TREE
, true, GSI_SAME_STMT
);
2264 else if (gimple_code (g
) == GIMPLE_NOP
)
2266 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2268 *prec_stored
= *prec
;
2269 tree var
= create_tmp_var (m_limb_type
);
2270 TREE_ADDRESSABLE (var
) = 1;
2271 ret
= build_fold_addr_expr (var
);
2272 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2274 tree clobber
= build_clobber (m_limb_type
,
2275 CLOBBER_STORAGE_END
);
2276 g
= gimple_build_assign (var
, clobber
);
2277 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2282 gcc_assert (gimple_assign_cast_p (g
));
2283 tree rhs1
= gimple_assign_rhs1 (g
);
2284 bitint_prec_kind kind
= bitint_prec_small
;
2285 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2286 rhs1
= TREE_OPERAND (rhs1
, 0);
2287 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2288 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2289 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2290 if (kind
>= bitint_prec_large
)
2292 tree lhs_type
= TREE_TYPE (op
);
2293 tree rhs_type
= TREE_TYPE (rhs1
);
2294 int prec_stored_val
= 0;
2295 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2296 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2298 if (TYPE_UNSIGNED (lhs_type
)
2299 && !TYPE_UNSIGNED (rhs_type
))
2300 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2304 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2306 else if (TYPE_UNSIGNED (lhs_type
))
2308 gcc_assert (*prec
> 0
2309 || prec_stored_val
> 0
2310 || (-prec_stored_val
2311 >= TYPE_PRECISION (lhs_type
)));
2312 *prec
= TYPE_PRECISION (lhs_type
);
2314 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2317 *prec
= -TYPE_PRECISION (lhs_type
);
2332 int p
= var_to_partition (m_map
, op
);
2333 gcc_assert (m_vars
[p
] != NULL_TREE
);
2334 *prec
= range_to_prec (op
, stmt
);
2336 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2337 ? TYPE_PRECISION (TREE_TYPE (op
))
2338 : -TYPE_PRECISION (TREE_TYPE (op
)));
2339 return build_fold_addr_expr (m_vars
[p
]);
2342 unsigned int min_prec
, mp
;
2344 w
= wi::to_wide (op
);
2345 if (tree_int_cst_sgn (op
) >= 0)
2347 min_prec
= wi::min_precision (w
, UNSIGNED
);
2348 *prec
= MAX (min_prec
, 1);
2352 min_prec
= wi::min_precision (w
, SIGNED
);
2353 *prec
= MIN ((int) -min_prec
, -2);
2355 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2358 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
))
2359 && (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
2360 || TYPE_PRECISION (TREE_TYPE (op
)) <= limb_prec
))
2361 type
= TREE_TYPE (op
);
2363 type
= build_bitint_type (mp
, 1);
2364 if (TREE_CODE (type
) != BITINT_TYPE
2365 || bitint_precision_kind (type
) == bitint_prec_small
)
2367 if (TYPE_PRECISION (type
) <= limb_prec
)
2371 while (bitint_precision_kind (mp
) == bitint_prec_small
)
2373 /* This case is for targets which e.g. have 64-bit
2374 limb but categorize up to 128-bits _BitInts as
2375 small. We could use type of m_limb_type[2] and
2376 similar instead to save space. */
2377 type
= build_bitint_type (mp
, 1);
2382 if (tree_int_cst_sgn (op
) >= 0)
2383 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2385 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2387 op
= tree_output_constant_def (fold_convert (type
, op
));
2388 return build_fold_addr_expr (op
);
2394 /* Helper function, create a loop before the current location,
2395 start with sizetype INIT value from the preheader edge. Return
2396 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2397 from the latch edge. */
2400 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2402 if (!gsi_end_p (m_gsi
))
2405 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2406 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2407 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2408 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2409 e3
->probability
= profile_probability::very_unlikely ();
2410 e2
->flags
= EDGE_FALSE_VALUE
;
2411 e2
->probability
= e3
->probability
.invert ();
2412 tree idx
= make_ssa_name (sizetype
);
2413 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2414 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2415 *idx_next
= make_ssa_name (sizetype
);
2416 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2417 m_gsi
= gsi_after_labels (e1
->dest
);
2419 m_preheader_bb
= e1
->src
;
2420 class loop
*loop
= alloc_loop ();
2421 loop
->header
= e1
->dest
;
2422 add_loop (loop
, e1
->src
->loop_father
);
2426 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2427 lowered using iteration from the least significant limb up to the most
2428 significant limb. For large _BitInt it is emitted as straight line code
2429 before current location, for huge _BitInt as a loop handling two limbs
2430 at once, followed by handling up to limbs in straight line code (at most
2431 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2432 comparisons, in that case CMP_CODE should be the comparison code and
2433 CMP_OP1/CMP_OP2 the comparison operands. */
2436 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2437 tree cmp_op1
, tree cmp_op2
)
2439 bool eq_p
= cmp_code
!= ERROR_MARK
;
2442 type
= TREE_TYPE (cmp_op1
);
2444 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2445 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2446 bitint_prec_kind kind
= bitint_precision_kind (type
);
2447 gcc_assert (kind
>= bitint_prec_large
);
2449 tree lhs
= gimple_get_lhs (stmt
);
2450 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2452 && TREE_CODE (lhs
) == SSA_NAME
2453 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2454 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2456 int p
= var_to_partition (m_map
, lhs
);
2457 gcc_assert (m_vars
[p
] != NULL_TREE
);
2458 m_lhs
= lhs
= m_vars
[p
];
2460 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2462 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2464 basic_block eh_pad
= NULL
;
2465 tree nlhs
= NULL_TREE
;
2466 unsigned HOST_WIDE_INT bo_idx
= 0;
2467 unsigned HOST_WIDE_INT bo_bit
= 0;
2468 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2469 if (gimple_store_p (stmt
))
2471 store_operand
= gimple_assign_rhs1 (stmt
);
2472 eh
= stmt_ends_bb_p (stmt
);
2477 basic_block bb
= gimple_bb (stmt
);
2479 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2480 if (e
->flags
& EDGE_EH
)
2486 if (TREE_CODE (lhs
) == COMPONENT_REF
2487 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2489 tree fld
= TREE_OPERAND (lhs
, 1);
2490 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2491 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2492 poly_int64 bitoffset
;
2493 poly_uint64 field_offset
, repr_offset
;
2494 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2498 bool var_field_off
= false;
2499 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2500 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2501 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2505 var_field_off
= true;
2507 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2508 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2509 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2510 TREE_OPERAND (lhs
, 0), repr
,
2512 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2513 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2514 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2515 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2520 && TREE_CODE (store_operand
) == SSA_NAME
2522 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2523 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2524 || gimple_assign_cast_p (stmt
))
2526 rhs1
= gimple_assign_rhs1 (store_operand
2527 ? SSA_NAME_DEF_STMT (store_operand
)
2529 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2530 rhs1
= TREE_OPERAND (rhs1
, 0);
2531 /* Optimize mergeable ops ending with widening cast to _BitInt
2532 (or followed by store). We can lower just the limbs of the
2533 cast operand and widen afterwards. */
2534 if (TREE_CODE (rhs1
) == SSA_NAME
2536 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2537 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2538 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2539 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2540 limb_prec
) < CEIL (prec
, limb_prec
)
2541 || (kind
== bitint_prec_huge
2542 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2544 store_operand
= rhs1
;
2545 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2546 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2547 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2551 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2552 if (kind
== bitint_prec_large
)
2553 cnt
= CEIL (prec
, limb_prec
);
2556 rem
= (prec
% (2 * limb_prec
));
2557 end
= (prec
- rem
) / limb_prec
;
2558 cnt
= 2 + CEIL (rem
, limb_prec
);
2559 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2562 basic_block edge_bb
= NULL
;
2565 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2567 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2569 if (kind
== bitint_prec_large
)
2570 m_gsi
= gsi_end_bb (edge_bb
);
2573 m_after_stmt
= stmt
;
2574 if (kind
!= bitint_prec_large
)
2575 m_upwards_2limb
= end
;
2579 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2580 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2581 > CEIL (prec
, limb_prec
)));
2583 for (unsigned i
= 0; i
< cnt
; i
++)
2586 if (kind
== bitint_prec_large
)
2589 idx
= size_int (end
+ (i
> 2));
2592 rhs1
= handle_operand (cmp_op1
, idx
);
2593 tree rhs2
= handle_operand (cmp_op2
, idx
);
2594 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2596 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2597 e1
->flags
= EDGE_FALSE_VALUE
;
2598 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2599 e1
->probability
= profile_probability::unlikely ();
2600 e2
->probability
= e1
->probability
.invert ();
2602 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2603 m_gsi
= gsi_after_labels (e1
->dest
);
2608 rhs1
= handle_operand (store_operand
, idx
);
2610 rhs1
= handle_stmt (stmt
, idx
);
2611 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2612 rhs1
= add_cast (m_limb_type
, rhs1
);
2613 if (sext
&& i
== cnt
- 1)
2618 if (tree_fits_uhwi_p (idx
))
2619 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2622 nidx
= make_ssa_name (sizetype
);
2623 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2629 basic_block new_bb
= NULL
;
2630 /* Handle stores into bit-fields. */
2636 if (kind
!= bitint_prec_large
)
2638 prepare_data_in_out (build_zero_cst (m_limb_type
),
2640 bf_next
= m_data
.pop ();
2641 bf_cur
= m_data
.pop ();
2642 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2643 NULL_TREE
, NULL_TREE
);
2645 if_then_else (g
, profile_probability::unlikely (),
2650 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2651 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2652 bitsize_int (limb_prec
- bo_bit
),
2653 bitsize_int (bo_idx
* limb_prec
+ bo_bit
));
2654 tree t
= add_cast (ftype
, rhs1
);
2655 g
= gimple_build_assign (bfr
, t
);
2659 maybe_duplicate_eh_stmt (g
, stmt
);
2662 edge e
= split_block (gsi_bb (m_gsi
), g
);
2663 m_gsi
= gsi_after_labels (e
->dest
);
2664 add_eh_edge (e
->src
,
2665 find_edge (gimple_bb (stmt
), eh_pad
));
2668 if (kind
== bitint_prec_large
)
2674 m_gsi
= gsi_after_labels (e2
->src
);
2678 tree t1
= make_ssa_name (m_limb_type
);
2679 tree t2
= make_ssa_name (m_limb_type
);
2680 tree t3
= make_ssa_name (m_limb_type
);
2681 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2682 build_int_cst (unsigned_type_node
,
2683 limb_prec
- bo_bit
));
2685 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2686 build_int_cst (unsigned_type_node
,
2690 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2693 if (bf_next
&& i
== 1)
2695 g
= gimple_build_assign (bf_next
, bf_cur
);
2702 /* Handle bit-field access to partial last limb if needed. */
2706 && tree_fits_uhwi_p (idx
))
2708 unsigned int tprec
= TYPE_PRECISION (type
);
2709 unsigned int rprec
= tprec
% limb_prec
;
2710 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2713 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2714 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2715 unshare_expr (nlhs
),
2716 bitsize_int (rprec
+ bo_bit
),
2717 bitsize_int ((bo_idx
2718 + tprec
/ limb_prec
)
2720 tree t
= add_cast (ftype
, rhs1
);
2721 g
= gimple_build_assign (bfr
, t
);
2725 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2728 /* Otherwise, stores to any other lhs. */
2731 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
,
2733 g
= gimple_build_assign (l
, rhs1
);
2738 maybe_duplicate_eh_stmt (g
, stmt
);
2741 edge e
= split_block (gsi_bb (m_gsi
), g
);
2742 m_gsi
= gsi_after_labels (e
->dest
);
2743 add_eh_edge (e
->src
,
2744 find_edge (gimple_bb (stmt
), eh_pad
));
2748 m_gsi
= gsi_after_labels (new_bb
);
2752 if (kind
== bitint_prec_huge
&& i
<= 1)
2756 idx
= make_ssa_name (sizetype
);
2757 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2763 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2766 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2767 NULL_TREE
, NULL_TREE
);
2770 m_gsi
= gsi_after_labels (edge_bb
);
2772 m_gsi
= gsi_for_stmt (stmt
);
2782 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2783 tree lpm1
= build_int_cst (unsigned_type_node
,
2785 tree n
= make_ssa_name (TREE_TYPE (ext
));
2786 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2788 ext
= add_cast (m_limb_type
, n
);
2791 ext
= build_zero_cst (m_limb_type
);
2792 kind
= bitint_precision_kind (type
);
2793 unsigned start
= CEIL (prec
, limb_prec
);
2794 prec
= TYPE_PRECISION (type
);
2795 idx
= idx_first
= idx_next
= NULL_TREE
;
2796 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2797 kind
= bitint_prec_large
;
2798 if (kind
== bitint_prec_large
)
2799 cnt
= CEIL (prec
, limb_prec
) - start
;
2802 rem
= prec
% limb_prec
;
2803 end
= (prec
- rem
) / limb_prec
;
2804 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2806 for (unsigned i
= 0; i
< cnt
; i
++)
2808 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2809 idx
= size_int (start
+ i
);
2810 else if (i
== cnt
- 1 && (rem
!= 0))
2811 idx
= size_int (end
);
2812 else if (i
== (bo_bit
!= 0))
2813 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2815 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2817 tree t1
= make_ssa_name (m_limb_type
);
2818 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2819 build_int_cst (unsigned_type_node
,
2820 limb_prec
- bo_bit
));
2822 if (integer_zerop (ext
))
2826 tree t2
= make_ssa_name (m_limb_type
);
2827 rhs1
= make_ssa_name (m_limb_type
);
2828 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2829 build_int_cst (unsigned_type_node
,
2832 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2840 if (tree_fits_uhwi_p (idx
))
2841 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2844 nidx
= make_ssa_name (sizetype
);
2845 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2851 /* Handle bit-field access to partial last limb if needed. */
2852 if (nlhs
&& i
== cnt
- 1)
2854 unsigned int tprec
= TYPE_PRECISION (type
);
2855 unsigned int rprec
= tprec
% limb_prec
;
2856 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2859 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2860 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2861 unshare_expr (nlhs
),
2862 bitsize_int (rprec
+ bo_bit
),
2863 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2865 tree t
= add_cast (ftype
, rhs1
);
2866 g
= gimple_build_assign (bfr
, t
);
2870 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2873 /* Otherwise, stores to any other lhs. */
2876 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
, nidx
, true);
2877 g
= gimple_build_assign (l
, rhs1
);
2882 maybe_duplicate_eh_stmt (g
, stmt
);
2885 edge e
= split_block (gsi_bb (m_gsi
), g
);
2886 m_gsi
= gsi_after_labels (e
->dest
);
2887 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2890 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2892 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2895 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2896 NULL_TREE
, NULL_TREE
);
2898 m_gsi
= gsi_for_stmt (stmt
);
2903 if (bf_cur
!= NULL_TREE
)
2905 unsigned int tprec
= TYPE_PRECISION (type
);
2906 unsigned int rprec
= tprec
% limb_prec
;
2907 tree ftype
= build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2908 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2909 bitsize_int (rprec
+ bo_bit
),
2910 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2915 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2916 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2917 build_int_cst (unsigned_type_node
,
2918 limb_prec
- bo_bit
));
2921 rhs1
= add_cast (ftype
, rhs1
);
2922 g
= gimple_build_assign (bfr
, rhs1
);
2926 maybe_duplicate_eh_stmt (g
, stmt
);
2929 edge e
= split_block (gsi_bb (m_gsi
), g
);
2930 m_gsi
= gsi_after_labels (e
->dest
);
2931 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2936 if (gimple_store_p (stmt
))
2938 unlink_stmt_vdef (stmt
);
2939 release_ssa_name (gimple_vdef (stmt
));
2940 gsi_remove (&m_gsi
, true);
2944 lhs
= make_ssa_name (boolean_type_node
);
2945 basic_block bb
= gimple_bb (stmt
);
2946 gphi
*phi
= create_phi_node (lhs
, bb
);
2947 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2948 unsigned int n
= EDGE_COUNT (bb
->preds
);
2949 for (unsigned int i
= 0; i
< n
; i
++)
2951 edge e2
= EDGE_PRED (bb
, i
);
2952 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
2953 e2
, UNKNOWN_LOCATION
);
2955 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2962 /* Handle a large/huge _BitInt comparison statement STMT other than
2963 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2964 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2965 lowered by iteration from the most significant limb downwards to
2966 the least significant one, for large _BitInt in straight line code,
2967 otherwise with most significant limb handled in
2968 straight line code followed by a loop handling one limb at a time.
2969 Comparisons with unsigned huge _BitInt with precisions which are
2970 multiples of limb precision can use just the loop and don't need to
2971 handle most significant limb before the loop. The loop or straight
2972 line code jumps to final basic block if a particular pair of limbs
2976 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2977 tree cmp_op1
, tree cmp_op2
)
2979 tree type
= TREE_TYPE (cmp_op1
);
2980 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2981 bitint_prec_kind kind
= bitint_precision_kind (type
);
2982 gcc_assert (kind
>= bitint_prec_large
);
2984 if (!TYPE_UNSIGNED (type
)
2985 && integer_zerop (cmp_op2
)
2986 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
2988 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
2989 tree idx
= size_int (end
);
2991 tree rhs1
= handle_operand (cmp_op1
, idx
);
2992 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2994 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2995 rhs1
= add_cast (stype
, rhs1
);
2997 tree lhs
= make_ssa_name (boolean_type_node
);
2998 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
2999 build_zero_cst (TREE_TYPE (rhs1
)));
3005 unsigned cnt
, rem
= 0, end
= 0;
3006 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
3007 if (kind
== bitint_prec_large
)
3008 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
3011 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
3012 if (rem
== 0 && !TYPE_UNSIGNED (type
))
3014 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
3015 cnt
= 1 + (rem
!= 0);
3018 basic_block edge_bb
= NULL
;
3019 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3021 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
3023 m_gsi
= gsi_end_bb (edge_bb
);
3025 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
3026 for (unsigned i
= 0; i
< cnt
; i
++)
3029 if (kind
== bitint_prec_large
)
3030 idx
= size_int (cnt
- i
- 1);
3031 else if (i
== cnt
- 1)
3032 idx
= create_loop (size_int (end
- 1), &idx_next
);
3034 idx
= size_int (end
);
3035 tree rhs1
= handle_operand (cmp_op1
, idx
);
3036 tree rhs2
= handle_operand (cmp_op2
, idx
);
3038 && !TYPE_UNSIGNED (type
)
3039 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3041 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3042 rhs1
= add_cast (stype
, rhs1
);
3043 rhs2
= add_cast (stype
, rhs2
);
3045 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3047 edge e1
= split_block (gsi_bb (m_gsi
), g
);
3048 e1
->flags
= EDGE_FALSE_VALUE
;
3049 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3050 e1
->probability
= profile_probability::likely ();
3051 e2
->probability
= e1
->probability
.invert ();
3053 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
3054 m_gsi
= gsi_after_labels (e1
->dest
);
3056 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3058 e1
= split_block (gsi_bb (m_gsi
), g
);
3059 e1
->flags
= EDGE_FALSE_VALUE
;
3060 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3061 e1
->probability
= profile_probability::unlikely ();
3062 e2
->probability
= e1
->probability
.invert ();
3063 m_gsi
= gsi_after_labels (e1
->dest
);
3064 edges
[2 * i
+ 1] = e2
;
3066 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
3068 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3070 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
3071 NULL_TREE
, NULL_TREE
);
3073 edge true_edge
, false_edge
;
3074 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
3075 &true_edge
, &false_edge
);
3076 m_gsi
= gsi_after_labels (false_edge
->dest
);
3081 tree lhs
= make_ssa_name (boolean_type_node
);
3082 basic_block bb
= gimple_bb (stmt
);
3083 gphi
*phi
= create_phi_node (lhs
, bb
);
3084 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
3086 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
3087 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
3088 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
3090 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
3091 ? boolean_true_node
: boolean_false_node
,
3092 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
3097 /* Lower large/huge _BitInt left and right shift except for left
3098 shift by < limb_prec constant. */
3101 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
3103 tree rhs1
= gimple_assign_rhs1 (stmt
);
3104 tree lhs
= gimple_assign_lhs (stmt
);
3105 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3106 tree type
= TREE_TYPE (rhs1
);
3107 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3108 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3109 && bitint_precision_kind (type
) >= bitint_prec_large
);
3110 int prec
= TYPE_PRECISION (type
);
3111 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
3113 if (obj
== NULL_TREE
)
3115 int part
= var_to_partition (m_map
, lhs
);
3116 gcc_assert (m_vars
[part
] != NULL_TREE
);
3119 /* Preparation code common for both left and right shifts.
3120 unsigned n1 = n % limb_prec;
3121 size_t n2 = n / limb_prec;
3122 size_t n3 = n1 != 0;
3123 unsigned n4 = (limb_prec - n1) % limb_prec;
3124 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3125 if (TREE_CODE (n
) == INTEGER_CST
)
3127 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3128 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
3129 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
3130 n3
= size_int (!integer_zerop (n1
));
3131 n4
= int_const_binop (TRUNC_MOD_EXPR
,
3132 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
3136 n1
= make_ssa_name (TREE_TYPE (n
));
3137 n2
= make_ssa_name (sizetype
);
3138 n3
= make_ssa_name (sizetype
);
3139 n4
= make_ssa_name (TREE_TYPE (n
));
3140 if (pow2p_hwi (limb_prec
))
3142 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
3143 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
3145 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3147 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3149 build_int_cst (TREE_TYPE (n
),
3150 exact_log2 (limb_prec
)));
3152 if (gimple_assign_lhs (g
) != n2
)
3154 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3157 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3160 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
3166 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3167 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
3169 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3171 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3172 TRUNC_DIV_EXPR
, n
, lp
);
3174 if (gimple_assign_lhs (g
) != n2
)
3176 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3179 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3180 MINUS_EXPR
, lp
, n1
);
3182 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3186 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3187 build_zero_cst (TREE_TYPE (n
)));
3189 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3192 tree p
= build_int_cst (sizetype
,
3193 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3194 if (rhs_code
== RSHIFT_EXPR
)
3199 unsigned n1 = n % limb_prec;
3200 size_t n2 = n / limb_prec;
3201 size_t n3 = n1 != 0;
3202 unsigned n4 = (limb_prec - n1) % limb_prec;
3204 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3205 int signed_p = (typeof (src) -1) < 0;
3206 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3207 ? p : p - n3); ++idx)
3208 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3210 if (prec % limb_prec == 0)
3213 ext = ((signed limb_type) (src[p] << (limb_prec
3214 - (prec % limb_prec))))
3215 >> (limb_prec - (prec % limb_prec));
3217 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3218 if (!signed_p && (prec % limb_prec == 0))
3220 else if (idx < prec / 64)
3222 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3228 dst[idx] = ((signed limb_type) ext) >> n1;
3229 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3233 dst[idx] = ext >> n1;
3236 for (++idx; idx <= p; ++idx)
3239 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3241 else if (TREE_CODE (n3
) == INTEGER_CST
)
3242 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3245 pmn3
= make_ssa_name (sizetype
);
3246 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3249 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3250 edge edge_true
, edge_false
;
3251 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3253 tree idx
= create_loop (n2
, &idx_next
);
3254 tree idxmn2
= make_ssa_name (sizetype
);
3255 tree idxpn3
= make_ssa_name (sizetype
);
3256 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3258 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3261 tree t1
= handle_operand (rhs1
, idx
);
3263 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3264 RSHIFT_EXPR
, t1
, n1
);
3266 t1
= gimple_assign_lhs (g
);
3267 if (!integer_zerop (n3
))
3270 tree t2
= handle_operand (rhs1
, idxpn3
);
3271 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3272 LSHIFT_EXPR
, t2
, n4
);
3274 t2
= gimple_assign_lhs (g
);
3275 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3276 BIT_IOR_EXPR
, t1
, t2
);
3278 t1
= gimple_assign_lhs (g
);
3280 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3281 g
= gimple_build_assign (l
, t1
);
3283 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3285 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3287 idx
= make_ssa_name (sizetype
);
3288 m_gsi
= gsi_for_stmt (final_stmt
);
3289 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3290 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3291 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3292 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3293 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3294 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3296 tree ms
= handle_operand (rhs1
, p
);
3298 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3299 ext
= add_cast (m_limb_type
, ms
);
3300 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3301 && !integer_zerop (n3
))
3303 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3304 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3306 t1
= handle_operand (rhs1
, idx
);
3307 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3308 RSHIFT_EXPR
, t1
, n1
);
3310 t1
= gimple_assign_lhs (g
);
3311 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3312 LSHIFT_EXPR
, ext
, n4
);
3314 tree t2
= gimple_assign_lhs (g
);
3315 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3316 BIT_IOR_EXPR
, t1
, t2
);
3318 t1
= gimple_assign_lhs (g
);
3319 idxmn2
= make_ssa_name (sizetype
);
3320 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3322 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3323 g
= gimple_build_assign (l
, t1
);
3325 idx_next
= make_ssa_name (sizetype
);
3326 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3328 m_gsi
= gsi_for_stmt (final_stmt
);
3329 tree nidx
= make_ssa_name (sizetype
);
3330 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3331 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3332 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3333 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3334 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3335 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3338 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3340 idx
= gimple_assign_lhs (g
);
3342 if (!TYPE_UNSIGNED (type
))
3343 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3344 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3345 RSHIFT_EXPR
, sext
, n1
);
3347 t1
= gimple_assign_lhs (g
);
3348 if (!TYPE_UNSIGNED (type
))
3350 t1
= add_cast (m_limb_type
, t1
);
3351 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3353 build_int_cst (TREE_TYPE (n
),
3356 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3359 ext
= build_zero_cst (m_limb_type
);
3360 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3361 g
= gimple_build_assign (l
, t1
);
3363 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3366 idx
= gimple_assign_lhs (g
);
3367 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3368 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3369 idx
= create_loop (idx
, &idx_next
);
3370 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3371 g
= gimple_build_assign (l
, ext
);
3373 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3375 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3383 unsigned n1 = n % limb_prec;
3384 size_t n2 = n / limb_prec;
3385 size_t n3 = n1 != 0;
3386 unsigned n4 = (limb_prec - n1) % limb_prec;
3388 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3389 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3390 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3393 dst[idx] = src[idx - n2] << n1;
3396 for (; (ssize_t) idx >= 0; --idx)
3399 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3400 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3403 n2pn3
= make_ssa_name (sizetype
);
3404 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3407 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3408 idx even to access the most significant partial limb. */
3410 if (integer_zerop (n3
))
3411 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3412 counts. Emit if (true) condition that can be optimized later. */
3413 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3414 NULL_TREE
, NULL_TREE
);
3416 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3417 edge edge_true
, edge_false
;
3418 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3420 tree idx
= create_loop (p
, &idx_next
);
3421 tree idxmn2
= make_ssa_name (sizetype
);
3422 tree idxmn2mn3
= make_ssa_name (sizetype
);
3423 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3425 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3428 tree t1
= handle_operand (rhs1
, idxmn2
);
3430 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3431 LSHIFT_EXPR
, t1
, n1
);
3433 t1
= gimple_assign_lhs (g
);
3434 if (!integer_zerop (n3
))
3437 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3438 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3439 RSHIFT_EXPR
, t2
, n4
);
3441 t2
= gimple_assign_lhs (g
);
3442 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3443 BIT_IOR_EXPR
, t1
, t2
);
3445 t1
= gimple_assign_lhs (g
);
3447 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3448 g
= gimple_build_assign (l
, t1
);
3450 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3452 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3453 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3454 NULL_TREE
, NULL_TREE
);
3456 idx
= make_ssa_name (sizetype
);
3457 m_gsi
= gsi_for_stmt (final_stmt
);
3458 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3459 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3460 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3461 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3462 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3463 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3465 if (!integer_zerop (n3
))
3467 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3468 NULL_TREE
, NULL_TREE
);
3469 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3470 idxmn2
= make_ssa_name (sizetype
);
3471 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3474 t1
= handle_operand (rhs1
, idxmn2
);
3475 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3476 LSHIFT_EXPR
, t1
, n1
);
3478 t1
= gimple_assign_lhs (g
);
3479 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3480 g
= gimple_build_assign (l
, t1
);
3482 idx_next
= make_ssa_name (sizetype
);
3483 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3485 m_gsi
= gsi_for_stmt (final_stmt
);
3486 tree nidx
= make_ssa_name (sizetype
);
3487 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3488 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3489 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3490 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3491 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3492 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3495 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3496 ssize_int (0), NULL_TREE
, NULL_TREE
);
3497 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3498 idx
= create_loop (idx
, &idx_next
);
3499 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3500 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3502 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3504 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3505 ssize_int (0), NULL_TREE
, NULL_TREE
);
3510 /* Lower large/huge _BitInt multiplication or division. */
3513 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3515 tree rhs1
= gimple_assign_rhs1 (stmt
);
3516 tree rhs2
= gimple_assign_rhs2 (stmt
);
3517 tree lhs
= gimple_assign_lhs (stmt
);
3518 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3519 tree type
= TREE_TYPE (rhs1
);
3520 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3521 && bitint_precision_kind (type
) >= bitint_prec_large
);
3522 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3523 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3524 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3525 if (obj
== NULL_TREE
)
3527 int part
= var_to_partition (m_map
, lhs
);
3528 gcc_assert (m_vars
[part
] != NULL_TREE
);
3530 lhs
= build_fold_addr_expr (obj
);
3534 lhs
= build_fold_addr_expr (obj
);
3535 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3536 NULL_TREE
, true, GSI_SAME_STMT
);
3538 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3543 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3544 lhs
, build_int_cst (sitype
, prec
),
3545 rhs1
, build_int_cst (sitype
, prec1
),
3546 rhs2
, build_int_cst (sitype
, prec2
));
3549 case TRUNC_DIV_EXPR
:
3550 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3551 lhs
, build_int_cst (sitype
, prec
),
3553 build_int_cst (sitype
, 0),
3554 rhs1
, build_int_cst (sitype
, prec1
),
3555 rhs2
, build_int_cst (sitype
, prec2
));
3556 if (!stmt_ends_bb_p (stmt
))
3557 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3560 case TRUNC_MOD_EXPR
:
3561 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3562 build_int_cst (sitype
, 0),
3563 lhs
, build_int_cst (sitype
, prec
),
3564 rhs1
, build_int_cst (sitype
, prec1
),
3565 rhs2
, build_int_cst (sitype
, prec2
));
3566 if (!stmt_ends_bb_p (stmt
))
3567 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3573 if (stmt_ends_bb_p (stmt
))
3575 maybe_duplicate_eh_stmt (g
, stmt
);
3578 basic_block bb
= gimple_bb (stmt
);
3580 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3581 if (e1
->flags
& EDGE_EH
)
3585 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3586 m_gsi
= gsi_after_labels (e2
->dest
);
3587 add_eh_edge (e2
->src
, e1
);
3592 /* Lower large/huge _BitInt conversion to/from floating point. */
3595 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3597 tree rhs1
= gimple_assign_rhs1 (stmt
);
3598 tree lhs
= gimple_assign_lhs (stmt
);
3599 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3600 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3602 if (rhs_code
== FIX_TRUNC_EXPR
)
3604 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3605 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3607 if (obj
== NULL_TREE
)
3609 int part
= var_to_partition (m_map
, lhs
);
3610 gcc_assert (m_vars
[part
] != NULL_TREE
);
3612 lhs
= build_fold_addr_expr (obj
);
3616 lhs
= build_fold_addr_expr (obj
);
3617 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3618 NULL_TREE
, true, GSI_SAME_STMT
);
3620 scalar_mode from_mode
3621 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3623 /* IEEE single is a full superset of both IEEE half and
3624 bfloat formats, convert to float first and then to _BitInt
3625 to avoid the need of another 2 library routines. */
3626 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3627 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3628 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3630 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3632 rhs1
= add_cast (type
, rhs1
);
3635 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3636 lhs
, build_int_cst (sitype
, prec
),
3643 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3644 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3645 rhs1
, build_int_cst (sitype
, prec
));
3646 gimple_call_set_lhs (g
, lhs
);
3647 if (!stmt_ends_bb_p (stmt
))
3648 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3649 gsi_replace (&m_gsi
, g
, true);
3653 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3654 If check_zero is true, caller wants to check if all bits in [start, end)
3655 are zero, otherwise if bits in [start, end) are either all zero or
3656 all ones. L is the limb with index LIMB, START and END are measured
3660 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3661 unsigned int end
, tree l
,
3665 unsigned startlimb
= start
/ limb_prec
;
3666 unsigned endlimb
= (end
- 1) / limb_prec
;
3669 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3671 if (startlimb
== endlimb
&& limb
== startlimb
)
3675 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3676 end
- start
, false, limb_prec
);
3677 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3679 wide_int_to_tree (m_limb_type
, w
));
3681 return gimple_assign_lhs (g
);
3683 unsigned int shift
= start
% limb_prec
;
3684 if ((end
% limb_prec
) != 0)
3686 unsigned int lshift
= (-end
) % limb_prec
;
3688 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3690 build_int_cst (unsigned_type_node
,
3693 l
= gimple_assign_lhs (g
);
3695 l
= add_cast (signed_type_for (m_limb_type
), l
);
3696 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3698 build_int_cst (unsigned_type_node
, shift
));
3700 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3702 else if (limb
== startlimb
)
3704 if ((start
% limb_prec
) == 0)
3707 l
= add_cast (signed_type_for (m_limb_type
), l
);
3708 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3710 build_int_cst (unsigned_type_node
,
3711 start
% limb_prec
));
3713 l
= gimple_assign_lhs (g
);
3715 l
= add_cast (m_limb_type
, l
);
3718 else if (limb
== endlimb
)
3720 if ((end
% limb_prec
) == 0)
3724 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3725 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3727 wide_int_to_tree (m_limb_type
, w
));
3729 return gimple_assign_lhs (g
);
3731 unsigned int shift
= (-end
) % limb_prec
;
3732 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3734 build_int_cst (unsigned_type_node
, shift
));
3736 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3737 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3739 build_int_cst (unsigned_type_node
, shift
));
3741 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3746 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3747 result including overflow flag into the right locations. */
3750 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3751 tree ovf
, tree lhs
, tree orig_obj
,
3752 gimple
*stmt
, tree_code code
)
3756 if (obj
== NULL_TREE
3757 && (TREE_CODE (type
) != BITINT_TYPE
3758 || bitint_precision_kind (type
) < bitint_prec_large
))
3760 /* Add support for 3 or more limbs filled in from normal integral
3761 type if this assert fails. If no target chooses limb mode smaller
3762 than half of largest supported normal integral type, this will not
3764 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3765 tree lhs_type
= type
;
3766 if (TREE_CODE (type
) == BITINT_TYPE
3767 && bitint_precision_kind (type
) == bitint_prec_middle
)
3768 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3769 TYPE_UNSIGNED (type
));
3770 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3771 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3773 r1
= gimple_assign_lhs (g
);
3774 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3775 r1
= add_cast (lhs_type
, r1
);
3776 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3778 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3779 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3781 r2
= gimple_assign_lhs (g
);
3782 r2
= add_cast (lhs_type
, r2
);
3783 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3784 build_int_cst (unsigned_type_node
,
3787 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3788 gimple_assign_lhs (g
));
3790 r1
= gimple_assign_lhs (g
);
3792 if (lhs_type
!= type
)
3793 r1
= add_cast (type
, r1
);
3794 ovf
= add_cast (lhs_type
, ovf
);
3795 if (lhs_type
!= type
)
3796 ovf
= add_cast (type
, ovf
);
3797 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3798 m_gsi
= gsi_for_stmt (stmt
);
3799 gsi_replace (&m_gsi
, g
, true);
3803 unsigned HOST_WIDE_INT nelts
= 0;
3804 tree atype
= NULL_TREE
;
3807 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3808 if (orig_obj
== NULL_TREE
)
3810 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3816 if (orig_obj
== NULL_TREE
)
3818 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3819 v1
= build2 (MEM_REF
, atype
,
3820 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3822 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3823 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3825 v1
= unshare_expr (obj
);
3826 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3827 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3828 g
= gimple_build_assign (v1
, v2
);
3831 if (orig_obj
== NULL_TREE
&& obj
)
3833 ovf
= add_cast (m_limb_type
, ovf
);
3834 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3835 g
= gimple_build_assign (l
, ovf
);
3839 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3840 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3841 (nelts
+ 1) * m_limb_size
);
3842 tree v1
= build2 (MEM_REF
, atype
,
3843 build_fold_addr_expr (unshare_expr (obj
)),
3845 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3849 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3851 imm_use_iterator ui
;
3852 use_operand_p use_p
;
3853 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3855 g
= USE_STMT (use_p
);
3856 if (!is_gimple_assign (g
)
3857 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3859 tree lhs2
= gimple_assign_lhs (g
);
3861 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3862 lhs2
= gimple_assign_lhs (use_stmt
);
3863 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3864 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3865 g
= gimple_build_assign (lhs2
, ovf
);
3867 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3868 gsi_replace (&gsi
, g
, true);
3869 if (gsi_stmt (m_gsi
) == use_stmt
)
3870 m_gsi
= gsi_for_stmt (g
);
3874 else if (ovf
!= boolean_false_node
)
3876 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3877 NULL_TREE
, NULL_TREE
);
3878 edge edge_true
, edge_false
;
3879 if_then (g
, profile_probability::very_unlikely (),
3880 edge_true
, edge_false
);
3881 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3882 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3885 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3886 true, GSI_SAME_STMT
);
3887 m_gsi
= gsi_after_labels (edge_true
->dest
);
3892 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3893 g
= gimple_build_assign (var
, clobber
);
3894 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3898 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3899 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3900 argument 1 precision PREC1 and minimum precision for the result
3901 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3904 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3905 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3910 /* Ignore this special rule for subtraction, even if both
3911 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3912 in infinite precision. */
3913 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3915 /* Result in [0, prec2) is unsigned, if prec > prec2,
3916 all bits above it will be zero. */
3917 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3918 return boolean_false_node
;
3921 /* ovf if any of bits in [start, end) is non-zero. */
3922 *start
= prec
- !TYPE_UNSIGNED (type
);
3926 else if (TYPE_UNSIGNED (type
))
3928 /* If result in [0, prec2) is signed and if prec > prec2,
3929 all bits above it will be sign bit copies. */
3932 /* ovf if bit prec - 1 is non-zero. */
3938 /* ovf if any of bits in [start, end) is non-zero. */
3943 else if (prec
>= prec2
)
3944 return boolean_false_node
;
3947 /* ovf if [start, end) bits aren't all zeros or all ones. */
3950 *check_zero
= false;
3955 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3956 argument or return type _Complex large/huge _BitInt. */
3959 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
3961 tree arg0
= gimple_call_arg (stmt
, 0);
3962 tree arg1
= gimple_call_arg (stmt
, 1);
3963 tree lhs
= gimple_call_lhs (stmt
);
3968 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3969 gsi_remove (&gsi
, true);
3972 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3973 tree type
= TREE_TYPE (lhs
);
3974 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3975 type
= TREE_TYPE (type
);
3976 int prec
= TYPE_PRECISION (type
);
3977 int prec0
= range_to_prec (arg0
, stmt
);
3978 int prec1
= range_to_prec (arg1
, stmt
);
3979 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3980 the be minimum unsigned precision of any possible operation's
3981 result, otherwise it is minimum signed precision.
3983 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3984 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3985 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3986 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3987 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3988 8 + 8 [0, 0x1fe] 9 UNSIGNED
3989 8 + 10 [0, 0x4fe] 11 UNSIGNED
3990 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3991 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3992 8 + -8 [-0x80, 0x17e] 10 SIGNED
3993 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3994 10 + -8 [-0x80, 0x47e] 12 SIGNED
3995 8 - 8 [-0xff, 0xff] 9 SIGNED
3996 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3997 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3998 -8 - -8 [-0xff, 0xff] 9 SIGNED
3999 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4000 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4001 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4002 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4003 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4004 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4005 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4006 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4007 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
4008 prec1
< 0 ? -prec1
: prec1
);
4009 /* If operands are either both signed or both unsigned,
4010 we need just one additional bit. */
4011 prec2
= (((prec0
< 0) == (prec1
< 0)
4012 /* If one operand is signed and one unsigned and
4013 the signed one has larger precision, we need
4014 just one extra bit, otherwise two. */
4015 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
4016 : (prec2
== -prec1
&& prec2
!= prec0
)))
4017 ? prec2
+ 1 : prec2
+ 2);
4018 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
4019 prec1
< 0 ? -prec1
: prec1
);
4020 prec3
= MAX (prec3
, prec
);
4021 tree var
= NULL_TREE
;
4022 tree orig_obj
= obj
;
4023 if (obj
== NULL_TREE
4024 && TREE_CODE (type
) == BITINT_TYPE
4025 && bitint_precision_kind (type
) >= bitint_prec_large
4027 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4029 int part
= var_to_partition (m_map
, lhs
);
4030 gcc_assert (m_vars
[part
] != NULL_TREE
);
4032 if (TREE_TYPE (lhs
) == type
)
4035 if (TREE_CODE (type
) != BITINT_TYPE
4036 || bitint_precision_kind (type
) < bitint_prec_large
)
4038 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
4039 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4040 var
= create_tmp_var (atype
);
4043 enum tree_code code
;
4044 switch (gimple_call_internal_fn (stmt
))
4046 case IFN_ADD_OVERFLOW
:
4047 case IFN_UBSAN_CHECK_ADD
:
4050 case IFN_SUB_OVERFLOW
:
4051 case IFN_UBSAN_CHECK_SUB
:
4057 unsigned start
, end
;
4059 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
4060 &start
, &end
, &check_zero
);
4062 unsigned startlimb
, endlimb
;
4070 startlimb
= start
/ limb_prec
;
4071 endlimb
= (end
- 1) / limb_prec
;
4074 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
4075 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
4076 unsigned cnt
, rem
= 0, fin
= 0;
4077 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4078 bool last_ovf
= (ovf
== NULL_TREE
4079 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
4080 if (kind
!= bitint_prec_huge
)
4081 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
4084 rem
= (prec4
% (2 * limb_prec
));
4085 fin
= (prec4
- rem
) / limb_prec
;
4086 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
4087 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4090 if (kind
== bitint_prec_huge
)
4091 m_upwards_2limb
= fin
;
4094 tree type0
= TREE_TYPE (arg0
);
4095 tree type1
= TREE_TYPE (arg1
);
4097 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
4098 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
4099 if (TYPE_PRECISION (type0
) < prec5
)
4101 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
4102 if (TREE_CODE (arg0
) == INTEGER_CST
)
4103 arg0
= fold_convert (type0
, arg0
);
4105 if (TYPE_PRECISION (type1
) < prec5
)
4107 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
4108 if (TREE_CODE (arg1
) == INTEGER_CST
)
4109 arg1
= fold_convert (type1
, arg1
);
4111 unsigned int data_cnt
= 0;
4112 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
4113 tree cmp
= build_zero_cst (m_limb_type
);
4114 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
4115 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
4116 for (unsigned i
= 0; i
< cnt
; i
++)
4120 if (kind
!= bitint_prec_huge
)
4123 idx
= size_int (fin
+ i
- 2);
4124 if (!last_ovf
|| i
< cnt
- 1)
4126 if (type0
!= TREE_TYPE (arg0
))
4127 rhs1
= handle_cast (type0
, arg0
, idx
);
4129 rhs1
= handle_operand (arg0
, idx
);
4130 if (type1
!= TREE_TYPE (arg1
))
4131 rhs2
= handle_cast (type1
, arg1
, idx
);
4133 rhs2
= handle_operand (arg1
, idx
);
4135 data_cnt
= m_data_cnt
;
4136 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4137 rhs1
= add_cast (m_limb_type
, rhs1
);
4138 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
4139 rhs2
= add_cast (m_limb_type
, rhs2
);
4145 m_data_cnt
= data_cnt
;
4146 if (TYPE_UNSIGNED (type0
))
4147 rhs1
= build_zero_cst (m_limb_type
);
4150 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
4151 if (TREE_CODE (rhs1
) == INTEGER_CST
)
4152 rhs1
= build_int_cst (m_limb_type
,
4153 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
4156 tree lpm1
= build_int_cst (unsigned_type_node
,
4158 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
4159 RSHIFT_EXPR
, rhs1
, lpm1
);
4161 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4164 if (TYPE_UNSIGNED (type1
))
4165 rhs2
= build_zero_cst (m_limb_type
);
4168 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
4169 if (TREE_CODE (rhs2
) == INTEGER_CST
)
4170 rhs2
= build_int_cst (m_limb_type
,
4171 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
4174 tree lpm1
= build_int_cst (unsigned_type_node
,
4176 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
4177 RSHIFT_EXPR
, rhs2
, lpm1
);
4179 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4183 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4184 if (ovf
!= boolean_false_node
)
4186 if (tree_fits_uhwi_p (idx
))
4188 unsigned limb
= tree_to_uhwi (idx
);
4189 if (limb
>= startlimb
&& limb
<= endlimb
)
4191 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4193 tree this_ovf
= make_ssa_name (boolean_type_node
);
4194 if (ovf
== NULL_TREE
&& !check_zero
)
4197 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4199 build_int_cst (m_limb_type
, 1));
4201 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4202 gimple_assign_lhs (g
),
4203 build_int_cst (m_limb_type
, 1));
4206 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4208 if (ovf
== NULL_TREE
)
4212 tree b
= make_ssa_name (boolean_type_node
);
4213 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4219 else if (startlimb
< fin
)
4221 if (m_first
&& startlimb
+ 2 < fin
)
4224 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4225 ovf_out
= m_data
.pop ();
4229 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4230 cmp_out
= m_data
.pop ();
4234 if (i
!= 0 || startlimb
!= fin
- 1)
4237 bool single_comparison
4238 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4239 if (!single_comparison
)
4242 if (!check_zero
&& (start
% limb_prec
) == 0)
4243 single_comparison
= true;
4245 else if ((startlimb
& 1) == (i
& 1))
4249 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4250 NULL_TREE
, NULL_TREE
);
4251 edge edge_true_true
, edge_true_false
, edge_false
;
4253 if (!single_comparison
)
4254 g2
= gimple_build_cond (NE_EXPR
, idx
,
4255 size_int (startlimb
), NULL_TREE
,
4257 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4258 profile_probability::likely (),
4259 edge_true_true
, edge_true_false
,
4261 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4262 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4264 tree this_ovf
= make_ssa_name (boolean_type_node
);
4265 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4267 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4269 build_int_cst (m_limb_type
, 1));
4271 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4272 gimple_assign_lhs (g
),
4273 build_int_cst (m_limb_type
, 1));
4276 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4278 if (cmp_code
== GT_EXPR
)
4280 tree t
= make_ssa_name (boolean_type_node
);
4281 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4285 tree this_ovf2
= NULL_TREE
;
4286 if (!single_comparison
)
4288 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4289 tree t
= make_ssa_name (boolean_type_node
);
4290 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4292 this_ovf2
= make_ssa_name (boolean_type_node
);
4293 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4297 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4299 if (i
== 1 && ovf_out
)
4302 t
= make_ssa_name (boolean_type_node
);
4303 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4304 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4306 add_phi_arg (phi
, ovf
? ovf
4307 : boolean_false_node
, edge_false
,
4310 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4313 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4315 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4316 phi
= create_phi_node (t
, edge_true_false
->dest
);
4317 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4318 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4320 add_phi_arg (phi
, cmp
, edge_true_true
,
4330 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4332 else if (!tree_fits_uhwi_p (idx
)
4333 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4335 bool single_comparison
4336 = (((unsigned) prec
% limb_prec
) == 0
4337 || prec_limbs
+ 1 >= fin
4338 || (prec_limbs
& 1) == (i
& 1));
4339 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4340 NULL_TREE
, NULL_TREE
);
4342 if (!single_comparison
)
4343 g2
= gimple_build_cond (EQ_EXPR
, idx
,
4344 size_int (prec_limbs
- 1),
4345 NULL_TREE
, NULL_TREE
);
4346 edge edge_true_true
, edge_true_false
, edge_false
;
4347 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4348 profile_probability::unlikely (),
4349 edge_true_true
, edge_true_false
,
4351 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4352 g
= gimple_build_assign (l
, rhs
);
4354 if (!single_comparison
)
4356 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4357 tree plm1idx
= size_int (prec_limbs
- 1);
4358 tree plm1type
= limb_access_type (type
, plm1idx
);
4359 l
= limb_access (type
, var
? var
: obj
, plm1idx
, true);
4360 if (!useless_type_conversion_p (plm1type
, TREE_TYPE (rhs
)))
4361 rhs
= add_cast (plm1type
, rhs
);
4362 if (!useless_type_conversion_p (TREE_TYPE (l
),
4364 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4365 g
= gimple_build_assign (l
, rhs
);
4368 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4372 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4373 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4374 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4375 g
= gimple_build_assign (l
, rhs
);
4380 if (kind
== bitint_prec_huge
&& i
<= 1)
4384 idx
= make_ssa_name (sizetype
);
4385 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4391 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4394 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4395 NULL_TREE
, NULL_TREE
);
4397 m_gsi
= gsi_for_stmt (final_stmt
);
4403 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4406 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4407 argument or return type _Complex large/huge _BitInt. */
4410 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4412 tree arg0
= gimple_call_arg (stmt
, 0);
4413 tree arg1
= gimple_call_arg (stmt
, 1);
4414 tree lhs
= gimple_call_lhs (stmt
);
4417 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4418 gsi_remove (&gsi
, true);
4421 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4422 tree type
= TREE_TYPE (lhs
);
4423 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4424 type
= TREE_TYPE (type
);
4425 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4426 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4427 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4428 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4429 + (prec1
< 0 ? -prec1
: prec1
));
4430 if (prec0
== 1 || prec1
== 1)
4432 tree var
= NULL_TREE
;
4433 tree orig_obj
= obj
;
4434 bool force_var
= false;
4435 if (obj
== NULL_TREE
4436 && TREE_CODE (type
) == BITINT_TYPE
4437 && bitint_precision_kind (type
) >= bitint_prec_large
4439 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4441 int part
= var_to_partition (m_map
, lhs
);
4442 gcc_assert (m_vars
[part
] != NULL_TREE
);
4444 if (TREE_TYPE (lhs
) == type
)
4447 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4449 for (int i
= 0; i
< 2; ++i
)
4451 tree arg
= i
? arg1
: arg0
;
4452 if (TREE_CODE (arg
) == ADDR_EXPR
)
4453 arg
= TREE_OPERAND (arg
, 0);
4454 if (get_base_address (arg
) == obj
)
4461 if (obj
== NULL_TREE
4463 || TREE_CODE (type
) != BITINT_TYPE
4464 || bitint_precision_kind (type
) < bitint_prec_large
4465 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4467 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4468 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4469 var
= create_tmp_var (atype
);
4471 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4472 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4473 NULL_TREE
, true, GSI_SAME_STMT
);
4474 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4476 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4477 addr
, build_int_cst (sitype
,
4479 arg0
, build_int_cst (sitype
, prec0
),
4480 arg1
, build_int_cst (sitype
, prec1
));
4483 unsigned start
, end
;
4485 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4486 &start
, &end
, &check_zero
);
4487 if (ovf
== NULL_TREE
)
4489 unsigned startlimb
= start
/ limb_prec
;
4490 unsigned endlimb
= (end
- 1) / limb_prec
;
4492 bool use_loop
= false;
4493 if (startlimb
== endlimb
)
4495 else if (startlimb
+ 1 == endlimb
)
4497 else if ((end
% limb_prec
) == 0)
4505 use_loop
= startlimb
+ 2 < endlimb
;
4509 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4510 size_int (startlimb
), true);
4511 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4513 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4514 startlimb
, check_zero
);
4515 ovf
= make_ssa_name (boolean_type_node
);
4517 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4518 build_zero_cst (m_limb_type
));
4521 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4523 build_int_cst (m_limb_type
, 1));
4525 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4526 build_int_cst (m_limb_type
, 1));
4532 basic_block edge_bb
= NULL
;
4533 gimple_stmt_iterator gsi
= m_gsi
;
4535 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4537 m_gsi
= gsi_end_bb (edge_bb
);
4539 tree cmp
= build_zero_cst (m_limb_type
);
4540 for (unsigned i
= 0; i
< cnt
; i
++)
4542 tree idx
, idx_next
= NULL_TREE
;
4544 idx
= size_int (startlimb
);
4546 idx
= size_int (endlimb
);
4548 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4550 idx
= size_int (startlimb
+ 1);
4551 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4552 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4554 l
= gimple_assign_lhs (g
);
4555 if (i
== 0 || i
== 2)
4556 l
= arith_overflow_extract_bits (start
, end
, l
,
4559 if (i
== 0 && !check_zero
)
4562 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4564 build_int_cst (m_limb_type
, 1));
4566 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4567 build_int_cst (m_limb_type
, 1),
4568 NULL_TREE
, NULL_TREE
);
4571 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4573 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4574 e1
->flags
= EDGE_FALSE_VALUE
;
4575 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4577 e1
->probability
= profile_probability::likely ();
4578 e2
->probability
= e1
->probability
.invert ();
4580 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4581 m_gsi
= gsi_after_labels (e1
->dest
);
4582 if (i
== 1 && use_loop
)
4584 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4587 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4588 size_int (endlimb
+ (cnt
== 2)),
4589 NULL_TREE
, NULL_TREE
);
4591 edge true_edge
, false_edge
;
4592 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4595 m_gsi
= gsi_after_labels (false_edge
->dest
);
4600 ovf
= make_ssa_name (boolean_type_node
);
4601 basic_block bb
= gimple_bb (final_stmt
);
4602 gphi
*phi
= create_phi_node (ovf
, bb
);
4603 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4605 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4607 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4608 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4610 m_gsi
= gsi_for_stmt (final_stmt
);
4614 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4617 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4618 .{ADD,SUB,MUL}_OVERFLOW call. */
4621 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4623 tree rhs1
= gimple_assign_rhs1 (stmt
);
4624 rhs1
= TREE_OPERAND (rhs1
, 0);
4625 if (obj
== NULL_TREE
)
4627 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4628 gcc_assert (m_vars
[part
] != NULL_TREE
);
4631 if (TREE_CODE (rhs1
) == SSA_NAME
4633 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4635 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4638 int part
= var_to_partition (m_map
, rhs1
);
4639 gcc_assert (m_vars
[part
] != NULL_TREE
);
4640 tree var
= m_vars
[part
];
4641 unsigned HOST_WIDE_INT nelts
4642 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4643 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4644 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4645 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4646 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4647 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4648 ? 0 : nelts
* m_limb_size
);
4649 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4650 gimple
*g
= gimple_build_assign (obj
, v2
);
4654 /* Lower COMPLEX_EXPR stmt. */
4657 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4659 tree lhs
= gimple_assign_lhs (stmt
);
4660 tree rhs1
= gimple_assign_rhs1 (stmt
);
4661 tree rhs2
= gimple_assign_rhs2 (stmt
);
4662 int part
= var_to_partition (m_map
, lhs
);
4663 gcc_assert (m_vars
[part
] != NULL_TREE
);
4665 unsigned HOST_WIDE_INT nelts
4666 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4667 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4668 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4669 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4671 if (TREE_CODE (rhs1
) == SSA_NAME
)
4673 part
= var_to_partition (m_map
, rhs1
);
4674 gcc_assert (m_vars
[part
] != NULL_TREE
);
4677 else if (integer_zerop (rhs1
))
4678 v2
= build_zero_cst (atype
);
4680 v2
= tree_output_constant_def (rhs1
);
4681 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4682 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4683 gimple
*g
= gimple_build_assign (v1
, v2
);
4685 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4686 TYPE_SIZE_UNIT (atype
));
4687 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4688 if (TREE_CODE (rhs2
) == SSA_NAME
)
4690 part
= var_to_partition (m_map
, rhs2
);
4691 gcc_assert (m_vars
[part
] != NULL_TREE
);
4694 else if (integer_zerop (rhs2
))
4695 v2
= build_zero_cst (atype
);
4697 v2
= tree_output_constant_def (rhs2
);
4698 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4699 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4700 g
= gimple_build_assign (v1
, v2
);
4704 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4708 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4710 tree arg0
= gimple_call_arg (stmt
, 0);
4711 tree arg1
= (gimple_call_num_args (stmt
) == 2
4712 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4713 tree lhs
= gimple_call_lhs (stmt
);
4718 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4719 gsi_remove (&gsi
, true);
4722 tree type
= TREE_TYPE (arg0
);
4723 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4724 bitint_prec_kind kind
= bitint_precision_kind (type
);
4725 gcc_assert (kind
>= bitint_prec_large
);
4726 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4727 enum built_in_function fcode
= END_BUILTINS
;
4728 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4729 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4730 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4734 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4735 fcode
= BUILT_IN_CLZ
;
4736 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4737 fcode
= BUILT_IN_CLZL
;
4739 fcode
= BUILT_IN_CLZLL
;
4742 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4743 we don't add the addend at the end. */
4744 arg1
= integer_zero_node
;
4747 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4748 fcode
= BUILT_IN_CTZ
;
4749 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4750 fcode
= BUILT_IN_CTZL
;
4752 fcode
= BUILT_IN_CTZLL
;
4756 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4757 fcode
= BUILT_IN_CLRSB
;
4758 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4759 fcode
= BUILT_IN_CLRSBL
;
4761 fcode
= BUILT_IN_CLRSBLL
;
4764 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4765 fcode
= BUILT_IN_PARITY
;
4766 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4767 fcode
= BUILT_IN_PARITYL
;
4769 fcode
= BUILT_IN_PARITYLL
;
4773 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4774 fcode
= BUILT_IN_POPCOUNT
;
4775 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4776 fcode
= BUILT_IN_POPCOUNTL
;
4778 fcode
= BUILT_IN_POPCOUNTLL
;
4784 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4785 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4786 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4787 basic_block edge_bb
= NULL
;
4790 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4791 if (kind
== bitint_prec_large
)
4792 cnt
= CEIL (prec
, limb_prec
);
4795 rem
= (prec
% (2 * limb_prec
));
4796 end
= (prec
- rem
) / limb_prec
;
4797 cnt
= 2 + CEIL (rem
, limb_prec
);
4798 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4801 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4803 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4805 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4807 if (kind
== bitint_prec_large
)
4808 m_gsi
= gsi_end_bb (edge_bb
);
4809 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4812 m_after_stmt
= stmt
;
4813 if (kind
!= bitint_prec_large
)
4814 m_upwards_2limb
= end
;
4816 for (unsigned i
= 0; i
< cnt
; i
++)
4819 if (kind
== bitint_prec_large
)
4822 idx
= size_int (end
+ (i
> 2));
4824 tree rhs1
= handle_operand (arg0
, idx
);
4825 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4827 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4828 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4829 rhs1
= add_cast (m_limb_type
, rhs1
);
4833 if (ifn
== IFN_PARITY
)
4834 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4835 else if (ifn
== IFN_FFS
)
4836 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4838 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4844 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4845 build_zero_cst (m_limb_type
),
4846 NULL_TREE
, NULL_TREE
);
4849 e1
= split_block (gsi_bb (m_gsi
), g
);
4850 e1
->flags
= EDGE_FALSE_VALUE
;
4851 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4852 e1
->probability
= profile_probability::unlikely ();
4853 e2
->probability
= e1
->probability
.invert ();
4855 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4856 m_gsi
= gsi_after_labels (e1
->dest
);
4859 if (tree_fits_uhwi_p (idx
))
4861 = build_int_cst (integer_type_node
,
4862 tree_to_uhwi (idx
) * limb_prec
4863 + (ifn
== IFN_FFS
));
4870 res
= make_ssa_name (integer_type_node
);
4871 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4872 build_int_cst (integer_type_node
,
4875 m_data
[m_data_cnt
] = res
;
4879 if (!integer_zerop (in
))
4881 if (kind
== bitint_prec_huge
&& i
== 1)
4884 res
= make_ssa_name (m_limb_type
);
4885 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4890 m_data
[m_data_cnt
] = res
;
4893 g
= gimple_build_call (fndecl
, 1, rhs1
);
4894 tem
= make_ssa_name (integer_type_node
);
4895 gimple_call_set_lhs (g
, tem
);
4897 if (!integer_zerop (in
))
4899 if (kind
== bitint_prec_huge
&& i
== 1)
4902 res
= make_ssa_name (integer_type_node
);
4903 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4908 m_data
[m_data_cnt
] = res
;
4915 if (kind
== bitint_prec_huge
&& i
<= 1)
4919 idx
= make_ssa_name (sizetype
);
4920 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4926 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4929 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4930 NULL_TREE
, NULL_TREE
);
4932 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4933 m_gsi
= gsi_after_labels (edge_bb
);
4935 m_gsi
= gsi_for_stmt (stmt
);
4943 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4945 if (kind
== bitint_prec_large
)
4946 cnt
= CEIL (prec
, limb_prec
);
4949 rem
= prec
% limb_prec
;
4950 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4952 end
= (prec
- rem
) / limb_prec
;
4953 cnt
= 1 + (rem
!= 0);
4954 if (ifn
== IFN_CLRSB
)
4958 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4960 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4962 m_gsi
= gsi_end_bb (edge_bb
);
4965 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4968 gsi
= gsi_for_stmt (stmt
);
4970 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4972 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
4975 for (unsigned i
= 0; i
< cnt
; i
++)
4978 if (kind
== bitint_prec_large
)
4979 idx
= size_int (cnt
- i
- 1);
4980 else if (i
== cnt
- 1)
4981 idx
= create_loop (size_int (end
- 1), &idx_next
);
4983 idx
= size_int (end
);
4985 tree rhs1
= handle_operand (arg0
, idx
);
4986 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4988 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4989 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4990 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4991 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
4992 rhs1
= add_cast (m_limb_type
, rhs1
);
4997 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4998 build_zero_cst (m_limb_type
),
4999 NULL_TREE
, NULL_TREE
);
5001 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5002 e1
->flags
= EDGE_FALSE_VALUE
;
5003 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
5004 e1
->probability
= profile_probability::unlikely ();
5005 e2
->probability
= e1
->probability
.invert ();
5007 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5008 m_gsi
= gsi_after_labels (e1
->dest
);
5017 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5019 build_int_cst (m_limb_type
, 1));
5021 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
5022 build_int_cst (m_limb_type
, 1),
5023 NULL_TREE
, NULL_TREE
);
5028 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5029 BIT_XOR_EXPR
, rhs1
, first
);
5031 tree stype
= signed_type_for (m_limb_type
);
5032 g
= gimple_build_cond (LT_EXPR
,
5034 gimple_assign_lhs (g
)),
5035 build_zero_cst (stype
),
5036 NULL_TREE
, NULL_TREE
);
5038 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5039 e1
->flags
= EDGE_FALSE_VALUE
;
5040 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
5042 e1
->probability
= profile_probability::unlikely ();
5043 e2
->probability
= e1
->probability
.invert ();
5045 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
5047 m_gsi
= gsi_after_labels (e1
->dest
);
5049 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
5050 NULL_TREE
, NULL_TREE
);
5053 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5054 e1
->flags
= EDGE_FALSE_VALUE
;
5055 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
5056 e1
->probability
= profile_probability::unlikely ();
5057 e2
->probability
= e1
->probability
.invert ();
5059 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5060 m_gsi
= gsi_after_labels (e1
->dest
);
5061 bqp
[2 * i
+ 1].e
= e2
;
5064 if (tree_fits_uhwi_p (idx
))
5066 = build_int_cst (integer_type_node
,
5068 - (((int) tree_to_uhwi (idx
) + 1)
5069 * limb_prec
) - sub_one
);
5073 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
5075 in
= prepare_data_in_out (in
, idx
, &out
);
5076 out
= m_data
[m_data_cnt
+ 1];
5078 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
5079 build_int_cst (integer_type_node
,
5082 m_data
[m_data_cnt
] = out
;
5086 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
5088 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
5091 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
5092 NULL_TREE
, NULL_TREE
);
5094 edge true_edge
, false_edge
;
5095 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
5096 &true_edge
, &false_edge
);
5097 m_gsi
= gsi_after_labels (false_edge
->dest
);
5107 gphi
*phi1
, *phi2
, *phi3
;
5109 bb
= gsi_bb (m_gsi
);
5110 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
5111 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5113 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5115 for (unsigned i
= 0; i
< cnt
; i
++)
5117 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5118 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5120 if (arg1
== NULL_TREE
)
5122 g
= gimple_build_builtin_unreachable (m_loc
);
5125 m_gsi
= gsi_for_stmt (stmt
);
5126 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
5127 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5129 if (arg1
== NULL_TREE
)
5130 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
5131 gimple_phi_result (phi2
),
5132 gimple_call_lhs (g
));
5135 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5136 PLUS_EXPR
, gimple_phi_result (phi2
),
5137 gimple_call_lhs (g
));
5139 edge e1
= split_block (gimple_bb (stmt
), g
);
5140 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
5141 e2
->probability
= profile_probability::always ();
5142 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
5143 get_immediate_dominator (CDI_DOMINATORS
,
5145 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
5146 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
5147 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
5148 m_gsi
= gsi_for_stmt (stmt
);
5149 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5151 gsi_replace (&m_gsi
, g
, true);
5154 bb
= gsi_bb (m_gsi
);
5155 remove_edge (find_edge (bb
, edge_bb
));
5157 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
5158 e
->probability
= profile_probability::always ();
5159 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
5160 get_immediate_dominator (CDI_DOMINATORS
,
5162 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5164 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5166 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
5168 for (unsigned i
= 0; i
< cnt
; i
++)
5170 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
5171 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
5173 tree a
= bqp
[i
].addend
;
5174 if (i
&& kind
== bitint_prec_large
)
5175 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
5177 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
5179 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
5181 m_gsi
= gsi_after_labels (edge_bb
);
5182 g
= gimple_build_call (fndecl
, 1,
5183 add_cast (signed_type_for (m_limb_type
),
5184 gimple_phi_result (phi1
)));
5185 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5187 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5188 PLUS_EXPR
, gimple_call_lhs (g
),
5189 gimple_phi_result (phi2
));
5191 if (kind
!= bitint_prec_large
)
5193 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5194 PLUS_EXPR
, gimple_assign_lhs (g
),
5198 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5199 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5200 m_gsi
= gsi_for_stmt (stmt
);
5201 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5202 gsi_replace (&m_gsi
, g
, true);
5205 g
= gimple_build_call (fndecl
, 1, res
);
5206 gimple_call_set_lhs (g
, lhs
);
5207 gsi_replace (&m_gsi
, g
, true);
5210 g
= gimple_build_assign (lhs
, res
);
5211 gsi_replace (&m_gsi
, g
, true);
5218 /* Lower a call statement with one or more large/huge _BitInt
5219 arguments or large/huge _BitInt return value. */
5222 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5224 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5225 unsigned int nargs
= gimple_call_num_args (stmt
);
5226 if (gimple_call_internal_p (stmt
))
5227 switch (gimple_call_internal_fn (stmt
))
5229 case IFN_ADD_OVERFLOW
:
5230 case IFN_SUB_OVERFLOW
:
5231 case IFN_UBSAN_CHECK_ADD
:
5232 case IFN_UBSAN_CHECK_SUB
:
5233 lower_addsub_overflow (obj
, stmt
);
5235 case IFN_MUL_OVERFLOW
:
5236 case IFN_UBSAN_CHECK_MUL
:
5237 lower_mul_overflow (obj
, stmt
);
5245 lower_bit_query (stmt
);
5250 for (unsigned int i
= 0; i
< nargs
; ++i
)
5252 tree arg
= gimple_call_arg (stmt
, i
);
5253 if (TREE_CODE (arg
) != SSA_NAME
5254 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5255 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5257 if (SSA_NAME_IS_DEFAULT_DEF (arg
)
5258 && (!SSA_NAME_VAR (arg
) || VAR_P (SSA_NAME_VAR (arg
))))
5260 tree var
= create_tmp_reg (TREE_TYPE (arg
));
5261 arg
= get_or_create_ssa_default_def (cfun
, var
);
5265 int p
= var_to_partition (m_map
, arg
);
5267 gcc_assert (v
!= NULL_TREE
);
5268 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5269 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5270 arg
= make_ssa_name (TREE_TYPE (arg
));
5271 gimple
*g
= gimple_build_assign (arg
, v
);
5272 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5274 gimple_call_set_arg (stmt
, i
, arg
);
5275 if (m_preserved
== NULL
)
5276 m_preserved
= BITMAP_ALLOC (NULL
);
5277 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5279 tree lhs
= gimple_call_lhs (stmt
);
5281 && TREE_CODE (lhs
) == SSA_NAME
5282 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5283 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5285 int p
= var_to_partition (m_map
, lhs
);
5287 gcc_assert (v
!= NULL_TREE
);
5288 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5289 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5290 gimple_call_set_lhs (stmt
, v
);
5291 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5296 /* Lower __asm STMT which involves large/huge _BitInt values. */
5299 bitint_large_huge::lower_asm (gimple
*stmt
)
5301 gasm
*g
= as_a
<gasm
*> (stmt
);
5302 unsigned noutputs
= gimple_asm_noutputs (g
);
5303 unsigned ninputs
= gimple_asm_ninputs (g
);
5305 for (unsigned i
= 0; i
< noutputs
; ++i
)
5307 tree t
= gimple_asm_output_op (g
, i
);
5308 tree s
= TREE_VALUE (t
);
5309 if (TREE_CODE (s
) == SSA_NAME
5310 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5311 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5313 int part
= var_to_partition (m_map
, s
);
5314 gcc_assert (m_vars
[part
] != NULL_TREE
);
5315 TREE_VALUE (t
) = m_vars
[part
];
5318 for (unsigned i
= 0; i
< ninputs
; ++i
)
5320 tree t
= gimple_asm_input_op (g
, i
);
5321 tree s
= TREE_VALUE (t
);
5322 if (TREE_CODE (s
) == SSA_NAME
5323 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5324 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5326 if (SSA_NAME_IS_DEFAULT_DEF (s
)
5327 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
5329 TREE_VALUE (t
) = create_tmp_var (TREE_TYPE (s
), "bitint");
5330 mark_addressable (TREE_VALUE (t
));
5334 int part
= var_to_partition (m_map
, s
);
5335 gcc_assert (m_vars
[part
] != NULL_TREE
);
5336 TREE_VALUE (t
) = m_vars
[part
];
5343 /* Lower statement STMT which involves large/huge _BitInt values
5344 into code accessing individual limbs. */
5347 bitint_large_huge::lower_stmt (gimple
*stmt
)
5351 m_data
.truncate (0);
5353 m_gsi
= gsi_for_stmt (stmt
);
5354 m_after_stmt
= NULL
;
5357 gsi_prev (&m_init_gsi
);
5358 m_preheader_bb
= NULL
;
5359 m_upwards_2limb
= 0;
5362 m_cast_conditional
= false;
5364 m_loc
= gimple_location (stmt
);
5365 if (is_gimple_call (stmt
))
5367 lower_call (NULL_TREE
, stmt
);
5370 if (gimple_code (stmt
) == GIMPLE_ASM
)
5375 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5376 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5377 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5378 bool mergeable_cast_p
= false;
5379 bool final_cast_p
= false;
5380 if (gimple_assign_cast_p (stmt
))
5382 lhs
= gimple_assign_lhs (stmt
);
5383 tree rhs1
= gimple_assign_rhs1 (stmt
);
5384 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
5385 rhs1
= TREE_OPERAND (rhs1
, 0);
5386 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5387 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5388 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5389 mergeable_cast_p
= true;
5390 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5391 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5392 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5393 || POINTER_TYPE_P (TREE_TYPE (lhs
))
5394 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
))
5396 final_cast_p
= true;
5397 if (((TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
5398 && TYPE_PRECISION (TREE_TYPE (lhs
)) > MAX_FIXED_MODE_SIZE
)
5399 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5400 && !POINTER_TYPE_P (TREE_TYPE (lhs
))))
5401 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5403 /* Handle VIEW_CONVERT_EXPRs to not generally supported
5404 huge INTEGER_TYPEs like uint256_t or uint512_t. These
5405 are usually emitted from memcpy folding and backends
5406 support moves with them but that is usually it.
5407 Similarly handle VCEs to vector/complex types etc. */
5408 gcc_assert (TREE_CODE (rhs1
) == SSA_NAME
);
5409 if (SSA_NAME_IS_DEFAULT_DEF (rhs1
)
5410 && (!SSA_NAME_VAR (rhs1
) || VAR_P (SSA_NAME_VAR (rhs1
))))
5412 tree var
= create_tmp_reg (TREE_TYPE (lhs
));
5413 rhs1
= get_or_create_ssa_default_def (cfun
, var
);
5414 gimple_assign_set_rhs1 (stmt
, rhs1
);
5415 gimple_assign_set_rhs_code (stmt
, SSA_NAME
);
5417 else if (m_names
== NULL
5418 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
5420 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5421 gcc_assert (gimple_assign_load_p (g
));
5422 tree mem
= gimple_assign_rhs1 (g
);
5423 tree ltype
= TREE_TYPE (lhs
);
5424 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (mem
));
5425 if (as
!= TYPE_ADDR_SPACE (ltype
))
5427 = build_qualified_type (ltype
,
5429 | ENCODE_QUAL_ADDR_SPACE (as
));
5430 rhs1
= build1 (VIEW_CONVERT_EXPR
, ltype
, unshare_expr (mem
));
5431 gimple_assign_set_rhs1 (stmt
, rhs1
);
5435 int part
= var_to_partition (m_map
, rhs1
);
5436 gcc_assert (m_vars
[part
] != NULL_TREE
);
5437 rhs1
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
5439 gimple_assign_set_rhs1 (stmt
, rhs1
);
5444 if (TREE_CODE (rhs1
) == SSA_NAME
5446 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5448 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5449 if (is_gimple_assign (g
)
5450 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5452 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5453 if (TREE_CODE (rhs2
) == SSA_NAME
5455 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5457 g
= SSA_NAME_DEF_STMT (rhs2
);
5458 int ovf
= optimizable_arith_overflow (g
);
5460 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5461 and IMAGPART_EXPR uses, where the latter is cast to
5462 non-_BitInt, it will be optimized when handling
5463 the REALPART_EXPR. */
5467 lower_call (NULL_TREE
, g
);
5474 else if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5475 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5476 && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5477 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
5478 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5480 int part
= var_to_partition (m_map
, lhs
);
5481 gcc_assert (m_vars
[part
] != NULL_TREE
);
5482 lhs
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs1
), m_vars
[part
]);
5483 insert_before (gimple_build_assign (lhs
, rhs1
));
5487 if (gimple_store_p (stmt
))
5489 tree rhs1
= gimple_assign_rhs1 (stmt
);
5490 if (TREE_CODE (rhs1
) == SSA_NAME
5492 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5494 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5495 m_loc
= gimple_location (g
);
5496 lhs
= gimple_assign_lhs (stmt
);
5497 if (is_gimple_assign (g
) && !mergeable_op (g
))
5498 switch (gimple_assign_rhs_code (g
))
5502 lower_shift_stmt (lhs
, g
);
5504 m_gsi
= gsi_for_stmt (stmt
);
5505 unlink_stmt_vdef (stmt
);
5506 release_ssa_name (gimple_vdef (stmt
));
5507 gsi_remove (&m_gsi
, true);
5510 case TRUNC_DIV_EXPR
:
5511 case TRUNC_MOD_EXPR
:
5512 lower_muldiv_stmt (lhs
, g
);
5514 case FIX_TRUNC_EXPR
:
5515 lower_float_conv_stmt (lhs
, g
);
5519 lower_cplxpart_stmt (lhs
, g
);
5521 case VIEW_CONVERT_EXPR
:
5523 tree rhs1
= gimple_assign_rhs1 (g
);
5524 rhs1
= TREE_OPERAND (rhs1
, 0);
5525 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5526 && !POINTER_TYPE_P (TREE_TYPE (rhs1
)))
5528 tree ltype
= TREE_TYPE (rhs1
);
5529 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (lhs
));
5531 = build_qualified_type (ltype
,
5532 TYPE_QUALS (TREE_TYPE (lhs
))
5533 | ENCODE_QUAL_ADDR_SPACE (as
));
5534 lhs
= build1 (VIEW_CONVERT_EXPR
, ltype
, lhs
);
5535 gimple_assign_set_lhs (stmt
, lhs
);
5536 gimple_assign_set_rhs1 (stmt
, rhs1
);
5537 gimple_assign_set_rhs_code (stmt
, TREE_CODE (rhs1
));
5546 else if (optimizable_arith_overflow (g
) == 3)
5548 lower_call (lhs
, g
);
5551 m_loc
= gimple_location (stmt
);
5554 if (mergeable_op (stmt
)
5555 || gimple_store_p (stmt
)
5556 || gimple_assign_load_p (stmt
)
5558 || mergeable_cast_p
)
5560 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5564 else if (cmp_code
!= ERROR_MARK
)
5565 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5566 if (cmp_code
!= ERROR_MARK
)
5568 if (gimple_code (stmt
) == GIMPLE_COND
)
5570 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5571 gimple_cond_set_lhs (cstmt
, lhs
);
5572 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5573 gimple_cond_set_code (cstmt
, cmp_code
);
5577 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5579 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5580 boolean_false_node
);
5581 gimple_assign_set_rhs1 (stmt
, cond
);
5582 lhs
= gimple_assign_lhs (stmt
);
5583 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5584 || (bitint_precision_kind (TREE_TYPE (lhs
))
5585 <= bitint_prec_middle
));
5589 gimple_assign_set_rhs1 (stmt
, lhs
);
5590 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5591 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5597 tree lhs_type
= TREE_TYPE (lhs
);
5598 /* Add support for 3 or more limbs filled in from normal integral
5599 type if this assert fails. If no target chooses limb mode smaller
5600 than half of largest supported normal integral type, this will not
5602 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5604 if ((TREE_CODE (lhs_type
) == BITINT_TYPE
5605 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5606 || POINTER_TYPE_P (lhs_type
))
5607 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5608 TYPE_UNSIGNED (lhs_type
));
5610 tree rhs1
= gimple_assign_rhs1 (stmt
);
5611 tree r1
= handle_operand (rhs1
, size_int (0));
5612 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5613 r1
= add_cast (lhs_type
, r1
);
5614 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5618 tree r2
= handle_operand (rhs1
, size_int (1));
5619 r2
= add_cast (lhs_type
, r2
);
5620 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5621 build_int_cst (unsigned_type_node
,
5624 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5625 gimple_assign_lhs (g
));
5627 r1
= gimple_assign_lhs (g
);
5629 if (lhs_type
!= TREE_TYPE (lhs
))
5630 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5632 g
= gimple_build_assign (lhs
, r1
);
5633 gsi_replace (&m_gsi
, g
, true);
5636 if (is_gimple_assign (stmt
))
5637 switch (gimple_assign_rhs_code (stmt
))
5641 lower_shift_stmt (NULL_TREE
, stmt
);
5644 case TRUNC_DIV_EXPR
:
5645 case TRUNC_MOD_EXPR
:
5646 lower_muldiv_stmt (NULL_TREE
, stmt
);
5648 case FIX_TRUNC_EXPR
:
5650 lower_float_conv_stmt (NULL_TREE
, stmt
);
5654 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5657 lower_complexexpr_stmt (stmt
);
5665 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5666 the desired memory state. */
5669 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5671 tree vuse2
= (tree
) data
;
5678 /* Return true if STMT uses a library function and needs to take
5679 address of its inputs. We need to avoid bit-fields in those
5680 cases. Similarly, we need to avoid overlap between destination
5681 and source limb arrays. */
5684 stmt_needs_operand_addr (gimple
*stmt
)
5686 if (is_gimple_assign (stmt
))
5687 switch (gimple_assign_rhs_code (stmt
))
5690 case TRUNC_DIV_EXPR
:
5691 case TRUNC_MOD_EXPR
:
5697 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5698 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5703 /* Dominator walker used to discover which large/huge _BitInt
5704 loads could be sunk into all their uses. */
5706 class bitint_dom_walker
: public dom_walker
5709 bitint_dom_walker (bitmap names
, bitmap loads
)
5710 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5712 edge
before_dom_children (basic_block
) final override
;
5715 bitmap m_names
, m_loads
;
5719 bitint_dom_walker::before_dom_children (basic_block bb
)
5721 gphi
*phi
= get_virtual_phi (bb
);
5724 vop
= gimple_phi_result (phi
);
5725 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5728 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5730 auto_vec
<tree
, 16> worklist
;
5731 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5732 !gsi_end_p (gsi
); gsi_next (&gsi
))
5734 gimple
*stmt
= gsi_stmt (gsi
);
5735 if (is_gimple_debug (stmt
))
5738 if (!vop
&& gimple_vuse (stmt
))
5739 vop
= gimple_vuse (stmt
);
5742 if (gimple_vdef (stmt
))
5743 vop
= gimple_vdef (stmt
);
5745 tree lhs
= gimple_get_lhs (stmt
);
5747 && TREE_CODE (lhs
) == SSA_NAME
5748 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5749 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5750 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5751 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5752 it means it will be handled in a loop or straight line code
5753 at the location of its (ultimate) immediate use, so for
5754 vop checking purposes check these only at the ultimate
5759 use_operand_p use_p
;
5760 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5762 tree s
= USE_FROM_PTR (use_p
);
5763 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5764 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5765 worklist
.safe_push (s
);
5768 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5769 while (worklist
.length () > 0)
5771 tree s
= worklist
.pop ();
5773 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5775 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5776 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5777 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5779 tree s2
= USE_FROM_PTR (use_p
);
5780 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5781 && (bitint_precision_kind (TREE_TYPE (s2
))
5782 >= bitint_prec_large
))
5783 worklist
.safe_push (s2
);
5787 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5788 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5790 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5791 if (TREE_CODE (rhs
) == SSA_NAME
5792 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5797 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5800 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5801 tree rhs1
= gimple_assign_rhs1 (g
);
5802 if (needs_operand_addr
5803 && TREE_CODE (rhs1
) == COMPONENT_REF
5804 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5806 tree fld
= TREE_OPERAND (rhs1
, 1);
5807 /* For little-endian, we can allow as inputs bit-fields
5808 which start at a limb boundary. */
5809 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5810 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5811 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5816 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5822 ao_ref_init (&ref
, rhs1
);
5823 tree lvop
= gimple_vuse (g
);
5824 unsigned limit
= 64;
5827 && is_gimple_assign (stmt
)
5828 && gimple_store_p (stmt
)
5829 && (needs_operand_addr
5830 || !operand_equal_p (lhs
, gimple_assign_rhs1 (g
), 0)))
5833 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5834 NULL
, NULL
, limit
, lvop
) == NULL
)
5835 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5839 bb
->aux
= (void *) vop
;
5845 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5846 build_ssa_conflict_graph.
5847 The differences are:
5848 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5849 2) for large/huge _BitInt multiplication/division/modulo process def
5850 only after processing uses rather than before to make uses conflict
5852 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5853 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5854 the final statement. */
5857 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5858 ssa_conflicts
*graph
, bitmap names
,
5859 void (*def
) (live_track
*, tree
,
5861 void (*use
) (live_track
*, tree
))
5863 bool muldiv_p
= false;
5864 tree lhs
= NULL_TREE
;
5865 if (is_gimple_assign (stmt
))
5867 lhs
= gimple_assign_lhs (stmt
);
5868 if (TREE_CODE (lhs
) == SSA_NAME
5869 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5870 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5872 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5874 switch (gimple_assign_rhs_code (stmt
))
5877 case TRUNC_DIV_EXPR
:
5878 case TRUNC_MOD_EXPR
:
5890 /* For stmts with more than one SSA_NAME definition pretend all the
5891 SSA_NAME outputs but the first one are live at this point, so
5892 that conflicts are added in between all those even when they are
5893 actually not really live after the asm, because expansion might
5894 copy those into pseudos after the asm and if multiple outputs
5895 share the same partition, it might overwrite those that should
5897 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5901 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5907 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5908 def (live
, var
, graph
);
5911 auto_vec
<tree
, 16> worklist
;
5912 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5913 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5914 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5916 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5919 worklist
.safe_push (var
);
5922 while (worklist
.length () > 0)
5924 tree s
= worklist
.pop ();
5925 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5926 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5927 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5929 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5932 worklist
.safe_push (var
);
5937 def (live
, lhs
, graph
);
5940 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5941 return the largest bitint_prec_kind of them, otherwise return
5942 bitint_prec_small. */
5944 static bitint_prec_kind
5945 arith_overflow_arg_kind (gimple
*stmt
)
5947 bitint_prec_kind ret
= bitint_prec_small
;
5948 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
5949 switch (gimple_call_internal_fn (stmt
))
5951 case IFN_ADD_OVERFLOW
:
5952 case IFN_SUB_OVERFLOW
:
5953 case IFN_MUL_OVERFLOW
:
5954 for (int i
= 0; i
< 2; ++i
)
5956 tree a
= gimple_call_arg (stmt
, i
);
5957 if (TREE_CODE (a
) == INTEGER_CST
5958 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
5960 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
5961 ret
= MAX (ret
, kind
);
5971 /* Entry point for _BitInt(N) operation lowering during optimization. */
5974 gimple_lower_bitint (void)
5976 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
5980 for (i
= 0; i
< num_ssa_names
; ++i
)
5982 tree s
= ssa_name (i
);
5985 tree type
= TREE_TYPE (s
);
5986 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5988 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5989 != bitint_prec_small
)
5991 type
= TREE_TYPE (type
);
5993 if (TREE_CODE (type
) == BITINT_TYPE
5994 && bitint_precision_kind (type
) != bitint_prec_small
)
5996 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5997 into memory. Such functions could have no large/huge SSA_NAMEs. */
5998 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6000 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6001 if (is_gimple_assign (g
) && gimple_store_p (g
))
6003 tree t
= gimple_assign_rhs1 (g
);
6004 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6005 && (bitint_precision_kind (TREE_TYPE (t
))
6006 >= bitint_prec_large
))
6010 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6011 to floating point types need to be rewritten. */
6012 else if (SCALAR_FLOAT_TYPE_P (type
))
6014 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6015 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6017 tree t
= gimple_assign_rhs1 (g
);
6018 if (TREE_CODE (t
) == INTEGER_CST
6019 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6020 && (bitint_precision_kind (TREE_TYPE (t
))
6021 != bitint_prec_small
))
6026 if (i
== num_ssa_names
)
6030 auto_vec
<gimple
*, 4> switch_statements
;
6031 FOR_EACH_BB_FN (bb
, cfun
)
6033 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
6035 tree idx
= gimple_switch_index (swtch
);
6036 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
6037 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
6041 group_case_labels_stmt (swtch
);
6042 if (gimple_switch_num_labels (swtch
) == 1)
6044 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
6045 gimple_stmt_iterator gsi
= gsi_for_stmt (swtch
);
6046 gsi_remove (&gsi
, true);
6049 switch_statements
.safe_push (swtch
);
6053 if (!switch_statements
.is_empty ())
6055 bool expanded
= false;
6059 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
6061 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
6062 tree_switch_conversion::switch_decision_tree
dt (swtch
);
6063 expanded
|= dt
.analyze_switch_statement ();
6068 free_dominance_info (CDI_DOMINATORS
);
6069 free_dominance_info (CDI_POST_DOMINATORS
);
6070 mark_virtual_operands_for_renaming (cfun
);
6071 cleanup_tree_cfg (TODO_update_ssa
);
6075 struct bitint_large_huge large_huge
;
6076 bool has_large_huge_parm_result
= false;
6077 bool has_large_huge
= false;
6078 unsigned int ret
= 0, first_large_huge
= ~0U;
6079 bool edge_insertions
= false;
6080 for (; i
< num_ssa_names
; ++i
)
6082 tree s
= ssa_name (i
);
6085 tree type
= TREE_TYPE (s
);
6086 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6088 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6089 >= bitint_prec_large
)
6090 has_large_huge
= true;
6091 type
= TREE_TYPE (type
);
6093 if (TREE_CODE (type
) == BITINT_TYPE
6094 && bitint_precision_kind (type
) >= bitint_prec_large
)
6096 if (first_large_huge
== ~0U)
6097 first_large_huge
= i
;
6098 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
6099 gimple_stmt_iterator gsi
;
6101 /* Unoptimize certain constructs to simpler alternatives to
6102 avoid having to lower all of them. */
6103 if (is_gimple_assign (stmt
) && gimple_bb (stmt
))
6104 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
6109 case TRUNC_DIV_EXPR
:
6110 case TRUNC_MOD_EXPR
:
6111 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
))
6113 location_t loc
= gimple_location (stmt
);
6114 gsi
= gsi_for_stmt (stmt
);
6115 tree rhs1
= gimple_assign_rhs1 (stmt
);
6116 tree rhs2
= gimple_assign_rhs2 (stmt
);
6117 /* For multiplication and division with (ab)
6118 lhs and one or both operands force the operands
6119 into new SSA_NAMEs to avoid coalescing failures. */
6120 if (TREE_CODE (rhs1
) == SSA_NAME
6121 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
))
6123 first_large_huge
= 0;
6124 tree t
= make_ssa_name (TREE_TYPE (rhs1
));
6125 g
= gimple_build_assign (t
, SSA_NAME
, rhs1
);
6126 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6127 gimple_set_location (g
, loc
);
6128 gimple_assign_set_rhs1 (stmt
, t
);
6131 gimple_assign_set_rhs2 (stmt
, t
);
6136 if (TREE_CODE (rhs2
) == SSA_NAME
6137 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2
))
6139 first_large_huge
= 0;
6140 tree t
= make_ssa_name (TREE_TYPE (rhs2
));
6141 g
= gimple_build_assign (t
, SSA_NAME
, rhs2
);
6142 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6143 gimple_set_location (g
, loc
);
6144 gimple_assign_set_rhs2 (stmt
, t
);
6152 first_large_huge
= 0;
6153 location_t loc
= gimple_location (stmt
);
6154 gsi
= gsi_for_stmt (stmt
);
6155 tree rhs1
= gimple_assign_rhs1 (stmt
);
6156 tree type
= TREE_TYPE (rhs1
);
6157 tree n
= gimple_assign_rhs2 (stmt
), m
;
6158 tree p
= build_int_cst (TREE_TYPE (n
),
6159 TYPE_PRECISION (type
));
6160 if (TREE_CODE (n
) == INTEGER_CST
)
6161 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
6164 m
= make_ssa_name (TREE_TYPE (n
));
6165 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
6166 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6167 gimple_set_location (g
, loc
);
6169 if (!TYPE_UNSIGNED (type
))
6171 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
6173 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6174 rhs1
= fold_convert (utype
, rhs1
);
6177 tree t
= make_ssa_name (type
);
6178 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
6179 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6180 gimple_set_location (g
, loc
);
6183 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6184 rhs_code
== LROTATE_EXPR
6185 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
6187 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6188 gimple_set_location (g
, loc
);
6189 tree op1
= gimple_assign_lhs (g
);
6190 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6191 rhs_code
== LROTATE_EXPR
6192 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
6194 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6195 gimple_set_location (g
, loc
);
6196 tree op2
= gimple_assign_lhs (g
);
6197 tree lhs
= gimple_assign_lhs (stmt
);
6198 if (!TYPE_UNSIGNED (type
))
6200 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
6201 BIT_IOR_EXPR
, op1
, op2
);
6202 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6203 gimple_set_location (g
, loc
);
6204 g
= gimple_build_assign (lhs
, NOP_EXPR
,
6205 gimple_assign_lhs (g
));
6208 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
6209 gsi_replace (&gsi
, g
, true);
6210 gimple_set_location (g
, loc
);
6218 first_large_huge
= 0;
6219 gsi
= gsi_for_stmt (stmt
);
6220 tree lhs
= gimple_assign_lhs (stmt
);
6221 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
6222 location_t loc
= gimple_location (stmt
);
6223 if (rhs_code
== ABS_EXPR
)
6224 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6225 build_zero_cst (TREE_TYPE (rhs1
)),
6226 NULL_TREE
, NULL_TREE
);
6227 else if (rhs_code
== ABSU_EXPR
)
6229 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
6230 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
6231 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6232 gimple_set_location (g
, loc
);
6233 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6234 build_zero_cst (TREE_TYPE (rhs1
)),
6235 NULL_TREE
, NULL_TREE
);
6238 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
6240 rhs2
= gimple_assign_rhs2 (stmt
);
6241 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6242 std::swap (rhs1
, rhs2
);
6243 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
6244 NULL_TREE
, NULL_TREE
);
6245 if (rhs_code
== MAX_EXPR
)
6246 std::swap (rhs1
, rhs2
);
6250 g
= gimple_build_cond (NE_EXPR
, rhs1
,
6251 build_zero_cst (TREE_TYPE (rhs1
)),
6252 NULL_TREE
, NULL_TREE
);
6253 rhs1
= gimple_assign_rhs2 (stmt
);
6254 rhs2
= gimple_assign_rhs3 (stmt
);
6256 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6257 gimple_set_location (g
, loc
);
6258 edge e1
= split_block (gsi_bb (gsi
), g
);
6259 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
6260 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
6261 e3
->probability
= profile_probability::even ();
6262 e1
->flags
= EDGE_TRUE_VALUE
;
6263 e1
->probability
= e3
->probability
.invert ();
6264 if (dom_info_available_p (CDI_DOMINATORS
))
6265 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
6266 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
6268 gsi
= gsi_after_labels (e1
->dest
);
6269 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6271 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6272 gimple_set_location (g
, loc
);
6273 rhs2
= gimple_assign_lhs (g
);
6274 std::swap (rhs1
, rhs2
);
6276 gsi
= gsi_for_stmt (stmt
);
6277 gsi_remove (&gsi
, true);
6278 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
6279 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
6280 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
6284 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6285 into memory. Such functions could have no large/huge SSA_NAMEs. */
6286 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6288 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6289 if (is_gimple_assign (g
) && gimple_store_p (g
))
6291 tree t
= gimple_assign_rhs1 (g
);
6292 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6293 && (bitint_precision_kind (TREE_TYPE (t
))
6294 >= bitint_prec_large
))
6295 has_large_huge
= true;
6298 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6299 to floating point types need to be rewritten. */
6300 else if (SCALAR_FLOAT_TYPE_P (type
))
6302 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6303 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6305 tree t
= gimple_assign_rhs1 (g
);
6306 if (TREE_CODE (t
) == INTEGER_CST
6307 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6308 && (bitint_precision_kind (TREE_TYPE (t
))
6309 >= bitint_prec_large
))
6310 has_large_huge
= true;
6314 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
6316 tree s
= ssa_name (i
);
6319 tree type
= TREE_TYPE (s
);
6320 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6321 type
= TREE_TYPE (type
);
6322 if (TREE_CODE (type
) == BITINT_TYPE
6323 && bitint_precision_kind (type
) >= bitint_prec_large
)
6325 use_operand_p use_p
;
6327 has_large_huge
= true;
6329 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
6331 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6332 the same bb and could be handled in the same loop with the
6335 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6336 && single_imm_use (s
, &use_p
, &use_stmt
)
6337 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
6339 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
6341 if (mergeable_op (use_stmt
))
6343 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6344 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6346 if (gimple_assign_cast_p (use_stmt
))
6348 tree lhs
= gimple_assign_lhs (use_stmt
);
6349 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6350 /* Don't merge with VIEW_CONVERT_EXPRs to
6351 huge INTEGER_TYPEs used sometimes in memcpy
6353 && (TREE_CODE (TREE_TYPE (lhs
)) != INTEGER_TYPE
6354 || (TYPE_PRECISION (TREE_TYPE (lhs
))
6355 <= MAX_FIXED_MODE_SIZE
)))
6358 else if (gimple_store_p (use_stmt
)
6359 && is_gimple_assign (use_stmt
)
6360 && !gimple_has_volatile_ops (use_stmt
)
6361 && !stmt_ends_bb_p (use_stmt
))
6364 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6366 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6367 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
6369 rhs1
= TREE_OPERAND (rhs1
, 0);
6370 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6371 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
6372 && gimple_store_p (use_stmt
))
6375 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6376 && ((is_gimple_assign (use_stmt
)
6377 && (gimple_assign_rhs_code (use_stmt
)
6379 || gimple_code (use_stmt
) == GIMPLE_COND
)
6380 && (!gimple_store_p (use_stmt
)
6381 || (is_gimple_assign (use_stmt
)
6382 && !gimple_has_volatile_ops (use_stmt
)
6383 && !stmt_ends_bb_p (use_stmt
)))
6384 && (TREE_CODE (rhs1
) != SSA_NAME
6385 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6387 if (is_gimple_assign (use_stmt
))
6388 switch (gimple_assign_rhs_code (use_stmt
))
6390 case TRUNC_DIV_EXPR
:
6391 case TRUNC_MOD_EXPR
:
6393 /* For division, modulo and casts to floating
6394 point, avoid representing unsigned operands
6395 using negative prec if they were sign-extended
6396 from narrower precision. */
6397 if (TYPE_UNSIGNED (TREE_TYPE (s
))
6398 && !TYPE_UNSIGNED (TREE_TYPE (rhs1
))
6399 && (TYPE_PRECISION (TREE_TYPE (s
))
6400 > TYPE_PRECISION (TREE_TYPE (rhs1
))))
6404 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6405 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6406 < bitint_prec_large
))
6408 /* Uses which use handle_operand_addr can't
6409 deal with nested casts. */
6410 if (TREE_CODE (rhs1
) == SSA_NAME
6411 && gimple_assign_cast_p
6412 (SSA_NAME_DEF_STMT (rhs1
))
6413 && has_single_use (rhs1
)
6414 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6415 == gimple_bb (SSA_NAME_DEF_STMT (s
))))
6418 case VIEW_CONVERT_EXPR
:
6420 tree lhs
= gimple_assign_lhs (use_stmt
);
6421 /* Don't merge with VIEW_CONVERT_EXPRs to
6422 non-integral types. */
6423 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6425 /* Don't merge with VIEW_CONVERT_EXPRs to
6426 huge INTEGER_TYPEs used sometimes in memcpy
6428 if (TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
6429 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6430 > MAX_FIXED_MODE_SIZE
))
6437 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6438 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6439 < bitint_prec_large
))
6441 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6442 >= TYPE_PRECISION (TREE_TYPE (s
)))
6443 && mergeable_op (use_stmt
))
6445 /* Prevent merging a widening non-mergeable cast
6446 on result of some narrower mergeable op
6447 together with later mergeable operations. E.g.
6448 result of _BitInt(223) addition shouldn't be
6449 sign-extended to _BitInt(513) and have another
6450 _BitInt(513) added to it, as handle_plus_minus
6451 with its PHI node handling inside of handle_cast
6452 will not work correctly. An exception is if
6453 use_stmt is a store, this is handled directly
6454 in lower_mergeable_stmt. */
6455 if (TREE_CODE (rhs1
) != SSA_NAME
6456 || !has_single_use (rhs1
)
6457 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6458 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6459 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6460 || gimple_store_p (use_stmt
))
6462 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6463 < TYPE_PRECISION (TREE_TYPE (s
)))
6464 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6466 /* Another exception is if the widening cast is
6467 from mergeable same precision cast from something
6470 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6471 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6472 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6473 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6475 if (TREE_CODE (rhs2
) != SSA_NAME
6476 || !has_single_use (rhs2
)
6477 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6478 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6479 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6485 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6486 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6490 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6491 rhs1
= TREE_OPERAND (rhs1
, 0);
6492 if (TREE_CODE (rhs1
) == SSA_NAME
)
6494 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6495 if (optimizable_arith_overflow (g
))
6503 case TRUNC_DIV_EXPR
:
6504 case TRUNC_MOD_EXPR
:
6505 case FIX_TRUNC_EXPR
:
6507 if (gimple_store_p (use_stmt
)
6508 && is_gimple_assign (use_stmt
)
6509 && !gimple_has_volatile_ops (use_stmt
)
6510 && !stmt_ends_bb_p (use_stmt
))
6512 tree lhs
= gimple_assign_lhs (use_stmt
);
6513 /* As multiply/division passes address of the lhs
6514 to library function and that assumes it can extend
6515 it to whole number of limbs, avoid merging those
6516 with bit-field stores. Don't allow it for
6517 shifts etc. either, so that the bit-field store
6518 handling doesn't have to be done everywhere. */
6519 if (TREE_CODE (lhs
) == COMPONENT_REF
6520 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6530 /* Also ignore uninitialized uses. */
6531 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6532 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6536 if (!large_huge
.m_names
)
6537 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6538 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6539 if (has_single_use (s
))
6541 if (!large_huge
.m_single_use_names
)
6542 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6543 bitmap_set_bit (large_huge
.m_single_use_names
,
6544 SSA_NAME_VERSION (s
));
6546 if (SSA_NAME_VAR (s
)
6547 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6548 && SSA_NAME_IS_DEFAULT_DEF (s
))
6549 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6550 has_large_huge_parm_result
= true;
6552 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6553 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6554 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6555 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6557 use_operand_p use_p
;
6558 imm_use_iterator iter
;
6559 bool optimizable_load
= true;
6560 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6562 gimple
*use_stmt
= USE_STMT (use_p
);
6563 if (is_gimple_debug (use_stmt
))
6565 if (gimple_code (use_stmt
) == GIMPLE_PHI
6566 || is_gimple_call (use_stmt
)
6567 || gimple_code (use_stmt
) == GIMPLE_ASM
)
6569 optimizable_load
= false;
6575 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6578 tree s2
= USE_FROM_PTR (use_p
);
6579 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6581 optimizable_load
= false;
6586 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6588 if (!large_huge
.m_loads
)
6589 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6590 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6594 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6595 into memory. Such functions could have no large/huge SSA_NAMEs. */
6596 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6598 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6599 if (is_gimple_assign (g
) && gimple_store_p (g
))
6601 tree t
= gimple_assign_rhs1 (g
);
6602 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6603 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6604 has_large_huge
= true;
6609 if (large_huge
.m_names
|| has_large_huge
)
6611 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6612 calculate_dominance_info (CDI_DOMINATORS
);
6614 enable_ranger (cfun
);
6615 if (large_huge
.m_loads
)
6617 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6619 bitint_dom_walker (large_huge
.m_names
,
6620 large_huge
.m_loads
).walk (entry
);
6621 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6622 clear_aux_for_blocks ();
6623 BITMAP_FREE (large_huge
.m_loads
);
6625 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6626 large_huge
.m_limb_size
6627 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6629 if (large_huge
.m_names
)
6632 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6633 coalesce_ssa_name (large_huge
.m_map
);
6634 partition_view_normal (large_huge
.m_map
);
6635 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6637 fprintf (dump_file
, "After Coalescing:\n");
6638 dump_var_map (dump_file
, large_huge
.m_map
);
6641 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6643 if (has_large_huge_parm_result
)
6644 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6646 tree s
= ssa_name (i
);
6647 if (SSA_NAME_VAR (s
)
6648 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6649 && SSA_NAME_IS_DEFAULT_DEF (s
))
6650 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6652 int p
= var_to_partition (large_huge
.m_map
, s
);
6653 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6655 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6656 mark_addressable (SSA_NAME_VAR (s
));
6660 tree atype
= NULL_TREE
;
6661 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6662 fprintf (dump_file
, "Mapping SSA_NAMEs to decls:\n");
6663 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6665 tree s
= ssa_name (i
);
6666 int p
= var_to_partition (large_huge
.m_map
, s
);
6667 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6669 if (atype
== NULL_TREE
6670 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6671 TYPE_SIZE (TREE_TYPE (s
))))
6673 unsigned HOST_WIDE_INT nelts
6674 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6675 atype
= build_array_type_nelts (large_huge
.m_limb_type
,
6678 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6679 mark_addressable (large_huge
.m_vars
[p
]);
6681 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6683 print_generic_expr (dump_file
, s
, TDF_SLIM
);
6684 fprintf (dump_file
, " -> ");
6685 print_generic_expr (dump_file
, large_huge
.m_vars
[p
], TDF_SLIM
);
6686 fprintf (dump_file
, "\n");
6691 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6693 gimple_stmt_iterator prev
;
6694 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6700 gimple
*stmt
= gsi_stmt (gsi
);
6701 if (is_gimple_debug (stmt
))
6703 bitint_prec_kind kind
= bitint_prec_small
;
6705 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6706 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6708 bitint_prec_kind this_kind
6709 = bitint_precision_kind (TREE_TYPE (t
));
6710 kind
= MAX (kind
, this_kind
);
6712 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6714 t
= gimple_assign_rhs1 (stmt
);
6715 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6717 bitint_prec_kind this_kind
6718 = bitint_precision_kind (TREE_TYPE (t
));
6719 kind
= MAX (kind
, this_kind
);
6722 if (is_gimple_assign (stmt
)
6723 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6725 t
= gimple_assign_rhs1 (stmt
);
6726 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6727 && TREE_CODE (t
) == INTEGER_CST
)
6729 bitint_prec_kind this_kind
6730 = bitint_precision_kind (TREE_TYPE (t
));
6731 kind
= MAX (kind
, this_kind
);
6734 if (is_gimple_call (stmt
))
6736 t
= gimple_call_lhs (stmt
);
6737 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6739 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6740 kind
= MAX (kind
, this_kind
);
6741 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6744 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6745 kind
= MAX (kind
, this_kind
);
6749 if (kind
== bitint_prec_small
)
6751 switch (gimple_code (stmt
))
6754 /* For now. We'll need to handle some internal functions and
6755 perhaps some builtins. */
6756 if (kind
== bitint_prec_middle
)
6760 if (kind
== bitint_prec_middle
)
6766 if (gimple_clobber_p (stmt
))
6768 if (kind
>= bitint_prec_large
)
6770 if (gimple_assign_single_p (stmt
))
6771 /* No need to lower copies, loads or stores. */
6773 if (gimple_assign_cast_p (stmt
))
6775 tree lhs
= gimple_assign_lhs (stmt
);
6776 tree rhs
= gimple_assign_rhs1 (stmt
);
6777 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6778 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6779 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6780 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6781 /* No need to lower casts to same precision. */
6789 if (kind
== bitint_prec_middle
)
6791 tree type
= NULL_TREE
;
6792 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6793 with the same precision and back. */
6794 unsigned int nops
= gimple_num_ops (stmt
);
6795 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6797 if (tree op
= gimple_op (stmt
, i
))
6799 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6801 gimple_set_op (stmt
, i
, nop
);
6802 else if (COMPARISON_CLASS_P (op
))
6804 TREE_OPERAND (op
, 0)
6805 = maybe_cast_middle_bitint (&gsi
,
6806 TREE_OPERAND (op
, 0),
6808 TREE_OPERAND (op
, 1)
6809 = maybe_cast_middle_bitint (&gsi
,
6810 TREE_OPERAND (op
, 1),
6813 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6816 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6819 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6823 if (tree lhs
= gimple_get_lhs (stmt
))
6824 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6825 && (bitint_precision_kind (TREE_TYPE (lhs
))
6826 == bitint_prec_middle
))
6828 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6829 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6830 type
= build_nonstandard_integer_type (prec
, uns
);
6831 tree lhs2
= make_ssa_name (type
);
6832 gimple_set_lhs (stmt
, lhs2
);
6833 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6834 if (stmt_ends_bb_p (stmt
))
6836 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6837 gsi_insert_on_edge_immediate (e
, g
);
6840 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6846 if (tree lhs
= gimple_get_lhs (stmt
))
6847 if (TREE_CODE (lhs
) == SSA_NAME
)
6849 tree type
= TREE_TYPE (lhs
);
6850 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6851 type
= TREE_TYPE (type
);
6852 if (TREE_CODE (type
) == BITINT_TYPE
6853 && bitint_precision_kind (type
) >= bitint_prec_large
6854 && (large_huge
.m_names
== NULL
6855 || !bitmap_bit_p (large_huge
.m_names
,
6856 SSA_NAME_VERSION (lhs
))))
6860 large_huge
.lower_stmt (stmt
);
6863 tree atype
= NULL_TREE
;
6864 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6867 gphi
*phi
= gsi
.phi ();
6868 tree lhs
= gimple_phi_result (phi
);
6869 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6870 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6872 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6873 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6874 tree v1
= large_huge
.m_vars
[p1
];
6875 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6877 tree arg
= gimple_phi_arg_def (phi
, i
);
6878 edge e
= gimple_phi_arg_edge (phi
, i
);
6880 switch (TREE_CODE (arg
))
6883 if (integer_zerop (arg
) && VAR_P (v1
))
6885 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6886 g
= gimple_build_assign (v1
, zero
);
6887 gsi_insert_on_edge (e
, g
);
6888 edge_insertions
= true;
6892 unsigned int min_prec
, prec
, rem
;
6894 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6895 rem
= prec
% (2 * limb_prec
);
6896 min_prec
= bitint_min_cst_precision (arg
, ext
);
6897 if (min_prec
> prec
- rem
- 2 * limb_prec
6898 && min_prec
> (unsigned) limb_prec
)
6899 /* Constant which has enough significant bits that it
6900 isn't worth trying to save .rodata space by extending
6901 from smaller number. */
6904 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6907 else if (min_prec
== prec
)
6908 c
= tree_output_constant_def (arg
);
6909 else if (min_prec
== (unsigned) limb_prec
)
6910 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6913 tree ctype
= build_bitint_type (min_prec
, 1);
6914 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6918 if (VAR_P (v1
) && min_prec
== prec
)
6920 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6922 g
= gimple_build_assign (v1
, v2
);
6923 gsi_insert_on_edge (e
, g
);
6924 edge_insertions
= true;
6927 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6928 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6933 unsigned HOST_WIDE_INT nelts
6934 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
6937 = build_array_type_nelts (large_huge
.m_limb_type
,
6939 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6941 build1 (VIEW_CONVERT_EXPR
,
6944 gsi_insert_on_edge (e
, g
);
6948 unsigned HOST_WIDE_INT nelts
6949 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
6950 - min_prec
) / limb_prec
;
6952 = build_array_type_nelts (large_huge
.m_limb_type
,
6954 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6957 off
= fold_convert (ptype
,
6958 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6960 off
= build_zero_cst (ptype
);
6961 tree vd
= build2 (MEM_REF
, vtype
,
6962 build_fold_addr_expr (v1
), off
);
6963 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
6970 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6972 = fold_convert (ptype
,
6973 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6974 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
6975 build_fold_addr_expr (v1
), off
);
6977 vd
= build_fold_addr_expr (vd
);
6978 unsigned HOST_WIDE_INT nbytes
6979 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
6982 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6983 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
6984 g
= gimple_build_call (fn
, 3, vd
,
6985 integer_minus_one_node
,
6986 build_int_cst (sizetype
,
6989 gsi_insert_on_edge (e
, g
);
6990 edge_insertions
= true;
6995 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
6997 if (large_huge
.m_names
== NULL
6998 || !bitmap_bit_p (large_huge
.m_names
,
6999 SSA_NAME_VERSION (arg
)))
7002 int p2
= var_to_partition (large_huge
.m_map
, arg
);
7005 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
7006 tree v2
= large_huge
.m_vars
[p2
];
7007 if (VAR_P (v1
) && VAR_P (v2
))
7008 g
= gimple_build_assign (v1
, v2
);
7009 else if (VAR_P (v1
))
7010 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
7011 TREE_TYPE (v1
), v2
));
7012 else if (VAR_P (v2
))
7013 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7014 TREE_TYPE (v2
), v1
), v2
);
7017 if (atype
== NULL_TREE
7018 || !tree_int_cst_equal (TYPE_SIZE (atype
),
7019 TYPE_SIZE (TREE_TYPE (lhs
))))
7021 unsigned HOST_WIDE_INT nelts
7022 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
7025 = build_array_type_nelts (large_huge
.m_limb_type
,
7028 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7030 build1 (VIEW_CONVERT_EXPR
,
7033 gsi_insert_on_edge (e
, g
);
7034 edge_insertions
= true;
7041 if (large_huge
.m_names
|| has_large_huge
)
7044 for (i
= 0; i
< num_ssa_names
; ++i
)
7046 tree s
= ssa_name (i
);
7049 tree type
= TREE_TYPE (s
);
7050 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7051 type
= TREE_TYPE (type
);
7052 if (TREE_CODE (type
) == BITINT_TYPE
7053 && bitint_precision_kind (type
) >= bitint_prec_large
)
7055 if (large_huge
.m_preserved
7056 && bitmap_bit_p (large_huge
.m_preserved
,
7057 SSA_NAME_VERSION (s
)))
7059 gimple
*g
= SSA_NAME_DEF_STMT (s
);
7060 if (gimple_code (g
) == GIMPLE_NOP
)
7062 if (SSA_NAME_VAR (s
))
7063 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
7064 release_ssa_name (s
);
7067 if (gimple_bb (g
) == NULL
)
7069 release_ssa_name (s
);
7072 if (gimple_code (g
) != GIMPLE_ASM
)
7074 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7075 bool save_vta
= flag_var_tracking_assignments
;
7076 flag_var_tracking_assignments
= false;
7077 gsi_remove (&gsi
, true);
7078 flag_var_tracking_assignments
= save_vta
;
7081 nop
= gimple_build_nop ();
7082 SSA_NAME_DEF_STMT (s
) = nop
;
7083 release_ssa_name (s
);
7087 disable_ranger (cfun
);
7090 if (edge_insertions
)
7091 gsi_commit_edge_inserts ();
7098 const pass_data pass_data_lower_bitint
=
7100 GIMPLE_PASS
, /* type */
7101 "bitintlower", /* name */
7102 OPTGROUP_NONE
, /* optinfo_flags */
7103 TV_NONE
, /* tv_id */
7104 PROP_ssa
, /* properties_required */
7105 PROP_gimple_lbitint
, /* properties_provided */
7106 0, /* properties_destroyed */
7107 0, /* todo_flags_start */
7108 0, /* todo_flags_finish */
7111 class pass_lower_bitint
: public gimple_opt_pass
7114 pass_lower_bitint (gcc::context
*ctxt
)
7115 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
7118 /* opt_pass methods: */
7119 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
7120 unsigned int execute (function
*) final override
7122 return gimple_lower_bitint ();
7125 }; // class pass_lower_bitint
7130 make_pass_lower_bitint (gcc::context
*ctxt
)
7132 return new pass_lower_bitint (ctxt
);
7138 const pass_data pass_data_lower_bitint_O0
=
7140 GIMPLE_PASS
, /* type */
7141 "bitintlower0", /* name */
7142 OPTGROUP_NONE
, /* optinfo_flags */
7143 TV_NONE
, /* tv_id */
7144 PROP_cfg
, /* properties_required */
7145 PROP_gimple_lbitint
, /* properties_provided */
7146 0, /* properties_destroyed */
7147 0, /* todo_flags_start */
7148 0, /* todo_flags_finish */
7151 class pass_lower_bitint_O0
: public gimple_opt_pass
7154 pass_lower_bitint_O0 (gcc::context
*ctxt
)
7155 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
7158 /* opt_pass methods: */
7159 bool gate (function
*fun
) final override
7161 /* With errors, normal optimization passes are not run. If we don't
7162 lower bitint operations at all, rtl expansion will abort. */
7163 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
7166 unsigned int execute (function
*) final override
7168 return gimple_lower_bitint ();
7171 }; // class pass_lower_bitint_O0
7176 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
7178 return new pass_lower_bitint_O0 (ctxt
);