1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "stor-layout.h"
58 #include "gimple-lower-bitint.h"
60 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 target hook says it is a single limb, middle _BitInt which per ABI
62 does not, but there is some INTEGER_TYPE in which arithmetics can be
63 performed (operations on such _BitInt are lowered to casts to that
64 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 ones), large _BitInt which should by straight line code and
67 finally huge _BitInt which should be handled by loops over the limbs. */
69 enum bitint_prec_kind
{
76 /* Caches to speed up bitint_precision_kind. */
78 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
81 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
83 static bitint_prec_kind
84 bitint_precision_kind (int prec
)
86 if (prec
<= small_max_prec
)
87 return bitint_prec_small
;
88 if (huge_min_prec
&& prec
>= huge_min_prec
)
89 return bitint_prec_huge
;
90 if (large_min_prec
&& prec
>= large_min_prec
)
91 return bitint_prec_large
;
92 if (mid_min_prec
&& prec
>= mid_min_prec
)
93 return bitint_prec_middle
;
95 struct bitint_info info
;
96 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
98 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
99 if (prec
<= GET_MODE_PRECISION (limb_mode
))
101 small_max_prec
= prec
;
102 return bitint_prec_small
;
105 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
106 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
108 limb_prec
= GET_MODE_PRECISION (limb_mode
);
111 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
112 huge_min_prec
= 4 * limb_prec
;
114 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
116 if (prec
<= MAX_FIXED_MODE_SIZE
)
118 if (!mid_min_prec
|| prec
< mid_min_prec
)
120 return bitint_prec_middle
;
122 if (large_min_prec
&& prec
<= large_min_prec
)
123 return bitint_prec_large
;
124 return bitint_prec_huge
;
127 /* Same for a TYPE. */
129 static bitint_prec_kind
130 bitint_precision_kind (tree type
)
132 return bitint_precision_kind (TYPE_PRECISION (type
));
135 /* Return minimum precision needed to describe INTEGER_CST
136 CST. All bits above that precision up to precision of
137 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
138 if EXT is set to -1. */
141 bitint_min_cst_precision (tree cst
, int &ext
)
143 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
144 wide_int w
= wi::to_wide (cst
);
145 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
146 /* For signed values, we don't need to count the sign bit,
147 we'll use constant 0 or -1 for the upper bits. */
148 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
152 /* For unsigned values, also try signed min_precision
153 in case the constant has lots of most significant bits set. */
154 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
155 if (min_prec2
< min_prec
)
166 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
167 cached in TYPE and return it. */
170 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
173 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
174 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
177 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
178 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
179 if (type
== NULL_TREE
180 || TYPE_PRECISION (type
) != prec
181 || TYPE_UNSIGNED (type
) != uns
)
182 type
= build_nonstandard_integer_type (prec
, uns
);
184 if (TREE_CODE (op
) != SSA_NAME
)
186 tree nop
= fold_convert (type
, op
);
187 if (is_gimple_val (nop
))
191 tree nop
= make_ssa_name (type
);
192 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
193 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
197 /* Return true if STMT can be handled in a loop from least to most
198 significant limb together with its dependencies. */
201 mergeable_op (gimple
*stmt
)
203 if (!is_gimple_assign (stmt
))
205 switch (gimple_assign_rhs_code (stmt
))
220 tree cnt
= gimple_assign_rhs2 (stmt
);
221 if (tree_fits_uhwi_p (cnt
)
222 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
227 case VIEW_CONVERT_EXPR
:
229 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
230 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
231 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
232 && TREE_CODE (lhs_type
) == BITINT_TYPE
233 && TREE_CODE (rhs_type
) == BITINT_TYPE
234 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
235 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
236 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
237 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
)))
239 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
241 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
243 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
254 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
255 _Complex large/huge _BitInt lhs which has at most two immediate uses,
256 at most one use in REALPART_EXPR stmt in the same bb and exactly one
257 IMAGPART_EXPR use in the same bb with a single use which casts it to
258 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
259 return 2. Such cases (most common uses of those builtins) can be
260 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
261 of REALPART_EXPR as not needed to be backed up by a stack variable.
262 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
265 optimizable_arith_overflow (gimple
*stmt
)
267 bool is_ubsan
= false;
268 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
270 switch (gimple_call_internal_fn (stmt
))
272 case IFN_ADD_OVERFLOW
:
273 case IFN_SUB_OVERFLOW
:
274 case IFN_MUL_OVERFLOW
:
276 case IFN_UBSAN_CHECK_ADD
:
277 case IFN_UBSAN_CHECK_SUB
:
278 case IFN_UBSAN_CHECK_MUL
:
284 tree lhs
= gimple_call_lhs (stmt
);
287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
289 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
290 if (TREE_CODE (type
) != BITINT_TYPE
291 || bitint_precision_kind (type
) < bitint_prec_large
)
298 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
299 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
300 || !gimple_store_p (use_stmt
)
301 || !is_gimple_assign (use_stmt
)
302 || gimple_has_volatile_ops (use_stmt
)
303 || stmt_ends_bb_p (use_stmt
))
311 gimple
*realpart
= NULL
, *cast
= NULL
;
312 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
314 gimple
*g
= USE_STMT (use_p
);
315 if (is_gimple_debug (g
))
317 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
319 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
326 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
332 use_operand_p use2_p
;
334 tree lhs2
= gimple_assign_lhs (g
);
335 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
337 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
338 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
339 || !gimple_assign_cast_p (use_stmt
))
342 lhs2
= gimple_assign_lhs (use_stmt
);
343 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
344 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
355 /* Punt if the cast stmt appears before realpart stmt, because
356 if both appear, the lowering wants to emit all the code
357 at the location of realpart stmt. */
358 gimple_stmt_iterator gsi
= gsi_for_stmt (realpart
);
359 unsigned int cnt
= 0;
362 gsi_prev_nondebug (&gsi
);
363 if (gsi_end_p (gsi
) || gsi_stmt (gsi
) == cast
)
365 if (gsi_stmt (gsi
) == stmt
)
367 /* If realpart is too far from stmt, punt as well.
368 Usually it will appear right after it. */
377 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
378 comparing large/huge _BitInt types, return the comparison code and if
379 non-NULL fill in the comparison operands to *POP1 and *POP2. */
382 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
384 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
385 tree_code code
= ERROR_MARK
;
386 if (gimple_code (stmt
) == GIMPLE_COND
)
388 code
= gimple_cond_code (stmt
);
389 op1
= gimple_cond_lhs (stmt
);
390 op2
= gimple_cond_rhs (stmt
);
392 else if (is_gimple_assign (stmt
))
394 code
= gimple_assign_rhs_code (stmt
);
395 op1
= gimple_assign_rhs1 (stmt
);
396 if (TREE_CODE_CLASS (code
) == tcc_comparison
397 || TREE_CODE_CLASS (code
) == tcc_binary
)
398 op2
= gimple_assign_rhs2 (stmt
);
400 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
402 tree type
= TREE_TYPE (op1
);
403 if (TREE_CODE (type
) != BITINT_TYPE
404 || bitint_precision_kind (type
) < bitint_prec_large
)
414 /* Class used during large/huge _BitInt lowering containing all the
415 state for the methods. */
417 struct bitint_large_huge
420 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
421 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
422 m_limb_type (NULL_TREE
), m_data (vNULL
),
423 m_returns_twice_calls (vNULL
) {}
425 ~bitint_large_huge ();
427 void insert_before (gimple
*);
428 tree
limb_access_type (tree
, tree
);
429 tree
limb_access (tree
, tree
, tree
, bool);
430 tree
build_bit_field_ref (tree
, tree
, unsigned HOST_WIDE_INT
,
431 unsigned HOST_WIDE_INT
);
432 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
433 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
434 void if_then_if_then_else (gimple
*g
, gimple
*,
435 profile_probability
, profile_probability
,
436 edge
&, edge
&, edge
&);
437 tree
handle_operand (tree
, tree
);
438 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
439 tree
add_cast (tree
, tree
);
440 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
441 tree
handle_lshift (tree
, tree
, tree
);
442 tree
handle_cast (tree
, tree
, tree
);
443 tree
handle_bit_field_ref (tree
, tree
);
444 tree
handle_load (gimple
*, tree
);
445 tree
handle_stmt (gimple
*, tree
);
446 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
447 tree
create_loop (tree
, tree
*);
448 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
449 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
450 void lower_shift_stmt (tree
, gimple
*);
451 void lower_muldiv_stmt (tree
, gimple
*);
452 void lower_float_conv_stmt (tree
, gimple
*);
453 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
455 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
457 void lower_addsub_overflow (tree
, gimple
*);
458 void lower_mul_overflow (tree
, gimple
*);
459 void lower_cplxpart_stmt (tree
, gimple
*);
460 void lower_complexexpr_stmt (gimple
*);
461 void lower_bit_query (gimple
*);
462 void lower_call (tree
, gimple
*);
463 void lower_asm (gimple
*);
464 void lower_stmt (gimple
*);
466 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
467 merged with their uses. */
469 /* Subset of those for lhs of load statements. These will be
470 cleared in m_names if the loads will be mergeable with all
473 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
474 to later passes (arguments or return values of calls). */
476 /* Subset of m_names which have a single use. As the lowering
477 can replace various original statements with their lowered
478 form even before it is done iterating over all basic blocks,
479 testing has_single_use for the purpose of emitting clobbers
480 doesn't work properly. */
481 bitmap m_single_use_names
;
482 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
485 /* Mapping of the partitions to corresponding decls. */
487 /* Unsigned integer type with limb precision. */
489 /* Its TYPE_SIZE_UNIT. */
490 unsigned HOST_WIDE_INT m_limb_size
;
491 /* Location of a gimple stmt which is being currently lowered. */
493 /* Current stmt iterator where code is being lowered currently. */
494 gimple_stmt_iterator m_gsi
;
495 /* Statement after which any clobbers should be added if non-NULL. */
496 gimple
*m_after_stmt
;
497 /* Set when creating loops to the loop header bb and its preheader. */
498 basic_block m_bb
, m_preheader_bb
;
499 /* Stmt iterator after which initialization statements should be emitted. */
500 gimple_stmt_iterator m_init_gsi
;
501 /* Decl into which a mergeable statement stores result. */
503 /* handle_operand/handle_stmt can be invoked in various ways.
505 lower_mergeable_stmt for large _BitInt calls those with constant
506 idx only, expanding to straight line code, for huge _BitInt
507 emits a loop from least significant limb upwards, where each loop
508 iteration handles 2 limbs, plus there can be up to one full limb
509 and one partial limb processed after the loop, where handle_operand
510 and/or handle_stmt are called with constant idx. m_upwards_2limb
511 is set for this case, false otherwise. m_upwards is true if it
512 is either large or huge _BitInt handled by lower_mergeable_stmt,
513 i.e. indexes always increase.
515 Another way is used by lower_comparison_stmt, which walks limbs
516 from most significant to least significant, partial limb if any
517 processed first with constant idx and then loop processing a single
518 limb per iteration with non-constant idx.
520 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
521 destination limbs are processed from most significant to least
522 significant or for RSHIFT_EXPR the other way around, in loops or
523 straight line code, but idx usually is non-constant (so from
524 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
525 handling there can access even partial limbs using non-constant
526 idx (then m_var_msb should be true, for all the other cases
527 including lower_mergeable_stmt/lower_comparison_stmt that is
528 not the case and so m_var_msb should be false.
530 m_first should be set the first time handle_operand/handle_stmt
531 is called and clear when it is called for some other limb with
532 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
533 or statement (e.g. +/-/<< with < limb_prec constant) needs some
534 state between the different calls, when m_first is true it should
535 push some trees to m_data vector and also make sure m_data_cnt is
536 incremented by how many trees were pushed, and when m_first is
537 false, it can use the m_data[m_data_cnt] etc. data or update them,
538 just needs to bump m_data_cnt by the same amount as when it was
539 called with m_first set. The toplevel calls to
540 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
541 m_data vector when setting m_first to true.
543 m_cast_conditional and m_bitfld_load are used when handling a
544 bit-field load inside of a widening cast. handle_cast sometimes
545 needs to do runtime comparisons and handle_operand only conditionally
546 or even in two separate conditional blocks for one idx (once with
547 constant index after comparing the runtime one for equality with the
548 constant). In these cases, m_cast_conditional is set to true and
549 the bit-field load then communicates its m_data_cnt to handle_cast
550 using m_bitfld_load. */
553 unsigned m_upwards_2limb
;
555 bool m_cast_conditional
;
556 unsigned m_bitfld_load
;
558 unsigned int m_data_cnt
;
559 vec
<gimple
*> m_returns_twice_calls
;
562 bitint_large_huge::~bitint_large_huge ()
564 BITMAP_FREE (m_names
);
565 BITMAP_FREE (m_loads
);
566 BITMAP_FREE (m_preserved
);
567 BITMAP_FREE (m_single_use_names
);
569 delete_var_map (m_map
);
572 m_returns_twice_calls
.release ();
575 /* Insert gimple statement G before current location
576 and set its gimple_location. */
579 bitint_large_huge::insert_before (gimple
*g
)
581 gimple_set_location (g
, m_loc
);
582 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
585 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
586 This is normally m_limb_type, except for a partial most
587 significant limb if any. */
590 bitint_large_huge::limb_access_type (tree type
, tree idx
)
592 if (type
== NULL_TREE
)
594 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
595 unsigned int prec
= TYPE_PRECISION (type
);
596 gcc_assert (i
* limb_prec
< prec
);
597 if ((i
+ 1) * limb_prec
<= prec
)
600 return build_nonstandard_integer_type (prec
% limb_prec
,
601 TYPE_UNSIGNED (type
));
604 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
605 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
608 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
610 tree atype
= (tree_fits_uhwi_p (idx
)
611 ? limb_access_type (type
, idx
) : m_limb_type
);
612 tree ltype
= m_limb_type
;
613 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (var
));
614 if (as
!= TYPE_ADDR_SPACE (ltype
))
615 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
616 | ENCODE_QUAL_ADDR_SPACE (as
));
618 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
620 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
621 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
622 ret
= build2 (MEM_REF
, ltype
,
623 build_fold_addr_expr (var
),
624 build_int_cst (ptype
, off
));
625 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
626 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
628 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
631 = build2 (MEM_REF
, ltype
, unshare_expr (TREE_OPERAND (var
, 0)),
632 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
633 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
636 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
637 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
638 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
642 var
= unshare_expr (var
);
643 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
644 || !useless_type_conversion_p (m_limb_type
,
645 TREE_TYPE (TREE_TYPE (var
))))
647 unsigned HOST_WIDE_INT nelts
648 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
))), limb_prec
);
649 tree atype
= build_array_type_nelts (ltype
, nelts
);
650 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
652 ret
= build4 (ARRAY_REF
, ltype
, var
, idx
, NULL_TREE
, NULL_TREE
);
654 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
656 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
658 ret
= gimple_assign_lhs (g
);
659 ret
= build1 (NOP_EXPR
, atype
, ret
);
664 /* Build a BIT_FIELD_REF to access BITSIZE bits with FTYPE type at
665 offset BITPOS inside of OBJ. */
668 bitint_large_huge::build_bit_field_ref (tree ftype
, tree obj
,
669 unsigned HOST_WIDE_INT bitsize
,
670 unsigned HOST_WIDE_INT bitpos
)
672 if (INTEGRAL_TYPE_P (TREE_TYPE (obj
))
673 && !type_has_mode_precision_p (TREE_TYPE (obj
)))
675 unsigned HOST_WIDE_INT nelts
676 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))), limb_prec
);
677 tree ltype
= m_limb_type
;
678 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (obj
));
679 if (as
!= TYPE_ADDR_SPACE (ltype
))
680 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
681 | ENCODE_QUAL_ADDR_SPACE (as
));
682 tree atype
= build_array_type_nelts (ltype
, nelts
);
683 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
685 return build3 (BIT_FIELD_REF
, ftype
, obj
, bitsize_int (bitsize
),
686 bitsize_int (bitpos
));
689 /* Emit a half diamond,
698 or if (COND) new_bb1;
699 PROB is the probability that the condition is true.
700 Updates m_gsi to start of new_bb1.
701 Sets EDGE_TRUE to edge from new_bb1 to successor and
702 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
705 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
706 edge
&edge_true
, edge
&edge_false
)
708 insert_before (cond
);
709 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
710 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
711 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
712 e1
->flags
= EDGE_TRUE_VALUE
;
713 e1
->probability
= prob
;
714 e3
->probability
= prob
.invert ();
715 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
718 m_gsi
= gsi_after_labels (e1
->dest
);
721 /* Emit a full diamond,
730 or if (COND) new_bb2; else new_bb1;
731 PROB is the probability that the condition is true.
732 Updates m_gsi to start of new_bb2.
733 Sets EDGE_TRUE to edge from new_bb1 to successor and
734 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
737 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
738 edge
&edge_true
, edge
&edge_false
)
740 insert_before (cond
);
741 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
742 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
743 basic_block bb
= create_empty_bb (e1
->dest
);
744 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
745 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
746 e1
->flags
= EDGE_FALSE_VALUE
;
747 e3
->probability
= prob
;
748 e1
->probability
= prob
.invert ();
749 bb
->count
= e1
->src
->count
.apply_probability (prob
);
750 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
751 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
752 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
754 m_gsi
= gsi_after_labels (bb
);
757 /* Emit a half diamond with full diamond in it
771 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
772 PROB1 is the probability that the condition 1 is true.
773 PROB2 is the probability that the condition 2 is true.
774 Updates m_gsi to start of new_bb1.
775 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
776 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
777 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
778 If COND2 is NULL, this is equivalent to
779 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
780 EDGE_TRUE_TRUE = NULL; */
783 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
784 profile_probability prob1
,
785 profile_probability prob2
,
786 edge
&edge_true_true
,
787 edge
&edge_true_false
,
790 edge e2
, e3
, e4
= NULL
;
791 if_then (cond1
, prob1
, e2
, e3
);
794 edge_true_true
= NULL
;
795 edge_true_false
= e2
;
799 insert_before (cond2
);
800 e2
= split_block (gsi_bb (m_gsi
), cond2
);
801 basic_block bb
= create_empty_bb (e2
->dest
);
802 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
803 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
804 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
805 e4
->probability
= prob2
;
806 e2
->flags
= EDGE_FALSE_VALUE
;
807 e2
->probability
= prob2
.invert ();
808 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
809 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
810 e2
= find_edge (e2
->dest
, e3
->dest
);
812 edge_true_false
= e2
;
814 m_gsi
= gsi_after_labels (e2
->src
);
817 /* Emit code to access limb IDX from OP. */
820 bitint_large_huge::handle_operand (tree op
, tree idx
)
822 switch (TREE_CODE (op
))
826 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
828 if (SSA_NAME_IS_DEFAULT_DEF (op
))
832 tree v
= create_tmp_reg (m_limb_type
);
833 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
835 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
836 DECL_SOURCE_LOCATION (v
)
837 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
839 v
= get_or_create_ssa_default_def (cfun
, v
);
840 m_data
.safe_push (v
);
842 tree ret
= m_data
[m_data_cnt
];
844 if (tree_fits_uhwi_p (idx
))
846 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
847 ret
= add_cast (type
, ret
);
851 location_t loc_save
= m_loc
;
852 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
853 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
860 p
= var_to_partition (m_map
, op
);
861 gcc_assert (m_vars
[p
] != NULL_TREE
);
862 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
863 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
865 t
= gimple_assign_lhs (g
);
867 && m_single_use_names
868 && m_vars
[p
] != m_lhs
870 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
872 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
873 CLOBBER_STORAGE_END
);
874 g
= gimple_build_assign (m_vars
[p
], clobber
);
875 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
876 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
880 if (tree_fits_uhwi_p (idx
))
882 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
883 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
886 m_data
.safe_push (NULL_TREE
);
887 m_data
.safe_push (NULL_TREE
);
889 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
891 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
892 TYPE_SIGN (TREE_TYPE (op
)));
893 c
= wide_int_to_tree (type
,
894 wide_int::from (w
, TYPE_PRECISION (type
),
897 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
898 c
= build_int_cst (type
,
899 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
901 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
906 || (m_data
[m_data_cnt
] == NULL_TREE
907 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
909 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
910 unsigned int rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
912 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
915 m_data
.safe_push (NULL_TREE
);
916 m_data
.safe_push (NULL_TREE
);
918 if (integer_zerop (op
))
920 tree c
= build_zero_cst (m_limb_type
);
921 m_data
[m_data_cnt
] = c
;
922 m_data
[m_data_cnt
+ 1] = c
;
924 else if (integer_all_onesp (op
))
926 tree c
= build_all_ones_cst (m_limb_type
);
927 m_data
[m_data_cnt
] = c
;
928 m_data
[m_data_cnt
+ 1] = c
;
930 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
932 /* Single limb constant. Use a phi with that limb from
933 the preheader edge and 0 or -1 constant from the other edge
934 and for the second limb in the loop. */
936 gcc_assert (m_first
);
939 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
940 build_int_cst (m_limb_type
, ext
));
942 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
944 /* Constant which has enough significant bits that it isn't
945 worth trying to save .rodata space by extending from smaller
949 type
= TREE_TYPE (op
);
951 /* If we have a guarantee the most significant partial limb
952 (if any) will be only accessed through handle_operand
953 with INTEGER_CST idx, we don't need to include the partial
955 type
= build_bitint_type (prec
- rem
, 1);
956 tree c
= tree_output_constant_def (fold_convert (type
, op
));
957 m_data
[m_data_cnt
] = c
;
958 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
960 else if (m_upwards_2limb
)
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number. */
964 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
965 tree type
= build_bitint_type (min_prec
, 1);
966 tree c
= tree_output_constant_def (fold_convert (type
, op
));
967 tree idx2
= make_ssa_name (sizetype
);
968 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
970 g
= gimple_build_cond (LT_EXPR
, idx
,
971 size_int (min_prec
/ limb_prec
),
972 NULL_TREE
, NULL_TREE
);
973 edge edge_true
, edge_false
;
974 if_then (g
, (min_prec
>= (prec
- rem
) / 2
975 ? profile_probability::likely ()
976 : profile_probability::unlikely ()),
977 edge_true
, edge_false
);
978 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
979 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
981 c1
= gimple_assign_lhs (g
);
982 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
983 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
985 c2
= gimple_assign_lhs (g
);
986 tree c3
= build_int_cst (m_limb_type
, ext
);
987 m_gsi
= gsi_after_labels (edge_true
->dest
);
988 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
989 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
990 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
992 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
993 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
994 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
995 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
996 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
1000 /* Constant with smaller number of bits. Trade conditional
1001 code for .rodata space by extending from smaller number.
1002 Version for loops with random access to the limbs or
1004 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
1006 if (min_prec
<= (unsigned) limb_prec
)
1007 c
= fold_convert (m_limb_type
, op
);
1010 tree type
= build_bitint_type (min_prec
, 1);
1011 c
= tree_output_constant_def (fold_convert (type
, op
));
1013 m_data
[m_data_cnt
] = c
;
1014 m_data
[m_data_cnt
+ 1] = integer_type_node
;
1016 t
= m_data
[m_data_cnt
];
1017 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
1019 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
1020 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1022 t
= gimple_assign_lhs (g
);
1025 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
1027 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
1028 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1030 t
= gimple_assign_lhs (g
);
1033 t
= m_data
[m_data_cnt
+ 1];
1034 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
1036 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
1037 unsigned rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
1038 int ext
= wi::neg_p (wi::to_wide (op
)) ? -1 : 0;
1039 tree c
= m_data
[m_data_cnt
];
1040 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
1041 g
= gimple_build_cond (LT_EXPR
, idx
,
1042 size_int (min_prec
/ limb_prec
),
1043 NULL_TREE
, NULL_TREE
);
1044 edge edge_true
, edge_false
;
1045 if_then (g
, (min_prec
>= (prec
- rem
) / 2
1046 ? profile_probability::likely ()
1047 : profile_probability::unlikely ()),
1048 edge_true
, edge_false
);
1049 if (min_prec
> (unsigned) limb_prec
)
1051 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
1052 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
1054 c
= gimple_assign_lhs (g
);
1056 tree c2
= build_int_cst (m_limb_type
, ext
);
1057 m_gsi
= gsi_after_labels (edge_true
->dest
);
1058 t
= make_ssa_name (m_limb_type
);
1059 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
1060 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1061 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1070 /* Helper method, add a PHI node with VAL from preheader edge if
1071 inside of a loop and m_first. Keep state in a pair of m_data
1072 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1073 the latch edge, otherwise create a new SSA_NAME for it and let
1074 caller initialize it. */
1077 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1082 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1083 return m_data
[m_data_cnt
];
1086 *data_out
= NULL_TREE
;
1087 if (tree_fits_uhwi_p (idx
))
1089 m_data
.safe_push (val
);
1090 m_data
.safe_push (NULL_TREE
);
1094 tree in
= make_ssa_name (TREE_TYPE (val
));
1095 gphi
*phi
= create_phi_node (in
, m_bb
);
1096 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1097 edge e2
= EDGE_PRED (m_bb
, 0);
1099 e2
= EDGE_PRED (m_bb
, 1);
1100 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1101 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1102 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1103 m_data
.safe_push (in
);
1104 m_data
.safe_push (out
);
1108 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1109 convert it without emitting any code, otherwise emit
1110 the conversion statement before the current location. */
1113 bitint_large_huge::add_cast (tree type
, tree val
)
1115 if (TREE_CODE (val
) == INTEGER_CST
)
1116 return fold_convert (type
, val
);
1118 tree lhs
= make_ssa_name (type
);
1119 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1124 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1127 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1130 tree lhs
, data_out
, ctype
;
1131 tree rhs1_type
= TREE_TYPE (rhs1
);
1133 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1136 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1137 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1139 ctype
= build_complex_type (m_limb_type
);
1140 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1142 if (!TYPE_UNSIGNED (rhs1_type
))
1144 tree type
= unsigned_type_for (rhs1_type
);
1145 rhs1
= add_cast (type
, rhs1
);
1146 rhs2
= add_cast (type
, rhs2
);
1148 rhs1
= add_cast (m_limb_type
, rhs1
);
1149 rhs2
= add_cast (m_limb_type
, rhs2
);
1151 lhs
= make_ssa_name (ctype
);
1152 g
= gimple_build_call_internal (code
== PLUS_EXPR
1153 ? IFN_UADDC
: IFN_USUBC
,
1154 3, rhs1
, rhs2
, data_in
);
1155 gimple_call_set_lhs (g
, lhs
);
1157 if (data_out
== NULL_TREE
)
1158 data_out
= make_ssa_name (m_limb_type
);
1159 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1160 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1163 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1165 ctype
= build_complex_type (m_limb_type
);
1166 lhs
= make_ssa_name (ctype
);
1167 g
= gimple_build_call_internal (code
== PLUS_EXPR
1168 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1170 gimple_call_set_lhs (g
, lhs
);
1172 if (data_out
== NULL_TREE
)
1173 data_out
= make_ssa_name (m_limb_type
);
1174 if (!integer_zerop (data_in
))
1176 rhs1
= make_ssa_name (m_limb_type
);
1177 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1178 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1180 rhs2
= make_ssa_name (m_limb_type
);
1181 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1182 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1184 lhs
= make_ssa_name (ctype
);
1185 g
= gimple_build_call_internal (code
== PLUS_EXPR
1189 gimple_call_set_lhs (g
, lhs
);
1191 data_in
= make_ssa_name (m_limb_type
);
1192 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1193 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1195 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1200 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1201 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1207 tree in
= add_cast (rhs1_type
, data_in
);
1208 lhs
= make_ssa_name (rhs1_type
);
1209 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1211 rhs1
= make_ssa_name (rhs1_type
);
1212 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1214 m_data
[m_data_cnt
] = NULL_TREE
;
1218 rhs1
= make_ssa_name (m_limb_type
);
1219 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1220 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1222 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1223 rhs1
= add_cast (rhs1_type
, rhs1
);
1224 m_data
[m_data_cnt
] = data_out
;
1229 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1230 count in [0, limb_prec - 1] range. */
1233 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1235 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1236 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1240 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1242 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1245 if (!integer_zerop (data_in
))
1247 lhs
= make_ssa_name (m_limb_type
);
1248 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1249 build_int_cst (unsigned_type_node
,
1252 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1253 lhs
= add_cast (rhs1_type
, lhs
);
1256 if (types_compatible_p (rhs1_type
, m_limb_type
))
1258 if (data_out
== NULL_TREE
)
1259 data_out
= make_ssa_name (m_limb_type
);
1260 g
= gimple_build_assign (data_out
, rhs1
);
1263 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1265 lhs
= make_ssa_name (rhs1_type
);
1266 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1268 if (!integer_zerop (data_in
))
1271 lhs
= make_ssa_name (rhs1_type
);
1272 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1278 m_data
[m_data_cnt
] = data_out
;
1283 /* Helper function for handle_stmt method, handle an integral
1284 to integral conversion. */
1287 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1289 tree rhs_type
= TREE_TYPE (rhs1
);
1291 if ((TREE_CODE (rhs1
) == SSA_NAME
|| TREE_CODE (rhs1
) == INTEGER_CST
)
1292 && TREE_CODE (lhs_type
) == BITINT_TYPE
1293 && TREE_CODE (rhs_type
) == BITINT_TYPE
1294 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1295 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1297 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1298 /* If lhs has bigger precision than rhs, we can use
1299 the simple case only if there is a guarantee that
1300 the most significant limb is handled in straight
1301 line code. If m_var_msb (on left shifts) or
1302 if m_upwards_2limb * limb_prec is equal to
1303 lhs precision or if not m_upwards_2limb and lhs_type
1304 has precision which is multiple of limb_prec that is
1307 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
1308 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
))
1309 && ((!m_upwards_2limb
1310 && (TYPE_PRECISION (lhs_type
) % limb_prec
!= 0))
1312 && (m_upwards_2limb
* limb_prec
1313 < TYPE_PRECISION (lhs_type
))))))
1315 rhs1
= handle_operand (rhs1
, idx
);
1316 if (tree_fits_uhwi_p (idx
))
1318 tree type
= limb_access_type (lhs_type
, idx
);
1319 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1320 rhs1
= add_cast (type
, rhs1
);
1325 /* Indexes lower than this don't need any special processing. */
1326 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1327 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1328 /* Indexes >= than this always contain an extension. */
1329 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1330 bool save_first
= m_first
;
1333 m_data
.safe_push (NULL_TREE
);
1334 m_data
.safe_push (NULL_TREE
);
1335 m_data
.safe_push (NULL_TREE
);
1336 if (TYPE_UNSIGNED (rhs_type
))
1337 /* No need to keep state between iterations. */
1339 else if (m_upwards
&& !m_upwards_2limb
)
1340 /* We need to keep state between iterations, but
1341 not within any loop, everything is straight line
1342 code with only increasing indexes. */
1344 else if (!m_upwards_2limb
)
1346 unsigned save_data_cnt
= m_data_cnt
;
1347 gimple_stmt_iterator save_gsi
= m_gsi
;
1349 if (gsi_end_p (m_gsi
))
1350 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1353 m_data_cnt
= save_data_cnt
+ 3;
1354 t
= handle_operand (rhs1
, size_int (low
));
1356 m_data
[save_data_cnt
+ 2]
1357 = build_int_cst (NULL_TREE
, m_data_cnt
);
1358 m_data_cnt
= save_data_cnt
;
1359 t
= add_cast (signed_type_for (m_limb_type
), t
);
1360 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1361 tree n
= make_ssa_name (TREE_TYPE (t
));
1362 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1364 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1366 if (gsi_end_p (m_init_gsi
))
1367 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1369 gsi_prev (&m_init_gsi
);
1372 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1373 /* We need to keep state between iterations, but
1374 fortunately not within the loop, only afterwards. */
1379 m_data
.truncate (m_data_cnt
);
1380 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1381 m_data
.safe_push (NULL_TREE
);
1385 unsigned save_data_cnt
= m_data_cnt
;
1387 if (!tree_fits_uhwi_p (idx
))
1390 && low
>= m_upwards_2limb
- m_first
)
1392 rhs1
= handle_operand (rhs1
, idx
);
1394 m_data
[save_data_cnt
+ 2]
1395 = build_int_cst (NULL_TREE
, m_data_cnt
);
1396 m_first
= save_first
;
1399 bool single_comparison
1400 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1402 if (!single_comparison
1405 && low
+ 1 == m_upwards_2limb
)
1406 /* In this case we know that idx <= low always,
1407 so effectively we just needs a single comparison,
1408 idx < low or idx == low, but we'd need to emit different
1409 code for the 2 branches than single_comparison normally
1410 emits. So, instead of special-casing that, emit a
1411 low <= low comparison which cfg cleanup will clean up
1412 at the end of the pass. */
1413 idxc
= size_int (low
);
1414 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1415 idxc
, size_int (low
), NULL_TREE
, NULL_TREE
);
1416 edge edge_true_true
, edge_true_false
, edge_false
;
1417 if_then_if_then_else (g
, (single_comparison
? NULL
1418 : gimple_build_cond (EQ_EXPR
, idx
,
1422 profile_probability::likely (),
1423 profile_probability::unlikely (),
1424 edge_true_true
, edge_true_false
, edge_false
);
1425 bool save_cast_conditional
= m_cast_conditional
;
1426 m_cast_conditional
= true;
1428 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1430 m_data
[save_data_cnt
+ 2]
1431 = build_int_cst (NULL_TREE
, m_data_cnt
);
1432 tree ext
= NULL_TREE
;
1433 tree bitfld
= NULL_TREE
;
1434 if (!single_comparison
)
1436 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1438 m_data_cnt
= save_data_cnt
+ 3;
1441 bitfld
= m_data
[m_bitfld_load
];
1442 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1445 t2
= handle_operand (rhs1
, size_int (low
));
1446 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1447 t2
= add_cast (m_limb_type
, t2
);
1448 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1450 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1451 tree lpm1
= build_int_cst (unsigned_type_node
,
1453 tree n
= make_ssa_name (TREE_TYPE (ext
));
1454 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1456 ext
= add_cast (m_limb_type
, n
);
1460 if (TYPE_UNSIGNED (rhs_type
))
1461 t3
= build_zero_cst (m_limb_type
);
1462 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1463 t3
= m_data
[save_data_cnt
];
1465 t3
= m_data
[save_data_cnt
+ 1];
1466 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1467 t
= make_ssa_name (m_limb_type
);
1468 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1469 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1470 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1472 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1475 tree t4
= make_ssa_name (m_limb_type
);
1476 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1477 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1479 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1481 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1482 if (!save_cast_conditional
)
1484 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1488 for (basic_block bb
= gsi_bb (m_gsi
);;)
1490 edge e1
= single_succ_edge (bb
);
1491 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1492 tree t5
= (e2
? m_data
[save_data_cnt
+ 1]
1493 : make_ssa_name (m_limb_type
));
1494 phi
= create_phi_node (t5
, e1
->dest
);
1496 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1497 add_phi_arg (phi
, (e3
== e1
? t4
1498 : build_zero_cst (m_limb_type
)),
1499 e3
, UNKNOWN_LOCATION
);
1509 if (!save_first
&& !save_cast_conditional
)
1510 t4
= m_data
[m_bitfld_load
+ 1];
1512 t4
= make_ssa_name (m_limb_type
);
1513 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1515 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1516 edge_true_false
, UNKNOWN_LOCATION
);
1517 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1518 edge_false
, UNKNOWN_LOCATION
);
1520 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1522 if (save_cast_conditional
)
1523 for (basic_block bb
= gsi_bb (m_gsi
);;)
1525 edge e1
= single_succ_edge (bb
);
1526 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1527 tree t5
= ((e2
&& !save_first
) ? m_data
[m_bitfld_load
+ 1]
1528 : make_ssa_name (m_limb_type
));
1529 phi
= create_phi_node (t5
, e1
->dest
);
1531 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1532 add_phi_arg (phi
, (e3
== e1
? t4
1533 : build_zero_cst (m_limb_type
)),
1534 e3
, UNKNOWN_LOCATION
);
1540 m_data
[m_bitfld_load
] = t4
;
1541 m_data
[m_bitfld_load
+ 2] = t4
;
1544 m_cast_conditional
= save_cast_conditional
;
1545 m_first
= save_first
;
1550 if (tree_to_uhwi (idx
) < low
)
1552 t
= handle_operand (rhs1
, idx
);
1554 m_data
[save_data_cnt
+ 2]
1555 = build_int_cst (NULL_TREE
, m_data_cnt
);
1557 else if (tree_to_uhwi (idx
) < high
)
1559 t
= handle_operand (rhs1
, size_int (low
));
1561 m_data
[save_data_cnt
+ 2]
1562 = build_int_cst (NULL_TREE
, m_data_cnt
);
1563 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1564 t
= add_cast (m_limb_type
, t
);
1565 tree ext
= NULL_TREE
;
1566 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1568 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1569 tree lpm1
= build_int_cst (unsigned_type_node
,
1571 tree n
= make_ssa_name (TREE_TYPE (ext
));
1572 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1574 ext
= add_cast (m_limb_type
, n
);
1575 m_data
[save_data_cnt
+ 1] = ext
;
1580 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1582 handle_operand (rhs1
, size_zero_node
);
1583 m_data
[save_data_cnt
+ 2]
1584 = build_int_cst (NULL_TREE
, m_data_cnt
);
1587 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1588 if (TYPE_UNSIGNED (rhs_type
))
1589 t
= build_zero_cst (m_limb_type
);
1590 else if (m_bb
&& m_data
[save_data_cnt
])
1591 t
= m_data
[save_data_cnt
];
1593 t
= m_data
[save_data_cnt
+ 1];
1595 tree type
= limb_access_type (lhs_type
, idx
);
1596 if (!useless_type_conversion_p (type
, m_limb_type
))
1597 t
= add_cast (type
, t
);
1598 m_first
= save_first
;
1602 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1603 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1604 && INTEGRAL_TYPE_P (rhs_type
))
1606 /* Add support for 3 or more limbs filled in from normal integral
1607 type if this assert fails. If no target chooses limb mode smaller
1608 than half of largest supported normal integral type, this will not
1610 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1611 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1614 gimple_stmt_iterator save_gsi
= m_gsi
;
1616 if (gsi_end_p (m_gsi
))
1617 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1620 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1621 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1623 tree type
= NULL_TREE
;
1624 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1625 rhs_type
= TREE_TYPE (rhs1
);
1628 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1629 r1
= add_cast (m_limb_type
, rhs1
);
1630 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1632 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1634 build_int_cst (unsigned_type_node
,
1637 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1639 if (TYPE_UNSIGNED (rhs_type
))
1640 rext
= build_zero_cst (m_limb_type
);
1643 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1644 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1646 build_int_cst (unsigned_type_node
,
1649 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1652 if (gsi_end_p (m_init_gsi
))
1653 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1655 gsi_prev (&m_init_gsi
);
1659 if (m_upwards_2limb
)
1664 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1665 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1667 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1670 m_data
[m_data_cnt
+ 1] = t
;
1673 m_data
[m_data_cnt
+ 1] = rext
;
1674 m_data
.safe_push (rext
);
1675 t
= m_data
[m_data_cnt
];
1677 else if (!tree_fits_uhwi_p (idx
))
1678 t
= m_data
[m_data_cnt
+ 1];
1681 tree type
= limb_access_type (lhs_type
, idx
);
1682 t
= m_data
[m_data_cnt
+ 2];
1683 if (!useless_type_conversion_p (type
, m_limb_type
))
1684 t
= add_cast (type
, t
);
1691 m_data
.safe_push (r1
);
1692 m_data
.safe_push (r2
);
1693 m_data
.safe_push (rext
);
1695 if (tree_fits_uhwi_p (idx
))
1697 tree type
= limb_access_type (lhs_type
, idx
);
1698 if (integer_zerop (idx
))
1699 t
= m_data
[m_data_cnt
];
1700 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1701 && integer_onep (idx
))
1702 t
= m_data
[m_data_cnt
+ 1];
1704 t
= m_data
[m_data_cnt
+ 2];
1705 if (!useless_type_conversion_p (type
, m_limb_type
))
1706 t
= add_cast (type
, t
);
1710 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1711 NULL_TREE
, NULL_TREE
);
1712 edge e2
, e3
, e4
= NULL
;
1713 if_then (g
, profile_probability::likely (), e2
, e3
);
1714 if (m_data
[m_data_cnt
+ 1])
1716 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1717 NULL_TREE
, NULL_TREE
);
1719 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1720 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1721 e2
= find_edge (e5
->dest
, e2
->dest
);
1722 e4
->probability
= profile_probability::unlikely ();
1723 e5
->flags
= EDGE_FALSE_VALUE
;
1724 e5
->probability
= e4
->probability
.invert ();
1726 m_gsi
= gsi_after_labels (e2
->dest
);
1727 t
= make_ssa_name (m_limb_type
);
1728 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1729 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1730 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1732 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1739 /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1742 bitint_large_huge::handle_bit_field_ref (tree op
, tree idx
)
1744 if (tree_fits_uhwi_p (idx
))
1747 m_data
.safe_push (NULL
);
1749 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (m_limb_type
));
1750 tree bfr
= build3 (BIT_FIELD_REF
, m_limb_type
,
1751 TREE_OPERAND (op
, 0),
1752 TYPE_SIZE (m_limb_type
),
1753 size_binop (PLUS_EXPR
, TREE_OPERAND (op
, 2),
1754 bitsize_int (tree_to_uhwi (idx
) * sz
)));
1755 tree r
= make_ssa_name (m_limb_type
);
1756 gimple
*g
= gimple_build_assign (r
, bfr
);
1758 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
1759 if (!useless_type_conversion_p (type
, m_limb_type
))
1760 r
= add_cast (type
, r
);
1766 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
)));
1769 if (bitwise_mode_for_size (sz
).exists (&mode
)
1770 && known_eq (GET_MODE_BITSIZE (mode
), sz
))
1771 type
= bitwise_type_for_mode (mode
);
1775 type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op
, 0)));
1777 if (TYPE_ALIGN (type
) < TYPE_ALIGN (TREE_TYPE (op
)))
1778 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op
)));
1779 var
= create_tmp_var (type
);
1780 TREE_ADDRESSABLE (var
) = 1;
1782 if (mode
!= VOIDmode
)
1784 bfr
= build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (op
, 0),
1785 TYPE_SIZE (type
), TREE_OPERAND (op
, 2));
1786 g
= gimple_build_assign (make_ssa_name (type
),
1787 BIT_FIELD_REF
, bfr
);
1788 gimple_set_location (g
, m_loc
);
1789 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1790 bfr
= gimple_assign_lhs (g
);
1793 bfr
= TREE_OPERAND (op
, 0);
1794 g
= gimple_build_assign (var
, bfr
);
1795 gimple_set_location (g
, m_loc
);
1796 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1797 if (mode
== VOIDmode
)
1799 unsigned HOST_WIDE_INT nelts
1800 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
))), limb_prec
);
1801 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
1802 var
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
),
1803 build_int_cst (build_pointer_type (type
),
1804 tree_to_uhwi (TREE_OPERAND (op
, 2))
1807 m_data
.safe_push (var
);
1810 var
= unshare_expr (m_data
[m_data_cnt
]);
1812 var
= limb_access (TREE_TYPE (op
), var
, idx
, false);
1813 tree r
= make_ssa_name (m_limb_type
);
1814 gimple
*g
= gimple_build_assign (r
, var
);
1819 /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1820 is an older EH edge, and except for virtual PHIs duplicate the
1821 PHI argument from the EH_EDGE to the new EH edge. */
1824 add_eh_edge (basic_block src
, edge eh_edge
)
1826 edge e
= make_edge (src
, eh_edge
->dest
, EDGE_EH
);
1827 e
->probability
= profile_probability::very_unlikely ();
1828 for (gphi_iterator gsi
= gsi_start_phis (eh_edge
->dest
);
1829 !gsi_end_p (gsi
); gsi_next (&gsi
))
1831 gphi
*phi
= gsi
.phi ();
1832 tree lhs
= gimple_phi_result (phi
);
1833 if (virtual_operand_p (lhs
))
1835 const phi_arg_d
*arg
= gimple_phi_arg (phi
, eh_edge
->dest_idx
);
1836 add_phi_arg (phi
, arg
->def
, e
, arg
->locus
);
1840 /* Helper function for handle_stmt method, handle a load from memory. */
1843 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1845 tree rhs1
= gimple_assign_rhs1 (stmt
);
1846 tree rhs_type
= TREE_TYPE (rhs1
);
1847 bool eh
= stmt_ends_bb_p (stmt
);
1848 edge eh_edge
= NULL
;
1854 basic_block bb
= gimple_bb (stmt
);
1856 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1857 if (eh_edge
->flags
& EDGE_EH
)
1861 if (TREE_CODE (rhs1
) == COMPONENT_REF
1862 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1864 tree fld
= TREE_OPERAND (rhs1
, 1);
1865 /* For little-endian, we can allow as inputs bit-fields
1866 which start at a limb boundary. */
1867 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1868 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1869 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1871 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1872 handle it normally for now. */
1873 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1875 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1876 poly_int64 bitoffset
;
1877 poly_uint64 field_offset
, repr_offset
;
1878 bool var_field_off
= false;
1879 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1880 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1881 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1885 var_field_off
= true;
1887 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1888 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1889 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1890 TREE_OPERAND (rhs1
, 0), repr
,
1891 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1892 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1893 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1894 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1899 gimple_stmt_iterator save_gsi
= m_gsi
;
1901 if (gsi_end_p (m_gsi
))
1902 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1905 tree t
= limb_access (NULL_TREE
, nrhs1
, size_int (bo_idx
), true);
1906 tree iv
= make_ssa_name (m_limb_type
);
1907 g
= gimple_build_assign (iv
, t
);
1911 maybe_duplicate_eh_stmt (g
, stmt
);
1914 edge e
= split_block (gsi_bb (m_gsi
), g
);
1915 add_eh_edge (e
->src
, eh_edge
);
1916 m_gsi
= gsi_after_labels (e
->dest
);
1917 if (gsi_bb (save_gsi
) == e
->src
)
1919 if (gsi_end_p (save_gsi
))
1920 save_gsi
= gsi_end_bb (e
->dest
);
1922 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1924 if (m_preheader_bb
== e
->src
)
1925 m_preheader_bb
= e
->dest
;
1929 if (gsi_end_p (m_init_gsi
))
1930 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1932 gsi_prev (&m_init_gsi
);
1935 prepare_data_in_out (iv
, idx
, &out
);
1936 out
= m_data
[m_data_cnt
];
1937 m_data
.safe_push (out
);
1941 m_data
.safe_push (NULL_TREE
);
1942 m_data
.safe_push (NULL_TREE
);
1943 m_data
.safe_push (NULL_TREE
);
1947 tree nidx0
= NULL_TREE
, nidx1
;
1948 tree iv
= m_data
[m_data_cnt
];
1949 if (m_cast_conditional
&& iv
)
1951 gcc_assert (!m_bitfld_load
);
1952 m_bitfld_load
= m_data_cnt
;
1954 if (tree_fits_uhwi_p (idx
))
1956 unsigned prec
= TYPE_PRECISION (rhs_type
);
1957 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1958 gcc_assert (i
* limb_prec
< prec
);
1959 nidx1
= size_int (i
+ bo_idx
+ 1);
1960 if ((i
+ 1) * limb_prec
> prec
)
1963 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1967 nidx0
= size_int (i
+ bo_idx
);
1977 nidx0
= make_ssa_name (sizetype
);
1978 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1983 nidx1
= make_ssa_name (sizetype
);
1984 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1985 size_int (bo_idx
+ 1));
1989 tree iv2
= NULL_TREE
;
1992 tree t
= limb_access (NULL_TREE
, nrhs1
, nidx0
, true);
1993 iv
= make_ssa_name (m_limb_type
);
1994 g
= gimple_build_assign (iv
, t
);
2000 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
2001 unsigned prec
= TYPE_PRECISION (rhs_type
);
2004 if ((prec
% limb_prec
) == 0
2005 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
2006 conditional
= false;
2008 edge edge_true
= NULL
, edge_false
= NULL
;
2011 g
= gimple_build_cond (NE_EXPR
, idx
,
2012 size_int (prec
/ limb_prec
),
2013 NULL_TREE
, NULL_TREE
);
2014 if_then (g
, profile_probability::likely (),
2015 edge_true
, edge_false
);
2017 tree t
= limb_access (NULL_TREE
, nrhs1
, nidx1
, true);
2021 && !tree_fits_uhwi_p (idx
))
2022 iv2
= m_data
[m_data_cnt
+ 1];
2024 iv2
= make_ssa_name (m_limb_type
);
2025 g
= gimple_build_assign (iv2
, t
);
2029 maybe_duplicate_eh_stmt (g
, stmt
);
2032 edge e
= split_block (gsi_bb (m_gsi
), g
);
2033 m_gsi
= gsi_after_labels (e
->dest
);
2034 add_eh_edge (e
->src
, eh_edge
);
2039 tree iv3
= make_ssa_name (m_limb_type
);
2041 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
2042 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
2043 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
2044 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
2045 edge_false
, UNKNOWN_LOCATION
);
2046 m_gsi
= gsi_after_labels (edge_true
->dest
);
2050 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
2051 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
2053 iv
= gimple_assign_lhs (g
);
2056 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
2057 iv2
, build_int_cst (unsigned_type_node
,
2058 limb_prec
- bo_bit
));
2060 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
2061 gimple_assign_lhs (g
), iv
);
2063 iv
= gimple_assign_lhs (g
);
2064 if (m_data
[m_data_cnt
])
2065 m_data
[m_data_cnt
] = iv2
;
2067 if (tree_fits_uhwi_p (idx
))
2069 tree atype
= limb_access_type (rhs_type
, idx
);
2070 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
2071 iv
= add_cast (atype
, iv
);
2078 /* Use write_p = true for loads with EH edges to make
2079 sure limb_access doesn't add a cast as separate
2080 statement after it. */
2081 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
2082 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
2083 g
= gimple_build_assign (ret
, rhs1
);
2087 maybe_duplicate_eh_stmt (g
, stmt
);
2090 edge e
= split_block (gsi_bb (m_gsi
), g
);
2091 m_gsi
= gsi_after_labels (e
->dest
);
2092 add_eh_edge (e
->src
, eh_edge
);
2094 if (tree_fits_uhwi_p (idx
))
2096 tree atype
= limb_access_type (rhs_type
, idx
);
2097 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
2098 ret
= add_cast (atype
, ret
);
2104 /* Return a limb IDX from a mergeable statement STMT. */
2107 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
2109 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
2111 switch (gimple_code (stmt
))
2114 if (gimple_assign_load_p (stmt
))
2115 return handle_load (stmt
, idx
);
2116 switch (gimple_assign_rhs_code (stmt
))
2121 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2124 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2125 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
2126 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
2132 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2133 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2134 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
2137 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2138 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
2139 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
2141 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
2143 gimple_assign_rhs2 (stmt
), idx
);
2146 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2148 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2149 gimple_assign_rhs1 (stmt
), idx
);
2150 case VIEW_CONVERT_EXPR
:
2151 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2152 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0),
2155 return handle_bit_field_ref (gimple_assign_rhs1 (stmt
), idx
);
2166 /* Return minimum precision of OP at STMT.
2167 Positive value is minimum precision above which all bits
2168 are zero, negative means all bits above negation of the
2169 value are copies of the sign bit. */
2172 range_to_prec (tree op
, gimple
*stmt
)
2176 tree type
= TREE_TYPE (op
);
2177 unsigned int prec
= TYPE_PRECISION (type
);
2180 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
2181 || r
.undefined_p ())
2183 if (TYPE_UNSIGNED (type
))
2186 return MIN ((int) -prec
, -2);
2189 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
2191 w
= r
.lower_bound ();
2194 int min_prec1
= wi::min_precision (w
, SIGNED
);
2195 w
= r
.upper_bound ();
2196 int min_prec2
= wi::min_precision (w
, SIGNED
);
2197 int min_prec
= MAX (min_prec1
, min_prec2
);
2198 return MIN (-min_prec
, -2);
2202 w
= r
.upper_bound ();
2203 int min_prec
= wi::min_precision (w
, UNSIGNED
);
2204 return MAX (min_prec
, 1);
2207 /* Return address of the first limb of OP and write into *PREC
2208 its precision. If positive, the operand is zero extended
2209 from that precision, if it is negative, the operand is sign-extended
2210 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2211 otherwise *PREC_STORED is prec from the innermost call without
2212 range optimizations. */
2215 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
2216 int *prec_stored
, int *prec
)
2219 location_t loc_save
= m_loc
;
2220 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2221 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2222 && TREE_CODE (op
) != INTEGER_CST
)
2225 *prec
= range_to_prec (op
, stmt
);
2226 bitint_prec_kind kind
= bitint_prec_small
;
2227 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2228 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2229 kind
= bitint_precision_kind (TREE_TYPE (op
));
2230 if (kind
== bitint_prec_middle
)
2232 tree type
= NULL_TREE
;
2233 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2235 tree op_type
= TREE_TYPE (op
);
2236 unsigned HOST_WIDE_INT nelts
2237 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2238 /* Add support for 3 or more limbs filled in from normal
2239 integral type if this assert fails. If no target chooses
2240 limb mode smaller than half of largest supported normal
2241 integral type, this will not be needed. */
2242 gcc_assert (nelts
<= 2);
2244 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2245 ? TYPE_PRECISION (op_type
)
2246 : -TYPE_PRECISION (op_type
));
2247 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2252 if (TYPE_UNSIGNED (op_type
))
2254 if (*prec_stored
> limb_prec
)
2255 *prec_stored
= limb_prec
;
2257 else if (*prec_stored
< -limb_prec
)
2258 *prec_stored
= -limb_prec
;
2261 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2262 tree var
= create_tmp_var (atype
);
2264 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2265 t1
= add_cast (m_limb_type
, t1
);
2266 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2267 NULL_TREE
, NULL_TREE
);
2268 gimple
*g
= gimple_build_assign (v
, t1
);
2272 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2273 g
= gimple_build_assign (make_ssa_name (op_type
),
2274 RSHIFT_EXPR
, op
, lp
);
2276 tree t2
= gimple_assign_lhs (g
);
2277 t2
= add_cast (m_limb_type
, t2
);
2278 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2279 NULL_TREE
, NULL_TREE
);
2280 g
= gimple_build_assign (v
, t2
);
2283 tree ret
= build_fold_addr_expr (var
);
2284 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2286 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2287 g
= gimple_build_assign (var
, clobber
);
2288 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2293 switch (TREE_CODE (op
))
2297 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2299 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2301 m_loc
= gimple_location (g
);
2302 if (gimple_assign_load_p (g
))
2304 *prec
= range_to_prec (op
, NULL
);
2306 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2307 ? TYPE_PRECISION (TREE_TYPE (op
))
2308 : -TYPE_PRECISION (TREE_TYPE (op
)));
2309 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2310 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2311 NULL_TREE
, true, GSI_SAME_STMT
);
2313 else if (gimple_code (g
) == GIMPLE_NOP
)
2315 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2317 *prec_stored
= *prec
;
2318 tree var
= create_tmp_var (m_limb_type
);
2319 TREE_ADDRESSABLE (var
) = 1;
2320 ret
= build_fold_addr_expr (var
);
2321 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2323 tree clobber
= build_clobber (m_limb_type
,
2324 CLOBBER_STORAGE_END
);
2325 g
= gimple_build_assign (var
, clobber
);
2326 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2331 gcc_assert (gimple_assign_cast_p (g
));
2332 tree rhs1
= gimple_assign_rhs1 (g
);
2333 bitint_prec_kind kind
= bitint_prec_small
;
2334 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2335 rhs1
= TREE_OPERAND (rhs1
, 0);
2336 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2337 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2338 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2339 if (kind
>= bitint_prec_large
)
2341 tree lhs_type
= TREE_TYPE (op
);
2342 tree rhs_type
= TREE_TYPE (rhs1
);
2343 int prec_stored_val
= 0;
2344 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2345 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2347 if (TYPE_UNSIGNED (lhs_type
)
2348 && !TYPE_UNSIGNED (rhs_type
))
2349 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2353 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2355 else if (TYPE_UNSIGNED (lhs_type
))
2357 gcc_assert (*prec
> 0
2358 || prec_stored_val
> 0
2359 || (-prec_stored_val
2360 >= TYPE_PRECISION (lhs_type
)));
2361 *prec
= TYPE_PRECISION (lhs_type
);
2363 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2366 *prec
= -TYPE_PRECISION (lhs_type
);
2381 int p
= var_to_partition (m_map
, op
);
2382 gcc_assert (m_vars
[p
] != NULL_TREE
);
2383 *prec
= range_to_prec (op
, stmt
);
2385 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2386 ? TYPE_PRECISION (TREE_TYPE (op
))
2387 : -TYPE_PRECISION (TREE_TYPE (op
)));
2388 return build_fold_addr_expr (m_vars
[p
]);
2391 unsigned int min_prec
, mp
;
2393 w
= wi::to_wide (op
);
2394 if (tree_int_cst_sgn (op
) >= 0)
2396 min_prec
= wi::min_precision (w
, UNSIGNED
);
2397 *prec
= MAX (min_prec
, 1);
2401 min_prec
= wi::min_precision (w
, SIGNED
);
2402 *prec
= MIN ((int) -min_prec
, -2);
2404 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2407 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
))
2408 && (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
2409 || TYPE_PRECISION (TREE_TYPE (op
)) <= limb_prec
))
2410 type
= TREE_TYPE (op
);
2412 type
= build_bitint_type (mp
, 1);
2413 if (TREE_CODE (type
) != BITINT_TYPE
2414 || bitint_precision_kind (type
) == bitint_prec_small
)
2416 if (TYPE_PRECISION (type
) <= limb_prec
)
2420 while (bitint_precision_kind (mp
) == bitint_prec_small
)
2422 /* This case is for targets which e.g. have 64-bit
2423 limb but categorize up to 128-bits _BitInts as
2424 small. We could use type of m_limb_type[2] and
2425 similar instead to save space. */
2426 type
= build_bitint_type (mp
, 1);
2431 if (tree_int_cst_sgn (op
) >= 0)
2432 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2434 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2436 op
= tree_output_constant_def (fold_convert (type
, op
));
2437 return build_fold_addr_expr (op
);
2443 /* Helper function, create a loop before the current location,
2444 start with sizetype INIT value from the preheader edge. Return
2445 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2446 from the latch edge. */
2449 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2451 if (!gsi_end_p (m_gsi
))
2454 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2455 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2456 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2457 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2458 e3
->probability
= profile_probability::very_unlikely ();
2459 e2
->flags
= EDGE_FALSE_VALUE
;
2460 e2
->probability
= e3
->probability
.invert ();
2461 tree idx
= make_ssa_name (sizetype
);
2462 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2463 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2464 *idx_next
= make_ssa_name (sizetype
);
2465 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2466 m_gsi
= gsi_after_labels (e1
->dest
);
2468 m_preheader_bb
= e1
->src
;
2469 class loop
*loop
= alloc_loop ();
2470 loop
->header
= e1
->dest
;
2471 add_loop (loop
, e1
->src
->loop_father
);
2475 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2476 lowered using iteration from the least significant limb up to the most
2477 significant limb. For large _BitInt it is emitted as straight line code
2478 before current location, for huge _BitInt as a loop handling two limbs
2479 at once, followed by handling up to limbs in straight line code (at most
2480 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2481 comparisons, in that case CMP_CODE should be the comparison code and
2482 CMP_OP1/CMP_OP2 the comparison operands. */
2485 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2486 tree cmp_op1
, tree cmp_op2
)
2488 bool eq_p
= cmp_code
!= ERROR_MARK
;
2491 type
= TREE_TYPE (cmp_op1
);
2493 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2494 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2495 bitint_prec_kind kind
= bitint_precision_kind (type
);
2496 gcc_assert (kind
>= bitint_prec_large
);
2498 tree lhs
= gimple_get_lhs (stmt
);
2499 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2501 && TREE_CODE (lhs
) == SSA_NAME
2502 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2503 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2505 int p
= var_to_partition (m_map
, lhs
);
2506 gcc_assert (m_vars
[p
] != NULL_TREE
);
2507 m_lhs
= lhs
= m_vars
[p
];
2509 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2511 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2513 basic_block eh_pad
= NULL
;
2514 tree nlhs
= NULL_TREE
;
2515 unsigned HOST_WIDE_INT bo_idx
= 0;
2516 unsigned HOST_WIDE_INT bo_bit
= 0;
2517 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2518 if (gimple_store_p (stmt
))
2520 store_operand
= gimple_assign_rhs1 (stmt
);
2521 eh
= stmt_ends_bb_p (stmt
);
2526 basic_block bb
= gimple_bb (stmt
);
2528 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2529 if (e
->flags
& EDGE_EH
)
2535 if (TREE_CODE (lhs
) == COMPONENT_REF
2536 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2538 tree fld
= TREE_OPERAND (lhs
, 1);
2539 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2540 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2541 poly_int64 bitoffset
;
2542 poly_uint64 field_offset
, repr_offset
;
2543 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2547 bool var_field_off
= false;
2548 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2549 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2550 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2554 var_field_off
= true;
2556 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2557 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2558 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2559 TREE_OPERAND (lhs
, 0), repr
,
2561 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2562 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2563 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2564 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2569 && TREE_CODE (store_operand
) == SSA_NAME
2571 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2572 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2573 || gimple_assign_cast_p (stmt
))
2575 rhs1
= gimple_assign_rhs1 (store_operand
2576 ? SSA_NAME_DEF_STMT (store_operand
)
2578 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2579 rhs1
= TREE_OPERAND (rhs1
, 0);
2580 /* Optimize mergeable ops ending with widening cast to _BitInt
2581 (or followed by store). We can lower just the limbs of the
2582 cast operand and widen afterwards. */
2583 if (TREE_CODE (rhs1
) == SSA_NAME
2585 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2586 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2587 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2588 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2589 limb_prec
) < CEIL (prec
, limb_prec
)
2590 || (kind
== bitint_prec_huge
2591 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2593 store_operand
= rhs1
;
2594 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2595 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2596 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2600 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2601 if (kind
== bitint_prec_large
)
2602 cnt
= CEIL (prec
, limb_prec
);
2605 rem
= (prec
% (2 * limb_prec
));
2606 end
= (prec
- rem
) / limb_prec
;
2607 cnt
= 2 + CEIL (rem
, limb_prec
);
2608 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2611 basic_block edge_bb
= NULL
;
2614 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2616 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2618 if (kind
== bitint_prec_large
)
2619 m_gsi
= gsi_end_bb (edge_bb
);
2622 m_after_stmt
= stmt
;
2623 if (kind
!= bitint_prec_large
)
2624 m_upwards_2limb
= end
;
2628 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2629 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2630 > CEIL (prec
, limb_prec
)));
2632 for (unsigned i
= 0; i
< cnt
; i
++)
2635 if (kind
== bitint_prec_large
)
2638 idx
= size_int (end
+ (i
> 2));
2641 rhs1
= handle_operand (cmp_op1
, idx
);
2642 tree rhs2
= handle_operand (cmp_op2
, idx
);
2643 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2645 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2646 e1
->flags
= EDGE_FALSE_VALUE
;
2647 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2648 e1
->probability
= profile_probability::unlikely ();
2649 e2
->probability
= e1
->probability
.invert ();
2651 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2652 m_gsi
= gsi_after_labels (e1
->dest
);
2657 rhs1
= handle_operand (store_operand
, idx
);
2659 rhs1
= handle_stmt (stmt
, idx
);
2660 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2661 rhs1
= add_cast (m_limb_type
, rhs1
);
2662 if (sext
&& i
== cnt
- 1)
2667 if (tree_fits_uhwi_p (idx
))
2668 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2671 nidx
= make_ssa_name (sizetype
);
2672 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2678 basic_block new_bb
= NULL
;
2679 /* Handle stores into bit-fields. */
2685 if (kind
!= bitint_prec_large
)
2687 prepare_data_in_out (build_zero_cst (m_limb_type
),
2689 bf_next
= m_data
.pop ();
2690 bf_cur
= m_data
.pop ();
2691 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2692 NULL_TREE
, NULL_TREE
);
2694 if_then_else (g
, profile_probability::unlikely (),
2699 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2700 tree bfr
= build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2702 bo_idx
* limb_prec
+ bo_bit
);
2703 tree t
= add_cast (ftype
, rhs1
);
2704 g
= gimple_build_assign (bfr
, t
);
2708 maybe_duplicate_eh_stmt (g
, stmt
);
2711 edge e
= split_block (gsi_bb (m_gsi
), g
);
2712 m_gsi
= gsi_after_labels (e
->dest
);
2713 add_eh_edge (e
->src
,
2714 find_edge (gimple_bb (stmt
), eh_pad
));
2717 if (kind
== bitint_prec_large
)
2723 m_gsi
= gsi_after_labels (e2
->src
);
2727 tree t1
= make_ssa_name (m_limb_type
);
2728 tree t2
= make_ssa_name (m_limb_type
);
2729 tree t3
= make_ssa_name (m_limb_type
);
2730 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2731 build_int_cst (unsigned_type_node
,
2732 limb_prec
- bo_bit
));
2734 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2735 build_int_cst (unsigned_type_node
,
2739 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2742 if (bf_next
&& i
== 1)
2744 g
= gimple_build_assign (bf_next
, bf_cur
);
2751 /* Handle bit-field access to partial last limb if needed. */
2755 && tree_fits_uhwi_p (idx
))
2757 unsigned int tprec
= TYPE_PRECISION (type
);
2758 unsigned int rprec
= (tprec
- 1) % limb_prec
+ 1;
2759 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2762 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2764 = build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2766 (bo_idx
+ tprec
/ limb_prec
)
2768 tree t
= add_cast (ftype
, rhs1
);
2769 g
= gimple_build_assign (bfr
, t
);
2773 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2776 /* Otherwise, stores to any other lhs. */
2779 tree l
= limb_access (nlhs
? NULL_TREE
: lhs_type
,
2780 nlhs
? nlhs
: lhs
, nidx
, true);
2781 g
= gimple_build_assign (l
, rhs1
);
2786 maybe_duplicate_eh_stmt (g
, stmt
);
2789 edge e
= split_block (gsi_bb (m_gsi
), g
);
2790 m_gsi
= gsi_after_labels (e
->dest
);
2791 add_eh_edge (e
->src
,
2792 find_edge (gimple_bb (stmt
), eh_pad
));
2796 m_gsi
= gsi_after_labels (new_bb
);
2800 if (kind
== bitint_prec_huge
&& i
<= 1)
2804 idx
= make_ssa_name (sizetype
);
2805 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2811 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2814 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2815 NULL_TREE
, NULL_TREE
);
2818 m_gsi
= gsi_after_labels (edge_bb
);
2820 m_gsi
= gsi_for_stmt (stmt
);
2830 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2831 tree lpm1
= build_int_cst (unsigned_type_node
,
2833 tree n
= make_ssa_name (TREE_TYPE (ext
));
2834 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2836 ext
= add_cast (m_limb_type
, n
);
2839 ext
= build_zero_cst (m_limb_type
);
2840 kind
= bitint_precision_kind (type
);
2841 unsigned start
= CEIL (prec
, limb_prec
);
2842 prec
= TYPE_PRECISION (type
);
2843 idx
= idx_first
= idx_next
= NULL_TREE
;
2844 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2845 kind
= bitint_prec_large
;
2846 if (kind
== bitint_prec_large
)
2847 cnt
= CEIL (prec
, limb_prec
) - start
;
2850 rem
= prec
% limb_prec
;
2851 end
= (prec
- rem
) / limb_prec
;
2852 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2854 for (unsigned i
= 0; i
< cnt
; i
++)
2856 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2857 idx
= size_int (start
+ i
);
2858 else if (i
== cnt
- 1 && (rem
!= 0))
2859 idx
= size_int (end
);
2860 else if (i
== (bo_bit
!= 0))
2861 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2863 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2865 tree t1
= make_ssa_name (m_limb_type
);
2866 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2867 build_int_cst (unsigned_type_node
,
2868 limb_prec
- bo_bit
));
2870 if (integer_zerop (ext
))
2874 tree t2
= make_ssa_name (m_limb_type
);
2875 rhs1
= make_ssa_name (m_limb_type
);
2876 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2877 build_int_cst (unsigned_type_node
,
2880 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2888 if (tree_fits_uhwi_p (idx
))
2889 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2892 nidx
= make_ssa_name (sizetype
);
2893 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2899 /* Handle bit-field access to partial last limb if needed. */
2900 if (nlhs
&& i
== cnt
- 1)
2902 unsigned int tprec
= TYPE_PRECISION (type
);
2903 unsigned int rprec
= (tprec
- 1) % limb_prec
+ 1;
2904 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2907 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2909 = build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2911 (bo_idx
+ tprec
/ limb_prec
)
2913 tree t
= add_cast (ftype
, rhs1
);
2914 g
= gimple_build_assign (bfr
, t
);
2918 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2921 /* Otherwise, stores to any other lhs. */
2924 tree l
= limb_access (nlhs
? NULL_TREE
: lhs_type
,
2925 nlhs
? nlhs
: lhs
, nidx
, true);
2926 g
= gimple_build_assign (l
, rhs1
);
2931 maybe_duplicate_eh_stmt (g
, stmt
);
2934 edge e
= split_block (gsi_bb (m_gsi
), g
);
2935 m_gsi
= gsi_after_labels (e
->dest
);
2936 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2939 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2941 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2944 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2945 NULL_TREE
, NULL_TREE
);
2947 m_gsi
= gsi_for_stmt (stmt
);
2952 if (bf_cur
!= NULL_TREE
)
2954 unsigned int tprec
= TYPE_PRECISION (type
);
2955 unsigned int rprec
= (tprec
+ bo_bit
) % limb_prec
;
2956 tree ftype
= build_nonstandard_integer_type (rprec
, 1);
2957 tree bfr
= build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2959 (bo_idx
+ (tprec
+ bo_bit
) / limb_prec
)
2964 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2965 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2966 build_int_cst (unsigned_type_node
,
2967 limb_prec
- bo_bit
));
2970 rhs1
= add_cast (ftype
, rhs1
);
2971 g
= gimple_build_assign (bfr
, rhs1
);
2975 maybe_duplicate_eh_stmt (g
, stmt
);
2978 edge e
= split_block (gsi_bb (m_gsi
), g
);
2979 m_gsi
= gsi_after_labels (e
->dest
);
2980 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2985 if (gimple_store_p (stmt
))
2987 unlink_stmt_vdef (stmt
);
2988 release_ssa_name (gimple_vdef (stmt
));
2989 gsi_remove (&m_gsi
, true);
2993 lhs
= make_ssa_name (boolean_type_node
);
2994 basic_block bb
= gimple_bb (stmt
);
2995 gphi
*phi
= create_phi_node (lhs
, bb
);
2996 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2997 unsigned int n
= EDGE_COUNT (bb
->preds
);
2998 for (unsigned int i
= 0; i
< n
; i
++)
3000 edge e2
= EDGE_PRED (bb
, i
);
3001 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
3002 e2
, UNKNOWN_LOCATION
);
3004 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3011 /* Handle a large/huge _BitInt comparison statement STMT other than
3012 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
3013 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
3014 lowered by iteration from the most significant limb downwards to
3015 the least significant one, for large _BitInt in straight line code,
3016 otherwise with most significant limb handled in
3017 straight line code followed by a loop handling one limb at a time.
3018 Comparisons with unsigned huge _BitInt with precisions which are
3019 multiples of limb precision can use just the loop and don't need to
3020 handle most significant limb before the loop. The loop or straight
3021 line code jumps to final basic block if a particular pair of limbs
3025 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
3026 tree cmp_op1
, tree cmp_op2
)
3028 tree type
= TREE_TYPE (cmp_op1
);
3029 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
3030 bitint_prec_kind kind
= bitint_precision_kind (type
);
3031 gcc_assert (kind
>= bitint_prec_large
);
3033 if (!TYPE_UNSIGNED (type
)
3034 && integer_zerop (cmp_op2
)
3035 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
3037 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
3038 tree idx
= size_int (end
);
3040 tree rhs1
= handle_operand (cmp_op1
, idx
);
3041 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3043 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3044 rhs1
= add_cast (stype
, rhs1
);
3046 tree lhs
= make_ssa_name (boolean_type_node
);
3047 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
3048 build_zero_cst (TREE_TYPE (rhs1
)));
3054 unsigned cnt
, rem
= 0, end
= 0;
3055 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
3056 if (kind
== bitint_prec_large
)
3057 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
3060 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
3061 if (rem
== 0 && !TYPE_UNSIGNED (type
))
3063 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
3064 cnt
= 1 + (rem
!= 0);
3067 basic_block edge_bb
= NULL
;
3068 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3070 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
3072 m_gsi
= gsi_end_bb (edge_bb
);
3074 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
3075 for (unsigned i
= 0; i
< cnt
; i
++)
3078 if (kind
== bitint_prec_large
)
3079 idx
= size_int (cnt
- i
- 1);
3080 else if (i
== cnt
- 1)
3081 idx
= create_loop (size_int (end
- 1), &idx_next
);
3083 idx
= size_int (end
);
3084 tree rhs1
= handle_operand (cmp_op1
, idx
);
3085 tree rhs2
= handle_operand (cmp_op2
, idx
);
3087 && !TYPE_UNSIGNED (type
)
3088 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3090 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3091 rhs1
= add_cast (stype
, rhs1
);
3092 rhs2
= add_cast (stype
, rhs2
);
3094 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3096 edge e1
= split_block (gsi_bb (m_gsi
), g
);
3097 e1
->flags
= EDGE_FALSE_VALUE
;
3098 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3099 e1
->probability
= profile_probability::likely ();
3100 e2
->probability
= e1
->probability
.invert ();
3102 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
3103 m_gsi
= gsi_after_labels (e1
->dest
);
3105 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3107 e1
= split_block (gsi_bb (m_gsi
), g
);
3108 e1
->flags
= EDGE_FALSE_VALUE
;
3109 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3110 e1
->probability
= profile_probability::unlikely ();
3111 e2
->probability
= e1
->probability
.invert ();
3112 m_gsi
= gsi_after_labels (e1
->dest
);
3113 edges
[2 * i
+ 1] = e2
;
3115 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
3117 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3119 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
3120 NULL_TREE
, NULL_TREE
);
3122 edge true_edge
, false_edge
;
3123 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
3124 &true_edge
, &false_edge
);
3125 m_gsi
= gsi_after_labels (false_edge
->dest
);
3130 tree lhs
= make_ssa_name (boolean_type_node
);
3131 basic_block bb
= gimple_bb (stmt
);
3132 gphi
*phi
= create_phi_node (lhs
, bb
);
3133 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
3135 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
3136 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
3137 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
3139 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
3140 ? boolean_true_node
: boolean_false_node
,
3141 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
3146 /* Lower large/huge _BitInt left and right shift except for left
3147 shift by < limb_prec constant. */
3150 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
3152 tree rhs1
= gimple_assign_rhs1 (stmt
);
3153 tree lhs
= gimple_assign_lhs (stmt
);
3154 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3155 tree type
= TREE_TYPE (rhs1
);
3156 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3157 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3158 && bitint_precision_kind (type
) >= bitint_prec_large
);
3159 int prec
= TYPE_PRECISION (type
);
3160 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
3162 if (obj
== NULL_TREE
)
3164 int part
= var_to_partition (m_map
, lhs
);
3165 gcc_assert (m_vars
[part
] != NULL_TREE
);
3168 /* Preparation code common for both left and right shifts.
3169 unsigned n1 = n % limb_prec;
3170 size_t n2 = n / limb_prec;
3171 size_t n3 = n1 != 0;
3172 unsigned n4 = (limb_prec - n1) % limb_prec;
3173 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3174 if (TREE_CODE (n
) == INTEGER_CST
)
3176 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3177 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
3178 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
3179 n3
= size_int (!integer_zerop (n1
));
3180 n4
= int_const_binop (TRUNC_MOD_EXPR
,
3181 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
3185 n1
= make_ssa_name (TREE_TYPE (n
));
3186 n2
= make_ssa_name (sizetype
);
3187 n3
= make_ssa_name (sizetype
);
3188 n4
= make_ssa_name (TREE_TYPE (n
));
3189 if (pow2p_hwi (limb_prec
))
3191 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
3192 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
3194 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3196 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3198 build_int_cst (TREE_TYPE (n
),
3199 exact_log2 (limb_prec
)));
3201 if (gimple_assign_lhs (g
) != n2
)
3203 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3206 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3209 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
3215 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3216 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
3218 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3220 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3221 TRUNC_DIV_EXPR
, n
, lp
);
3223 if (gimple_assign_lhs (g
) != n2
)
3225 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3228 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3229 MINUS_EXPR
, lp
, n1
);
3231 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3235 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3236 build_zero_cst (TREE_TYPE (n
)));
3238 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3241 tree p
= build_int_cst (sizetype
,
3242 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3243 if (rhs_code
== RSHIFT_EXPR
)
3248 unsigned n1 = n % limb_prec;
3249 size_t n2 = n / limb_prec;
3250 size_t n3 = n1 != 0;
3251 unsigned n4 = (limb_prec - n1) % limb_prec;
3253 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3254 int signed_p = (typeof (src) -1) < 0;
3255 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3256 ? p : p - n3); ++idx)
3257 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3259 if (prec % limb_prec == 0)
3262 ext = ((signed limb_type) (src[p] << (limb_prec
3263 - (prec % limb_prec))))
3264 >> (limb_prec - (prec % limb_prec));
3266 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3267 if (!signed_p && (prec % limb_prec == 0))
3269 else if (idx < prec / 64)
3271 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3277 dst[idx] = ((signed limb_type) ext) >> n1;
3278 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3282 dst[idx] = ext >> n1;
3285 for (++idx; idx <= p; ++idx)
3288 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3290 else if (TREE_CODE (n3
) == INTEGER_CST
)
3291 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3294 pmn3
= make_ssa_name (sizetype
);
3295 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3298 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3299 edge edge_true
, edge_false
;
3300 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3302 tree idx
= create_loop (n2
, &idx_next
);
3303 tree idxmn2
= make_ssa_name (sizetype
);
3304 tree idxpn3
= make_ssa_name (sizetype
);
3305 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3307 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3310 tree t1
= handle_operand (rhs1
, idx
);
3312 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3313 RSHIFT_EXPR
, t1
, n1
);
3315 t1
= gimple_assign_lhs (g
);
3316 if (!integer_zerop (n3
))
3319 tree t2
= handle_operand (rhs1
, idxpn3
);
3320 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3321 LSHIFT_EXPR
, t2
, n4
);
3323 t2
= gimple_assign_lhs (g
);
3324 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3325 BIT_IOR_EXPR
, t1
, t2
);
3327 t1
= gimple_assign_lhs (g
);
3329 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3330 g
= gimple_build_assign (l
, t1
);
3332 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3334 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3336 idx
= make_ssa_name (sizetype
);
3337 m_gsi
= gsi_for_stmt (final_stmt
);
3338 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3339 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3340 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3341 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3342 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3343 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3345 tree ms
= handle_operand (rhs1
, p
);
3347 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3348 ext
= add_cast (m_limb_type
, ms
);
3349 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3350 && !integer_zerop (n3
))
3352 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3353 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3355 t1
= handle_operand (rhs1
, idx
);
3356 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3357 RSHIFT_EXPR
, t1
, n1
);
3359 t1
= gimple_assign_lhs (g
);
3360 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3361 LSHIFT_EXPR
, ext
, n4
);
3363 tree t2
= gimple_assign_lhs (g
);
3364 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3365 BIT_IOR_EXPR
, t1
, t2
);
3367 t1
= gimple_assign_lhs (g
);
3368 idxmn2
= make_ssa_name (sizetype
);
3369 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3371 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3372 g
= gimple_build_assign (l
, t1
);
3374 idx_next
= make_ssa_name (sizetype
);
3375 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3377 m_gsi
= gsi_for_stmt (final_stmt
);
3378 tree nidx
= make_ssa_name (sizetype
);
3379 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3380 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3381 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3382 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3383 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3384 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3387 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3389 idx
= gimple_assign_lhs (g
);
3391 if (!TYPE_UNSIGNED (type
))
3392 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3393 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3394 RSHIFT_EXPR
, sext
, n1
);
3396 t1
= gimple_assign_lhs (g
);
3397 if (!TYPE_UNSIGNED (type
))
3399 t1
= add_cast (m_limb_type
, t1
);
3400 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3402 build_int_cst (TREE_TYPE (n
),
3405 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3408 ext
= build_zero_cst (m_limb_type
);
3409 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3410 g
= gimple_build_assign (l
, t1
);
3412 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3415 idx
= gimple_assign_lhs (g
);
3416 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3417 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3418 idx
= create_loop (idx
, &idx_next
);
3419 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3420 g
= gimple_build_assign (l
, ext
);
3422 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3424 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3432 unsigned n1 = n % limb_prec;
3433 size_t n2 = n / limb_prec;
3434 size_t n3 = n1 != 0;
3435 unsigned n4 = (limb_prec - n1) % limb_prec;
3437 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3438 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3439 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3442 dst[idx] = src[idx - n2] << n1;
3445 for (; (ssize_t) idx >= 0; --idx)
3448 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3449 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3452 n2pn3
= make_ssa_name (sizetype
);
3453 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3456 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3457 idx even to access the most significant partial limb. */
3459 if (integer_zerop (n3
))
3460 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3461 counts. Emit if (true) condition that can be optimized later. */
3462 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3463 NULL_TREE
, NULL_TREE
);
3465 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3466 edge edge_true
, edge_false
;
3467 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3469 tree idx
= create_loop (p
, &idx_next
);
3470 tree idxmn2
= make_ssa_name (sizetype
);
3471 tree idxmn2mn3
= make_ssa_name (sizetype
);
3472 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3474 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3477 tree t1
= handle_operand (rhs1
, idxmn2
);
3479 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3480 LSHIFT_EXPR
, t1
, n1
);
3482 t1
= gimple_assign_lhs (g
);
3483 if (!integer_zerop (n3
))
3486 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3487 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3488 RSHIFT_EXPR
, t2
, n4
);
3490 t2
= gimple_assign_lhs (g
);
3491 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3492 BIT_IOR_EXPR
, t1
, t2
);
3494 t1
= gimple_assign_lhs (g
);
3496 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3497 g
= gimple_build_assign (l
, t1
);
3499 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3501 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3502 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3503 NULL_TREE
, NULL_TREE
);
3505 idx
= make_ssa_name (sizetype
);
3506 m_gsi
= gsi_for_stmt (final_stmt
);
3507 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3508 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3509 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3510 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3511 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3512 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3514 if (!integer_zerop (n3
))
3516 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3517 NULL_TREE
, NULL_TREE
);
3518 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3519 idxmn2
= make_ssa_name (sizetype
);
3520 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3523 t1
= handle_operand (rhs1
, idxmn2
);
3524 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3525 LSHIFT_EXPR
, t1
, n1
);
3527 t1
= gimple_assign_lhs (g
);
3528 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3529 g
= gimple_build_assign (l
, t1
);
3531 idx_next
= make_ssa_name (sizetype
);
3532 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3534 m_gsi
= gsi_for_stmt (final_stmt
);
3535 tree nidx
= make_ssa_name (sizetype
);
3536 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3537 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3538 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3539 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3540 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3541 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3544 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3545 ssize_int (0), NULL_TREE
, NULL_TREE
);
3546 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3547 idx
= create_loop (idx
, &idx_next
);
3548 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3549 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3551 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3553 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3554 ssize_int (0), NULL_TREE
, NULL_TREE
);
3559 /* Lower large/huge _BitInt multiplication or division. */
3562 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3564 tree rhs1
= gimple_assign_rhs1 (stmt
);
3565 tree rhs2
= gimple_assign_rhs2 (stmt
);
3566 tree lhs
= gimple_assign_lhs (stmt
);
3567 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3568 tree type
= TREE_TYPE (rhs1
);
3569 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3570 && bitint_precision_kind (type
) >= bitint_prec_large
);
3571 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3572 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3573 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3574 if (obj
== NULL_TREE
)
3576 int part
= var_to_partition (m_map
, lhs
);
3577 gcc_assert (m_vars
[part
] != NULL_TREE
);
3579 lhs
= build_fold_addr_expr (obj
);
3583 lhs
= build_fold_addr_expr (obj
);
3584 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3585 NULL_TREE
, true, GSI_SAME_STMT
);
3587 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3592 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3593 lhs
, build_int_cst (sitype
, prec
),
3594 rhs1
, build_int_cst (sitype
, prec1
),
3595 rhs2
, build_int_cst (sitype
, prec2
));
3598 case TRUNC_DIV_EXPR
:
3599 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3600 lhs
, build_int_cst (sitype
, prec
),
3602 build_int_cst (sitype
, 0),
3603 rhs1
, build_int_cst (sitype
, prec1
),
3604 rhs2
, build_int_cst (sitype
, prec2
));
3605 if (!stmt_ends_bb_p (stmt
))
3606 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3609 case TRUNC_MOD_EXPR
:
3610 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3611 build_int_cst (sitype
, 0),
3612 lhs
, build_int_cst (sitype
, prec
),
3613 rhs1
, build_int_cst (sitype
, prec1
),
3614 rhs2
, build_int_cst (sitype
, prec2
));
3615 if (!stmt_ends_bb_p (stmt
))
3616 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3622 if (stmt_ends_bb_p (stmt
))
3624 maybe_duplicate_eh_stmt (g
, stmt
);
3627 basic_block bb
= gimple_bb (stmt
);
3629 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3630 if (e1
->flags
& EDGE_EH
)
3634 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3635 m_gsi
= gsi_after_labels (e2
->dest
);
3636 add_eh_edge (e2
->src
, e1
);
3641 /* Lower large/huge _BitInt conversion to/from floating point. */
3644 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3646 tree rhs1
= gimple_assign_rhs1 (stmt
);
3647 tree lhs
= gimple_assign_lhs (stmt
);
3648 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3649 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3651 if (rhs_code
== FIX_TRUNC_EXPR
)
3653 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3654 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3656 if (obj
== NULL_TREE
)
3658 int part
= var_to_partition (m_map
, lhs
);
3659 gcc_assert (m_vars
[part
] != NULL_TREE
);
3661 lhs
= build_fold_addr_expr (obj
);
3665 lhs
= build_fold_addr_expr (obj
);
3666 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3667 NULL_TREE
, true, GSI_SAME_STMT
);
3669 scalar_mode from_mode
3670 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3672 /* IEEE single is a full superset of both IEEE half and
3673 bfloat formats, convert to float first and then to _BitInt
3674 to avoid the need of another 2 library routines. */
3675 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3676 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3677 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3679 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3681 rhs1
= add_cast (type
, rhs1
);
3684 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3685 lhs
, build_int_cst (sitype
, prec
),
3692 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3693 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3694 rhs1
, build_int_cst (sitype
, prec
));
3695 gimple_call_set_lhs (g
, lhs
);
3696 if (!stmt_ends_bb_p (stmt
))
3697 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3698 gsi_replace (&m_gsi
, g
, true);
3702 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3703 If check_zero is true, caller wants to check if all bits in [start, end)
3704 are zero, otherwise if bits in [start, end) are either all zero or
3705 all ones. L is the limb with index LIMB, START and END are measured
3709 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3710 unsigned int end
, tree l
,
3714 unsigned startlimb
= start
/ limb_prec
;
3715 unsigned endlimb
= (end
- 1) / limb_prec
;
3718 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3720 if (startlimb
== endlimb
&& limb
== startlimb
)
3724 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3725 end
- start
, false, limb_prec
);
3726 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3728 wide_int_to_tree (m_limb_type
, w
));
3730 return gimple_assign_lhs (g
);
3732 unsigned int shift
= start
% limb_prec
;
3733 if ((end
% limb_prec
) != 0)
3735 unsigned int lshift
= (-end
) % limb_prec
;
3737 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3739 build_int_cst (unsigned_type_node
,
3742 l
= gimple_assign_lhs (g
);
3744 l
= add_cast (signed_type_for (m_limb_type
), l
);
3745 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3747 build_int_cst (unsigned_type_node
, shift
));
3749 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3751 else if (limb
== startlimb
)
3753 if ((start
% limb_prec
) == 0)
3756 l
= add_cast (signed_type_for (m_limb_type
), l
);
3757 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3759 build_int_cst (unsigned_type_node
,
3760 start
% limb_prec
));
3762 l
= gimple_assign_lhs (g
);
3764 l
= add_cast (m_limb_type
, l
);
3767 else if (limb
== endlimb
)
3769 if ((end
% limb_prec
) == 0)
3773 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3774 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3776 wide_int_to_tree (m_limb_type
, w
));
3778 return gimple_assign_lhs (g
);
3780 unsigned int shift
= (-end
) % limb_prec
;
3781 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3783 build_int_cst (unsigned_type_node
, shift
));
3785 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3786 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3788 build_int_cst (unsigned_type_node
, shift
));
3790 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3795 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3796 result including overflow flag into the right locations. */
3799 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3800 tree ovf
, tree lhs
, tree orig_obj
,
3801 gimple
*stmt
, tree_code code
)
3805 if (obj
== NULL_TREE
3806 && (TREE_CODE (type
) != BITINT_TYPE
3807 || bitint_precision_kind (type
) < bitint_prec_large
))
3809 /* Add support for 3 or more limbs filled in from normal integral
3810 type if this assert fails. If no target chooses limb mode smaller
3811 than half of largest supported normal integral type, this will not
3813 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3814 tree lhs_type
= type
;
3815 if (TREE_CODE (type
) == BITINT_TYPE
3816 && bitint_precision_kind (type
) == bitint_prec_middle
)
3817 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3818 TYPE_UNSIGNED (type
));
3819 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3820 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3822 r1
= gimple_assign_lhs (g
);
3823 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3824 r1
= add_cast (lhs_type
, r1
);
3825 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3827 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3828 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3830 r2
= gimple_assign_lhs (g
);
3831 r2
= add_cast (lhs_type
, r2
);
3832 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3833 build_int_cst (unsigned_type_node
,
3836 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3837 gimple_assign_lhs (g
));
3839 r1
= gimple_assign_lhs (g
);
3841 if (lhs_type
!= type
)
3842 r1
= add_cast (type
, r1
);
3843 ovf
= add_cast (lhs_type
, ovf
);
3844 if (lhs_type
!= type
)
3845 ovf
= add_cast (type
, ovf
);
3846 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3847 m_gsi
= gsi_for_stmt (stmt
);
3848 gsi_replace (&m_gsi
, g
, true);
3852 unsigned HOST_WIDE_INT nelts
= 0;
3853 tree atype
= NULL_TREE
;
3856 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3857 if (orig_obj
== NULL_TREE
)
3859 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3865 if (orig_obj
== NULL_TREE
)
3867 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3868 v1
= build2 (MEM_REF
, atype
,
3869 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3871 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3872 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3874 v1
= unshare_expr (obj
);
3875 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3876 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3877 g
= gimple_build_assign (v1
, v2
);
3880 if (orig_obj
== NULL_TREE
&& obj
)
3882 ovf
= add_cast (m_limb_type
, ovf
);
3883 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3884 g
= gimple_build_assign (l
, ovf
);
3888 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3889 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3890 (nelts
+ 1) * m_limb_size
);
3891 tree v1
= build2 (MEM_REF
, atype
,
3892 build_fold_addr_expr (unshare_expr (obj
)),
3894 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3898 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3900 imm_use_iterator ui
;
3901 use_operand_p use_p
;
3902 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3904 g
= USE_STMT (use_p
);
3905 if (!is_gimple_assign (g
)
3906 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3908 tree lhs2
= gimple_assign_lhs (g
);
3910 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3911 lhs2
= gimple_assign_lhs (use_stmt
);
3912 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3913 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3914 g
= gimple_build_assign (lhs2
, ovf
);
3916 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3917 gsi_replace (&gsi
, g
, true);
3918 if (gsi_stmt (m_gsi
) == use_stmt
)
3919 m_gsi
= gsi_for_stmt (g
);
3923 else if (ovf
!= boolean_false_node
)
3925 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3926 NULL_TREE
, NULL_TREE
);
3927 edge edge_true
, edge_false
;
3928 if_then (g
, profile_probability::very_unlikely (),
3929 edge_true
, edge_false
);
3930 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3931 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3934 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3935 true, GSI_SAME_STMT
);
3936 m_gsi
= gsi_after_labels (edge_true
->dest
);
3941 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3942 g
= gimple_build_assign (var
, clobber
);
3943 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3947 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3948 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3949 argument 1 precision PREC1 and minimum precision for the result
3950 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3953 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3954 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3959 /* Ignore this special rule for subtraction, even if both
3960 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3961 in infinite precision. */
3962 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3964 /* Result in [0, prec2) is unsigned, if prec > prec2,
3965 all bits above it will be zero. */
3966 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3967 return boolean_false_node
;
3970 /* ovf if any of bits in [start, end) is non-zero. */
3971 *start
= prec
- !TYPE_UNSIGNED (type
);
3975 else if (TYPE_UNSIGNED (type
))
3977 /* If result in [0, prec2) is signed and if prec > prec2,
3978 all bits above it will be sign bit copies. */
3981 /* ovf if bit prec - 1 is non-zero. */
3987 /* ovf if any of bits in [start, end) is non-zero. */
3992 else if (prec
>= prec2
)
3993 return boolean_false_node
;
3996 /* ovf if [start, end) bits aren't all zeros or all ones. */
3999 *check_zero
= false;
4004 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
4005 argument or return type _Complex large/huge _BitInt. */
4008 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
4010 tree arg0
= gimple_call_arg (stmt
, 0);
4011 tree arg1
= gimple_call_arg (stmt
, 1);
4012 tree lhs
= gimple_call_lhs (stmt
);
4017 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4018 gsi_remove (&gsi
, true);
4021 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4022 tree type
= TREE_TYPE (lhs
);
4023 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4024 type
= TREE_TYPE (type
);
4025 int prec
= TYPE_PRECISION (type
);
4026 int prec0
= range_to_prec (arg0
, stmt
);
4027 int prec1
= range_to_prec (arg1
, stmt
);
4028 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
4029 the be minimum unsigned precision of any possible operation's
4030 result, otherwise it is minimum signed precision.
4032 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
4033 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
4034 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
4035 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
4036 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
4037 8 + 8 [0, 0x1fe] 9 UNSIGNED
4038 8 + 10 [0, 0x4fe] 11 UNSIGNED
4039 -8 + -8 [-0x100, 0xfe] 9 SIGNED
4040 -8 + -10 [-0x280, 0x27e] 11 SIGNED
4041 8 + -8 [-0x80, 0x17e] 10 SIGNED
4042 8 + -10 [-0x200, 0x2fe] 11 SIGNED
4043 10 + -8 [-0x80, 0x47e] 12 SIGNED
4044 8 - 8 [-0xff, 0xff] 9 SIGNED
4045 8 - 10 [-0x3ff, 0xff] 11 SIGNED
4046 10 - 8 [-0xff, 0x3ff] 11 SIGNED
4047 -8 - -8 [-0xff, 0xff] 9 SIGNED
4048 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4049 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4050 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4051 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4052 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4053 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4054 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4055 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4056 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
4057 prec1
< 0 ? -prec1
: prec1
);
4058 /* If operands are either both signed or both unsigned,
4059 we need just one additional bit. */
4060 prec2
= (((prec0
< 0) == (prec1
< 0)
4061 /* If one operand is signed and one unsigned and
4062 the signed one has larger precision, we need
4063 just one extra bit, otherwise two. */
4064 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
4065 : (prec2
== -prec1
&& prec2
!= prec0
)))
4066 ? prec2
+ 1 : prec2
+ 2);
4067 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
4068 prec1
< 0 ? -prec1
: prec1
);
4069 prec3
= MAX (prec3
, prec
);
4070 tree var
= NULL_TREE
;
4071 tree orig_obj
= obj
;
4072 if (obj
== NULL_TREE
4073 && TREE_CODE (type
) == BITINT_TYPE
4074 && bitint_precision_kind (type
) >= bitint_prec_large
4076 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4078 int part
= var_to_partition (m_map
, lhs
);
4079 gcc_assert (m_vars
[part
] != NULL_TREE
);
4081 if (TREE_TYPE (lhs
) == type
)
4084 if (TREE_CODE (type
) != BITINT_TYPE
4085 || bitint_precision_kind (type
) < bitint_prec_large
)
4087 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
4088 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4089 var
= create_tmp_var (atype
);
4092 enum tree_code code
;
4093 switch (gimple_call_internal_fn (stmt
))
4095 case IFN_ADD_OVERFLOW
:
4096 case IFN_UBSAN_CHECK_ADD
:
4099 case IFN_SUB_OVERFLOW
:
4100 case IFN_UBSAN_CHECK_SUB
:
4106 unsigned start
, end
;
4108 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
4109 &start
, &end
, &check_zero
);
4111 unsigned startlimb
, endlimb
;
4119 startlimb
= start
/ limb_prec
;
4120 endlimb
= (end
- 1) / limb_prec
;
4123 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
4124 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
4125 unsigned cnt
, rem
= 0, fin
= 0;
4126 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4127 bool last_ovf
= (ovf
== NULL_TREE
4128 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
4129 if (kind
!= bitint_prec_huge
)
4130 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
4133 rem
= (prec4
% (2 * limb_prec
));
4134 fin
= (prec4
- rem
) / limb_prec
;
4135 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
4136 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4139 if (kind
== bitint_prec_huge
)
4140 m_upwards_2limb
= fin
;
4143 tree type0
= TREE_TYPE (arg0
);
4144 tree type1
= TREE_TYPE (arg1
);
4146 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
4147 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
4148 if (TYPE_PRECISION (type0
) < prec5
)
4150 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
4151 if (TREE_CODE (arg0
) == INTEGER_CST
)
4152 arg0
= fold_convert (type0
, arg0
);
4154 if (TYPE_PRECISION (type1
) < prec5
)
4156 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
4157 if (TREE_CODE (arg1
) == INTEGER_CST
)
4158 arg1
= fold_convert (type1
, arg1
);
4160 unsigned int data_cnt
= 0;
4161 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
4162 tree cmp
= build_zero_cst (m_limb_type
);
4163 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
4164 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
4165 for (unsigned i
= 0; i
< cnt
; i
++)
4169 if (kind
!= bitint_prec_huge
)
4172 idx
= size_int (fin
+ i
- 2);
4173 if (!last_ovf
|| i
< cnt
- 1)
4175 if (type0
!= TREE_TYPE (arg0
))
4176 rhs1
= handle_cast (type0
, arg0
, idx
);
4178 rhs1
= handle_operand (arg0
, idx
);
4179 if (type1
!= TREE_TYPE (arg1
))
4180 rhs2
= handle_cast (type1
, arg1
, idx
);
4182 rhs2
= handle_operand (arg1
, idx
);
4184 data_cnt
= m_data_cnt
;
4185 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4186 rhs1
= add_cast (m_limb_type
, rhs1
);
4187 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
4188 rhs2
= add_cast (m_limb_type
, rhs2
);
4194 m_data_cnt
= data_cnt
;
4195 if (TYPE_UNSIGNED (type0
))
4196 rhs1
= build_zero_cst (m_limb_type
);
4199 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
4200 if (TREE_CODE (rhs1
) == INTEGER_CST
)
4201 rhs1
= build_int_cst (m_limb_type
,
4202 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
4205 tree lpm1
= build_int_cst (unsigned_type_node
,
4207 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
4208 RSHIFT_EXPR
, rhs1
, lpm1
);
4210 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4213 if (TYPE_UNSIGNED (type1
))
4214 rhs2
= build_zero_cst (m_limb_type
);
4217 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
4218 if (TREE_CODE (rhs2
) == INTEGER_CST
)
4219 rhs2
= build_int_cst (m_limb_type
,
4220 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
4223 tree lpm1
= build_int_cst (unsigned_type_node
,
4225 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
4226 RSHIFT_EXPR
, rhs2
, lpm1
);
4228 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4232 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4233 if (ovf
!= boolean_false_node
)
4235 if (tree_fits_uhwi_p (idx
))
4237 unsigned limb
= tree_to_uhwi (idx
);
4238 if (limb
>= startlimb
&& limb
<= endlimb
)
4240 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4242 tree this_ovf
= make_ssa_name (boolean_type_node
);
4243 if (ovf
== NULL_TREE
&& !check_zero
)
4246 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4248 build_int_cst (m_limb_type
, 1));
4250 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4251 gimple_assign_lhs (g
),
4252 build_int_cst (m_limb_type
, 1));
4255 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4257 if (ovf
== NULL_TREE
)
4261 tree b
= make_ssa_name (boolean_type_node
);
4262 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4268 else if (startlimb
< fin
)
4270 if (m_first
&& startlimb
+ 2 < fin
)
4273 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4274 ovf_out
= m_data
.pop ();
4278 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4279 cmp_out
= m_data
.pop ();
4283 if (i
!= 0 || startlimb
!= fin
- 1)
4286 bool single_comparison
4287 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4288 if (!single_comparison
)
4291 if (!check_zero
&& (start
% limb_prec
) == 0)
4292 single_comparison
= true;
4294 else if ((startlimb
& 1) == (i
& 1))
4298 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4299 NULL_TREE
, NULL_TREE
);
4300 edge edge_true_true
, edge_true_false
, edge_false
;
4302 if (!single_comparison
)
4303 g2
= gimple_build_cond (NE_EXPR
, idx
,
4304 size_int (startlimb
), NULL_TREE
,
4306 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4307 profile_probability::likely (),
4308 edge_true_true
, edge_true_false
,
4310 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4311 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4313 tree this_ovf
= make_ssa_name (boolean_type_node
);
4314 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4316 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4318 build_int_cst (m_limb_type
, 1));
4320 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4321 gimple_assign_lhs (g
),
4322 build_int_cst (m_limb_type
, 1));
4325 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4327 if (cmp_code
== GT_EXPR
)
4329 tree t
= make_ssa_name (boolean_type_node
);
4330 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4334 tree this_ovf2
= NULL_TREE
;
4335 if (!single_comparison
)
4337 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4338 tree t
= make_ssa_name (boolean_type_node
);
4339 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4341 this_ovf2
= make_ssa_name (boolean_type_node
);
4342 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4346 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4348 if (i
== 1 && ovf_out
)
4351 t
= make_ssa_name (boolean_type_node
);
4352 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4353 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4355 add_phi_arg (phi
, ovf
? ovf
4356 : boolean_false_node
, edge_false
,
4359 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4362 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4364 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4365 phi
= create_phi_node (t
, edge_true_false
->dest
);
4366 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4367 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4369 add_phi_arg (phi
, cmp
, edge_true_true
,
4379 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4381 else if (!tree_fits_uhwi_p (idx
)
4382 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4384 bool single_comparison
4385 = (((unsigned) prec
% limb_prec
) == 0
4386 || prec_limbs
+ 1 >= fin
4387 || (prec_limbs
& 1) == (i
& 1));
4388 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4389 NULL_TREE
, NULL_TREE
);
4391 if (!single_comparison
)
4392 g2
= gimple_build_cond (EQ_EXPR
, idx
,
4393 size_int (prec_limbs
- 1),
4394 NULL_TREE
, NULL_TREE
);
4395 edge edge_true_true
, edge_true_false
, edge_false
;
4396 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4397 profile_probability::unlikely (),
4398 edge_true_true
, edge_true_false
,
4400 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4401 g
= gimple_build_assign (l
, rhs
);
4403 if (!single_comparison
)
4405 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4406 tree plm1idx
= size_int (prec_limbs
- 1);
4407 tree plm1type
= limb_access_type (type
, plm1idx
);
4408 l
= limb_access (type
, var
? var
: obj
, plm1idx
, true);
4409 if (!useless_type_conversion_p (plm1type
, TREE_TYPE (rhs
)))
4410 rhs
= add_cast (plm1type
, rhs
);
4411 if (!useless_type_conversion_p (TREE_TYPE (l
),
4413 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4414 g
= gimple_build_assign (l
, rhs
);
4417 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4421 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4422 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4423 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4424 g
= gimple_build_assign (l
, rhs
);
4429 if (kind
== bitint_prec_huge
&& i
<= 1)
4433 idx
= make_ssa_name (sizetype
);
4434 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4440 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4443 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4444 NULL_TREE
, NULL_TREE
);
4446 m_gsi
= gsi_for_stmt (final_stmt
);
4452 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4455 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4456 argument or return type _Complex large/huge _BitInt. */
4459 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4461 tree arg0
= gimple_call_arg (stmt
, 0);
4462 tree arg1
= gimple_call_arg (stmt
, 1);
4463 tree lhs
= gimple_call_lhs (stmt
);
4466 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4467 gsi_remove (&gsi
, true);
4470 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4471 tree type
= TREE_TYPE (lhs
);
4472 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4473 type
= TREE_TYPE (type
);
4474 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4475 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4476 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4477 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4478 + (prec1
< 0 ? -prec1
: prec1
));
4479 if (prec0
== 1 || prec1
== 1)
4481 tree var
= NULL_TREE
;
4482 tree orig_obj
= obj
;
4483 bool force_var
= false;
4484 if (obj
== NULL_TREE
4485 && TREE_CODE (type
) == BITINT_TYPE
4486 && bitint_precision_kind (type
) >= bitint_prec_large
4488 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4490 int part
= var_to_partition (m_map
, lhs
);
4491 gcc_assert (m_vars
[part
] != NULL_TREE
);
4493 if (TREE_TYPE (lhs
) == type
)
4496 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4498 for (int i
= 0; i
< 2; ++i
)
4500 tree arg
= i
? arg1
: arg0
;
4501 if (TREE_CODE (arg
) == ADDR_EXPR
)
4502 arg
= TREE_OPERAND (arg
, 0);
4503 if (get_base_address (arg
) == obj
)
4510 if (obj
== NULL_TREE
4512 || TREE_CODE (type
) != BITINT_TYPE
4513 || bitint_precision_kind (type
) < bitint_prec_large
4514 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4516 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4517 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4518 var
= create_tmp_var (atype
);
4520 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4521 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4522 NULL_TREE
, true, GSI_SAME_STMT
);
4523 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4525 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4526 addr
, build_int_cst (sitype
,
4528 arg0
, build_int_cst (sitype
, prec0
),
4529 arg1
, build_int_cst (sitype
, prec1
));
4532 unsigned start
, end
;
4534 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4535 &start
, &end
, &check_zero
);
4536 if (ovf
== NULL_TREE
)
4538 unsigned startlimb
= start
/ limb_prec
;
4539 unsigned endlimb
= (end
- 1) / limb_prec
;
4541 bool use_loop
= false;
4542 if (startlimb
== endlimb
)
4544 else if (startlimb
+ 1 == endlimb
)
4546 else if ((end
% limb_prec
) == 0)
4554 use_loop
= startlimb
+ 2 < endlimb
;
4558 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4559 size_int (startlimb
), true);
4560 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4562 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4563 startlimb
, check_zero
);
4564 ovf
= make_ssa_name (boolean_type_node
);
4566 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4567 build_zero_cst (m_limb_type
));
4570 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4572 build_int_cst (m_limb_type
, 1));
4574 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4575 build_int_cst (m_limb_type
, 1));
4581 basic_block edge_bb
= NULL
;
4582 gimple_stmt_iterator gsi
= m_gsi
;
4584 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4586 m_gsi
= gsi_end_bb (edge_bb
);
4588 tree cmp
= build_zero_cst (m_limb_type
);
4589 for (unsigned i
= 0; i
< cnt
; i
++)
4591 tree idx
, idx_next
= NULL_TREE
;
4593 idx
= size_int (startlimb
);
4595 idx
= size_int (endlimb
);
4597 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4599 idx
= size_int (startlimb
+ 1);
4600 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4601 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4603 l
= gimple_assign_lhs (g
);
4604 if (i
== 0 || i
== 2)
4605 l
= arith_overflow_extract_bits (start
, end
, l
,
4608 if (i
== 0 && !check_zero
)
4611 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4613 build_int_cst (m_limb_type
, 1));
4615 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4616 build_int_cst (m_limb_type
, 1),
4617 NULL_TREE
, NULL_TREE
);
4620 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4622 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4623 e1
->flags
= EDGE_FALSE_VALUE
;
4624 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4626 e1
->probability
= profile_probability::likely ();
4627 e2
->probability
= e1
->probability
.invert ();
4629 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4630 m_gsi
= gsi_after_labels (e1
->dest
);
4631 if (i
== 1 && use_loop
)
4633 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4636 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4637 size_int (endlimb
+ (cnt
== 2)),
4638 NULL_TREE
, NULL_TREE
);
4640 edge true_edge
, false_edge
;
4641 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4644 m_gsi
= gsi_after_labels (false_edge
->dest
);
4649 ovf
= make_ssa_name (boolean_type_node
);
4650 basic_block bb
= gimple_bb (final_stmt
);
4651 gphi
*phi
= create_phi_node (ovf
, bb
);
4652 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4654 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4656 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4657 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4659 m_gsi
= gsi_for_stmt (final_stmt
);
4663 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4666 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4667 .{ADD,SUB,MUL}_OVERFLOW call. */
4670 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4672 tree rhs1
= gimple_assign_rhs1 (stmt
);
4673 rhs1
= TREE_OPERAND (rhs1
, 0);
4674 if (obj
== NULL_TREE
)
4676 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4677 gcc_assert (m_vars
[part
] != NULL_TREE
);
4680 if (TREE_CODE (rhs1
) == SSA_NAME
4682 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4684 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4687 int part
= var_to_partition (m_map
, rhs1
);
4688 gcc_assert (m_vars
[part
] != NULL_TREE
);
4689 tree var
= m_vars
[part
];
4690 unsigned HOST_WIDE_INT nelts
4691 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4692 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4693 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4694 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4695 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4696 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4697 ? 0 : nelts
* m_limb_size
);
4698 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4699 gimple
*g
= gimple_build_assign (obj
, v2
);
4703 /* Lower COMPLEX_EXPR stmt. */
4706 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4708 tree lhs
= gimple_assign_lhs (stmt
);
4709 tree rhs1
= gimple_assign_rhs1 (stmt
);
4710 tree rhs2
= gimple_assign_rhs2 (stmt
);
4711 int part
= var_to_partition (m_map
, lhs
);
4712 gcc_assert (m_vars
[part
] != NULL_TREE
);
4714 unsigned HOST_WIDE_INT nelts
4715 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4716 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4717 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4718 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4720 if (TREE_CODE (rhs1
) == SSA_NAME
)
4722 part
= var_to_partition (m_map
, rhs1
);
4723 gcc_assert (m_vars
[part
] != NULL_TREE
);
4726 else if (integer_zerop (rhs1
))
4727 v2
= build_zero_cst (atype
);
4729 v2
= tree_output_constant_def (rhs1
);
4730 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4731 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4732 gimple
*g
= gimple_build_assign (v1
, v2
);
4734 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4735 TYPE_SIZE_UNIT (atype
));
4736 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4737 if (TREE_CODE (rhs2
) == SSA_NAME
)
4739 part
= var_to_partition (m_map
, rhs2
);
4740 gcc_assert (m_vars
[part
] != NULL_TREE
);
4743 else if (integer_zerop (rhs2
))
4744 v2
= build_zero_cst (atype
);
4746 v2
= tree_output_constant_def (rhs2
);
4747 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4748 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4749 g
= gimple_build_assign (v1
, v2
);
4753 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4757 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4759 tree arg0
= gimple_call_arg (stmt
, 0);
4760 tree arg1
= (gimple_call_num_args (stmt
) == 2
4761 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4762 tree lhs
= gimple_call_lhs (stmt
);
4767 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4768 gsi_remove (&gsi
, true);
4771 tree type
= TREE_TYPE (arg0
);
4772 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4773 bitint_prec_kind kind
= bitint_precision_kind (type
);
4774 gcc_assert (kind
>= bitint_prec_large
);
4775 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4776 enum built_in_function fcode
= END_BUILTINS
;
4777 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4778 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4779 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4783 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4784 fcode
= BUILT_IN_CLZ
;
4785 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4786 fcode
= BUILT_IN_CLZL
;
4788 fcode
= BUILT_IN_CLZLL
;
4791 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4792 we don't add the addend at the end. */
4793 arg1
= integer_zero_node
;
4796 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4797 fcode
= BUILT_IN_CTZ
;
4798 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4799 fcode
= BUILT_IN_CTZL
;
4801 fcode
= BUILT_IN_CTZLL
;
4805 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4806 fcode
= BUILT_IN_CLRSB
;
4807 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4808 fcode
= BUILT_IN_CLRSBL
;
4810 fcode
= BUILT_IN_CLRSBLL
;
4813 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4814 fcode
= BUILT_IN_PARITY
;
4815 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4816 fcode
= BUILT_IN_PARITYL
;
4818 fcode
= BUILT_IN_PARITYLL
;
4822 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4823 fcode
= BUILT_IN_POPCOUNT
;
4824 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4825 fcode
= BUILT_IN_POPCOUNTL
;
4827 fcode
= BUILT_IN_POPCOUNTLL
;
4833 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4834 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4835 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4836 basic_block edge_bb
= NULL
;
4839 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4840 if (kind
== bitint_prec_large
)
4841 cnt
= CEIL (prec
, limb_prec
);
4844 rem
= (prec
% (2 * limb_prec
));
4845 end
= (prec
- rem
) / limb_prec
;
4846 cnt
= 2 + CEIL (rem
, limb_prec
);
4847 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4850 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4852 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4854 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4856 if (kind
== bitint_prec_large
)
4857 m_gsi
= gsi_end_bb (edge_bb
);
4858 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4861 m_after_stmt
= stmt
;
4862 if (kind
!= bitint_prec_large
)
4863 m_upwards_2limb
= end
;
4865 for (unsigned i
= 0; i
< cnt
; i
++)
4868 if (kind
== bitint_prec_large
)
4871 idx
= size_int (end
+ (i
> 2));
4873 tree rhs1
= handle_operand (arg0
, idx
);
4874 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4876 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4877 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4878 rhs1
= add_cast (m_limb_type
, rhs1
);
4882 if (ifn
== IFN_PARITY
)
4883 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4884 else if (ifn
== IFN_FFS
)
4885 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4887 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4893 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4894 build_zero_cst (m_limb_type
),
4895 NULL_TREE
, NULL_TREE
);
4898 e1
= split_block (gsi_bb (m_gsi
), g
);
4899 e1
->flags
= EDGE_FALSE_VALUE
;
4900 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4901 e1
->probability
= profile_probability::unlikely ();
4902 e2
->probability
= e1
->probability
.invert ();
4904 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4905 m_gsi
= gsi_after_labels (e1
->dest
);
4908 if (tree_fits_uhwi_p (idx
))
4910 = build_int_cst (integer_type_node
,
4911 tree_to_uhwi (idx
) * limb_prec
4912 + (ifn
== IFN_FFS
));
4919 res
= make_ssa_name (integer_type_node
);
4920 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4921 build_int_cst (integer_type_node
,
4924 m_data
[m_data_cnt
] = res
;
4928 if (!integer_zerop (in
))
4930 if (kind
== bitint_prec_huge
&& i
== 1)
4933 res
= make_ssa_name (m_limb_type
);
4934 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4939 m_data
[m_data_cnt
] = res
;
4942 g
= gimple_build_call (fndecl
, 1, rhs1
);
4943 tem
= make_ssa_name (integer_type_node
);
4944 gimple_call_set_lhs (g
, tem
);
4946 if (!integer_zerop (in
))
4948 if (kind
== bitint_prec_huge
&& i
== 1)
4951 res
= make_ssa_name (integer_type_node
);
4952 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4957 m_data
[m_data_cnt
] = res
;
4964 if (kind
== bitint_prec_huge
&& i
<= 1)
4968 idx
= make_ssa_name (sizetype
);
4969 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4975 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4978 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4979 NULL_TREE
, NULL_TREE
);
4981 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4982 m_gsi
= gsi_after_labels (edge_bb
);
4984 m_gsi
= gsi_for_stmt (stmt
);
4992 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4994 if (kind
== bitint_prec_large
)
4995 cnt
= CEIL (prec
, limb_prec
);
4998 rem
= prec
% limb_prec
;
4999 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
5001 end
= (prec
- rem
) / limb_prec
;
5002 cnt
= 1 + (rem
!= 0);
5003 if (ifn
== IFN_CLRSB
)
5007 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5009 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
5011 m_gsi
= gsi_end_bb (edge_bb
);
5014 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
5017 gsi
= gsi_for_stmt (stmt
);
5019 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
5021 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
5024 for (unsigned i
= 0; i
< cnt
; i
++)
5027 if (kind
== bitint_prec_large
)
5028 idx
= size_int (cnt
- i
- 1);
5029 else if (i
== cnt
- 1)
5030 idx
= create_loop (size_int (end
- 1), &idx_next
);
5032 idx
= size_int (end
);
5034 tree rhs1
= handle_operand (arg0
, idx
);
5035 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
5037 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
5038 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
5039 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
5040 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
5041 rhs1
= add_cast (m_limb_type
, rhs1
);
5046 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5047 build_zero_cst (m_limb_type
),
5048 NULL_TREE
, NULL_TREE
);
5050 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5051 e1
->flags
= EDGE_FALSE_VALUE
;
5052 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
5053 e1
->probability
= profile_probability::unlikely ();
5054 e2
->probability
= e1
->probability
.invert ();
5056 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5057 m_gsi
= gsi_after_labels (e1
->dest
);
5066 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5068 build_int_cst (m_limb_type
, 1));
5070 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
5071 build_int_cst (m_limb_type
, 1),
5072 NULL_TREE
, NULL_TREE
);
5077 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5078 BIT_XOR_EXPR
, rhs1
, first
);
5080 tree stype
= signed_type_for (m_limb_type
);
5081 g
= gimple_build_cond (LT_EXPR
,
5083 gimple_assign_lhs (g
)),
5084 build_zero_cst (stype
),
5085 NULL_TREE
, NULL_TREE
);
5087 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5088 e1
->flags
= EDGE_FALSE_VALUE
;
5089 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
5091 e1
->probability
= profile_probability::unlikely ();
5092 e2
->probability
= e1
->probability
.invert ();
5094 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
5096 m_gsi
= gsi_after_labels (e1
->dest
);
5098 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
5099 NULL_TREE
, NULL_TREE
);
5102 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5103 e1
->flags
= EDGE_FALSE_VALUE
;
5104 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
5105 e1
->probability
= profile_probability::unlikely ();
5106 e2
->probability
= e1
->probability
.invert ();
5108 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5109 m_gsi
= gsi_after_labels (e1
->dest
);
5110 bqp
[2 * i
+ 1].e
= e2
;
5113 if (tree_fits_uhwi_p (idx
))
5115 = build_int_cst (integer_type_node
,
5117 - (((int) tree_to_uhwi (idx
) + 1)
5118 * limb_prec
) - sub_one
);
5122 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
5124 in
= prepare_data_in_out (in
, idx
, &out
);
5125 out
= m_data
[m_data_cnt
+ 1];
5127 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
5128 build_int_cst (integer_type_node
,
5131 m_data
[m_data_cnt
] = out
;
5135 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
5137 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
5140 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
5141 NULL_TREE
, NULL_TREE
);
5143 edge true_edge
, false_edge
;
5144 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
5145 &true_edge
, &false_edge
);
5146 m_gsi
= gsi_after_labels (false_edge
->dest
);
5156 gphi
*phi1
, *phi2
, *phi3
;
5158 bb
= gsi_bb (m_gsi
);
5159 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
5160 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5162 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5164 for (unsigned i
= 0; i
< cnt
; i
++)
5166 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5167 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5169 if (arg1
== NULL_TREE
)
5171 g
= gimple_build_builtin_unreachable (m_loc
);
5174 m_gsi
= gsi_for_stmt (stmt
);
5175 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
5176 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5178 if (arg1
== NULL_TREE
)
5179 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
5180 gimple_phi_result (phi2
),
5181 gimple_call_lhs (g
));
5184 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5185 PLUS_EXPR
, gimple_phi_result (phi2
),
5186 gimple_call_lhs (g
));
5188 edge e1
= split_block (gimple_bb (stmt
), g
);
5189 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
5190 e2
->probability
= profile_probability::always ();
5191 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
5192 get_immediate_dominator (CDI_DOMINATORS
,
5194 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
5195 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
5196 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
5197 m_gsi
= gsi_for_stmt (stmt
);
5198 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5200 gsi_replace (&m_gsi
, g
, true);
5203 bb
= gsi_bb (m_gsi
);
5204 remove_edge (find_edge (bb
, edge_bb
));
5206 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
5207 e
->probability
= profile_probability::always ();
5208 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
5209 get_immediate_dominator (CDI_DOMINATORS
,
5211 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5213 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5215 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
5217 for (unsigned i
= 0; i
< cnt
; i
++)
5219 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
5220 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
5222 tree a
= bqp
[i
].addend
;
5223 if (i
&& kind
== bitint_prec_large
)
5224 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
5226 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
5228 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
5230 m_gsi
= gsi_after_labels (edge_bb
);
5231 g
= gimple_build_call (fndecl
, 1,
5232 add_cast (signed_type_for (m_limb_type
),
5233 gimple_phi_result (phi1
)));
5234 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5236 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5237 PLUS_EXPR
, gimple_call_lhs (g
),
5238 gimple_phi_result (phi2
));
5240 if (kind
!= bitint_prec_large
)
5242 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5243 PLUS_EXPR
, gimple_assign_lhs (g
),
5247 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5248 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5249 m_gsi
= gsi_for_stmt (stmt
);
5250 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5251 gsi_replace (&m_gsi
, g
, true);
5254 g
= gimple_build_call (fndecl
, 1, res
);
5255 gimple_call_set_lhs (g
, lhs
);
5256 gsi_replace (&m_gsi
, g
, true);
5259 g
= gimple_build_assign (lhs
, res
);
5260 gsi_replace (&m_gsi
, g
, true);
5267 /* Lower a call statement with one or more large/huge _BitInt
5268 arguments or large/huge _BitInt return value. */
5271 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5273 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5274 unsigned int nargs
= gimple_call_num_args (stmt
);
5275 if (gimple_call_internal_p (stmt
))
5276 switch (gimple_call_internal_fn (stmt
))
5278 case IFN_ADD_OVERFLOW
:
5279 case IFN_SUB_OVERFLOW
:
5280 case IFN_UBSAN_CHECK_ADD
:
5281 case IFN_UBSAN_CHECK_SUB
:
5282 lower_addsub_overflow (obj
, stmt
);
5284 case IFN_MUL_OVERFLOW
:
5285 case IFN_UBSAN_CHECK_MUL
:
5286 lower_mul_overflow (obj
, stmt
);
5294 lower_bit_query (stmt
);
5299 bool returns_twice
= (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
) != 0;
5300 for (unsigned int i
= 0; i
< nargs
; ++i
)
5302 tree arg
= gimple_call_arg (stmt
, i
);
5303 if (TREE_CODE (arg
) != SSA_NAME
5304 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5305 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5307 if (SSA_NAME_IS_DEFAULT_DEF (arg
)
5308 && (!SSA_NAME_VAR (arg
) || VAR_P (SSA_NAME_VAR (arg
))))
5310 tree var
= create_tmp_reg (TREE_TYPE (arg
));
5311 arg
= get_or_create_ssa_default_def (cfun
, var
);
5315 int p
= var_to_partition (m_map
, arg
);
5317 gcc_assert (v
!= NULL_TREE
);
5318 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5319 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5320 arg
= make_ssa_name (TREE_TYPE (arg
));
5321 gimple
*g
= gimple_build_assign (arg
, v
);
5322 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5325 m_returns_twice_calls
.safe_push (stmt
);
5326 returns_twice
= false;
5329 gimple_call_set_arg (stmt
, i
, arg
);
5330 if (m_preserved
== NULL
)
5331 m_preserved
= BITMAP_ALLOC (NULL
);
5332 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5334 tree lhs
= gimple_call_lhs (stmt
);
5336 && TREE_CODE (lhs
) == SSA_NAME
5337 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5338 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5340 int p
= var_to_partition (m_map
, lhs
);
5342 gcc_assert (v
!= NULL_TREE
);
5343 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5344 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5345 gimple_call_set_lhs (stmt
, v
);
5346 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5351 /* Lower __asm STMT which involves large/huge _BitInt values. */
5354 bitint_large_huge::lower_asm (gimple
*stmt
)
5356 gasm
*g
= as_a
<gasm
*> (stmt
);
5357 unsigned noutputs
= gimple_asm_noutputs (g
);
5358 unsigned ninputs
= gimple_asm_ninputs (g
);
5360 for (unsigned i
= 0; i
< noutputs
; ++i
)
5362 tree t
= gimple_asm_output_op (g
, i
);
5363 tree s
= TREE_VALUE (t
);
5364 if (TREE_CODE (s
) == SSA_NAME
5365 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5366 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5368 int part
= var_to_partition (m_map
, s
);
5369 gcc_assert (m_vars
[part
] != NULL_TREE
);
5370 TREE_VALUE (t
) = m_vars
[part
];
5373 for (unsigned i
= 0; i
< ninputs
; ++i
)
5375 tree t
= gimple_asm_input_op (g
, i
);
5376 tree s
= TREE_VALUE (t
);
5377 if (TREE_CODE (s
) == SSA_NAME
5378 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5379 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5381 if (SSA_NAME_IS_DEFAULT_DEF (s
)
5382 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
5384 TREE_VALUE (t
) = create_tmp_var (TREE_TYPE (s
), "bitint");
5385 mark_addressable (TREE_VALUE (t
));
5389 int part
= var_to_partition (m_map
, s
);
5390 gcc_assert (m_vars
[part
] != NULL_TREE
);
5391 TREE_VALUE (t
) = m_vars
[part
];
5398 /* Lower statement STMT which involves large/huge _BitInt values
5399 into code accessing individual limbs. */
5402 bitint_large_huge::lower_stmt (gimple
*stmt
)
5406 m_data
.truncate (0);
5408 m_gsi
= gsi_for_stmt (stmt
);
5409 m_after_stmt
= NULL
;
5412 gsi_prev (&m_init_gsi
);
5413 m_preheader_bb
= NULL
;
5414 m_upwards_2limb
= 0;
5417 m_cast_conditional
= false;
5419 m_loc
= gimple_location (stmt
);
5420 if (is_gimple_call (stmt
))
5422 lower_call (NULL_TREE
, stmt
);
5425 if (gimple_code (stmt
) == GIMPLE_ASM
)
5430 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5431 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5432 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5433 bool mergeable_cast_p
= false;
5434 bool final_cast_p
= false;
5435 if (gimple_assign_cast_p (stmt
))
5437 lhs
= gimple_assign_lhs (stmt
);
5438 tree rhs1
= gimple_assign_rhs1 (stmt
);
5439 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
5440 rhs1
= TREE_OPERAND (rhs1
, 0);
5441 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5442 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5443 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5444 mergeable_cast_p
= true;
5445 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5446 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5447 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5448 || POINTER_TYPE_P (TREE_TYPE (lhs
))
5449 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
))
5451 final_cast_p
= true;
5452 if (((TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
5453 && TYPE_PRECISION (TREE_TYPE (lhs
)) > MAX_FIXED_MODE_SIZE
)
5454 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5455 && !POINTER_TYPE_P (TREE_TYPE (lhs
))))
5456 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5458 /* Handle VIEW_CONVERT_EXPRs to not generally supported
5459 huge INTEGER_TYPEs like uint256_t or uint512_t. These
5460 are usually emitted from memcpy folding and backends
5461 support moves with them but that is usually it.
5462 Similarly handle VCEs to vector/complex types etc. */
5463 gcc_assert (TREE_CODE (rhs1
) == SSA_NAME
);
5464 if (SSA_NAME_IS_DEFAULT_DEF (rhs1
)
5465 && (!SSA_NAME_VAR (rhs1
) || VAR_P (SSA_NAME_VAR (rhs1
))))
5467 tree var
= create_tmp_reg (TREE_TYPE (lhs
));
5468 rhs1
= get_or_create_ssa_default_def (cfun
, var
);
5469 gimple_assign_set_rhs1 (stmt
, rhs1
);
5470 gimple_assign_set_rhs_code (stmt
, SSA_NAME
);
5472 else if (m_names
== NULL
5473 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
5475 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5476 gcc_assert (gimple_assign_load_p (g
));
5477 tree mem
= gimple_assign_rhs1 (g
);
5478 tree ltype
= TREE_TYPE (lhs
);
5479 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (mem
));
5480 if (as
!= TYPE_ADDR_SPACE (ltype
))
5482 = build_qualified_type (ltype
,
5484 | ENCODE_QUAL_ADDR_SPACE (as
));
5485 rhs1
= build1 (VIEW_CONVERT_EXPR
, ltype
, unshare_expr (mem
));
5486 gimple_assign_set_rhs1 (stmt
, rhs1
);
5490 int part
= var_to_partition (m_map
, rhs1
);
5491 gcc_assert (m_vars
[part
] != NULL_TREE
);
5492 rhs1
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
5494 gimple_assign_set_rhs1 (stmt
, rhs1
);
5499 if (TREE_CODE (rhs1
) == SSA_NAME
5501 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5503 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5504 if (is_gimple_assign (g
)
5505 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5507 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5508 if (TREE_CODE (rhs2
) == SSA_NAME
5510 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5512 g
= SSA_NAME_DEF_STMT (rhs2
);
5513 int ovf
= optimizable_arith_overflow (g
);
5515 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5516 and IMAGPART_EXPR uses, where the latter is cast to
5517 non-_BitInt, it will be optimized when handling
5518 the REALPART_EXPR. */
5522 lower_call (NULL_TREE
, g
);
5529 else if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5530 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5531 && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5532 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
5533 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5535 int part
= var_to_partition (m_map
, lhs
);
5536 gcc_assert (m_vars
[part
] != NULL_TREE
);
5537 lhs
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs1
), m_vars
[part
]);
5538 insert_before (gimple_build_assign (lhs
, rhs1
));
5542 if (gimple_store_p (stmt
))
5544 tree rhs1
= gimple_assign_rhs1 (stmt
);
5545 if (TREE_CODE (rhs1
) == SSA_NAME
5547 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5549 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5550 m_loc
= gimple_location (g
);
5551 lhs
= gimple_assign_lhs (stmt
);
5552 if (is_gimple_assign (g
) && !mergeable_op (g
))
5553 switch (gimple_assign_rhs_code (g
))
5557 lower_shift_stmt (lhs
, g
);
5559 m_gsi
= gsi_for_stmt (stmt
);
5560 unlink_stmt_vdef (stmt
);
5561 release_ssa_name (gimple_vdef (stmt
));
5562 gsi_remove (&m_gsi
, true);
5565 case TRUNC_DIV_EXPR
:
5566 case TRUNC_MOD_EXPR
:
5567 lower_muldiv_stmt (lhs
, g
);
5569 case FIX_TRUNC_EXPR
:
5570 lower_float_conv_stmt (lhs
, g
);
5574 lower_cplxpart_stmt (lhs
, g
);
5576 case VIEW_CONVERT_EXPR
:
5578 tree rhs1
= gimple_assign_rhs1 (g
);
5579 rhs1
= TREE_OPERAND (rhs1
, 0);
5580 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5581 && !POINTER_TYPE_P (TREE_TYPE (rhs1
)))
5583 tree ltype
= TREE_TYPE (rhs1
);
5584 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (lhs
));
5586 = build_qualified_type (ltype
,
5587 TYPE_QUALS (TREE_TYPE (lhs
))
5588 | ENCODE_QUAL_ADDR_SPACE (as
));
5589 lhs
= build1 (VIEW_CONVERT_EXPR
, ltype
, lhs
);
5590 gimple_assign_set_lhs (stmt
, lhs
);
5591 gimple_assign_set_rhs1 (stmt
, rhs1
);
5592 gimple_assign_set_rhs_code (stmt
, TREE_CODE (rhs1
));
5601 else if (optimizable_arith_overflow (g
) == 3)
5603 lower_call (lhs
, g
);
5606 m_loc
= gimple_location (stmt
);
5609 if (mergeable_op (stmt
)
5610 || gimple_store_p (stmt
)
5611 || gimple_assign_load_p (stmt
)
5613 || mergeable_cast_p
)
5615 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5619 else if (cmp_code
!= ERROR_MARK
)
5620 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5621 if (cmp_code
!= ERROR_MARK
)
5623 if (gimple_code (stmt
) == GIMPLE_COND
)
5625 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5626 gimple_cond_set_lhs (cstmt
, lhs
);
5627 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5628 gimple_cond_set_code (cstmt
, cmp_code
);
5632 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5634 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5635 boolean_false_node
);
5636 gimple_assign_set_rhs1 (stmt
, cond
);
5637 lhs
= gimple_assign_lhs (stmt
);
5638 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5639 || (bitint_precision_kind (TREE_TYPE (lhs
))
5640 <= bitint_prec_middle
));
5644 gimple_assign_set_rhs1 (stmt
, lhs
);
5645 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5646 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5652 tree lhs_type
= TREE_TYPE (lhs
);
5653 /* Add support for 3 or more limbs filled in from normal integral
5654 type if this assert fails. If no target chooses limb mode smaller
5655 than half of largest supported normal integral type, this will not
5657 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5659 if ((TREE_CODE (lhs_type
) == BITINT_TYPE
5660 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5661 || POINTER_TYPE_P (lhs_type
))
5662 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5663 TYPE_UNSIGNED (lhs_type
));
5665 tree rhs1
= gimple_assign_rhs1 (stmt
);
5666 tree r1
= handle_operand (rhs1
, size_int (0));
5667 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5668 r1
= add_cast (lhs_type
, r1
);
5669 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5673 tree r2
= handle_operand (rhs1
, size_int (1));
5674 r2
= add_cast (lhs_type
, r2
);
5675 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5676 build_int_cst (unsigned_type_node
,
5679 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5680 gimple_assign_lhs (g
));
5682 r1
= gimple_assign_lhs (g
);
5684 if (lhs_type
!= TREE_TYPE (lhs
))
5685 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5687 g
= gimple_build_assign (lhs
, r1
);
5688 gsi_replace (&m_gsi
, g
, true);
5691 if (is_gimple_assign (stmt
))
5692 switch (gimple_assign_rhs_code (stmt
))
5696 lower_shift_stmt (NULL_TREE
, stmt
);
5699 case TRUNC_DIV_EXPR
:
5700 case TRUNC_MOD_EXPR
:
5701 lower_muldiv_stmt (NULL_TREE
, stmt
);
5703 case FIX_TRUNC_EXPR
:
5705 lower_float_conv_stmt (NULL_TREE
, stmt
);
5709 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5712 lower_complexexpr_stmt (stmt
);
5720 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5721 the desired memory state. */
5724 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5726 tree vuse2
= (tree
) data
;
5733 /* Return true if STMT uses a library function and needs to take
5734 address of its inputs. We need to avoid bit-fields in those
5735 cases. Similarly, we need to avoid overlap between destination
5736 and source limb arrays. */
5739 stmt_needs_operand_addr (gimple
*stmt
)
5741 if (is_gimple_assign (stmt
))
5742 switch (gimple_assign_rhs_code (stmt
))
5745 case TRUNC_DIV_EXPR
:
5746 case TRUNC_MOD_EXPR
:
5752 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5753 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5758 /* Dominator walker used to discover which large/huge _BitInt
5759 loads could be sunk into all their uses. */
5761 class bitint_dom_walker
: public dom_walker
5764 bitint_dom_walker (bitmap names
, bitmap loads
)
5765 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5767 edge
before_dom_children (basic_block
) final override
;
5770 bitmap m_names
, m_loads
;
5774 bitint_dom_walker::before_dom_children (basic_block bb
)
5776 gphi
*phi
= get_virtual_phi (bb
);
5779 vop
= gimple_phi_result (phi
);
5780 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5783 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5785 auto_vec
<tree
, 16> worklist
;
5786 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5787 !gsi_end_p (gsi
); gsi_next (&gsi
))
5789 gimple
*stmt
= gsi_stmt (gsi
);
5790 if (is_gimple_debug (stmt
))
5793 if (!vop
&& gimple_vuse (stmt
))
5794 vop
= gimple_vuse (stmt
);
5797 if (gimple_vdef (stmt
))
5798 vop
= gimple_vdef (stmt
);
5800 tree lhs
= gimple_get_lhs (stmt
);
5802 && TREE_CODE (lhs
) == SSA_NAME
5803 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5804 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5805 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5806 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5807 it means it will be handled in a loop or straight line code
5808 at the location of its (ultimate) immediate use, so for
5809 vop checking purposes check these only at the ultimate
5814 use_operand_p use_p
;
5815 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5817 tree s
= USE_FROM_PTR (use_p
);
5818 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5819 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5820 worklist
.safe_push (s
);
5823 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5824 while (worklist
.length () > 0)
5826 tree s
= worklist
.pop ();
5828 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5830 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5831 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5832 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5834 tree s2
= USE_FROM_PTR (use_p
);
5835 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5836 && (bitint_precision_kind (TREE_TYPE (s2
))
5837 >= bitint_prec_large
))
5838 worklist
.safe_push (s2
);
5842 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5843 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5845 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5846 if (TREE_CODE (rhs
) == SSA_NAME
5847 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5852 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5855 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5856 tree rhs1
= gimple_assign_rhs1 (g
);
5857 if (needs_operand_addr
5858 && TREE_CODE (rhs1
) == COMPONENT_REF
5859 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5861 tree fld
= TREE_OPERAND (rhs1
, 1);
5862 /* For little-endian, we can allow as inputs bit-fields
5863 which start at a limb boundary. */
5864 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5865 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5866 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5871 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5877 ao_ref_init (&ref
, rhs1
);
5878 tree lvop
= gimple_vuse (g
);
5879 unsigned limit
= 64;
5882 && is_gimple_assign (stmt
)
5883 && gimple_store_p (stmt
)
5884 && (needs_operand_addr
5885 || !operand_equal_p (lhs
, gimple_assign_rhs1 (g
), 0)))
5888 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5889 NULL
, NULL
, limit
, lvop
) == NULL
)
5890 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5894 bb
->aux
= (void *) vop
;
5900 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5901 build_ssa_conflict_graph.
5902 The differences are:
5903 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5904 2) for large/huge _BitInt multiplication/division/modulo process def
5905 only after processing uses rather than before to make uses conflict
5907 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5908 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5909 the final statement. */
5912 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5913 ssa_conflicts
*graph
, bitmap names
,
5914 void (*def
) (live_track
*, tree
,
5916 void (*use
) (live_track
*, tree
))
5918 bool muldiv_p
= false;
5919 tree lhs
= NULL_TREE
;
5920 if (is_gimple_assign (stmt
))
5922 lhs
= gimple_assign_lhs (stmt
);
5923 if (TREE_CODE (lhs
) == SSA_NAME
)
5925 tree type
= TREE_TYPE (lhs
);
5926 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5927 type
= TREE_TYPE (type
);
5928 if (TREE_CODE (type
) == BITINT_TYPE
5929 && bitint_precision_kind (type
) >= bitint_prec_large
)
5931 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5933 switch (gimple_assign_rhs_code (stmt
))
5936 case TRUNC_DIV_EXPR
:
5937 case TRUNC_MOD_EXPR
:
5950 /* For stmts with more than one SSA_NAME definition pretend all the
5951 SSA_NAME outputs but the first one are live at this point, so
5952 that conflicts are added in between all those even when they are
5953 actually not really live after the asm, because expansion might
5954 copy those into pseudos after the asm and if multiple outputs
5955 share the same partition, it might overwrite those that should
5957 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5961 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5967 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5968 def (live
, var
, graph
);
5971 auto_vec
<tree
, 16> worklist
;
5972 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5974 tree type
= TREE_TYPE (var
);
5975 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5976 type
= TREE_TYPE (type
);
5977 if (TREE_CODE (type
) == BITINT_TYPE
5978 && bitint_precision_kind (type
) >= bitint_prec_large
)
5980 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5983 worklist
.safe_push (var
);
5987 while (worklist
.length () > 0)
5989 tree s
= worklist
.pop ();
5990 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5992 tree type
= TREE_TYPE (var
);
5993 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5994 type
= TREE_TYPE (type
);
5995 if (TREE_CODE (type
) == BITINT_TYPE
5996 && bitint_precision_kind (type
) >= bitint_prec_large
)
5998 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
6001 worklist
.safe_push (var
);
6007 def (live
, lhs
, graph
);
6010 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
6011 return the largest bitint_prec_kind of them, otherwise return
6012 bitint_prec_small. */
6014 static bitint_prec_kind
6015 arith_overflow_arg_kind (gimple
*stmt
)
6017 bitint_prec_kind ret
= bitint_prec_small
;
6018 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
6019 switch (gimple_call_internal_fn (stmt
))
6021 case IFN_ADD_OVERFLOW
:
6022 case IFN_SUB_OVERFLOW
:
6023 case IFN_MUL_OVERFLOW
:
6024 for (int i
= 0; i
< 2; ++i
)
6026 tree a
= gimple_call_arg (stmt
, i
);
6027 if (TREE_CODE (a
) == INTEGER_CST
6028 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
6030 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
6031 ret
= MAX (ret
, kind
);
6041 /* Entry point for _BitInt(N) operation lowering during optimization. */
6044 gimple_lower_bitint (void)
6046 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
6050 for (i
= 0; i
< num_ssa_names
; ++i
)
6052 tree s
= ssa_name (i
);
6055 tree type
= TREE_TYPE (s
);
6056 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6058 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6059 != bitint_prec_small
)
6061 type
= TREE_TYPE (type
);
6063 if (TREE_CODE (type
) == BITINT_TYPE
6064 && bitint_precision_kind (type
) != bitint_prec_small
)
6066 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6067 into memory. Such functions could have no large/huge SSA_NAMEs. */
6068 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6070 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6071 if (is_gimple_assign (g
) && gimple_store_p (g
))
6073 tree t
= gimple_assign_rhs1 (g
);
6074 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6075 && (bitint_precision_kind (TREE_TYPE (t
))
6076 >= bitint_prec_large
))
6080 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6081 to floating point types need to be rewritten. */
6082 else if (SCALAR_FLOAT_TYPE_P (type
))
6084 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6085 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6087 tree t
= gimple_assign_rhs1 (g
);
6088 if (TREE_CODE (t
) == INTEGER_CST
6089 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6090 && (bitint_precision_kind (TREE_TYPE (t
))
6091 != bitint_prec_small
))
6096 if (i
== num_ssa_names
)
6100 auto_vec
<gimple
*, 4> switch_statements
;
6101 FOR_EACH_BB_FN (bb
, cfun
)
6103 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
6105 tree idx
= gimple_switch_index (swtch
);
6106 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
6107 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
6111 group_case_labels_stmt (swtch
);
6112 if (gimple_switch_num_labels (swtch
) == 1)
6114 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
6115 gimple_stmt_iterator gsi
= gsi_for_stmt (swtch
);
6116 gsi_remove (&gsi
, true);
6119 switch_statements
.safe_push (swtch
);
6123 if (!switch_statements
.is_empty ())
6125 bool expanded
= false;
6129 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
6131 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
6132 tree_switch_conversion::switch_decision_tree
dt (swtch
);
6133 expanded
|= dt
.analyze_switch_statement ();
6138 free_dominance_info (CDI_DOMINATORS
);
6139 free_dominance_info (CDI_POST_DOMINATORS
);
6140 mark_virtual_operands_for_renaming (cfun
);
6141 cleanup_tree_cfg (TODO_update_ssa
);
6145 struct bitint_large_huge large_huge
;
6146 bool has_large_huge_parm_result
= false;
6147 bool has_large_huge
= false;
6148 unsigned int ret
= 0, first_large_huge
= ~0U;
6149 bool edge_insertions
= false;
6150 for (; i
< num_ssa_names
; ++i
)
6152 tree s
= ssa_name (i
);
6155 tree type
= TREE_TYPE (s
);
6156 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6158 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6159 >= bitint_prec_large
)
6160 has_large_huge
= true;
6161 type
= TREE_TYPE (type
);
6163 if (TREE_CODE (type
) == BITINT_TYPE
6164 && bitint_precision_kind (type
) >= bitint_prec_large
)
6166 if (first_large_huge
== ~0U)
6167 first_large_huge
= i
;
6168 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
6169 gimple_stmt_iterator gsi
;
6171 /* Unoptimize certain constructs to simpler alternatives to
6172 avoid having to lower all of them. */
6173 if (is_gimple_assign (stmt
) && gimple_bb (stmt
))
6174 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
6179 case TRUNC_DIV_EXPR
:
6180 case TRUNC_MOD_EXPR
:
6181 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
))
6183 location_t loc
= gimple_location (stmt
);
6184 gsi
= gsi_for_stmt (stmt
);
6185 tree rhs1
= gimple_assign_rhs1 (stmt
);
6186 tree rhs2
= gimple_assign_rhs2 (stmt
);
6187 /* For multiplication and division with (ab)
6188 lhs and one or both operands force the operands
6189 into new SSA_NAMEs to avoid coalescing failures. */
6190 if (TREE_CODE (rhs1
) == SSA_NAME
6191 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
))
6193 first_large_huge
= 0;
6194 tree t
= make_ssa_name (TREE_TYPE (rhs1
));
6195 g
= gimple_build_assign (t
, SSA_NAME
, rhs1
);
6196 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6197 gimple_set_location (g
, loc
);
6198 gimple_assign_set_rhs1 (stmt
, t
);
6201 gimple_assign_set_rhs2 (stmt
, t
);
6206 if (TREE_CODE (rhs2
) == SSA_NAME
6207 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2
))
6209 first_large_huge
= 0;
6210 tree t
= make_ssa_name (TREE_TYPE (rhs2
));
6211 g
= gimple_build_assign (t
, SSA_NAME
, rhs2
);
6212 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6213 gimple_set_location (g
, loc
);
6214 gimple_assign_set_rhs2 (stmt
, t
);
6222 first_large_huge
= 0;
6223 location_t loc
= gimple_location (stmt
);
6224 gsi
= gsi_for_stmt (stmt
);
6225 tree rhs1
= gimple_assign_rhs1 (stmt
);
6226 tree type
= TREE_TYPE (rhs1
);
6227 tree n
= gimple_assign_rhs2 (stmt
), m
;
6228 tree p
= build_int_cst (TREE_TYPE (n
),
6229 TYPE_PRECISION (type
));
6230 if (TREE_CODE (n
) == INTEGER_CST
)
6231 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
6234 m
= make_ssa_name (TREE_TYPE (n
));
6235 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
6236 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6237 gimple_set_location (g
, loc
);
6239 if (!TYPE_UNSIGNED (type
))
6241 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
6243 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6244 rhs1
= fold_convert (utype
, rhs1
);
6247 tree t
= make_ssa_name (type
);
6248 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
6249 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6250 gimple_set_location (g
, loc
);
6253 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6254 rhs_code
== LROTATE_EXPR
6255 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
6257 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6258 gimple_set_location (g
, loc
);
6259 tree op1
= gimple_assign_lhs (g
);
6260 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6261 rhs_code
== LROTATE_EXPR
6262 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
6264 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6265 gimple_set_location (g
, loc
);
6266 tree op2
= gimple_assign_lhs (g
);
6267 tree lhs
= gimple_assign_lhs (stmt
);
6268 if (!TYPE_UNSIGNED (type
))
6270 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
6271 BIT_IOR_EXPR
, op1
, op2
);
6272 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6273 gimple_set_location (g
, loc
);
6274 g
= gimple_build_assign (lhs
, NOP_EXPR
,
6275 gimple_assign_lhs (g
));
6278 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
6279 gsi_replace (&gsi
, g
, true);
6280 gimple_set_location (g
, loc
);
6288 first_large_huge
= 0;
6289 gsi
= gsi_for_stmt (stmt
);
6290 tree lhs
= gimple_assign_lhs (stmt
);
6291 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
6292 location_t loc
= gimple_location (stmt
);
6293 if (rhs_code
== ABS_EXPR
)
6294 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6295 build_zero_cst (TREE_TYPE (rhs1
)),
6296 NULL_TREE
, NULL_TREE
);
6297 else if (rhs_code
== ABSU_EXPR
)
6299 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
6300 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
6301 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6302 gimple_set_location (g
, loc
);
6303 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6304 build_zero_cst (TREE_TYPE (rhs1
)),
6305 NULL_TREE
, NULL_TREE
);
6308 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
6310 rhs2
= gimple_assign_rhs2 (stmt
);
6311 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6312 std::swap (rhs1
, rhs2
);
6313 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
6314 NULL_TREE
, NULL_TREE
);
6315 if (rhs_code
== MAX_EXPR
)
6316 std::swap (rhs1
, rhs2
);
6320 g
= gimple_build_cond (NE_EXPR
, rhs1
,
6321 build_zero_cst (TREE_TYPE (rhs1
)),
6322 NULL_TREE
, NULL_TREE
);
6323 rhs1
= gimple_assign_rhs2 (stmt
);
6324 rhs2
= gimple_assign_rhs3 (stmt
);
6326 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6327 gimple_set_location (g
, loc
);
6328 edge e1
= split_block (gsi_bb (gsi
), g
);
6329 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
6330 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
6331 e3
->probability
= profile_probability::even ();
6332 e1
->flags
= EDGE_TRUE_VALUE
;
6333 e1
->probability
= e3
->probability
.invert ();
6334 if (dom_info_available_p (CDI_DOMINATORS
))
6335 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
6336 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
6338 gsi
= gsi_after_labels (e1
->dest
);
6339 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6341 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6342 gimple_set_location (g
, loc
);
6343 rhs2
= gimple_assign_lhs (g
);
6344 std::swap (rhs1
, rhs2
);
6346 gsi
= gsi_for_stmt (stmt
);
6347 gsi_remove (&gsi
, true);
6348 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
6349 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
6350 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
6354 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6355 into memory. Such functions could have no large/huge SSA_NAMEs. */
6356 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6358 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6359 if (is_gimple_assign (g
) && gimple_store_p (g
))
6361 tree t
= gimple_assign_rhs1 (g
);
6362 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6363 && (bitint_precision_kind (TREE_TYPE (t
))
6364 >= bitint_prec_large
))
6365 has_large_huge
= true;
6368 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6369 to floating point types need to be rewritten. */
6370 else if (SCALAR_FLOAT_TYPE_P (type
))
6372 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6373 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6375 tree t
= gimple_assign_rhs1 (g
);
6376 if (TREE_CODE (t
) == INTEGER_CST
6377 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6378 && (bitint_precision_kind (TREE_TYPE (t
))
6379 >= bitint_prec_large
))
6380 has_large_huge
= true;
6384 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
6386 tree s
= ssa_name (i
);
6389 tree type
= TREE_TYPE (s
);
6390 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6391 type
= TREE_TYPE (type
);
6392 if (TREE_CODE (type
) == BITINT_TYPE
6393 && bitint_precision_kind (type
) >= bitint_prec_large
)
6395 use_operand_p use_p
;
6397 has_large_huge
= true;
6399 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
6401 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6402 the same bb and could be handled in the same loop with the
6405 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6406 && single_imm_use (s
, &use_p
, &use_stmt
)
6407 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
6409 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
6411 if (mergeable_op (use_stmt
))
6413 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6414 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6416 if (gimple_assign_cast_p (use_stmt
))
6418 tree lhs
= gimple_assign_lhs (use_stmt
);
6419 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6420 /* Don't merge with VIEW_CONVERT_EXPRs to
6421 huge INTEGER_TYPEs used sometimes in memcpy
6423 && (TREE_CODE (TREE_TYPE (lhs
)) != INTEGER_TYPE
6424 || (TYPE_PRECISION (TREE_TYPE (lhs
))
6425 <= MAX_FIXED_MODE_SIZE
)))
6428 else if (gimple_store_p (use_stmt
)
6429 && is_gimple_assign (use_stmt
)
6430 && !gimple_has_volatile_ops (use_stmt
)
6431 && !stmt_ends_bb_p (use_stmt
))
6434 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6436 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6437 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
6439 rhs1
= TREE_OPERAND (rhs1
, 0);
6440 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6441 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
6442 && gimple_store_p (use_stmt
))
6445 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6446 && ((is_gimple_assign (use_stmt
)
6447 && (gimple_assign_rhs_code (use_stmt
)
6449 || gimple_code (use_stmt
) == GIMPLE_COND
)
6450 && (!gimple_store_p (use_stmt
)
6451 || (is_gimple_assign (use_stmt
)
6452 && !gimple_has_volatile_ops (use_stmt
)
6453 && !stmt_ends_bb_p (use_stmt
)))
6454 && (TREE_CODE (rhs1
) != SSA_NAME
6455 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6457 if (is_gimple_assign (use_stmt
))
6458 switch (gimple_assign_rhs_code (use_stmt
))
6460 case TRUNC_DIV_EXPR
:
6461 case TRUNC_MOD_EXPR
:
6463 /* For division, modulo and casts to floating
6464 point, avoid representing unsigned operands
6465 using negative prec if they were sign-extended
6466 from narrower precision. */
6467 if (TYPE_UNSIGNED (TREE_TYPE (s
))
6468 && !TYPE_UNSIGNED (TREE_TYPE (rhs1
))
6469 && (TYPE_PRECISION (TREE_TYPE (s
))
6470 > TYPE_PRECISION (TREE_TYPE (rhs1
))))
6474 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6475 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6476 < bitint_prec_large
))
6478 /* Uses which use handle_operand_addr can't
6479 deal with nested casts. */
6480 if (TREE_CODE (rhs1
) == SSA_NAME
6481 && gimple_assign_cast_p
6482 (SSA_NAME_DEF_STMT (rhs1
))
6483 && has_single_use (rhs1
)
6484 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6485 == gimple_bb (SSA_NAME_DEF_STMT (s
))))
6488 case VIEW_CONVERT_EXPR
:
6490 tree lhs
= gimple_assign_lhs (use_stmt
);
6491 /* Don't merge with VIEW_CONVERT_EXPRs to
6492 non-integral types. */
6493 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6495 /* Don't merge with VIEW_CONVERT_EXPRs to
6496 huge INTEGER_TYPEs used sometimes in memcpy
6498 if (TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
6499 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6500 > MAX_FIXED_MODE_SIZE
))
6507 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6508 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6509 < bitint_prec_large
))
6511 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6512 >= TYPE_PRECISION (TREE_TYPE (s
)))
6513 && mergeable_op (use_stmt
))
6515 /* Prevent merging a widening non-mergeable cast
6516 on result of some narrower mergeable op
6517 together with later mergeable operations. E.g.
6518 result of _BitInt(223) addition shouldn't be
6519 sign-extended to _BitInt(513) and have another
6520 _BitInt(513) added to it, as handle_plus_minus
6521 with its PHI node handling inside of handle_cast
6522 will not work correctly. An exception is if
6523 use_stmt is a store, this is handled directly
6524 in lower_mergeable_stmt. */
6525 if (TREE_CODE (rhs1
) != SSA_NAME
6526 || !has_single_use (rhs1
)
6527 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6528 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6529 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6530 || gimple_store_p (use_stmt
))
6532 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6533 < TYPE_PRECISION (TREE_TYPE (s
)))
6534 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6536 /* Another exception is if the widening cast is
6537 from mergeable same precision cast from something
6540 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6541 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6542 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6543 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6545 if (TREE_CODE (rhs2
) != SSA_NAME
6546 || !has_single_use (rhs2
)
6547 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6548 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6549 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6555 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6556 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6560 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6561 rhs1
= TREE_OPERAND (rhs1
, 0);
6562 if (TREE_CODE (rhs1
) == SSA_NAME
)
6564 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6565 if (optimizable_arith_overflow (g
))
6573 case TRUNC_DIV_EXPR
:
6574 case TRUNC_MOD_EXPR
:
6575 case FIX_TRUNC_EXPR
:
6577 if (gimple_store_p (use_stmt
)
6578 && is_gimple_assign (use_stmt
)
6579 && !gimple_has_volatile_ops (use_stmt
)
6580 && !stmt_ends_bb_p (use_stmt
))
6582 tree lhs
= gimple_assign_lhs (use_stmt
);
6583 /* As multiply/division passes address of the lhs
6584 to library function and that assumes it can extend
6585 it to whole number of limbs, avoid merging those
6586 with bit-field stores. Don't allow it for
6587 shifts etc. either, so that the bit-field store
6588 handling doesn't have to be done everywhere. */
6589 if (TREE_CODE (lhs
) == COMPONENT_REF
6590 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6600 /* Also ignore uninitialized uses. */
6601 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6602 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6606 if (!large_huge
.m_names
)
6607 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6608 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6609 if (has_single_use (s
))
6611 if (!large_huge
.m_single_use_names
)
6612 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6613 bitmap_set_bit (large_huge
.m_single_use_names
,
6614 SSA_NAME_VERSION (s
));
6616 if (SSA_NAME_VAR (s
)
6617 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6618 && SSA_NAME_IS_DEFAULT_DEF (s
))
6619 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6620 has_large_huge_parm_result
= true;
6622 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6623 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6624 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6625 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6627 use_operand_p use_p
;
6628 imm_use_iterator iter
;
6629 bool optimizable_load
= true;
6630 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6632 gimple
*use_stmt
= USE_STMT (use_p
);
6633 if (is_gimple_debug (use_stmt
))
6635 if (gimple_code (use_stmt
) == GIMPLE_PHI
6636 || is_gimple_call (use_stmt
)
6637 || gimple_code (use_stmt
) == GIMPLE_ASM
)
6639 optimizable_load
= false;
6645 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6648 tree s2
= USE_FROM_PTR (use_p
);
6649 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6651 optimizable_load
= false;
6656 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6658 if (!large_huge
.m_loads
)
6659 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6660 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6664 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6665 into memory. Such functions could have no large/huge SSA_NAMEs. */
6666 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6668 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6669 if (is_gimple_assign (g
) && gimple_store_p (g
))
6671 tree t
= gimple_assign_rhs1 (g
);
6672 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6673 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6674 has_large_huge
= true;
6679 if (large_huge
.m_names
|| has_large_huge
)
6681 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6682 calculate_dominance_info (CDI_DOMINATORS
);
6684 enable_ranger (cfun
);
6685 if (large_huge
.m_loads
)
6687 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6689 bitint_dom_walker (large_huge
.m_names
,
6690 large_huge
.m_loads
).walk (entry
);
6691 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6692 clear_aux_for_blocks ();
6693 BITMAP_FREE (large_huge
.m_loads
);
6695 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6696 large_huge
.m_limb_size
6697 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6699 if (large_huge
.m_names
)
6702 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6703 coalesce_ssa_name (large_huge
.m_map
);
6704 partition_view_normal (large_huge
.m_map
);
6705 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6707 fprintf (dump_file
, "After Coalescing:\n");
6708 dump_var_map (dump_file
, large_huge
.m_map
);
6711 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6713 if (has_large_huge_parm_result
)
6714 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6716 tree s
= ssa_name (i
);
6717 if (SSA_NAME_VAR (s
)
6718 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6719 && SSA_NAME_IS_DEFAULT_DEF (s
))
6720 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6722 int p
= var_to_partition (large_huge
.m_map
, s
);
6723 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6725 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6726 mark_addressable (SSA_NAME_VAR (s
));
6730 tree atype
= NULL_TREE
;
6731 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6732 fprintf (dump_file
, "Mapping SSA_NAMEs to decls:\n");
6733 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6735 tree s
= ssa_name (i
);
6736 int p
= var_to_partition (large_huge
.m_map
, s
);
6737 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6739 if (atype
== NULL_TREE
6740 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6741 TYPE_SIZE (TREE_TYPE (s
))))
6743 unsigned HOST_WIDE_INT nelts
6744 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6745 atype
= build_array_type_nelts (large_huge
.m_limb_type
,
6748 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6749 mark_addressable (large_huge
.m_vars
[p
]);
6751 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6753 print_generic_expr (dump_file
, s
, TDF_SLIM
);
6754 fprintf (dump_file
, " -> ");
6755 print_generic_expr (dump_file
, large_huge
.m_vars
[p
], TDF_SLIM
);
6756 fprintf (dump_file
, "\n");
6761 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6763 gimple_stmt_iterator prev
;
6764 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6770 gimple
*stmt
= gsi_stmt (gsi
);
6771 if (is_gimple_debug (stmt
))
6773 bitint_prec_kind kind
= bitint_prec_small
;
6775 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6776 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6778 bitint_prec_kind this_kind
6779 = bitint_precision_kind (TREE_TYPE (t
));
6780 kind
= MAX (kind
, this_kind
);
6782 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6784 t
= gimple_assign_rhs1 (stmt
);
6785 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6787 bitint_prec_kind this_kind
6788 = bitint_precision_kind (TREE_TYPE (t
));
6789 kind
= MAX (kind
, this_kind
);
6792 if (is_gimple_assign (stmt
)
6793 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6795 t
= gimple_assign_rhs1 (stmt
);
6796 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6797 && TREE_CODE (t
) == INTEGER_CST
)
6799 bitint_prec_kind this_kind
6800 = bitint_precision_kind (TREE_TYPE (t
));
6801 kind
= MAX (kind
, this_kind
);
6804 if (is_gimple_call (stmt
))
6806 t
= gimple_call_lhs (stmt
);
6807 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6809 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6810 kind
= MAX (kind
, this_kind
);
6811 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6814 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6815 kind
= MAX (kind
, this_kind
);
6819 if (kind
== bitint_prec_small
)
6821 switch (gimple_code (stmt
))
6824 /* For now. We'll need to handle some internal functions and
6825 perhaps some builtins. */
6826 if (kind
== bitint_prec_middle
)
6830 if (kind
== bitint_prec_middle
)
6836 if (gimple_clobber_p (stmt
))
6838 if (kind
>= bitint_prec_large
)
6840 if (gimple_assign_single_p (stmt
))
6841 /* No need to lower copies, loads or stores. */
6843 if (gimple_assign_cast_p (stmt
))
6845 tree lhs
= gimple_assign_lhs (stmt
);
6846 tree rhs
= gimple_assign_rhs1 (stmt
);
6847 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6848 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6849 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6850 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6851 /* No need to lower casts to same precision. */
6859 if (kind
== bitint_prec_middle
)
6861 tree type
= NULL_TREE
;
6862 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6863 with the same precision and back. */
6864 unsigned int nops
= gimple_num_ops (stmt
);
6865 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6867 if (tree op
= gimple_op (stmt
, i
))
6869 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6871 gimple_set_op (stmt
, i
, nop
);
6872 else if (COMPARISON_CLASS_P (op
))
6874 TREE_OPERAND (op
, 0)
6875 = maybe_cast_middle_bitint (&gsi
,
6876 TREE_OPERAND (op
, 0),
6878 TREE_OPERAND (op
, 1)
6879 = maybe_cast_middle_bitint (&gsi
,
6880 TREE_OPERAND (op
, 1),
6883 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6886 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6889 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6893 if (tree lhs
= gimple_get_lhs (stmt
))
6894 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6895 && (bitint_precision_kind (TREE_TYPE (lhs
))
6896 == bitint_prec_middle
))
6898 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6899 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6900 type
= build_nonstandard_integer_type (prec
, uns
);
6901 tree lhs2
= make_ssa_name (type
);
6902 gimple_set_lhs (stmt
, lhs2
);
6903 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6904 if (stmt_ends_bb_p (stmt
))
6906 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6907 gsi_insert_on_edge_immediate (e
, g
);
6910 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6916 if (tree lhs
= gimple_get_lhs (stmt
))
6917 if (TREE_CODE (lhs
) == SSA_NAME
)
6919 tree type
= TREE_TYPE (lhs
);
6920 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6921 type
= TREE_TYPE (type
);
6922 if (TREE_CODE (type
) == BITINT_TYPE
6923 && bitint_precision_kind (type
) >= bitint_prec_large
6924 && (large_huge
.m_names
== NULL
6925 || !bitmap_bit_p (large_huge
.m_names
,
6926 SSA_NAME_VERSION (lhs
))))
6930 large_huge
.lower_stmt (stmt
);
6933 tree atype
= NULL_TREE
;
6934 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6937 gphi
*phi
= gsi
.phi ();
6938 tree lhs
= gimple_phi_result (phi
);
6939 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6940 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6942 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6943 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6944 tree v1
= large_huge
.m_vars
[p1
];
6945 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6947 tree arg
= gimple_phi_arg_def (phi
, i
);
6948 edge e
= gimple_phi_arg_edge (phi
, i
);
6950 switch (TREE_CODE (arg
))
6953 if (integer_zerop (arg
) && VAR_P (v1
))
6955 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6956 g
= gimple_build_assign (v1
, zero
);
6957 gsi_insert_on_edge (e
, g
);
6958 edge_insertions
= true;
6962 unsigned int min_prec
, prec
, rem
;
6964 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6965 rem
= prec
% (2 * limb_prec
);
6966 min_prec
= bitint_min_cst_precision (arg
, ext
);
6967 if (min_prec
> prec
- rem
- 2 * limb_prec
6968 && min_prec
> (unsigned) limb_prec
)
6969 /* Constant which has enough significant bits that it
6970 isn't worth trying to save .rodata space by extending
6971 from smaller number. */
6974 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6977 else if (min_prec
== prec
)
6978 c
= tree_output_constant_def (arg
);
6979 else if (min_prec
== (unsigned) limb_prec
)
6980 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6983 tree ctype
= build_bitint_type (min_prec
, 1);
6984 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6988 if (VAR_P (v1
) && min_prec
== prec
)
6990 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6992 g
= gimple_build_assign (v1
, v2
);
6993 gsi_insert_on_edge (e
, g
);
6994 edge_insertions
= true;
6997 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6998 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7003 unsigned HOST_WIDE_INT nelts
7004 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
7007 = build_array_type_nelts (large_huge
.m_limb_type
,
7009 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7011 build1 (VIEW_CONVERT_EXPR
,
7014 gsi_insert_on_edge (e
, g
);
7018 unsigned HOST_WIDE_INT nelts
7019 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
7020 - min_prec
) / limb_prec
;
7022 = build_array_type_nelts (large_huge
.m_limb_type
,
7024 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
7027 off
= fold_convert (ptype
,
7028 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7030 off
= build_zero_cst (ptype
);
7031 tree vd
= build2 (MEM_REF
, vtype
,
7032 build_fold_addr_expr (v1
), off
);
7033 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
7040 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
7042 = fold_convert (ptype
,
7043 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7044 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
7045 build_fold_addr_expr (v1
), off
);
7047 vd
= build_fold_addr_expr (vd
);
7048 unsigned HOST_WIDE_INT nbytes
7049 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
7052 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7053 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
7054 g
= gimple_build_call (fn
, 3, vd
,
7055 integer_minus_one_node
,
7056 build_int_cst (sizetype
,
7059 gsi_insert_on_edge (e
, g
);
7060 edge_insertions
= true;
7065 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
7067 if (large_huge
.m_names
== NULL
7068 || !bitmap_bit_p (large_huge
.m_names
,
7069 SSA_NAME_VERSION (arg
)))
7072 int p2
= var_to_partition (large_huge
.m_map
, arg
);
7075 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
7076 tree v2
= large_huge
.m_vars
[p2
];
7077 if (VAR_P (v1
) && VAR_P (v2
))
7078 g
= gimple_build_assign (v1
, v2
);
7079 else if (VAR_P (v1
))
7080 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
7081 TREE_TYPE (v1
), v2
));
7082 else if (VAR_P (v2
))
7083 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7084 TREE_TYPE (v2
), v1
), v2
);
7087 if (atype
== NULL_TREE
7088 || !tree_int_cst_equal (TYPE_SIZE (atype
),
7089 TYPE_SIZE (TREE_TYPE (lhs
))))
7091 unsigned HOST_WIDE_INT nelts
7092 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
7095 = build_array_type_nelts (large_huge
.m_limb_type
,
7098 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7100 build1 (VIEW_CONVERT_EXPR
,
7103 gsi_insert_on_edge (e
, g
);
7104 edge_insertions
= true;
7111 if (large_huge
.m_names
|| has_large_huge
)
7114 for (i
= 0; i
< num_ssa_names
; ++i
)
7116 tree s
= ssa_name (i
);
7119 tree type
= TREE_TYPE (s
);
7120 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7121 type
= TREE_TYPE (type
);
7122 if (TREE_CODE (type
) == BITINT_TYPE
7123 && bitint_precision_kind (type
) >= bitint_prec_large
)
7125 if (large_huge
.m_preserved
7126 && bitmap_bit_p (large_huge
.m_preserved
,
7127 SSA_NAME_VERSION (s
)))
7129 gimple
*g
= SSA_NAME_DEF_STMT (s
);
7130 if (gimple_code (g
) == GIMPLE_NOP
)
7132 if (SSA_NAME_VAR (s
))
7133 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
7134 release_ssa_name (s
);
7137 if (gimple_bb (g
) == NULL
)
7139 release_ssa_name (s
);
7142 if (gimple_code (g
) != GIMPLE_ASM
)
7144 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7145 bool save_vta
= flag_var_tracking_assignments
;
7146 flag_var_tracking_assignments
= false;
7147 gsi_remove (&gsi
, true);
7148 flag_var_tracking_assignments
= save_vta
;
7151 nop
= gimple_build_nop ();
7152 SSA_NAME_DEF_STMT (s
) = nop
;
7153 release_ssa_name (s
);
7157 disable_ranger (cfun
);
7160 if (edge_insertions
)
7161 gsi_commit_edge_inserts ();
7163 /* Fix up arguments of ECF_RETURNS_TWICE calls. Those were temporarily
7164 inserted before the call, but that is invalid IL, so move them to the
7165 right place and add corresponding PHIs. */
7166 if (!large_huge
.m_returns_twice_calls
.is_empty ())
7168 auto_vec
<gimple
*, 16> arg_stmts
;
7169 while (!large_huge
.m_returns_twice_calls
.is_empty ())
7171 gimple
*stmt
= large_huge
.m_returns_twice_calls
.pop ();
7172 gimple_stmt_iterator gsi
= gsi_after_labels (gimple_bb (stmt
));
7173 while (gsi_stmt (gsi
) != stmt
)
7175 arg_stmts
.safe_push (gsi_stmt (gsi
));
7176 gsi_remove (&gsi
, false);
7179 basic_block bb
= NULL
;
7180 edge e
= NULL
, ead
= NULL
;
7181 FOR_EACH_VEC_ELT (arg_stmts
, i
, g
)
7183 gsi_safe_insert_before (&gsi
, g
);
7186 bb
= gimple_bb (stmt
);
7187 gcc_checking_assert (EDGE_COUNT (bb
->preds
) == 2);
7188 e
= EDGE_PRED (bb
, 0);
7189 ead
= EDGE_PRED (bb
, 1);
7190 if ((ead
->flags
& EDGE_ABNORMAL
) == 0)
7192 gcc_checking_assert ((e
->flags
& EDGE_ABNORMAL
) == 0
7193 && (ead
->flags
& EDGE_ABNORMAL
));
7195 tree lhs
= gimple_assign_lhs (g
);
7197 gphi
*phi
= create_phi_node (copy_ssa_name (arg
), bb
);
7198 add_phi_arg (phi
, arg
, e
, UNKNOWN_LOCATION
);
7199 tree var
= create_tmp_reg (TREE_TYPE (arg
));
7200 suppress_warning (var
, OPT_Wuninitialized
);
7201 arg
= get_or_create_ssa_default_def (cfun
, var
);
7202 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg
) = 1;
7203 add_phi_arg (phi
, arg
, ead
, UNKNOWN_LOCATION
);
7204 arg
= gimple_phi_result (phi
);
7205 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg
) = 1;
7206 imm_use_iterator iter
;
7208 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7210 if (use_stmt
== phi
)
7212 gcc_checking_assert (use_stmt
== stmt
);
7213 use_operand_p use_p
;
7214 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7215 SET_USE (use_p
, arg
);
7219 arg_stmts
.truncate (0);
7228 const pass_data pass_data_lower_bitint
=
7230 GIMPLE_PASS
, /* type */
7231 "bitintlower", /* name */
7232 OPTGROUP_NONE
, /* optinfo_flags */
7233 TV_NONE
, /* tv_id */
7234 PROP_ssa
, /* properties_required */
7235 PROP_gimple_lbitint
, /* properties_provided */
7236 0, /* properties_destroyed */
7237 0, /* todo_flags_start */
7238 0, /* todo_flags_finish */
7241 class pass_lower_bitint
: public gimple_opt_pass
7244 pass_lower_bitint (gcc::context
*ctxt
)
7245 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
7248 /* opt_pass methods: */
7249 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
7250 unsigned int execute (function
*) final override
7252 return gimple_lower_bitint ();
7255 }; // class pass_lower_bitint
7260 make_pass_lower_bitint (gcc::context
*ctxt
)
7262 return new pass_lower_bitint (ctxt
);
7268 const pass_data pass_data_lower_bitint_O0
=
7270 GIMPLE_PASS
, /* type */
7271 "bitintlower0", /* name */
7272 OPTGROUP_NONE
, /* optinfo_flags */
7273 TV_NONE
, /* tv_id */
7274 PROP_cfg
, /* properties_required */
7275 PROP_gimple_lbitint
, /* properties_provided */
7276 0, /* properties_destroyed */
7277 0, /* todo_flags_start */
7278 0, /* todo_flags_finish */
7281 class pass_lower_bitint_O0
: public gimple_opt_pass
7284 pass_lower_bitint_O0 (gcc::context
*ctxt
)
7285 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
7288 /* opt_pass methods: */
7289 bool gate (function
*fun
) final override
7291 /* With errors, normal optimization passes are not run. If we don't
7292 lower bitint operations at all, rtl expansion will abort. */
7293 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
7296 unsigned int execute (function
*) final override
7298 return gimple_lower_bitint ();
7301 }; // class pass_lower_bitint_O0
7306 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
7308 return new pass_lower_bitint_O0 (ctxt
);