hppa: Fix bug in atomic_storedi_1 pattern
[official-gcc.git] / gcc / gimple-lower-bitint.cc
bloba3802c61816a561be5a165efafc75d8d1dcf2fe8
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
235 == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
237 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
238 return true;
239 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
240 return true;
241 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
242 return true;
244 break;
246 default:
247 break;
249 return false;
252 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
253 _Complex large/huge _BitInt lhs which has at most two immediate uses,
254 at most one use in REALPART_EXPR stmt in the same bb and exactly one
255 IMAGPART_EXPR use in the same bb with a single use which casts it to
256 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
257 return 2. Such cases (most common uses of those builtins) can be
258 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
259 of REALPART_EXPR as not needed to be backed up by a stack variable.
260 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
263 optimizable_arith_overflow (gimple *stmt)
265 bool is_ubsan = false;
266 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
267 return false;
268 switch (gimple_call_internal_fn (stmt))
270 case IFN_ADD_OVERFLOW:
271 case IFN_SUB_OVERFLOW:
272 case IFN_MUL_OVERFLOW:
273 break;
274 case IFN_UBSAN_CHECK_ADD:
275 case IFN_UBSAN_CHECK_SUB:
276 case IFN_UBSAN_CHECK_MUL:
277 is_ubsan = true;
278 break;
279 default:
280 return 0;
282 tree lhs = gimple_call_lhs (stmt);
283 if (!lhs)
284 return 0;
285 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
286 return 0;
287 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
288 if (TREE_CODE (type) != BITINT_TYPE
289 || bitint_precision_kind (type) < bitint_prec_large)
290 return 0;
292 if (is_ubsan)
294 use_operand_p use_p;
295 gimple *use_stmt;
296 if (!single_imm_use (lhs, &use_p, &use_stmt)
297 || gimple_bb (use_stmt) != gimple_bb (stmt)
298 || !gimple_store_p (use_stmt)
299 || !is_gimple_assign (use_stmt)
300 || gimple_has_volatile_ops (use_stmt)
301 || stmt_ends_bb_p (use_stmt))
302 return 0;
303 return 3;
306 imm_use_iterator ui;
307 use_operand_p use_p;
308 int seen = 0;
309 gimple *realpart = NULL, *cast = NULL;
310 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
312 gimple *g = USE_STMT (use_p);
313 if (is_gimple_debug (g))
314 continue;
315 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
316 return 0;
317 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
319 if ((seen & 1) != 0)
320 return 0;
321 seen |= 1;
322 realpart = g;
324 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
326 if ((seen & 2) != 0)
327 return 0;
328 seen |= 2;
330 use_operand_p use2_p;
331 gimple *use_stmt;
332 tree lhs2 = gimple_assign_lhs (g);
333 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
334 return 0;
335 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
336 || gimple_bb (use_stmt) != gimple_bb (stmt)
337 || !gimple_assign_cast_p (use_stmt))
338 return 0;
340 lhs2 = gimple_assign_lhs (use_stmt);
341 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
342 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
343 return 0;
344 cast = use_stmt;
346 else
347 return 0;
349 if ((seen & 2) == 0)
350 return 0;
351 if (seen == 3)
353 /* Punt if the cast stmt appears before realpart stmt, because
354 if both appear, the lowering wants to emit all the code
355 at the location of realpart stmt. */
356 gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
357 unsigned int cnt = 0;
360 gsi_prev_nondebug (&gsi);
361 if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
362 return 0;
363 if (gsi_stmt (gsi) == stmt)
364 return 2;
365 /* If realpart is too far from stmt, punt as well.
366 Usually it will appear right after it. */
367 if (++cnt == 32)
368 return 0;
370 while (1);
372 return 1;
375 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
376 comparing large/huge _BitInt types, return the comparison code and if
377 non-NULL fill in the comparison operands to *POP1 and *POP2. */
379 tree_code
380 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
382 tree op1 = NULL_TREE, op2 = NULL_TREE;
383 tree_code code = ERROR_MARK;
384 if (gimple_code (stmt) == GIMPLE_COND)
386 code = gimple_cond_code (stmt);
387 op1 = gimple_cond_lhs (stmt);
388 op2 = gimple_cond_rhs (stmt);
390 else if (is_gimple_assign (stmt))
392 code = gimple_assign_rhs_code (stmt);
393 op1 = gimple_assign_rhs1 (stmt);
394 if (TREE_CODE_CLASS (code) == tcc_comparison
395 || TREE_CODE_CLASS (code) == tcc_binary)
396 op2 = gimple_assign_rhs2 (stmt);
398 if (TREE_CODE_CLASS (code) != tcc_comparison)
399 return ERROR_MARK;
400 tree type = TREE_TYPE (op1);
401 if (TREE_CODE (type) != BITINT_TYPE
402 || bitint_precision_kind (type) < bitint_prec_large)
403 return ERROR_MARK;
404 if (pop1)
406 *pop1 = op1;
407 *pop2 = op2;
409 return code;
412 /* Class used during large/huge _BitInt lowering containing all the
413 state for the methods. */
415 struct bitint_large_huge
417 bitint_large_huge ()
418 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
419 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
420 m_limb_type (NULL_TREE), m_data (vNULL) {}
422 ~bitint_large_huge ();
424 void insert_before (gimple *);
425 tree limb_access_type (tree, tree);
426 tree limb_access (tree, tree, tree, bool);
427 void if_then (gimple *, profile_probability, edge &, edge &);
428 void if_then_else (gimple *, profile_probability, edge &, edge &);
429 void if_then_if_then_else (gimple *g, gimple *,
430 profile_probability, profile_probability,
431 edge &, edge &, edge &);
432 tree handle_operand (tree, tree);
433 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
434 tree add_cast (tree, tree);
435 tree handle_plus_minus (tree_code, tree, tree, tree);
436 tree handle_lshift (tree, tree, tree);
437 tree handle_cast (tree, tree, tree);
438 tree handle_load (gimple *, tree);
439 tree handle_stmt (gimple *, tree);
440 tree handle_operand_addr (tree, gimple *, int *, int *);
441 tree create_loop (tree, tree *);
442 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
443 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
444 void lower_shift_stmt (tree, gimple *);
445 void lower_muldiv_stmt (tree, gimple *);
446 void lower_float_conv_stmt (tree, gimple *);
447 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
448 unsigned int, bool);
449 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
450 tree_code);
451 void lower_addsub_overflow (tree, gimple *);
452 void lower_mul_overflow (tree, gimple *);
453 void lower_cplxpart_stmt (tree, gimple *);
454 void lower_complexexpr_stmt (gimple *);
455 void lower_bit_query (gimple *);
456 void lower_call (tree, gimple *);
457 void lower_asm (gimple *);
458 void lower_stmt (gimple *);
460 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
461 merged with their uses. */
462 bitmap m_names;
463 /* Subset of those for lhs of load statements. These will be
464 cleared in m_names if the loads will be mergeable with all
465 their uses. */
466 bitmap m_loads;
467 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
468 to later passes (arguments or return values of calls). */
469 bitmap m_preserved;
470 /* Subset of m_names which have a single use. As the lowering
471 can replace various original statements with their lowered
472 form even before it is done iterating over all basic blocks,
473 testing has_single_use for the purpose of emitting clobbers
474 doesn't work properly. */
475 bitmap m_single_use_names;
476 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
477 set in m_names. */
478 var_map m_map;
479 /* Mapping of the partitions to corresponding decls. */
480 tree *m_vars;
481 /* Unsigned integer type with limb precision. */
482 tree m_limb_type;
483 /* Its TYPE_SIZE_UNIT. */
484 unsigned HOST_WIDE_INT m_limb_size;
485 /* Location of a gimple stmt which is being currently lowered. */
486 location_t m_loc;
487 /* Current stmt iterator where code is being lowered currently. */
488 gimple_stmt_iterator m_gsi;
489 /* Statement after which any clobbers should be added if non-NULL. */
490 gimple *m_after_stmt;
491 /* Set when creating loops to the loop header bb and its preheader. */
492 basic_block m_bb, m_preheader_bb;
493 /* Stmt iterator after which initialization statements should be emitted. */
494 gimple_stmt_iterator m_init_gsi;
495 /* Decl into which a mergeable statement stores result. */
496 tree m_lhs;
497 /* handle_operand/handle_stmt can be invoked in various ways.
499 lower_mergeable_stmt for large _BitInt calls those with constant
500 idx only, expanding to straight line code, for huge _BitInt
501 emits a loop from least significant limb upwards, where each loop
502 iteration handles 2 limbs, plus there can be up to one full limb
503 and one partial limb processed after the loop, where handle_operand
504 and/or handle_stmt are called with constant idx. m_upwards_2limb
505 is set for this case, false otherwise. m_upwards is true if it
506 is either large or huge _BitInt handled by lower_mergeable_stmt,
507 i.e. indexes always increase.
509 Another way is used by lower_comparison_stmt, which walks limbs
510 from most significant to least significant, partial limb if any
511 processed first with constant idx and then loop processing a single
512 limb per iteration with non-constant idx.
514 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
515 destination limbs are processed from most significant to least
516 significant or for RSHIFT_EXPR the other way around, in loops or
517 straight line code, but idx usually is non-constant (so from
518 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
519 handling there can access even partial limbs using non-constant
520 idx (then m_var_msb should be true, for all the other cases
521 including lower_mergeable_stmt/lower_comparison_stmt that is
522 not the case and so m_var_msb should be false.
524 m_first should be set the first time handle_operand/handle_stmt
525 is called and clear when it is called for some other limb with
526 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
527 or statement (e.g. +/-/<< with < limb_prec constant) needs some
528 state between the different calls, when m_first is true it should
529 push some trees to m_data vector and also make sure m_data_cnt is
530 incremented by how many trees were pushed, and when m_first is
531 false, it can use the m_data[m_data_cnt] etc. data or update them,
532 just needs to bump m_data_cnt by the same amount as when it was
533 called with m_first set. The toplevel calls to
534 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
535 m_data vector when setting m_first to true.
537 m_cast_conditional and m_bitfld_load are used when handling a
538 bit-field load inside of a widening cast. handle_cast sometimes
539 needs to do runtime comparisons and handle_operand only conditionally
540 or even in two separate conditional blocks for one idx (once with
541 constant index after comparing the runtime one for equality with the
542 constant). In these cases, m_cast_conditional is set to true and
543 the bit-field load then communicates its m_data_cnt to handle_cast
544 using m_bitfld_load. */
545 bool m_first;
546 bool m_var_msb;
547 unsigned m_upwards_2limb;
548 bool m_upwards;
549 bool m_cast_conditional;
550 unsigned m_bitfld_load;
551 vec<tree> m_data;
552 unsigned int m_data_cnt;
555 bitint_large_huge::~bitint_large_huge ()
557 BITMAP_FREE (m_names);
558 BITMAP_FREE (m_loads);
559 BITMAP_FREE (m_preserved);
560 BITMAP_FREE (m_single_use_names);
561 if (m_map)
562 delete_var_map (m_map);
563 XDELETEVEC (m_vars);
564 m_data.release ();
567 /* Insert gimple statement G before current location
568 and set its gimple_location. */
570 void
571 bitint_large_huge::insert_before (gimple *g)
573 gimple_set_location (g, m_loc);
574 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
577 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
578 This is normally m_limb_type, except for a partial most
579 significant limb if any. */
581 tree
582 bitint_large_huge::limb_access_type (tree type, tree idx)
584 if (type == NULL_TREE)
585 return m_limb_type;
586 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
587 unsigned int prec = TYPE_PRECISION (type);
588 gcc_assert (i * limb_prec < prec);
589 if ((i + 1) * limb_prec <= prec)
590 return m_limb_type;
591 else
592 return build_nonstandard_integer_type (prec % limb_prec,
593 TYPE_UNSIGNED (type));
596 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
597 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
599 tree
600 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
602 tree atype = (tree_fits_uhwi_p (idx)
603 ? limb_access_type (type, idx) : m_limb_type);
604 tree ret;
605 if (DECL_P (var) && tree_fits_uhwi_p (idx))
607 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
608 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
609 ret = build2 (MEM_REF, m_limb_type,
610 build_fold_addr_expr (var),
611 build_int_cst (ptype, off));
612 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
613 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
615 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
618 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
619 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
620 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
621 tree_to_uhwi (idx)
622 * m_limb_size)));
623 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
624 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
625 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
627 else
629 var = unshare_expr (var);
630 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
631 || !useless_type_conversion_p (m_limb_type,
632 TREE_TYPE (TREE_TYPE (var))))
634 unsigned HOST_WIDE_INT nelts
635 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
636 tree atype = build_array_type_nelts (m_limb_type, nelts);
637 var = build1 (VIEW_CONVERT_EXPR, atype, var);
639 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
641 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
643 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
644 insert_before (g);
645 ret = gimple_assign_lhs (g);
646 ret = build1 (NOP_EXPR, atype, ret);
648 return ret;
651 /* Emit a half diamond,
652 if (COND)
656 | new_bb1
660 or if (COND) new_bb1;
661 PROB is the probability that the condition is true.
662 Updates m_gsi to start of new_bb1.
663 Sets EDGE_TRUE to edge from new_bb1 to successor and
664 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
666 void
667 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
668 edge &edge_true, edge &edge_false)
670 insert_before (cond);
671 edge e1 = split_block (gsi_bb (m_gsi), cond);
672 edge e2 = split_block (e1->dest, (gimple *) NULL);
673 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
674 e1->flags = EDGE_TRUE_VALUE;
675 e1->probability = prob;
676 e3->probability = prob.invert ();
677 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
678 edge_true = e2;
679 edge_false = e3;
680 m_gsi = gsi_after_labels (e1->dest);
683 /* Emit a full diamond,
684 if (COND)
688 new_bb1 new_bb2
692 or if (COND) new_bb2; else new_bb1;
693 PROB is the probability that the condition is true.
694 Updates m_gsi to start of new_bb2.
695 Sets EDGE_TRUE to edge from new_bb1 to successor and
696 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
698 void
699 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
700 edge &edge_true, edge &edge_false)
702 insert_before (cond);
703 edge e1 = split_block (gsi_bb (m_gsi), cond);
704 edge e2 = split_block (e1->dest, (gimple *) NULL);
705 basic_block bb = create_empty_bb (e1->dest);
706 add_bb_to_loop (bb, e1->dest->loop_father);
707 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
708 e1->flags = EDGE_FALSE_VALUE;
709 e3->probability = prob;
710 e1->probability = prob.invert ();
711 bb->count = e1->src->count.apply_probability (prob);
712 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
713 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
714 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
715 edge_false = e2;
716 m_gsi = gsi_after_labels (bb);
719 /* Emit a half diamond with full diamond in it
720 if (COND1)
724 | if (COND2)
725 | / \
726 | / \
727 |new_bb1 new_bb2
728 | | /
729 \ | /
730 \ | /
731 \ | /
733 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
734 PROB1 is the probability that the condition 1 is true.
735 PROB2 is the probability that the condition 2 is true.
736 Updates m_gsi to start of new_bb1.
737 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
738 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
739 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
740 If COND2 is NULL, this is equivalent to
741 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
742 EDGE_TRUE_TRUE = NULL; */
744 void
745 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
746 profile_probability prob1,
747 profile_probability prob2,
748 edge &edge_true_true,
749 edge &edge_true_false,
750 edge &edge_false)
752 edge e2, e3, e4 = NULL;
753 if_then (cond1, prob1, e2, e3);
754 if (cond2 == NULL)
756 edge_true_true = NULL;
757 edge_true_false = e2;
758 edge_false = e3;
759 return;
761 insert_before (cond2);
762 e2 = split_block (gsi_bb (m_gsi), cond2);
763 basic_block bb = create_empty_bb (e2->dest);
764 add_bb_to_loop (bb, e2->dest->loop_father);
765 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
766 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
767 e4->probability = prob2;
768 e2->flags = EDGE_FALSE_VALUE;
769 e2->probability = prob2.invert ();
770 bb->count = e2->src->count.apply_probability (prob2);
771 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
772 e2 = find_edge (e2->dest, e3->dest);
773 edge_true_true = e4;
774 edge_true_false = e2;
775 edge_false = e3;
776 m_gsi = gsi_after_labels (e2->src);
779 /* Emit code to access limb IDX from OP. */
781 tree
782 bitint_large_huge::handle_operand (tree op, tree idx)
784 switch (TREE_CODE (op))
786 case SSA_NAME:
787 if (m_names == NULL
788 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
790 if (SSA_NAME_IS_DEFAULT_DEF (op))
792 if (m_first)
794 tree v = create_tmp_reg (m_limb_type);
795 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
797 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
798 DECL_SOURCE_LOCATION (v)
799 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
801 v = get_or_create_ssa_default_def (cfun, v);
802 m_data.safe_push (v);
804 tree ret = m_data[m_data_cnt];
805 m_data_cnt++;
806 if (tree_fits_uhwi_p (idx))
808 tree type = limb_access_type (TREE_TYPE (op), idx);
809 ret = add_cast (type, ret);
811 return ret;
813 location_t loc_save = m_loc;
814 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
815 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
816 m_loc = loc_save;
817 return ret;
819 int p;
820 gimple *g;
821 tree t;
822 p = var_to_partition (m_map, op);
823 gcc_assert (m_vars[p] != NULL_TREE);
824 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
825 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
826 insert_before (g);
827 t = gimple_assign_lhs (g);
828 if (m_first
829 && m_single_use_names
830 && m_vars[p] != m_lhs
831 && m_after_stmt
832 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
834 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
835 CLOBBER_STORAGE_END);
836 g = gimple_build_assign (m_vars[p], clobber);
837 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
838 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
840 return t;
841 case INTEGER_CST:
842 if (tree_fits_uhwi_p (idx))
844 tree c, type = limb_access_type (TREE_TYPE (op), idx);
845 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
846 if (m_first)
848 m_data.safe_push (NULL_TREE);
849 m_data.safe_push (NULL_TREE);
851 if (limb_prec != HOST_BITS_PER_WIDE_INT)
853 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
854 TYPE_SIGN (TREE_TYPE (op)));
855 c = wide_int_to_tree (type,
856 wide_int::from (w, TYPE_PRECISION (type),
857 UNSIGNED));
859 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
860 c = build_int_cst (type,
861 tree_int_cst_sgn (op) < 0 ? -1 : 0);
862 else
863 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
864 m_data_cnt += 2;
865 return c;
867 if (m_first
868 || (m_data[m_data_cnt] == NULL_TREE
869 && m_data[m_data_cnt + 1] == NULL_TREE))
871 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
872 unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
873 int ext;
874 unsigned min_prec = bitint_min_cst_precision (op, ext);
875 if (m_first)
877 m_data.safe_push (NULL_TREE);
878 m_data.safe_push (NULL_TREE);
880 if (integer_zerop (op))
882 tree c = build_zero_cst (m_limb_type);
883 m_data[m_data_cnt] = c;
884 m_data[m_data_cnt + 1] = c;
886 else if (integer_all_onesp (op))
888 tree c = build_all_ones_cst (m_limb_type);
889 m_data[m_data_cnt] = c;
890 m_data[m_data_cnt + 1] = c;
892 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
894 /* Single limb constant. Use a phi with that limb from
895 the preheader edge and 0 or -1 constant from the other edge
896 and for the second limb in the loop. */
897 tree out;
898 gcc_assert (m_first);
899 m_data.pop ();
900 m_data.pop ();
901 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
902 build_int_cst (m_limb_type, ext));
904 else if (min_prec > prec - rem - 2 * limb_prec)
906 /* Constant which has enough significant bits that it isn't
907 worth trying to save .rodata space by extending from smaller
908 number. */
909 tree type;
910 if (m_var_msb)
911 type = TREE_TYPE (op);
912 else
913 /* If we have a guarantee the most significant partial limb
914 (if any) will be only accessed through handle_operand
915 with INTEGER_CST idx, we don't need to include the partial
916 limb in .rodata. */
917 type = build_bitint_type (prec - rem, 1);
918 tree c = tree_output_constant_def (fold_convert (type, op));
919 m_data[m_data_cnt] = c;
920 m_data[m_data_cnt + 1] = NULL_TREE;
922 else if (m_upwards_2limb)
924 /* Constant with smaller number of bits. Trade conditional
925 code for .rodata space by extending from smaller number. */
926 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
927 tree type = build_bitint_type (min_prec, 1);
928 tree c = tree_output_constant_def (fold_convert (type, op));
929 tree idx2 = make_ssa_name (sizetype);
930 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
931 insert_before (g);
932 g = gimple_build_cond (LT_EXPR, idx,
933 size_int (min_prec / limb_prec),
934 NULL_TREE, NULL_TREE);
935 edge edge_true, edge_false;
936 if_then (g, (min_prec >= (prec - rem) / 2
937 ? profile_probability::likely ()
938 : profile_probability::unlikely ()),
939 edge_true, edge_false);
940 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
941 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
942 insert_before (g);
943 c1 = gimple_assign_lhs (g);
944 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
945 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
946 insert_before (g);
947 c2 = gimple_assign_lhs (g);
948 tree c3 = build_int_cst (m_limb_type, ext);
949 m_gsi = gsi_after_labels (edge_true->dest);
950 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
951 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
952 gphi *phi = create_phi_node (m_data[m_data_cnt],
953 edge_true->dest);
954 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
955 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
956 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
957 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
958 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
960 else
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number.
964 Version for loops with random access to the limbs or
965 downwards loops. */
966 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
967 tree c;
968 if (min_prec <= (unsigned) limb_prec)
969 c = fold_convert (m_limb_type, op);
970 else
972 tree type = build_bitint_type (min_prec, 1);
973 c = tree_output_constant_def (fold_convert (type, op));
975 m_data[m_data_cnt] = c;
976 m_data[m_data_cnt + 1] = integer_type_node;
978 t = m_data[m_data_cnt];
979 if (m_data[m_data_cnt + 1] == NULL_TREE)
981 t = limb_access (TREE_TYPE (op), t, idx, false);
982 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
983 insert_before (g);
984 t = gimple_assign_lhs (g);
987 else if (m_data[m_data_cnt + 1] == NULL_TREE)
989 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
990 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
991 insert_before (g);
992 t = gimple_assign_lhs (g);
994 else
995 t = m_data[m_data_cnt + 1];
996 if (m_data[m_data_cnt + 1] == integer_type_node)
998 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
999 unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1000 int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1001 tree c = m_data[m_data_cnt];
1002 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1003 g = gimple_build_cond (LT_EXPR, idx,
1004 size_int (min_prec / limb_prec),
1005 NULL_TREE, NULL_TREE);
1006 edge edge_true, edge_false;
1007 if_then (g, (min_prec >= (prec - rem) / 2
1008 ? profile_probability::likely ()
1009 : profile_probability::unlikely ()),
1010 edge_true, edge_false);
1011 if (min_prec > (unsigned) limb_prec)
1013 c = limb_access (TREE_TYPE (op), c, idx, false);
1014 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1015 insert_before (g);
1016 c = gimple_assign_lhs (g);
1018 tree c2 = build_int_cst (m_limb_type, ext);
1019 m_gsi = gsi_after_labels (edge_true->dest);
1020 t = make_ssa_name (m_limb_type);
1021 gphi *phi = create_phi_node (t, edge_true->dest);
1022 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1023 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1025 m_data_cnt += 2;
1026 return t;
1027 default:
1028 gcc_unreachable ();
1032 /* Helper method, add a PHI node with VAL from preheader edge if
1033 inside of a loop and m_first. Keep state in a pair of m_data
1034 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1035 the latch edge, otherwise create a new SSA_NAME for it and let
1036 caller initialize it. */
1038 tree
1039 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1040 tree val_out)
1042 if (!m_first)
1044 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1045 return m_data[m_data_cnt];
1048 *data_out = NULL_TREE;
1049 if (tree_fits_uhwi_p (idx))
1051 m_data.safe_push (val);
1052 m_data.safe_push (NULL_TREE);
1053 return val;
1056 tree in = make_ssa_name (TREE_TYPE (val));
1057 gphi *phi = create_phi_node (in, m_bb);
1058 edge e1 = find_edge (m_preheader_bb, m_bb);
1059 edge e2 = EDGE_PRED (m_bb, 0);
1060 if (e1 == e2)
1061 e2 = EDGE_PRED (m_bb, 1);
1062 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1063 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1064 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1065 m_data.safe_push (in);
1066 m_data.safe_push (out);
1067 return in;
1070 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1071 convert it without emitting any code, otherwise emit
1072 the conversion statement before the current location. */
1074 tree
1075 bitint_large_huge::add_cast (tree type, tree val)
1077 if (TREE_CODE (val) == INTEGER_CST)
1078 return fold_convert (type, val);
1080 tree lhs = make_ssa_name (type);
1081 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1082 insert_before (g);
1083 return lhs;
1086 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1088 tree
1089 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1090 tree idx)
1092 tree lhs, data_out, ctype;
1093 tree rhs1_type = TREE_TYPE (rhs1);
1094 gimple *g;
1095 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1096 &data_out);
1098 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1099 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1101 ctype = build_complex_type (m_limb_type);
1102 if (!types_compatible_p (rhs1_type, m_limb_type))
1104 if (!TYPE_UNSIGNED (rhs1_type))
1106 tree type = unsigned_type_for (rhs1_type);
1107 rhs1 = add_cast (type, rhs1);
1108 rhs2 = add_cast (type, rhs2);
1110 rhs1 = add_cast (m_limb_type, rhs1);
1111 rhs2 = add_cast (m_limb_type, rhs2);
1113 lhs = make_ssa_name (ctype);
1114 g = gimple_build_call_internal (code == PLUS_EXPR
1115 ? IFN_UADDC : IFN_USUBC,
1116 3, rhs1, rhs2, data_in);
1117 gimple_call_set_lhs (g, lhs);
1118 insert_before (g);
1119 if (data_out == NULL_TREE)
1120 data_out = make_ssa_name (m_limb_type);
1121 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1122 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1123 insert_before (g);
1125 else if (types_compatible_p (rhs1_type, m_limb_type))
1127 ctype = build_complex_type (m_limb_type);
1128 lhs = make_ssa_name (ctype);
1129 g = gimple_build_call_internal (code == PLUS_EXPR
1130 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1131 2, rhs1, rhs2);
1132 gimple_call_set_lhs (g, lhs);
1133 insert_before (g);
1134 if (data_out == NULL_TREE)
1135 data_out = make_ssa_name (m_limb_type);
1136 if (!integer_zerop (data_in))
1138 rhs1 = make_ssa_name (m_limb_type);
1139 g = gimple_build_assign (rhs1, REALPART_EXPR,
1140 build1 (REALPART_EXPR, m_limb_type, lhs));
1141 insert_before (g);
1142 rhs2 = make_ssa_name (m_limb_type);
1143 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1144 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1145 insert_before (g);
1146 lhs = make_ssa_name (ctype);
1147 g = gimple_build_call_internal (code == PLUS_EXPR
1148 ? IFN_ADD_OVERFLOW
1149 : IFN_SUB_OVERFLOW,
1150 2, rhs1, data_in);
1151 gimple_call_set_lhs (g, lhs);
1152 insert_before (g);
1153 data_in = make_ssa_name (m_limb_type);
1154 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1155 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1156 insert_before (g);
1157 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1158 insert_before (g);
1160 else
1162 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1163 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1164 insert_before (g);
1167 else
1169 tree in = add_cast (rhs1_type, data_in);
1170 lhs = make_ssa_name (rhs1_type);
1171 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1172 insert_before (g);
1173 rhs1 = make_ssa_name (rhs1_type);
1174 g = gimple_build_assign (rhs1, code, lhs, in);
1175 insert_before (g);
1176 m_data[m_data_cnt] = NULL_TREE;
1177 m_data_cnt += 2;
1178 return rhs1;
1180 rhs1 = make_ssa_name (m_limb_type);
1181 g = gimple_build_assign (rhs1, REALPART_EXPR,
1182 build1 (REALPART_EXPR, m_limb_type, lhs));
1183 insert_before (g);
1184 if (!types_compatible_p (rhs1_type, m_limb_type))
1185 rhs1 = add_cast (rhs1_type, rhs1);
1186 m_data[m_data_cnt] = data_out;
1187 m_data_cnt += 2;
1188 return rhs1;
1191 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1192 count in [0, limb_prec - 1] range. */
1194 tree
1195 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1197 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1198 gcc_checking_assert (cnt < (unsigned) limb_prec);
1199 if (cnt == 0)
1200 return rhs1;
1202 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1203 gimple *g;
1204 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1205 &data_out);
1207 if (!integer_zerop (data_in))
1209 lhs = make_ssa_name (m_limb_type);
1210 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1211 build_int_cst (unsigned_type_node,
1212 limb_prec - cnt));
1213 insert_before (g);
1214 if (!types_compatible_p (rhs1_type, m_limb_type))
1215 lhs = add_cast (rhs1_type, lhs);
1216 data_in = lhs;
1218 if (types_compatible_p (rhs1_type, m_limb_type))
1220 if (data_out == NULL_TREE)
1221 data_out = make_ssa_name (m_limb_type);
1222 g = gimple_build_assign (data_out, rhs1);
1223 insert_before (g);
1225 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1227 lhs = make_ssa_name (rhs1_type);
1228 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1229 insert_before (g);
1230 if (!integer_zerop (data_in))
1232 rhs1 = lhs;
1233 lhs = make_ssa_name (rhs1_type);
1234 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1235 insert_before (g);
1238 else
1239 lhs = data_in;
1240 m_data[m_data_cnt] = data_out;
1241 m_data_cnt += 2;
1242 return lhs;
1245 /* Helper function for handle_stmt method, handle an integral
1246 to integral conversion. */
1248 tree
1249 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1251 tree rhs_type = TREE_TYPE (rhs1);
1252 gimple *g;
1253 if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1254 && TREE_CODE (lhs_type) == BITINT_TYPE
1255 && TREE_CODE (rhs_type) == BITINT_TYPE
1256 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1257 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1259 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1260 /* If lhs has bigger precision than rhs, we can use
1261 the simple case only if there is a guarantee that
1262 the most significant limb is handled in straight
1263 line code. If m_var_msb (on left shifts) or
1264 if m_upwards_2limb * limb_prec is equal to
1265 lhs precision that is not the case. */
1266 || (!m_var_msb
1267 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1268 == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1269 && (!m_upwards_2limb
1270 || (m_upwards_2limb * limb_prec
1271 < TYPE_PRECISION (lhs_type)))))
1273 rhs1 = handle_operand (rhs1, idx);
1274 if (tree_fits_uhwi_p (idx))
1276 tree type = limb_access_type (lhs_type, idx);
1277 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1278 rhs1 = add_cast (type, rhs1);
1280 return rhs1;
1282 tree t;
1283 /* Indexes lower than this don't need any special processing. */
1284 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1285 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1286 /* Indexes >= than this always contain an extension. */
1287 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1288 bool save_first = m_first;
1289 if (m_first)
1291 m_data.safe_push (NULL_TREE);
1292 m_data.safe_push (NULL_TREE);
1293 m_data.safe_push (NULL_TREE);
1294 if (TYPE_UNSIGNED (rhs_type))
1295 /* No need to keep state between iterations. */
1297 else if (m_upwards && !m_upwards_2limb)
1298 /* We need to keep state between iterations, but
1299 not within any loop, everything is straight line
1300 code with only increasing indexes. */
1302 else if (!m_upwards_2limb)
1304 unsigned save_data_cnt = m_data_cnt;
1305 gimple_stmt_iterator save_gsi = m_gsi;
1306 m_gsi = m_init_gsi;
1307 if (gsi_end_p (m_gsi))
1308 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1309 else
1310 gsi_next (&m_gsi);
1311 m_data_cnt = save_data_cnt + 3;
1312 t = handle_operand (rhs1, size_int (low));
1313 m_first = false;
1314 m_data[save_data_cnt + 2]
1315 = build_int_cst (NULL_TREE, m_data_cnt);
1316 m_data_cnt = save_data_cnt;
1317 t = add_cast (signed_type_for (m_limb_type), t);
1318 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1319 tree n = make_ssa_name (TREE_TYPE (t));
1320 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1321 insert_before (g);
1322 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1323 m_init_gsi = m_gsi;
1324 if (gsi_end_p (m_init_gsi))
1325 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1326 else
1327 gsi_prev (&m_init_gsi);
1328 m_gsi = save_gsi;
1330 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1331 /* We need to keep state between iterations, but
1332 fortunately not within the loop, only afterwards. */
1334 else
1336 tree out;
1337 m_data.truncate (m_data_cnt);
1338 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1339 m_data.safe_push (NULL_TREE);
1343 unsigned save_data_cnt = m_data_cnt;
1344 m_data_cnt += 3;
1345 if (!tree_fits_uhwi_p (idx))
1347 if (m_upwards_2limb
1348 && (m_upwards_2limb * limb_prec
1349 <= ((unsigned) TYPE_PRECISION (rhs_type)
1350 - !TYPE_UNSIGNED (rhs_type))))
1352 rhs1 = handle_operand (rhs1, idx);
1353 if (m_first)
1354 m_data[save_data_cnt + 2]
1355 = build_int_cst (NULL_TREE, m_data_cnt);
1356 m_first = save_first;
1357 return rhs1;
1359 bool single_comparison
1360 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1361 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1362 idx, size_int (low), NULL_TREE, NULL_TREE);
1363 edge edge_true_true, edge_true_false, edge_false;
1364 if_then_if_then_else (g, (single_comparison ? NULL
1365 : gimple_build_cond (EQ_EXPR, idx,
1366 size_int (low),
1367 NULL_TREE,
1368 NULL_TREE)),
1369 profile_probability::likely (),
1370 profile_probability::unlikely (),
1371 edge_true_true, edge_true_false, edge_false);
1372 bool save_cast_conditional = m_cast_conditional;
1373 m_cast_conditional = true;
1374 m_bitfld_load = 0;
1375 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1376 if (m_first)
1377 m_data[save_data_cnt + 2]
1378 = build_int_cst (NULL_TREE, m_data_cnt);
1379 tree ext = NULL_TREE;
1380 tree bitfld = NULL_TREE;
1381 if (!single_comparison)
1383 m_gsi = gsi_after_labels (edge_true_true->src);
1384 m_first = false;
1385 m_data_cnt = save_data_cnt + 3;
1386 if (m_bitfld_load)
1388 bitfld = m_data[m_bitfld_load];
1389 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1390 m_bitfld_load = 0;
1392 t2 = handle_operand (rhs1, size_int (low));
1393 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1394 t2 = add_cast (m_limb_type, t2);
1395 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1397 ext = add_cast (signed_type_for (m_limb_type), t2);
1398 tree lpm1 = build_int_cst (unsigned_type_node,
1399 limb_prec - 1);
1400 tree n = make_ssa_name (TREE_TYPE (ext));
1401 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1402 insert_before (g);
1403 ext = add_cast (m_limb_type, n);
1406 tree t3;
1407 if (TYPE_UNSIGNED (rhs_type))
1408 t3 = build_zero_cst (m_limb_type);
1409 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1410 t3 = m_data[save_data_cnt];
1411 else
1412 t3 = m_data[save_data_cnt + 1];
1413 m_gsi = gsi_after_labels (edge_true_false->dest);
1414 t = make_ssa_name (m_limb_type);
1415 gphi *phi = create_phi_node (t, edge_true_false->dest);
1416 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1417 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1418 if (edge_true_true)
1419 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1420 if (ext)
1422 tree t4 = make_ssa_name (m_limb_type);
1423 phi = create_phi_node (t4, edge_true_false->dest);
1424 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1425 UNKNOWN_LOCATION);
1426 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1427 UNKNOWN_LOCATION);
1428 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1429 if (!save_cast_conditional)
1431 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1432 insert_before (g);
1434 else
1435 for (basic_block bb = gsi_bb (m_gsi);;)
1437 edge e1 = single_succ_edge (bb);
1438 edge e2 = find_edge (e1->dest, m_bb), e3;
1439 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1440 : make_ssa_name (m_limb_type));
1441 phi = create_phi_node (t5, e1->dest);
1442 edge_iterator ei;
1443 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1444 add_phi_arg (phi, (e3 == e1 ? t4
1445 : build_zero_cst (m_limb_type)),
1446 e3, UNKNOWN_LOCATION);
1447 if (e2)
1448 break;
1449 t4 = t5;
1450 bb = e1->dest;
1453 if (m_bitfld_load)
1455 tree t4;
1456 if (!m_first)
1457 t4 = m_data[m_bitfld_load + 1];
1458 else
1459 t4 = make_ssa_name (m_limb_type);
1460 phi = create_phi_node (t4, edge_true_false->dest);
1461 add_phi_arg (phi,
1462 edge_true_true ? bitfld : m_data[m_bitfld_load],
1463 edge_true_false, UNKNOWN_LOCATION);
1464 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1465 edge_false, UNKNOWN_LOCATION);
1466 if (edge_true_true)
1467 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1468 UNKNOWN_LOCATION);
1469 m_data[m_bitfld_load] = t4;
1470 m_data[m_bitfld_load + 2] = t4;
1471 m_bitfld_load = 0;
1473 m_cast_conditional = save_cast_conditional;
1474 m_first = save_first;
1475 return t;
1477 else
1479 if (tree_to_uhwi (idx) < low)
1481 t = handle_operand (rhs1, idx);
1482 if (m_first)
1483 m_data[save_data_cnt + 2]
1484 = build_int_cst (NULL_TREE, m_data_cnt);
1486 else if (tree_to_uhwi (idx) < high)
1488 t = handle_operand (rhs1, size_int (low));
1489 if (m_first)
1490 m_data[save_data_cnt + 2]
1491 = build_int_cst (NULL_TREE, m_data_cnt);
1492 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1493 t = add_cast (m_limb_type, t);
1494 tree ext = NULL_TREE;
1495 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1497 ext = add_cast (signed_type_for (m_limb_type), t);
1498 tree lpm1 = build_int_cst (unsigned_type_node,
1499 limb_prec - 1);
1500 tree n = make_ssa_name (TREE_TYPE (ext));
1501 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1502 insert_before (g);
1503 ext = add_cast (m_limb_type, n);
1504 m_data[save_data_cnt + 1] = ext;
1507 else
1509 if (TYPE_UNSIGNED (rhs_type) && m_first)
1511 handle_operand (rhs1, size_zero_node);
1512 m_data[save_data_cnt + 2]
1513 = build_int_cst (NULL_TREE, m_data_cnt);
1515 else
1516 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1517 if (TYPE_UNSIGNED (rhs_type))
1518 t = build_zero_cst (m_limb_type);
1519 else if (m_bb && m_data[save_data_cnt])
1520 t = m_data[save_data_cnt];
1521 else
1522 t = m_data[save_data_cnt + 1];
1524 tree type = limb_access_type (lhs_type, idx);
1525 if (!useless_type_conversion_p (type, m_limb_type))
1526 t = add_cast (type, t);
1527 m_first = save_first;
1528 return t;
1531 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1532 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1533 && INTEGRAL_TYPE_P (rhs_type))
1535 /* Add support for 3 or more limbs filled in from normal integral
1536 type if this assert fails. If no target chooses limb mode smaller
1537 than half of largest supported normal integral type, this will not
1538 be needed. */
1539 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1540 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1541 if (m_first)
1543 gimple_stmt_iterator save_gsi = m_gsi;
1544 m_gsi = m_init_gsi;
1545 if (gsi_end_p (m_gsi))
1546 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1547 else
1548 gsi_next (&m_gsi);
1549 if (TREE_CODE (rhs_type) == BITINT_TYPE
1550 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1552 tree type = NULL_TREE;
1553 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1554 rhs_type = TREE_TYPE (rhs1);
1556 r1 = rhs1;
1557 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1558 r1 = add_cast (m_limb_type, rhs1);
1559 if (TYPE_PRECISION (rhs_type) > limb_prec)
1561 g = gimple_build_assign (make_ssa_name (rhs_type),
1562 RSHIFT_EXPR, rhs1,
1563 build_int_cst (unsigned_type_node,
1564 limb_prec));
1565 insert_before (g);
1566 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1568 if (TYPE_UNSIGNED (rhs_type))
1569 rext = build_zero_cst (m_limb_type);
1570 else
1572 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1573 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1574 RSHIFT_EXPR, rext,
1575 build_int_cst (unsigned_type_node,
1576 limb_prec - 1));
1577 insert_before (g);
1578 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1580 m_init_gsi = m_gsi;
1581 if (gsi_end_p (m_init_gsi))
1582 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1583 else
1584 gsi_prev (&m_init_gsi);
1585 m_gsi = save_gsi;
1587 tree t;
1588 if (m_upwards_2limb)
1590 if (m_first)
1592 tree out1, out2;
1593 prepare_data_in_out (r1, idx, &out1, rext);
1594 if (TYPE_PRECISION (rhs_type) > limb_prec)
1596 prepare_data_in_out (r2, idx, &out2, rext);
1597 m_data.pop ();
1598 t = m_data.pop ();
1599 m_data[m_data_cnt + 1] = t;
1601 else
1602 m_data[m_data_cnt + 1] = rext;
1603 m_data.safe_push (rext);
1604 t = m_data[m_data_cnt];
1606 else if (!tree_fits_uhwi_p (idx))
1607 t = m_data[m_data_cnt + 1];
1608 else
1610 tree type = limb_access_type (lhs_type, idx);
1611 t = m_data[m_data_cnt + 2];
1612 if (!useless_type_conversion_p (type, m_limb_type))
1613 t = add_cast (type, t);
1615 m_data_cnt += 3;
1616 return t;
1618 else if (m_first)
1620 m_data.safe_push (r1);
1621 m_data.safe_push (r2);
1622 m_data.safe_push (rext);
1624 if (tree_fits_uhwi_p (idx))
1626 tree type = limb_access_type (lhs_type, idx);
1627 if (integer_zerop (idx))
1628 t = m_data[m_data_cnt];
1629 else if (TYPE_PRECISION (rhs_type) > limb_prec
1630 && integer_onep (idx))
1631 t = m_data[m_data_cnt + 1];
1632 else
1633 t = m_data[m_data_cnt + 2];
1634 if (!useless_type_conversion_p (type, m_limb_type))
1635 t = add_cast (type, t);
1636 m_data_cnt += 3;
1637 return t;
1639 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1640 NULL_TREE, NULL_TREE);
1641 edge e2, e3, e4 = NULL;
1642 if_then (g, profile_probability::likely (), e2, e3);
1643 if (m_data[m_data_cnt + 1])
1645 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1646 NULL_TREE, NULL_TREE);
1647 insert_before (g);
1648 edge e5 = split_block (gsi_bb (m_gsi), g);
1649 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1650 e2 = find_edge (e5->dest, e2->dest);
1651 e4->probability = profile_probability::unlikely ();
1652 e5->flags = EDGE_FALSE_VALUE;
1653 e5->probability = e4->probability.invert ();
1655 m_gsi = gsi_after_labels (e2->dest);
1656 t = make_ssa_name (m_limb_type);
1657 gphi *phi = create_phi_node (t, e2->dest);
1658 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1659 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1660 if (e4)
1661 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1662 m_data_cnt += 3;
1663 return t;
1665 return NULL_TREE;
1668 /* Helper function for handle_stmt method, handle a load from memory. */
1670 tree
1671 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1673 tree rhs1 = gimple_assign_rhs1 (stmt);
1674 tree rhs_type = TREE_TYPE (rhs1);
1675 bool eh = stmt_ends_bb_p (stmt);
1676 edge eh_edge = NULL;
1677 gimple *g;
1679 if (eh)
1681 edge_iterator ei;
1682 basic_block bb = gimple_bb (stmt);
1684 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1685 if (eh_edge->flags & EDGE_EH)
1686 break;
1689 if (TREE_CODE (rhs1) == COMPONENT_REF
1690 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1692 tree fld = TREE_OPERAND (rhs1, 1);
1693 /* For little-endian, we can allow as inputs bit-fields
1694 which start at a limb boundary. */
1695 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1696 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1697 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1698 goto normal_load;
1699 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1700 handle it normally for now. */
1701 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1702 goto normal_load;
1703 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1704 poly_int64 bitoffset;
1705 poly_uint64 field_offset, repr_offset;
1706 bool var_field_off = false;
1707 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1708 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1709 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1710 else
1712 bitoffset = 0;
1713 var_field_off = true;
1715 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1716 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1717 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1718 TREE_OPERAND (rhs1, 0), repr,
1719 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1720 HOST_WIDE_INT bo = bitoffset.to_constant ();
1721 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1722 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1723 if (m_first)
1725 if (m_upwards)
1727 gimple_stmt_iterator save_gsi = m_gsi;
1728 m_gsi = m_init_gsi;
1729 if (gsi_end_p (m_gsi))
1730 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1731 else
1732 gsi_next (&m_gsi);
1733 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1734 tree iv = make_ssa_name (m_limb_type);
1735 g = gimple_build_assign (iv, t);
1736 insert_before (g);
1737 if (eh)
1739 maybe_duplicate_eh_stmt (g, stmt);
1740 if (eh_edge)
1742 edge e = split_block (gsi_bb (m_gsi), g);
1743 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1744 = profile_probability::very_unlikely ();
1745 m_gsi = gsi_after_labels (e->dest);
1746 if (gsi_bb (save_gsi) == e->src)
1748 if (gsi_end_p (save_gsi))
1749 save_gsi = gsi_end_bb (e->dest);
1750 else
1751 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1753 if (m_preheader_bb == e->src)
1754 m_preheader_bb = e->dest;
1757 m_init_gsi = m_gsi;
1758 if (gsi_end_p (m_init_gsi))
1759 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1760 else
1761 gsi_prev (&m_init_gsi);
1762 m_gsi = save_gsi;
1763 tree out;
1764 prepare_data_in_out (iv, idx, &out);
1765 out = m_data[m_data_cnt];
1766 m_data.safe_push (out);
1768 else
1770 m_data.safe_push (NULL_TREE);
1771 m_data.safe_push (NULL_TREE);
1772 m_data.safe_push (NULL_TREE);
1776 tree nidx0 = NULL_TREE, nidx1;
1777 tree iv = m_data[m_data_cnt];
1778 if (m_cast_conditional && iv)
1780 gcc_assert (!m_bitfld_load);
1781 m_bitfld_load = m_data_cnt;
1783 if (tree_fits_uhwi_p (idx))
1785 unsigned prec = TYPE_PRECISION (rhs_type);
1786 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1787 gcc_assert (i * limb_prec < prec);
1788 nidx1 = size_int (i + bo_idx + 1);
1789 if ((i + 1) * limb_prec > prec)
1791 prec %= limb_prec;
1792 if (prec + bo_bit <= (unsigned) limb_prec)
1793 nidx1 = NULL_TREE;
1795 if (!iv)
1796 nidx0 = size_int (i + bo_idx);
1798 else
1800 if (!iv)
1802 if (bo_idx == 0)
1803 nidx0 = idx;
1804 else
1806 nidx0 = make_ssa_name (sizetype);
1807 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1808 size_int (bo_idx));
1809 insert_before (g);
1812 nidx1 = make_ssa_name (sizetype);
1813 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1814 size_int (bo_idx + 1));
1815 insert_before (g);
1818 tree iv2 = NULL_TREE;
1819 if (nidx0)
1821 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1822 iv = make_ssa_name (m_limb_type);
1823 g = gimple_build_assign (iv, t);
1824 insert_before (g);
1825 gcc_assert (!eh);
1827 if (nidx1)
1829 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1830 unsigned prec = TYPE_PRECISION (rhs_type);
1831 if (conditional)
1833 if ((prec % limb_prec) == 0
1834 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1835 conditional = false;
1837 edge edge_true = NULL, edge_false = NULL;
1838 if (conditional)
1840 g = gimple_build_cond (NE_EXPR, idx,
1841 size_int (prec / limb_prec),
1842 NULL_TREE, NULL_TREE);
1843 if_then (g, profile_probability::likely (),
1844 edge_true, edge_false);
1846 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1847 if (m_upwards_2limb
1848 && !m_first
1849 && !m_bitfld_load
1850 && !tree_fits_uhwi_p (idx))
1851 iv2 = m_data[m_data_cnt + 1];
1852 else
1853 iv2 = make_ssa_name (m_limb_type);
1854 g = gimple_build_assign (iv2, t);
1855 insert_before (g);
1856 if (eh)
1858 maybe_duplicate_eh_stmt (g, stmt);
1859 if (eh_edge)
1861 edge e = split_block (gsi_bb (m_gsi), g);
1862 m_gsi = gsi_after_labels (e->dest);
1863 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1864 = profile_probability::very_unlikely ();
1867 if (conditional)
1869 tree iv3 = make_ssa_name (m_limb_type);
1870 if (eh)
1871 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1872 gphi *phi = create_phi_node (iv3, edge_true->dest);
1873 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1874 add_phi_arg (phi, build_zero_cst (m_limb_type),
1875 edge_false, UNKNOWN_LOCATION);
1876 m_gsi = gsi_after_labels (edge_true->dest);
1879 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1880 iv, build_int_cst (unsigned_type_node, bo_bit));
1881 insert_before (g);
1882 iv = gimple_assign_lhs (g);
1883 if (iv2)
1885 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1886 iv2, build_int_cst (unsigned_type_node,
1887 limb_prec - bo_bit));
1888 insert_before (g);
1889 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1890 gimple_assign_lhs (g), iv);
1891 insert_before (g);
1892 iv = gimple_assign_lhs (g);
1893 if (m_data[m_data_cnt])
1894 m_data[m_data_cnt] = iv2;
1896 if (tree_fits_uhwi_p (idx))
1898 tree atype = limb_access_type (rhs_type, idx);
1899 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1900 iv = add_cast (atype, iv);
1902 m_data_cnt += 3;
1903 return iv;
1906 normal_load:
1907 /* Use write_p = true for loads with EH edges to make
1908 sure limb_access doesn't add a cast as separate
1909 statement after it. */
1910 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1911 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1912 g = gimple_build_assign (ret, rhs1);
1913 insert_before (g);
1914 if (eh)
1916 maybe_duplicate_eh_stmt (g, stmt);
1917 if (eh_edge)
1919 edge e = split_block (gsi_bb (m_gsi), g);
1920 m_gsi = gsi_after_labels (e->dest);
1921 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1922 = profile_probability::very_unlikely ();
1924 if (tree_fits_uhwi_p (idx))
1926 tree atype = limb_access_type (rhs_type, idx);
1927 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1928 ret = add_cast (atype, ret);
1931 return ret;
1934 /* Return a limb IDX from a mergeable statement STMT. */
1936 tree
1937 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1939 tree lhs, rhs1, rhs2 = NULL_TREE;
1940 gimple *g;
1941 switch (gimple_code (stmt))
1943 case GIMPLE_ASSIGN:
1944 if (gimple_assign_load_p (stmt))
1945 return handle_load (stmt, idx);
1946 switch (gimple_assign_rhs_code (stmt))
1948 case BIT_AND_EXPR:
1949 case BIT_IOR_EXPR:
1950 case BIT_XOR_EXPR:
1951 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1952 /* FALLTHRU */
1953 case BIT_NOT_EXPR:
1954 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1955 lhs = make_ssa_name (TREE_TYPE (rhs1));
1956 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1957 rhs1, rhs2);
1958 insert_before (g);
1959 return lhs;
1960 case PLUS_EXPR:
1961 case MINUS_EXPR:
1962 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1963 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1964 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1965 rhs1, rhs2, idx);
1966 case NEGATE_EXPR:
1967 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1968 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1969 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1970 case LSHIFT_EXPR:
1971 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1972 idx),
1973 gimple_assign_rhs2 (stmt), idx);
1974 case SSA_NAME:
1975 case INTEGER_CST:
1976 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1977 CASE_CONVERT:
1978 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1979 gimple_assign_rhs1 (stmt), idx);
1980 case VIEW_CONVERT_EXPR:
1981 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1982 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
1983 idx);
1984 default:
1985 break;
1987 break;
1988 default:
1989 break;
1991 gcc_unreachable ();
1994 /* Return minimum precision of OP at STMT.
1995 Positive value is minimum precision above which all bits
1996 are zero, negative means all bits above negation of the
1997 value are copies of the sign bit. */
1999 static int
2000 range_to_prec (tree op, gimple *stmt)
2002 int_range_max r;
2003 wide_int w;
2004 tree type = TREE_TYPE (op);
2005 unsigned int prec = TYPE_PRECISION (type);
2007 if (!optimize
2008 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2009 || r.undefined_p ())
2011 if (TYPE_UNSIGNED (type))
2012 return prec;
2013 else
2014 return MIN ((int) -prec, -2);
2017 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2019 w = r.lower_bound ();
2020 if (wi::neg_p (w))
2022 int min_prec1 = wi::min_precision (w, SIGNED);
2023 w = r.upper_bound ();
2024 int min_prec2 = wi::min_precision (w, SIGNED);
2025 int min_prec = MAX (min_prec1, min_prec2);
2026 return MIN (-min_prec, -2);
2030 w = r.upper_bound ();
2031 int min_prec = wi::min_precision (w, UNSIGNED);
2032 return MAX (min_prec, 1);
2035 /* Return address of the first limb of OP and write into *PREC
2036 its precision. If positive, the operand is zero extended
2037 from that precision, if it is negative, the operand is sign-extended
2038 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2039 otherwise *PREC_STORED is prec from the innermost call without
2040 range optimizations. */
2042 tree
2043 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2044 int *prec_stored, int *prec)
2046 wide_int w;
2047 location_t loc_save = m_loc;
2048 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2049 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2050 && TREE_CODE (op) != INTEGER_CST)
2052 do_int:
2053 *prec = range_to_prec (op, stmt);
2054 bitint_prec_kind kind = bitint_prec_small;
2055 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2056 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2057 kind = bitint_precision_kind (TREE_TYPE (op));
2058 if (kind == bitint_prec_middle)
2060 tree type = NULL_TREE;
2061 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2063 tree op_type = TREE_TYPE (op);
2064 unsigned HOST_WIDE_INT nelts
2065 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2066 /* Add support for 3 or more limbs filled in from normal
2067 integral type if this assert fails. If no target chooses
2068 limb mode smaller than half of largest supported normal
2069 integral type, this will not be needed. */
2070 gcc_assert (nelts <= 2);
2071 if (prec_stored)
2072 *prec_stored = (TYPE_UNSIGNED (op_type)
2073 ? TYPE_PRECISION (op_type)
2074 : -TYPE_PRECISION (op_type));
2075 if (*prec <= limb_prec && *prec >= -limb_prec)
2077 nelts = 1;
2078 if (prec_stored)
2080 if (TYPE_UNSIGNED (op_type))
2082 if (*prec_stored > limb_prec)
2083 *prec_stored = limb_prec;
2085 else if (*prec_stored < -limb_prec)
2086 *prec_stored = -limb_prec;
2089 tree atype = build_array_type_nelts (m_limb_type, nelts);
2090 tree var = create_tmp_var (atype);
2091 tree t1 = op;
2092 if (!useless_type_conversion_p (m_limb_type, op_type))
2093 t1 = add_cast (m_limb_type, t1);
2094 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2095 NULL_TREE, NULL_TREE);
2096 gimple *g = gimple_build_assign (v, t1);
2097 insert_before (g);
2098 if (nelts > 1)
2100 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2101 g = gimple_build_assign (make_ssa_name (op_type),
2102 RSHIFT_EXPR, op, lp);
2103 insert_before (g);
2104 tree t2 = gimple_assign_lhs (g);
2105 t2 = add_cast (m_limb_type, t2);
2106 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2107 NULL_TREE, NULL_TREE);
2108 g = gimple_build_assign (v, t2);
2109 insert_before (g);
2111 tree ret = build_fold_addr_expr (var);
2112 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2114 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2115 g = gimple_build_assign (var, clobber);
2116 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2118 m_loc = loc_save;
2119 return ret;
2121 switch (TREE_CODE (op))
2123 case SSA_NAME:
2124 if (m_names == NULL
2125 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2127 gimple *g = SSA_NAME_DEF_STMT (op);
2128 tree ret;
2129 m_loc = gimple_location (g);
2130 if (gimple_assign_load_p (g))
2132 *prec = range_to_prec (op, NULL);
2133 if (prec_stored)
2134 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2135 ? TYPE_PRECISION (TREE_TYPE (op))
2136 : -TYPE_PRECISION (TREE_TYPE (op)));
2137 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2138 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2139 NULL_TREE, true, GSI_SAME_STMT);
2141 else if (gimple_code (g) == GIMPLE_NOP)
2143 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2144 if (prec_stored)
2145 *prec_stored = *prec;
2146 tree var = create_tmp_var (m_limb_type);
2147 TREE_ADDRESSABLE (var) = 1;
2148 ret = build_fold_addr_expr (var);
2149 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2151 tree clobber = build_clobber (m_limb_type,
2152 CLOBBER_STORAGE_END);
2153 g = gimple_build_assign (var, clobber);
2154 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2157 else
2159 gcc_assert (gimple_assign_cast_p (g));
2160 tree rhs1 = gimple_assign_rhs1 (g);
2161 bitint_prec_kind kind = bitint_prec_small;
2162 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2163 rhs1 = TREE_OPERAND (rhs1, 0);
2164 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2165 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2166 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2167 if (kind >= bitint_prec_large)
2169 tree lhs_type = TREE_TYPE (op);
2170 tree rhs_type = TREE_TYPE (rhs1);
2171 int prec_stored_val = 0;
2172 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2173 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2175 if (TYPE_UNSIGNED (lhs_type)
2176 && !TYPE_UNSIGNED (rhs_type))
2177 gcc_assert (*prec >= 0 || prec_stored == NULL);
2179 else
2181 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2183 else if (TYPE_UNSIGNED (lhs_type))
2185 gcc_assert (*prec > 0
2186 || prec_stored_val > 0
2187 || (-prec_stored_val
2188 >= TYPE_PRECISION (lhs_type)));
2189 *prec = TYPE_PRECISION (lhs_type);
2191 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2193 else
2194 *prec = -TYPE_PRECISION (lhs_type);
2197 else
2199 op = rhs1;
2200 stmt = g;
2201 goto do_int;
2204 m_loc = loc_save;
2205 return ret;
2207 else
2209 int p = var_to_partition (m_map, op);
2210 gcc_assert (m_vars[p] != NULL_TREE);
2211 *prec = range_to_prec (op, stmt);
2212 if (prec_stored)
2213 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2214 ? TYPE_PRECISION (TREE_TYPE (op))
2215 : -TYPE_PRECISION (TREE_TYPE (op)));
2216 return build_fold_addr_expr (m_vars[p]);
2218 case INTEGER_CST:
2219 unsigned int min_prec, mp;
2220 tree type;
2221 w = wi::to_wide (op);
2222 if (tree_int_cst_sgn (op) >= 0)
2224 min_prec = wi::min_precision (w, UNSIGNED);
2225 *prec = MAX (min_prec, 1);
2227 else
2229 min_prec = wi::min_precision (w, SIGNED);
2230 *prec = MIN ((int) -min_prec, -2);
2232 mp = CEIL (min_prec, limb_prec) * limb_prec;
2233 if (mp == 0)
2234 mp = 1;
2235 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2236 && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2237 || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2238 type = TREE_TYPE (op);
2239 else
2240 type = build_bitint_type (mp, 1);
2241 if (TREE_CODE (type) != BITINT_TYPE
2242 || bitint_precision_kind (type) == bitint_prec_small)
2244 if (TYPE_PRECISION (type) <= limb_prec)
2245 type = m_limb_type;
2246 else
2248 while (bitint_precision_kind (mp) == bitint_prec_small)
2249 mp += limb_prec;
2250 /* This case is for targets which e.g. have 64-bit
2251 limb but categorize up to 128-bits _BitInts as
2252 small. We could use type of m_limb_type[2] and
2253 similar instead to save space. */
2254 type = build_bitint_type (mp, 1);
2257 if (prec_stored)
2259 if (tree_int_cst_sgn (op) >= 0)
2260 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2261 else
2262 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2264 op = tree_output_constant_def (fold_convert (type, op));
2265 return build_fold_addr_expr (op);
2266 default:
2267 gcc_unreachable ();
2271 /* Helper function, create a loop before the current location,
2272 start with sizetype INIT value from the preheader edge. Return
2273 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2274 from the latch edge. */
2276 tree
2277 bitint_large_huge::create_loop (tree init, tree *idx_next)
2279 if (!gsi_end_p (m_gsi))
2280 gsi_prev (&m_gsi);
2281 else
2282 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2283 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2284 edge e2 = split_block (e1->dest, (gimple *) NULL);
2285 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2286 e3->probability = profile_probability::very_unlikely ();
2287 e2->flags = EDGE_FALSE_VALUE;
2288 e2->probability = e3->probability.invert ();
2289 tree idx = make_ssa_name (sizetype);
2290 gphi *phi = create_phi_node (idx, e1->dest);
2291 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2292 *idx_next = make_ssa_name (sizetype);
2293 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2294 m_gsi = gsi_after_labels (e1->dest);
2295 m_bb = e1->dest;
2296 m_preheader_bb = e1->src;
2297 class loop *loop = alloc_loop ();
2298 loop->header = e1->dest;
2299 add_loop (loop, e1->src->loop_father);
2300 return idx;
2303 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2304 lowered using iteration from the least significant limb up to the most
2305 significant limb. For large _BitInt it is emitted as straight line code
2306 before current location, for huge _BitInt as a loop handling two limbs
2307 at once, followed by handling up to limbs in straight line code (at most
2308 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2309 comparisons, in that case CMP_CODE should be the comparison code and
2310 CMP_OP1/CMP_OP2 the comparison operands. */
2312 tree
2313 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2314 tree cmp_op1, tree cmp_op2)
2316 bool eq_p = cmp_code != ERROR_MARK;
2317 tree type;
2318 if (eq_p)
2319 type = TREE_TYPE (cmp_op1);
2320 else
2321 type = TREE_TYPE (gimple_assign_lhs (stmt));
2322 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2323 bitint_prec_kind kind = bitint_precision_kind (type);
2324 gcc_assert (kind >= bitint_prec_large);
2325 gimple *g;
2326 tree lhs = gimple_get_lhs (stmt);
2327 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2328 if (lhs
2329 && TREE_CODE (lhs) == SSA_NAME
2330 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2331 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2333 int p = var_to_partition (m_map, lhs);
2334 gcc_assert (m_vars[p] != NULL_TREE);
2335 m_lhs = lhs = m_vars[p];
2337 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2338 bool sext = false;
2339 tree ext = NULL_TREE, store_operand = NULL_TREE;
2340 bool eh = false;
2341 basic_block eh_pad = NULL;
2342 tree nlhs = NULL_TREE;
2343 unsigned HOST_WIDE_INT bo_idx = 0;
2344 unsigned HOST_WIDE_INT bo_bit = 0;
2345 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2346 if (gimple_store_p (stmt))
2348 store_operand = gimple_assign_rhs1 (stmt);
2349 eh = stmt_ends_bb_p (stmt);
2350 if (eh)
2352 edge e;
2353 edge_iterator ei;
2354 basic_block bb = gimple_bb (stmt);
2356 FOR_EACH_EDGE (e, ei, bb->succs)
2357 if (e->flags & EDGE_EH)
2359 eh_pad = e->dest;
2360 break;
2363 if (TREE_CODE (lhs) == COMPONENT_REF
2364 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2366 tree fld = TREE_OPERAND (lhs, 1);
2367 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2368 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2369 poly_int64 bitoffset;
2370 poly_uint64 field_offset, repr_offset;
2371 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2372 nlhs = lhs;
2373 else
2375 bool var_field_off = false;
2376 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2377 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2378 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2379 else
2381 bitoffset = 0;
2382 var_field_off = true;
2384 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2385 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2386 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2387 TREE_OPERAND (lhs, 0), repr,
2388 var_field_off
2389 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2390 HOST_WIDE_INT bo = bitoffset.to_constant ();
2391 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2392 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2396 if ((store_operand
2397 && TREE_CODE (store_operand) == SSA_NAME
2398 && (m_names == NULL
2399 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2400 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2401 || gimple_assign_cast_p (stmt))
2403 rhs1 = gimple_assign_rhs1 (store_operand
2404 ? SSA_NAME_DEF_STMT (store_operand)
2405 : stmt);
2406 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2407 rhs1 = TREE_OPERAND (rhs1, 0);
2408 /* Optimize mergeable ops ending with widening cast to _BitInt
2409 (or followed by store). We can lower just the limbs of the
2410 cast operand and widen afterwards. */
2411 if (TREE_CODE (rhs1) == SSA_NAME
2412 && (m_names == NULL
2413 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2414 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2415 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2416 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2417 limb_prec) < CEIL (prec, limb_prec)
2418 || (kind == bitint_prec_huge
2419 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2421 store_operand = rhs1;
2422 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2423 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2424 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2425 sext = true;
2428 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2429 if (kind == bitint_prec_large)
2430 cnt = CEIL (prec, limb_prec);
2431 else
2433 rem = (prec % (2 * limb_prec));
2434 end = (prec - rem) / limb_prec;
2435 cnt = 2 + CEIL (rem, limb_prec);
2436 idx = idx_first = create_loop (size_zero_node, &idx_next);
2439 basic_block edge_bb = NULL;
2440 if (eq_p)
2442 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2443 gsi_prev (&gsi);
2444 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2445 edge_bb = e->src;
2446 if (kind == bitint_prec_large)
2447 m_gsi = gsi_end_bb (edge_bb);
2449 else
2450 m_after_stmt = stmt;
2451 if (kind != bitint_prec_large)
2452 m_upwards_2limb = end;
2453 m_upwards = true;
2455 bool separate_ext
2456 = (prec != (unsigned) TYPE_PRECISION (type)
2457 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2458 > CEIL (prec, limb_prec)));
2460 for (unsigned i = 0; i < cnt; i++)
2462 m_data_cnt = 0;
2463 if (kind == bitint_prec_large)
2464 idx = size_int (i);
2465 else if (i >= 2)
2466 idx = size_int (end + (i > 2));
2467 if (eq_p)
2469 rhs1 = handle_operand (cmp_op1, idx);
2470 tree rhs2 = handle_operand (cmp_op2, idx);
2471 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2472 insert_before (g);
2473 edge e1 = split_block (gsi_bb (m_gsi), g);
2474 e1->flags = EDGE_FALSE_VALUE;
2475 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2476 e1->probability = profile_probability::unlikely ();
2477 e2->probability = e1->probability.invert ();
2478 if (i == 0)
2479 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2480 m_gsi = gsi_after_labels (e1->dest);
2482 else
2484 if (store_operand)
2485 rhs1 = handle_operand (store_operand, idx);
2486 else
2487 rhs1 = handle_stmt (stmt, idx);
2488 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2489 rhs1 = add_cast (m_limb_type, rhs1);
2490 if (sext && i == cnt - 1)
2491 ext = rhs1;
2492 tree nidx = idx;
2493 if (bo_idx)
2495 if (tree_fits_uhwi_p (idx))
2496 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2497 else
2499 nidx = make_ssa_name (sizetype);
2500 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2501 size_int (bo_idx));
2502 insert_before (g);
2505 bool done = false;
2506 basic_block new_bb = NULL;
2507 /* Handle stores into bit-fields. */
2508 if (bo_bit)
2510 if (i == 0)
2512 edge e2 = NULL;
2513 if (kind != bitint_prec_large)
2515 prepare_data_in_out (build_zero_cst (m_limb_type),
2516 idx, &bf_next);
2517 bf_next = m_data.pop ();
2518 bf_cur = m_data.pop ();
2519 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2520 NULL_TREE, NULL_TREE);
2521 edge edge_true;
2522 if_then_else (g, profile_probability::unlikely (),
2523 edge_true, e2);
2524 new_bb = e2->dest;
2526 tree ftype
2527 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2528 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2529 bitsize_int (limb_prec - bo_bit),
2530 bitsize_int (bo_idx * limb_prec + bo_bit));
2531 tree t = add_cast (ftype, rhs1);
2532 g = gimple_build_assign (bfr, t);
2533 insert_before (g);
2534 if (eh)
2536 maybe_duplicate_eh_stmt (g, stmt);
2537 if (eh_pad)
2539 edge e = split_block (gsi_bb (m_gsi), g);
2540 m_gsi = gsi_after_labels (e->dest);
2541 make_edge (e->src, eh_pad, EDGE_EH)->probability
2542 = profile_probability::very_unlikely ();
2545 if (kind == bitint_prec_large)
2547 bf_cur = rhs1;
2548 done = true;
2550 else if (e2)
2551 m_gsi = gsi_after_labels (e2->src);
2553 if (!done)
2555 tree t1 = make_ssa_name (m_limb_type);
2556 tree t2 = make_ssa_name (m_limb_type);
2557 tree t3 = make_ssa_name (m_limb_type);
2558 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2559 build_int_cst (unsigned_type_node,
2560 limb_prec - bo_bit));
2561 insert_before (g);
2562 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2563 build_int_cst (unsigned_type_node,
2564 bo_bit));
2565 insert_before (g);
2566 bf_cur = rhs1;
2567 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2568 insert_before (g);
2569 rhs1 = t3;
2570 if (bf_next && i == 1)
2572 g = gimple_build_assign (bf_next, bf_cur);
2573 insert_before (g);
2577 if (!done)
2579 /* Handle bit-field access to partial last limb if needed. */
2580 if (nlhs
2581 && i == cnt - 1
2582 && !separate_ext
2583 && tree_fits_uhwi_p (idx))
2585 unsigned int tprec = TYPE_PRECISION (type);
2586 unsigned int rprec = tprec % limb_prec;
2587 if (rprec + bo_bit < (unsigned) limb_prec)
2589 tree ftype
2590 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2591 tree bfr = build3 (BIT_FIELD_REF, ftype,
2592 unshare_expr (nlhs),
2593 bitsize_int (rprec + bo_bit),
2594 bitsize_int ((bo_idx
2595 + tprec / limb_prec)
2596 * limb_prec));
2597 tree t = add_cast (ftype, rhs1);
2598 g = gimple_build_assign (bfr, t);
2599 done = true;
2600 bf_cur = NULL_TREE;
2602 else if (rprec + bo_bit == (unsigned) limb_prec)
2603 bf_cur = NULL_TREE;
2605 /* Otherwise, stores to any other lhs. */
2606 if (!done)
2608 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2609 nidx, true);
2610 g = gimple_build_assign (l, rhs1);
2612 insert_before (g);
2613 if (eh)
2615 maybe_duplicate_eh_stmt (g, stmt);
2616 if (eh_pad)
2618 edge e = split_block (gsi_bb (m_gsi), g);
2619 m_gsi = gsi_after_labels (e->dest);
2620 make_edge (e->src, eh_pad, EDGE_EH)->probability
2621 = profile_probability::very_unlikely ();
2624 if (new_bb)
2625 m_gsi = gsi_after_labels (new_bb);
2628 m_first = false;
2629 if (kind == bitint_prec_huge && i <= 1)
2631 if (i == 0)
2633 idx = make_ssa_name (sizetype);
2634 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2635 size_one_node);
2636 insert_before (g);
2638 else
2640 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2641 size_int (2));
2642 insert_before (g);
2643 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2644 NULL_TREE, NULL_TREE);
2645 insert_before (g);
2646 if (eq_p)
2647 m_gsi = gsi_after_labels (edge_bb);
2648 else
2649 m_gsi = gsi_for_stmt (stmt);
2650 m_bb = NULL;
2655 if (separate_ext)
2657 if (sext)
2659 ext = add_cast (signed_type_for (m_limb_type), ext);
2660 tree lpm1 = build_int_cst (unsigned_type_node,
2661 limb_prec - 1);
2662 tree n = make_ssa_name (TREE_TYPE (ext));
2663 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2664 insert_before (g);
2665 ext = add_cast (m_limb_type, n);
2667 else
2668 ext = build_zero_cst (m_limb_type);
2669 kind = bitint_precision_kind (type);
2670 unsigned start = CEIL (prec, limb_prec);
2671 prec = TYPE_PRECISION (type);
2672 idx = idx_first = idx_next = NULL_TREE;
2673 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2674 kind = bitint_prec_large;
2675 if (kind == bitint_prec_large)
2676 cnt = CEIL (prec, limb_prec) - start;
2677 else
2679 rem = prec % limb_prec;
2680 end = (prec - rem) / limb_prec;
2681 cnt = (bo_bit != 0) + 1 + (rem != 0);
2683 for (unsigned i = 0; i < cnt; i++)
2685 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2686 idx = size_int (start + i);
2687 else if (i == cnt - 1 && (rem != 0))
2688 idx = size_int (end);
2689 else if (i == (bo_bit != 0))
2690 idx = create_loop (size_int (start + i), &idx_next);
2691 rhs1 = ext;
2692 if (bf_cur != NULL_TREE && bf_cur != ext)
2694 tree t1 = make_ssa_name (m_limb_type);
2695 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2696 build_int_cst (unsigned_type_node,
2697 limb_prec - bo_bit));
2698 insert_before (g);
2699 if (integer_zerop (ext))
2700 rhs1 = t1;
2701 else
2703 tree t2 = make_ssa_name (m_limb_type);
2704 rhs1 = make_ssa_name (m_limb_type);
2705 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2706 build_int_cst (unsigned_type_node,
2707 bo_bit));
2708 insert_before (g);
2709 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2710 insert_before (g);
2712 bf_cur = ext;
2714 tree nidx = idx;
2715 if (bo_idx)
2717 if (tree_fits_uhwi_p (idx))
2718 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2719 else
2721 nidx = make_ssa_name (sizetype);
2722 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2723 size_int (bo_idx));
2724 insert_before (g);
2727 bool done = false;
2728 /* Handle bit-field access to partial last limb if needed. */
2729 if (nlhs && i == cnt - 1)
2731 unsigned int tprec = TYPE_PRECISION (type);
2732 unsigned int rprec = tprec % limb_prec;
2733 if (rprec + bo_bit < (unsigned) limb_prec)
2735 tree ftype
2736 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2737 tree bfr = build3 (BIT_FIELD_REF, ftype,
2738 unshare_expr (nlhs),
2739 bitsize_int (rprec + bo_bit),
2740 bitsize_int ((bo_idx + tprec / limb_prec)
2741 * limb_prec));
2742 tree t = add_cast (ftype, rhs1);
2743 g = gimple_build_assign (bfr, t);
2744 done = true;
2745 bf_cur = NULL_TREE;
2747 else if (rprec + bo_bit == (unsigned) limb_prec)
2748 bf_cur = NULL_TREE;
2750 /* Otherwise, stores to any other lhs. */
2751 if (!done)
2753 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2754 g = gimple_build_assign (l, rhs1);
2756 insert_before (g);
2757 if (eh)
2759 maybe_duplicate_eh_stmt (g, stmt);
2760 if (eh_pad)
2762 edge e = split_block (gsi_bb (m_gsi), g);
2763 m_gsi = gsi_after_labels (e->dest);
2764 make_edge (e->src, eh_pad, EDGE_EH)->probability
2765 = profile_probability::very_unlikely ();
2768 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2770 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2771 size_one_node);
2772 insert_before (g);
2773 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2774 NULL_TREE, NULL_TREE);
2775 insert_before (g);
2776 m_gsi = gsi_for_stmt (stmt);
2777 m_bb = NULL;
2781 if (bf_cur != NULL_TREE)
2783 unsigned int tprec = TYPE_PRECISION (type);
2784 unsigned int rprec = tprec % limb_prec;
2785 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2786 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2787 bitsize_int (rprec + bo_bit),
2788 bitsize_int ((bo_idx + tprec / limb_prec)
2789 * limb_prec));
2790 rhs1 = bf_cur;
2791 if (bf_cur != ext)
2793 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2794 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2795 build_int_cst (unsigned_type_node,
2796 limb_prec - bo_bit));
2797 insert_before (g);
2799 rhs1 = add_cast (ftype, rhs1);
2800 g = gimple_build_assign (bfr, rhs1);
2801 insert_before (g);
2802 if (eh)
2804 maybe_duplicate_eh_stmt (g, stmt);
2805 if (eh_pad)
2807 edge e = split_block (gsi_bb (m_gsi), g);
2808 m_gsi = gsi_after_labels (e->dest);
2809 make_edge (e->src, eh_pad, EDGE_EH)->probability
2810 = profile_probability::very_unlikely ();
2815 if (gimple_store_p (stmt))
2817 unlink_stmt_vdef (stmt);
2818 release_ssa_name (gimple_vdef (stmt));
2819 gsi_remove (&m_gsi, true);
2821 if (eq_p)
2823 lhs = make_ssa_name (boolean_type_node);
2824 basic_block bb = gimple_bb (stmt);
2825 gphi *phi = create_phi_node (lhs, bb);
2826 edge e = find_edge (gsi_bb (m_gsi), bb);
2827 unsigned int n = EDGE_COUNT (bb->preds);
2828 for (unsigned int i = 0; i < n; i++)
2830 edge e2 = EDGE_PRED (bb, i);
2831 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2832 e2, UNKNOWN_LOCATION);
2834 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2835 return lhs;
2837 else
2838 return NULL_TREE;
2841 /* Handle a large/huge _BitInt comparison statement STMT other than
2842 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2843 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2844 lowered by iteration from the most significant limb downwards to
2845 the least significant one, for large _BitInt in straight line code,
2846 otherwise with most significant limb handled in
2847 straight line code followed by a loop handling one limb at a time.
2848 Comparisons with unsigned huge _BitInt with precisions which are
2849 multiples of limb precision can use just the loop and don't need to
2850 handle most significant limb before the loop. The loop or straight
2851 line code jumps to final basic block if a particular pair of limbs
2852 is not equal. */
2854 tree
2855 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2856 tree cmp_op1, tree cmp_op2)
2858 tree type = TREE_TYPE (cmp_op1);
2859 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2860 bitint_prec_kind kind = bitint_precision_kind (type);
2861 gcc_assert (kind >= bitint_prec_large);
2862 gimple *g;
2863 if (!TYPE_UNSIGNED (type)
2864 && integer_zerop (cmp_op2)
2865 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2867 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2868 tree idx = size_int (end);
2869 m_data_cnt = 0;
2870 tree rhs1 = handle_operand (cmp_op1, idx);
2871 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2873 tree stype = signed_type_for (TREE_TYPE (rhs1));
2874 rhs1 = add_cast (stype, rhs1);
2876 tree lhs = make_ssa_name (boolean_type_node);
2877 g = gimple_build_assign (lhs, cmp_code, rhs1,
2878 build_zero_cst (TREE_TYPE (rhs1)));
2879 insert_before (g);
2880 cmp_code = NE_EXPR;
2881 return lhs;
2884 unsigned cnt, rem = 0, end = 0;
2885 tree idx = NULL_TREE, idx_next = NULL_TREE;
2886 if (kind == bitint_prec_large)
2887 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2888 else
2890 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2891 if (rem == 0 && !TYPE_UNSIGNED (type))
2892 rem = limb_prec;
2893 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2894 cnt = 1 + (rem != 0);
2897 basic_block edge_bb = NULL;
2898 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2899 gsi_prev (&gsi);
2900 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2901 edge_bb = e->src;
2902 m_gsi = gsi_end_bb (edge_bb);
2904 edge *edges = XALLOCAVEC (edge, cnt * 2);
2905 for (unsigned i = 0; i < cnt; i++)
2907 m_data_cnt = 0;
2908 if (kind == bitint_prec_large)
2909 idx = size_int (cnt - i - 1);
2910 else if (i == cnt - 1)
2911 idx = create_loop (size_int (end - 1), &idx_next);
2912 else
2913 idx = size_int (end);
2914 tree rhs1 = handle_operand (cmp_op1, idx);
2915 tree rhs2 = handle_operand (cmp_op2, idx);
2916 if (i == 0
2917 && !TYPE_UNSIGNED (type)
2918 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2920 tree stype = signed_type_for (TREE_TYPE (rhs1));
2921 rhs1 = add_cast (stype, rhs1);
2922 rhs2 = add_cast (stype, rhs2);
2924 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2925 insert_before (g);
2926 edge e1 = split_block (gsi_bb (m_gsi), g);
2927 e1->flags = EDGE_FALSE_VALUE;
2928 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2929 e1->probability = profile_probability::likely ();
2930 e2->probability = e1->probability.invert ();
2931 if (i == 0)
2932 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2933 m_gsi = gsi_after_labels (e1->dest);
2934 edges[2 * i] = e2;
2935 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2936 insert_before (g);
2937 e1 = split_block (gsi_bb (m_gsi), g);
2938 e1->flags = EDGE_FALSE_VALUE;
2939 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2940 e1->probability = profile_probability::unlikely ();
2941 e2->probability = e1->probability.invert ();
2942 m_gsi = gsi_after_labels (e1->dest);
2943 edges[2 * i + 1] = e2;
2944 m_first = false;
2945 if (kind == bitint_prec_huge && i == cnt - 1)
2947 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2948 insert_before (g);
2949 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2950 NULL_TREE, NULL_TREE);
2951 insert_before (g);
2952 edge true_edge, false_edge;
2953 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2954 &true_edge, &false_edge);
2955 m_gsi = gsi_after_labels (false_edge->dest);
2956 m_bb = NULL;
2960 tree lhs = make_ssa_name (boolean_type_node);
2961 basic_block bb = gimple_bb (stmt);
2962 gphi *phi = create_phi_node (lhs, bb);
2963 for (unsigned int i = 0; i < cnt * 2; i++)
2965 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2966 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2967 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2969 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2970 ? boolean_true_node : boolean_false_node,
2971 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2972 cmp_code = NE_EXPR;
2973 return lhs;
2976 /* Lower large/huge _BitInt left and right shift except for left
2977 shift by < limb_prec constant. */
2979 void
2980 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2982 tree rhs1 = gimple_assign_rhs1 (stmt);
2983 tree lhs = gimple_assign_lhs (stmt);
2984 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2985 tree type = TREE_TYPE (rhs1);
2986 gimple *final_stmt = gsi_stmt (m_gsi);
2987 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2988 && bitint_precision_kind (type) >= bitint_prec_large);
2989 int prec = TYPE_PRECISION (type);
2990 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2991 gimple *g;
2992 if (obj == NULL_TREE)
2994 int part = var_to_partition (m_map, lhs);
2995 gcc_assert (m_vars[part] != NULL_TREE);
2996 obj = m_vars[part];
2998 /* Preparation code common for both left and right shifts.
2999 unsigned n1 = n % limb_prec;
3000 size_t n2 = n / limb_prec;
3001 size_t n3 = n1 != 0;
3002 unsigned n4 = (limb_prec - n1) % limb_prec;
3003 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3004 if (TREE_CODE (n) == INTEGER_CST)
3006 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3007 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3008 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3009 n3 = size_int (!integer_zerop (n1));
3010 n4 = int_const_binop (TRUNC_MOD_EXPR,
3011 int_const_binop (MINUS_EXPR, lp, n1), lp);
3013 else
3015 n1 = make_ssa_name (TREE_TYPE (n));
3016 n2 = make_ssa_name (sizetype);
3017 n3 = make_ssa_name (sizetype);
3018 n4 = make_ssa_name (TREE_TYPE (n));
3019 if (pow2p_hwi (limb_prec))
3021 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3022 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3023 insert_before (g);
3024 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3025 TREE_TYPE (n))
3026 ? n2 : make_ssa_name (TREE_TYPE (n)),
3027 RSHIFT_EXPR, n,
3028 build_int_cst (TREE_TYPE (n),
3029 exact_log2 (limb_prec)));
3030 insert_before (g);
3031 if (gimple_assign_lhs (g) != n2)
3033 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3034 insert_before (g);
3036 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3037 NEGATE_EXPR, n1);
3038 insert_before (g);
3039 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3040 lpm1);
3041 insert_before (g);
3043 else
3045 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3046 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3047 insert_before (g);
3048 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3049 TREE_TYPE (n))
3050 ? n2 : make_ssa_name (TREE_TYPE (n)),
3051 TRUNC_DIV_EXPR, n, lp);
3052 insert_before (g);
3053 if (gimple_assign_lhs (g) != n2)
3055 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3056 insert_before (g);
3058 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3059 MINUS_EXPR, lp, n1);
3060 insert_before (g);
3061 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3062 lp);
3063 insert_before (g);
3065 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3066 build_zero_cst (TREE_TYPE (n)));
3067 insert_before (g);
3068 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3069 insert_before (g);
3071 tree p = build_int_cst (sizetype,
3072 prec / limb_prec - (prec % limb_prec == 0));
3073 if (rhs_code == RSHIFT_EXPR)
3075 /* Lower
3076 dst = src >> n;
3078 unsigned n1 = n % limb_prec;
3079 size_t n2 = n / limb_prec;
3080 size_t n3 = n1 != 0;
3081 unsigned n4 = (limb_prec - n1) % limb_prec;
3082 size_t idx;
3083 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3084 int signed_p = (typeof (src) -1) < 0;
3085 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3086 ? p : p - n3); ++idx)
3087 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3088 limb_type ext;
3089 if (prec % limb_prec == 0)
3090 ext = src[p];
3091 else if (signed_p)
3092 ext = ((signed limb_type) (src[p] << (limb_prec
3093 - (prec % limb_prec))))
3094 >> (limb_prec - (prec % limb_prec));
3095 else
3096 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3097 if (!signed_p && (prec % limb_prec == 0))
3099 else if (idx < prec / 64)
3101 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3102 ++idx;
3104 idx -= n2;
3105 if (signed_p)
3107 dst[idx] = ((signed limb_type) ext) >> n1;
3108 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3110 else
3112 dst[idx] = ext >> n1;
3113 ext = 0;
3115 for (++idx; idx <= p; ++idx)
3116 dst[idx] = ext; */
3117 tree pmn3;
3118 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3119 pmn3 = p;
3120 else if (TREE_CODE (n3) == INTEGER_CST)
3121 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3122 else
3124 pmn3 = make_ssa_name (sizetype);
3125 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3126 insert_before (g);
3128 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3129 edge edge_true, edge_false;
3130 if_then (g, profile_probability::likely (), edge_true, edge_false);
3131 tree idx_next;
3132 tree idx = create_loop (n2, &idx_next);
3133 tree idxmn2 = make_ssa_name (sizetype);
3134 tree idxpn3 = make_ssa_name (sizetype);
3135 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3136 insert_before (g);
3137 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3138 insert_before (g);
3139 m_data_cnt = 0;
3140 tree t1 = handle_operand (rhs1, idx);
3141 m_first = false;
3142 g = gimple_build_assign (make_ssa_name (m_limb_type),
3143 RSHIFT_EXPR, t1, n1);
3144 insert_before (g);
3145 t1 = gimple_assign_lhs (g);
3146 if (!integer_zerop (n3))
3148 m_data_cnt = 0;
3149 tree t2 = handle_operand (rhs1, idxpn3);
3150 g = gimple_build_assign (make_ssa_name (m_limb_type),
3151 LSHIFT_EXPR, t2, n4);
3152 insert_before (g);
3153 t2 = gimple_assign_lhs (g);
3154 g = gimple_build_assign (make_ssa_name (m_limb_type),
3155 BIT_IOR_EXPR, t1, t2);
3156 insert_before (g);
3157 t1 = gimple_assign_lhs (g);
3159 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3160 g = gimple_build_assign (l, t1);
3161 insert_before (g);
3162 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3163 insert_before (g);
3164 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3165 insert_before (g);
3166 idx = make_ssa_name (sizetype);
3167 m_gsi = gsi_for_stmt (final_stmt);
3168 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3169 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3170 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3171 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3172 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3173 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3174 m_data_cnt = 0;
3175 tree ms = handle_operand (rhs1, p);
3176 tree ext = ms;
3177 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3178 ext = add_cast (m_limb_type, ms);
3179 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3180 && !integer_zerop (n3))
3182 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3183 if_then (g, profile_probability::likely (), edge_true, edge_false);
3184 m_data_cnt = 0;
3185 t1 = handle_operand (rhs1, idx);
3186 g = gimple_build_assign (make_ssa_name (m_limb_type),
3187 RSHIFT_EXPR, t1, n1);
3188 insert_before (g);
3189 t1 = gimple_assign_lhs (g);
3190 g = gimple_build_assign (make_ssa_name (m_limb_type),
3191 LSHIFT_EXPR, ext, n4);
3192 insert_before (g);
3193 tree t2 = gimple_assign_lhs (g);
3194 g = gimple_build_assign (make_ssa_name (m_limb_type),
3195 BIT_IOR_EXPR, t1, t2);
3196 insert_before (g);
3197 t1 = gimple_assign_lhs (g);
3198 idxmn2 = make_ssa_name (sizetype);
3199 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3200 insert_before (g);
3201 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3202 g = gimple_build_assign (l, t1);
3203 insert_before (g);
3204 idx_next = make_ssa_name (sizetype);
3205 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3206 insert_before (g);
3207 m_gsi = gsi_for_stmt (final_stmt);
3208 tree nidx = make_ssa_name (sizetype);
3209 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3210 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3211 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3212 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3213 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3214 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3215 idx = nidx;
3217 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3218 insert_before (g);
3219 idx = gimple_assign_lhs (g);
3220 tree sext = ext;
3221 if (!TYPE_UNSIGNED (type))
3222 sext = add_cast (signed_type_for (m_limb_type), ext);
3223 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3224 RSHIFT_EXPR, sext, n1);
3225 insert_before (g);
3226 t1 = gimple_assign_lhs (g);
3227 if (!TYPE_UNSIGNED (type))
3229 t1 = add_cast (m_limb_type, t1);
3230 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3231 RSHIFT_EXPR, sext,
3232 build_int_cst (TREE_TYPE (n),
3233 limb_prec - 1));
3234 insert_before (g);
3235 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3237 else
3238 ext = build_zero_cst (m_limb_type);
3239 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3240 g = gimple_build_assign (l, t1);
3241 insert_before (g);
3242 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3243 size_one_node);
3244 insert_before (g);
3245 idx = gimple_assign_lhs (g);
3246 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3247 if_then (g, profile_probability::likely (), edge_true, edge_false);
3248 idx = create_loop (idx, &idx_next);
3249 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3250 g = gimple_build_assign (l, ext);
3251 insert_before (g);
3252 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3253 insert_before (g);
3254 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3255 insert_before (g);
3257 else
3259 /* Lower
3260 dst = src << n;
3262 unsigned n1 = n % limb_prec;
3263 size_t n2 = n / limb_prec;
3264 size_t n3 = n1 != 0;
3265 unsigned n4 = (limb_prec - n1) % limb_prec;
3266 size_t idx;
3267 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3268 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3269 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3270 if (n1)
3272 dst[idx] = src[idx - n2] << n1;
3273 --idx;
3275 for (; (ssize_t) idx >= 0; --idx)
3276 dst[idx] = 0; */
3277 tree n2pn3;
3278 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3279 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3280 else
3282 n2pn3 = make_ssa_name (sizetype);
3283 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3284 insert_before (g);
3286 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3287 idx even to access the most significant partial limb. */
3288 m_var_msb = true;
3289 if (integer_zerop (n3))
3290 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3291 counts. Emit if (true) condition that can be optimized later. */
3292 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3293 NULL_TREE, NULL_TREE);
3294 else
3295 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3296 edge edge_true, edge_false;
3297 if_then (g, profile_probability::likely (), edge_true, edge_false);
3298 tree idx_next;
3299 tree idx = create_loop (p, &idx_next);
3300 tree idxmn2 = make_ssa_name (sizetype);
3301 tree idxmn2mn3 = make_ssa_name (sizetype);
3302 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3303 insert_before (g);
3304 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3305 insert_before (g);
3306 m_data_cnt = 0;
3307 tree t1 = handle_operand (rhs1, idxmn2);
3308 m_first = false;
3309 g = gimple_build_assign (make_ssa_name (m_limb_type),
3310 LSHIFT_EXPR, t1, n1);
3311 insert_before (g);
3312 t1 = gimple_assign_lhs (g);
3313 if (!integer_zerop (n3))
3315 m_data_cnt = 0;
3316 tree t2 = handle_operand (rhs1, idxmn2mn3);
3317 g = gimple_build_assign (make_ssa_name (m_limb_type),
3318 RSHIFT_EXPR, t2, n4);
3319 insert_before (g);
3320 t2 = gimple_assign_lhs (g);
3321 g = gimple_build_assign (make_ssa_name (m_limb_type),
3322 BIT_IOR_EXPR, t1, t2);
3323 insert_before (g);
3324 t1 = gimple_assign_lhs (g);
3326 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3327 g = gimple_build_assign (l, t1);
3328 insert_before (g);
3329 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3330 insert_before (g);
3331 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3332 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3333 NULL_TREE, NULL_TREE);
3334 insert_before (g);
3335 idx = make_ssa_name (sizetype);
3336 m_gsi = gsi_for_stmt (final_stmt);
3337 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3338 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3339 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3340 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3341 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3342 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3343 m_data_cnt = 0;
3344 if (!integer_zerop (n3))
3346 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3347 NULL_TREE, NULL_TREE);
3348 if_then (g, profile_probability::likely (), edge_true, edge_false);
3349 idxmn2 = make_ssa_name (sizetype);
3350 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3351 insert_before (g);
3352 m_data_cnt = 0;
3353 t1 = handle_operand (rhs1, idxmn2);
3354 g = gimple_build_assign (make_ssa_name (m_limb_type),
3355 LSHIFT_EXPR, t1, n1);
3356 insert_before (g);
3357 t1 = gimple_assign_lhs (g);
3358 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3359 g = gimple_build_assign (l, t1);
3360 insert_before (g);
3361 idx_next = make_ssa_name (sizetype);
3362 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3363 insert_before (g);
3364 m_gsi = gsi_for_stmt (final_stmt);
3365 tree nidx = make_ssa_name (sizetype);
3366 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3367 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3368 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3369 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3370 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3371 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3372 idx = nidx;
3374 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3375 ssize_int (0), NULL_TREE, NULL_TREE);
3376 if_then (g, profile_probability::likely (), edge_true, edge_false);
3377 idx = create_loop (idx, &idx_next);
3378 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3379 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3380 insert_before (g);
3381 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3382 insert_before (g);
3383 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3384 ssize_int (0), NULL_TREE, NULL_TREE);
3385 insert_before (g);
3389 /* Lower large/huge _BitInt multiplication or division. */
3391 void
3392 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3394 tree rhs1 = gimple_assign_rhs1 (stmt);
3395 tree rhs2 = gimple_assign_rhs2 (stmt);
3396 tree lhs = gimple_assign_lhs (stmt);
3397 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3398 tree type = TREE_TYPE (rhs1);
3399 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3400 && bitint_precision_kind (type) >= bitint_prec_large);
3401 int prec = TYPE_PRECISION (type), prec1, prec2;
3402 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3403 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3404 if (obj == NULL_TREE)
3406 int part = var_to_partition (m_map, lhs);
3407 gcc_assert (m_vars[part] != NULL_TREE);
3408 obj = m_vars[part];
3409 lhs = build_fold_addr_expr (obj);
3411 else
3413 lhs = build_fold_addr_expr (obj);
3414 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3415 NULL_TREE, true, GSI_SAME_STMT);
3417 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3418 gimple *g;
3419 switch (rhs_code)
3421 case MULT_EXPR:
3422 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3423 lhs, build_int_cst (sitype, prec),
3424 rhs1, build_int_cst (sitype, prec1),
3425 rhs2, build_int_cst (sitype, prec2));
3426 insert_before (g);
3427 break;
3428 case TRUNC_DIV_EXPR:
3429 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3430 lhs, build_int_cst (sitype, prec),
3431 null_pointer_node,
3432 build_int_cst (sitype, 0),
3433 rhs1, build_int_cst (sitype, prec1),
3434 rhs2, build_int_cst (sitype, prec2));
3435 if (!stmt_ends_bb_p (stmt))
3436 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3437 insert_before (g);
3438 break;
3439 case TRUNC_MOD_EXPR:
3440 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3441 build_int_cst (sitype, 0),
3442 lhs, build_int_cst (sitype, prec),
3443 rhs1, build_int_cst (sitype, prec1),
3444 rhs2, build_int_cst (sitype, prec2));
3445 if (!stmt_ends_bb_p (stmt))
3446 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3447 insert_before (g);
3448 break;
3449 default:
3450 gcc_unreachable ();
3452 if (stmt_ends_bb_p (stmt))
3454 maybe_duplicate_eh_stmt (g, stmt);
3455 edge e1;
3456 edge_iterator ei;
3457 basic_block bb = gimple_bb (stmt);
3459 FOR_EACH_EDGE (e1, ei, bb->succs)
3460 if (e1->flags & EDGE_EH)
3461 break;
3462 if (e1)
3464 edge e2 = split_block (gsi_bb (m_gsi), g);
3465 m_gsi = gsi_after_labels (e2->dest);
3466 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3467 = profile_probability::very_unlikely ();
3472 /* Lower large/huge _BitInt conversion to/from floating point. */
3474 void
3475 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3477 tree rhs1 = gimple_assign_rhs1 (stmt);
3478 tree lhs = gimple_assign_lhs (stmt);
3479 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3480 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3481 gimple *g;
3482 if (rhs_code == FIX_TRUNC_EXPR)
3484 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3485 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3486 prec = -prec;
3487 if (obj == NULL_TREE)
3489 int part = var_to_partition (m_map, lhs);
3490 gcc_assert (m_vars[part] != NULL_TREE);
3491 obj = m_vars[part];
3492 lhs = build_fold_addr_expr (obj);
3494 else
3496 lhs = build_fold_addr_expr (obj);
3497 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3498 NULL_TREE, true, GSI_SAME_STMT);
3500 scalar_mode from_mode
3501 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3502 #ifdef HAVE_SFmode
3503 /* IEEE single is a full superset of both IEEE half and
3504 bfloat formats, convert to float first and then to _BitInt
3505 to avoid the need of another 2 library routines. */
3506 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3507 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3508 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3510 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3511 if (type)
3512 rhs1 = add_cast (type, rhs1);
3514 #endif
3515 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3516 lhs, build_int_cst (sitype, prec),
3517 rhs1);
3518 insert_before (g);
3520 else
3522 int prec;
3523 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3524 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3525 rhs1, build_int_cst (sitype, prec));
3526 gimple_call_set_lhs (g, lhs);
3527 if (!stmt_ends_bb_p (stmt))
3528 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3529 gsi_replace (&m_gsi, g, true);
3533 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3534 If check_zero is true, caller wants to check if all bits in [start, end)
3535 are zero, otherwise if bits in [start, end) are either all zero or
3536 all ones. L is the limb with index LIMB, START and END are measured
3537 in bits. */
3539 tree
3540 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3541 unsigned int end, tree l,
3542 unsigned int limb,
3543 bool check_zero)
3545 unsigned startlimb = start / limb_prec;
3546 unsigned endlimb = (end - 1) / limb_prec;
3547 gimple *g;
3549 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3550 return l;
3551 if (startlimb == endlimb && limb == startlimb)
3553 if (check_zero)
3555 wide_int w = wi::shifted_mask (start % limb_prec,
3556 end - start, false, limb_prec);
3557 g = gimple_build_assign (make_ssa_name (m_limb_type),
3558 BIT_AND_EXPR, l,
3559 wide_int_to_tree (m_limb_type, w));
3560 insert_before (g);
3561 return gimple_assign_lhs (g);
3563 unsigned int shift = start % limb_prec;
3564 if ((end % limb_prec) != 0)
3566 unsigned int lshift = (-end) % limb_prec;
3567 shift += lshift;
3568 g = gimple_build_assign (make_ssa_name (m_limb_type),
3569 LSHIFT_EXPR, l,
3570 build_int_cst (unsigned_type_node,
3571 lshift));
3572 insert_before (g);
3573 l = gimple_assign_lhs (g);
3575 l = add_cast (signed_type_for (m_limb_type), l);
3576 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3577 RSHIFT_EXPR, l,
3578 build_int_cst (unsigned_type_node, shift));
3579 insert_before (g);
3580 return add_cast (m_limb_type, gimple_assign_lhs (g));
3582 else if (limb == startlimb)
3584 if ((start % limb_prec) == 0)
3585 return l;
3586 if (!check_zero)
3587 l = add_cast (signed_type_for (m_limb_type), l);
3588 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3589 RSHIFT_EXPR, l,
3590 build_int_cst (unsigned_type_node,
3591 start % limb_prec));
3592 insert_before (g);
3593 l = gimple_assign_lhs (g);
3594 if (!check_zero)
3595 l = add_cast (m_limb_type, l);
3596 return l;
3598 else if (limb == endlimb)
3600 if ((end % limb_prec) == 0)
3601 return l;
3602 if (check_zero)
3604 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3605 g = gimple_build_assign (make_ssa_name (m_limb_type),
3606 BIT_AND_EXPR, l,
3607 wide_int_to_tree (m_limb_type, w));
3608 insert_before (g);
3609 return gimple_assign_lhs (g);
3611 unsigned int shift = (-end) % limb_prec;
3612 g = gimple_build_assign (make_ssa_name (m_limb_type),
3613 LSHIFT_EXPR, l,
3614 build_int_cst (unsigned_type_node, shift));
3615 insert_before (g);
3616 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3617 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3618 RSHIFT_EXPR, l,
3619 build_int_cst (unsigned_type_node, shift));
3620 insert_before (g);
3621 return add_cast (m_limb_type, gimple_assign_lhs (g));
3623 return l;
3626 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3627 result including overflow flag into the right locations. */
3629 void
3630 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3631 tree ovf, tree lhs, tree orig_obj,
3632 gimple *stmt, tree_code code)
3634 gimple *g;
3636 if (obj == NULL_TREE
3637 && (TREE_CODE (type) != BITINT_TYPE
3638 || bitint_precision_kind (type) < bitint_prec_large))
3640 /* Add support for 3 or more limbs filled in from normal integral
3641 type if this assert fails. If no target chooses limb mode smaller
3642 than half of largest supported normal integral type, this will not
3643 be needed. */
3644 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3645 tree lhs_type = type;
3646 if (TREE_CODE (type) == BITINT_TYPE
3647 && bitint_precision_kind (type) == bitint_prec_middle)
3648 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3649 TYPE_UNSIGNED (type));
3650 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3651 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3652 insert_before (g);
3653 r1 = gimple_assign_lhs (g);
3654 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3655 r1 = add_cast (lhs_type, r1);
3656 if (TYPE_PRECISION (lhs_type) > limb_prec)
3658 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3659 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3660 insert_before (g);
3661 r2 = gimple_assign_lhs (g);
3662 r2 = add_cast (lhs_type, r2);
3663 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3664 build_int_cst (unsigned_type_node,
3665 limb_prec));
3666 insert_before (g);
3667 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3668 gimple_assign_lhs (g));
3669 insert_before (g);
3670 r1 = gimple_assign_lhs (g);
3672 if (lhs_type != type)
3673 r1 = add_cast (type, r1);
3674 ovf = add_cast (lhs_type, ovf);
3675 if (lhs_type != type)
3676 ovf = add_cast (type, ovf);
3677 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3678 m_gsi = gsi_for_stmt (stmt);
3679 gsi_replace (&m_gsi, g, true);
3681 else
3683 unsigned HOST_WIDE_INT nelts = 0;
3684 tree atype = NULL_TREE;
3685 if (obj)
3687 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3688 if (orig_obj == NULL_TREE)
3689 nelts >>= 1;
3690 atype = build_array_type_nelts (m_limb_type, nelts);
3692 if (var && obj)
3694 tree v1, v2;
3695 tree zero;
3696 if (orig_obj == NULL_TREE)
3698 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3699 v1 = build2 (MEM_REF, atype,
3700 build_fold_addr_expr (unshare_expr (obj)), zero);
3702 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3703 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3704 else
3705 v1 = unshare_expr (obj);
3706 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3707 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3708 g = gimple_build_assign (v1, v2);
3709 insert_before (g);
3711 if (orig_obj == NULL_TREE && obj)
3713 ovf = add_cast (m_limb_type, ovf);
3714 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3715 g = gimple_build_assign (l, ovf);
3716 insert_before (g);
3717 if (nelts > 1)
3719 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3720 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3721 (nelts + 1) * m_limb_size);
3722 tree v1 = build2 (MEM_REF, atype,
3723 build_fold_addr_expr (unshare_expr (obj)),
3724 off);
3725 g = gimple_build_assign (v1, build_zero_cst (atype));
3726 insert_before (g);
3729 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3731 imm_use_iterator ui;
3732 use_operand_p use_p;
3733 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3735 g = USE_STMT (use_p);
3736 if (!is_gimple_assign (g)
3737 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3738 continue;
3739 tree lhs2 = gimple_assign_lhs (g);
3740 gimple *use_stmt;
3741 single_imm_use (lhs2, &use_p, &use_stmt);
3742 lhs2 = gimple_assign_lhs (use_stmt);
3743 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3744 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3745 g = gimple_build_assign (lhs2, ovf);
3746 else
3747 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3748 gsi_replace (&gsi, g, true);
3749 if (gsi_stmt (m_gsi) == use_stmt)
3750 m_gsi = gsi_for_stmt (g);
3751 break;
3754 else if (ovf != boolean_false_node)
3756 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3757 NULL_TREE, NULL_TREE);
3758 edge edge_true, edge_false;
3759 if_then (g, profile_probability::very_unlikely (),
3760 edge_true, edge_false);
3761 tree zero = build_zero_cst (TREE_TYPE (lhs));
3762 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3763 TREE_TYPE (lhs),
3764 zero, zero, NULL);
3765 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3766 true, GSI_SAME_STMT);
3767 m_gsi = gsi_after_labels (edge_true->dest);
3770 if (var)
3772 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3773 g = gimple_build_assign (var, clobber);
3774 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3778 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3779 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3780 argument 1 precision PREC1 and minimum precision for the result
3781 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3783 static tree
3784 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3785 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3787 *start = 0;
3788 *end = 0;
3789 *check_zero = true;
3790 /* Ignore this special rule for subtraction, even if both
3791 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3792 in infinite precision. */
3793 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3795 /* Result in [0, prec2) is unsigned, if prec > prec2,
3796 all bits above it will be zero. */
3797 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3798 return boolean_false_node;
3799 else
3801 /* ovf if any of bits in [start, end) is non-zero. */
3802 *start = prec - !TYPE_UNSIGNED (type);
3803 *end = prec2;
3806 else if (TYPE_UNSIGNED (type))
3808 /* If result in [0, prec2) is signed and if prec > prec2,
3809 all bits above it will be sign bit copies. */
3810 if (prec >= prec2)
3812 /* ovf if bit prec - 1 is non-zero. */
3813 *start = prec - 1;
3814 *end = prec;
3816 else
3818 /* ovf if any of bits in [start, end) is non-zero. */
3819 *start = prec;
3820 *end = prec2;
3823 else if (prec >= prec2)
3824 return boolean_false_node;
3825 else
3827 /* ovf if [start, end) bits aren't all zeros or all ones. */
3828 *start = prec - 1;
3829 *end = prec2;
3830 *check_zero = false;
3832 return NULL_TREE;
3835 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3836 argument or return type _Complex large/huge _BitInt. */
3838 void
3839 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3841 tree arg0 = gimple_call_arg (stmt, 0);
3842 tree arg1 = gimple_call_arg (stmt, 1);
3843 tree lhs = gimple_call_lhs (stmt);
3844 gimple *g;
3846 if (!lhs)
3848 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3849 gsi_remove (&gsi, true);
3850 return;
3852 gimple *final_stmt = gsi_stmt (m_gsi);
3853 tree type = TREE_TYPE (lhs);
3854 if (TREE_CODE (type) == COMPLEX_TYPE)
3855 type = TREE_TYPE (type);
3856 int prec = TYPE_PRECISION (type);
3857 int prec0 = range_to_prec (arg0, stmt);
3858 int prec1 = range_to_prec (arg1, stmt);
3859 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3860 the be minimum unsigned precision of any possible operation's
3861 result, otherwise it is minimum signed precision.
3862 Some examples:
3863 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3864 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3865 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3866 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3867 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3868 8 + 8 [0, 0x1fe] 9 UNSIGNED
3869 8 + 10 [0, 0x4fe] 11 UNSIGNED
3870 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3871 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3872 8 + -8 [-0x80, 0x17e] 10 SIGNED
3873 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3874 10 + -8 [-0x80, 0x47e] 12 SIGNED
3875 8 - 8 [-0xff, 0xff] 9 SIGNED
3876 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3877 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3878 -8 - -8 [-0xff, 0xff] 9 SIGNED
3879 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3880 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3881 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3882 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3883 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3884 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3885 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3886 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3887 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3888 prec1 < 0 ? -prec1 : prec1);
3889 /* If operands are either both signed or both unsigned,
3890 we need just one additional bit. */
3891 prec2 = (((prec0 < 0) == (prec1 < 0)
3892 /* If one operand is signed and one unsigned and
3893 the signed one has larger precision, we need
3894 just one extra bit, otherwise two. */
3895 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3896 : (prec2 == -prec1 && prec2 != prec0)))
3897 ? prec2 + 1 : prec2 + 2);
3898 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3899 prec1 < 0 ? -prec1 : prec1);
3900 prec3 = MAX (prec3, prec);
3901 tree var = NULL_TREE;
3902 tree orig_obj = obj;
3903 if (obj == NULL_TREE
3904 && TREE_CODE (type) == BITINT_TYPE
3905 && bitint_precision_kind (type) >= bitint_prec_large
3906 && m_names
3907 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3909 int part = var_to_partition (m_map, lhs);
3910 gcc_assert (m_vars[part] != NULL_TREE);
3911 obj = m_vars[part];
3912 if (TREE_TYPE (lhs) == type)
3913 orig_obj = obj;
3915 if (TREE_CODE (type) != BITINT_TYPE
3916 || bitint_precision_kind (type) < bitint_prec_large)
3918 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3919 tree atype = build_array_type_nelts (m_limb_type, nelts);
3920 var = create_tmp_var (atype);
3923 enum tree_code code;
3924 switch (gimple_call_internal_fn (stmt))
3926 case IFN_ADD_OVERFLOW:
3927 case IFN_UBSAN_CHECK_ADD:
3928 code = PLUS_EXPR;
3929 break;
3930 case IFN_SUB_OVERFLOW:
3931 case IFN_UBSAN_CHECK_SUB:
3932 code = MINUS_EXPR;
3933 break;
3934 default:
3935 gcc_unreachable ();
3937 unsigned start, end;
3938 bool check_zero;
3939 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3940 &start, &end, &check_zero);
3942 unsigned startlimb, endlimb;
3943 if (ovf)
3945 startlimb = ~0U;
3946 endlimb = ~0U;
3948 else
3950 startlimb = start / limb_prec;
3951 endlimb = (end - 1) / limb_prec;
3954 int prec4 = ovf != NULL_TREE ? prec : prec3;
3955 bitint_prec_kind kind = bitint_precision_kind (prec4);
3956 unsigned cnt, rem = 0, fin = 0;
3957 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3958 bool last_ovf = (ovf == NULL_TREE
3959 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3960 if (kind != bitint_prec_huge)
3961 cnt = CEIL (prec4, limb_prec) + last_ovf;
3962 else
3964 rem = (prec4 % (2 * limb_prec));
3965 fin = (prec4 - rem) / limb_prec;
3966 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3967 idx = idx_first = create_loop (size_zero_node, &idx_next);
3970 if (kind == bitint_prec_huge)
3971 m_upwards_2limb = fin;
3972 m_upwards = true;
3974 tree type0 = TREE_TYPE (arg0);
3975 tree type1 = TREE_TYPE (arg1);
3976 int prec5 = prec3;
3977 if (bitint_precision_kind (prec5) < bitint_prec_large)
3978 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3979 if (TYPE_PRECISION (type0) < prec5)
3981 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3982 if (TREE_CODE (arg0) == INTEGER_CST)
3983 arg0 = fold_convert (type0, arg0);
3985 if (TYPE_PRECISION (type1) < prec5)
3987 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3988 if (TREE_CODE (arg1) == INTEGER_CST)
3989 arg1 = fold_convert (type1, arg1);
3991 unsigned int data_cnt = 0;
3992 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3993 tree cmp = build_zero_cst (m_limb_type);
3994 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3995 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3996 for (unsigned i = 0; i < cnt; i++)
3998 m_data_cnt = 0;
3999 tree rhs1, rhs2;
4000 if (kind != bitint_prec_huge)
4001 idx = size_int (i);
4002 else if (i >= 2)
4003 idx = size_int (fin + (i > 2));
4004 if (!last_ovf || i < cnt - 1)
4006 if (type0 != TREE_TYPE (arg0))
4007 rhs1 = handle_cast (type0, arg0, idx);
4008 else
4009 rhs1 = handle_operand (arg0, idx);
4010 if (type1 != TREE_TYPE (arg1))
4011 rhs2 = handle_cast (type1, arg1, idx);
4012 else
4013 rhs2 = handle_operand (arg1, idx);
4014 if (i == 0)
4015 data_cnt = m_data_cnt;
4016 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4017 rhs1 = add_cast (m_limb_type, rhs1);
4018 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4019 rhs2 = add_cast (m_limb_type, rhs2);
4020 last_rhs1 = rhs1;
4021 last_rhs2 = rhs2;
4023 else
4025 m_data_cnt = data_cnt;
4026 if (TYPE_UNSIGNED (type0))
4027 rhs1 = build_zero_cst (m_limb_type);
4028 else
4030 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4031 if (TREE_CODE (rhs1) == INTEGER_CST)
4032 rhs1 = build_int_cst (m_limb_type,
4033 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4034 else
4036 tree lpm1 = build_int_cst (unsigned_type_node,
4037 limb_prec - 1);
4038 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4039 RSHIFT_EXPR, rhs1, lpm1);
4040 insert_before (g);
4041 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4044 if (TYPE_UNSIGNED (type1))
4045 rhs2 = build_zero_cst (m_limb_type);
4046 else
4048 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4049 if (TREE_CODE (rhs2) == INTEGER_CST)
4050 rhs2 = build_int_cst (m_limb_type,
4051 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4052 else
4054 tree lpm1 = build_int_cst (unsigned_type_node,
4055 limb_prec - 1);
4056 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4057 RSHIFT_EXPR, rhs2, lpm1);
4058 insert_before (g);
4059 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4063 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4064 if (ovf != boolean_false_node)
4066 if (tree_fits_uhwi_p (idx))
4068 unsigned limb = tree_to_uhwi (idx);
4069 if (limb >= startlimb && limb <= endlimb)
4071 tree l = arith_overflow_extract_bits (start, end, rhs,
4072 limb, check_zero);
4073 tree this_ovf = make_ssa_name (boolean_type_node);
4074 if (ovf == NULL_TREE && !check_zero)
4076 cmp = l;
4077 g = gimple_build_assign (make_ssa_name (m_limb_type),
4078 PLUS_EXPR, l,
4079 build_int_cst (m_limb_type, 1));
4080 insert_before (g);
4081 g = gimple_build_assign (this_ovf, GT_EXPR,
4082 gimple_assign_lhs (g),
4083 build_int_cst (m_limb_type, 1));
4085 else
4086 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4087 insert_before (g);
4088 if (ovf == NULL_TREE)
4089 ovf = this_ovf;
4090 else
4092 tree b = make_ssa_name (boolean_type_node);
4093 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4094 insert_before (g);
4095 ovf = b;
4099 else if (startlimb < fin)
4101 if (m_first && startlimb + 2 < fin)
4103 tree data_out;
4104 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4105 ovf_out = m_data.pop ();
4106 m_data.pop ();
4107 if (!check_zero)
4109 cmp = prepare_data_in_out (cmp, idx, &data_out);
4110 cmp_out = m_data.pop ();
4111 m_data.pop ();
4114 if (i != 0 || startlimb != fin - 1)
4116 tree_code cmp_code;
4117 bool single_comparison
4118 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4119 if (!single_comparison)
4121 cmp_code = GE_EXPR;
4122 if (!check_zero && (start % limb_prec) == 0)
4123 single_comparison = true;
4125 else if ((startlimb & 1) == (i & 1))
4126 cmp_code = EQ_EXPR;
4127 else
4128 cmp_code = GT_EXPR;
4129 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4130 NULL_TREE, NULL_TREE);
4131 edge edge_true_true, edge_true_false, edge_false;
4132 gimple *g2 = NULL;
4133 if (!single_comparison)
4134 g2 = gimple_build_cond (NE_EXPR, idx,
4135 size_int (startlimb), NULL_TREE,
4136 NULL_TREE);
4137 if_then_if_then_else (g, g2, profile_probability::likely (),
4138 profile_probability::likely (),
4139 edge_true_true, edge_true_false,
4140 edge_false);
4141 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4142 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4143 check_zero);
4144 tree this_ovf = make_ssa_name (boolean_type_node);
4145 if (cmp_code != GT_EXPR && !check_zero)
4147 g = gimple_build_assign (make_ssa_name (m_limb_type),
4148 PLUS_EXPR, l,
4149 build_int_cst (m_limb_type, 1));
4150 insert_before (g);
4151 g = gimple_build_assign (this_ovf, GT_EXPR,
4152 gimple_assign_lhs (g),
4153 build_int_cst (m_limb_type, 1));
4155 else
4156 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4157 insert_before (g);
4158 if (cmp_code == GT_EXPR)
4160 tree t = make_ssa_name (boolean_type_node);
4161 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4162 insert_before (g);
4163 this_ovf = t;
4165 tree this_ovf2 = NULL_TREE;
4166 if (!single_comparison)
4168 m_gsi = gsi_after_labels (edge_true_true->src);
4169 tree t = make_ssa_name (boolean_type_node);
4170 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4171 insert_before (g);
4172 this_ovf2 = make_ssa_name (boolean_type_node);
4173 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4174 ovf, t);
4175 insert_before (g);
4177 m_gsi = gsi_after_labels (edge_true_false->dest);
4178 tree t;
4179 if (i == 1 && ovf_out)
4180 t = ovf_out;
4181 else
4182 t = make_ssa_name (boolean_type_node);
4183 gphi *phi = create_phi_node (t, edge_true_false->dest);
4184 add_phi_arg (phi, this_ovf, edge_true_false,
4185 UNKNOWN_LOCATION);
4186 add_phi_arg (phi, ovf ? ovf
4187 : boolean_false_node, edge_false,
4188 UNKNOWN_LOCATION);
4189 if (edge_true_true)
4190 add_phi_arg (phi, this_ovf2, edge_true_true,
4191 UNKNOWN_LOCATION);
4192 ovf = t;
4193 if (!check_zero && cmp_code != GT_EXPR)
4195 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4196 phi = create_phi_node (t, edge_true_false->dest);
4197 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4198 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4199 if (edge_true_true)
4200 add_phi_arg (phi, cmp, edge_true_true,
4201 UNKNOWN_LOCATION);
4202 cmp = t;
4208 if (var || obj)
4210 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4212 else if (!tree_fits_uhwi_p (idx)
4213 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4215 bool single_comparison
4216 = (((unsigned) prec % limb_prec) == 0
4217 || prec_limbs + 1 >= fin
4218 || (prec_limbs & 1) == (i & 1));
4219 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4220 NULL_TREE, NULL_TREE);
4221 gimple *g2 = NULL;
4222 if (!single_comparison)
4223 g2 = gimple_build_cond (LT_EXPR, idx,
4224 size_int (prec_limbs - 1),
4225 NULL_TREE, NULL_TREE);
4226 edge edge_true_true, edge_true_false, edge_false;
4227 if_then_if_then_else (g, g2, profile_probability::likely (),
4228 profile_probability::likely (),
4229 edge_true_true, edge_true_false,
4230 edge_false);
4231 tree l = limb_access (type, var ? var : obj, idx, true);
4232 g = gimple_build_assign (l, rhs);
4233 insert_before (g);
4234 if (!single_comparison)
4236 m_gsi = gsi_after_labels (edge_true_true->src);
4237 l = limb_access (type, var ? var : obj,
4238 size_int (prec_limbs - 1), true);
4239 if (!useless_type_conversion_p (TREE_TYPE (l),
4240 TREE_TYPE (rhs)))
4241 rhs = add_cast (TREE_TYPE (l), rhs);
4242 g = gimple_build_assign (l, rhs);
4243 insert_before (g);
4245 m_gsi = gsi_after_labels (edge_true_false->dest);
4247 else
4249 tree l = limb_access (type, var ? var : obj, idx, true);
4250 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4251 rhs = add_cast (TREE_TYPE (l), rhs);
4252 g = gimple_build_assign (l, rhs);
4253 insert_before (g);
4256 m_first = false;
4257 if (kind == bitint_prec_huge && i <= 1)
4259 if (i == 0)
4261 idx = make_ssa_name (sizetype);
4262 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4263 size_one_node);
4264 insert_before (g);
4266 else
4268 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4269 size_int (2));
4270 insert_before (g);
4271 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4272 NULL_TREE, NULL_TREE);
4273 insert_before (g);
4274 m_gsi = gsi_for_stmt (final_stmt);
4275 m_bb = NULL;
4280 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4283 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4284 argument or return type _Complex large/huge _BitInt. */
4286 void
4287 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4289 tree arg0 = gimple_call_arg (stmt, 0);
4290 tree arg1 = gimple_call_arg (stmt, 1);
4291 tree lhs = gimple_call_lhs (stmt);
4292 if (!lhs)
4294 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4295 gsi_remove (&gsi, true);
4296 return;
4298 gimple *final_stmt = gsi_stmt (m_gsi);
4299 tree type = TREE_TYPE (lhs);
4300 if (TREE_CODE (type) == COMPLEX_TYPE)
4301 type = TREE_TYPE (type);
4302 int prec = TYPE_PRECISION (type), prec0, prec1;
4303 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4304 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4305 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4306 + (prec1 < 0 ? -prec1 : prec1));
4307 if (prec0 == 1 || prec1 == 1)
4308 --prec2;
4309 tree var = NULL_TREE;
4310 tree orig_obj = obj;
4311 bool force_var = false;
4312 if (obj == NULL_TREE
4313 && TREE_CODE (type) == BITINT_TYPE
4314 && bitint_precision_kind (type) >= bitint_prec_large
4315 && m_names
4316 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4318 int part = var_to_partition (m_map, lhs);
4319 gcc_assert (m_vars[part] != NULL_TREE);
4320 obj = m_vars[part];
4321 if (TREE_TYPE (lhs) == type)
4322 orig_obj = obj;
4324 else if (obj != NULL_TREE && DECL_P (obj))
4326 for (int i = 0; i < 2; ++i)
4328 tree arg = i ? arg1 : arg0;
4329 if (TREE_CODE (arg) == ADDR_EXPR)
4330 arg = TREE_OPERAND (arg, 0);
4331 if (get_base_address (arg) == obj)
4333 force_var = true;
4334 break;
4338 if (obj == NULL_TREE
4339 || force_var
4340 || TREE_CODE (type) != BITINT_TYPE
4341 || bitint_precision_kind (type) < bitint_prec_large
4342 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4344 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4345 tree atype = build_array_type_nelts (m_limb_type, nelts);
4346 var = create_tmp_var (atype);
4348 tree addr = build_fold_addr_expr (var ? var : obj);
4349 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4350 NULL_TREE, true, GSI_SAME_STMT);
4351 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4352 gimple *g
4353 = gimple_build_call_internal (IFN_MULBITINT, 6,
4354 addr, build_int_cst (sitype,
4355 MAX (prec2, prec)),
4356 arg0, build_int_cst (sitype, prec0),
4357 arg1, build_int_cst (sitype, prec1));
4358 insert_before (g);
4360 unsigned start, end;
4361 bool check_zero;
4362 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4363 &start, &end, &check_zero);
4364 if (ovf == NULL_TREE)
4366 unsigned startlimb = start / limb_prec;
4367 unsigned endlimb = (end - 1) / limb_prec;
4368 unsigned cnt;
4369 bool use_loop = false;
4370 if (startlimb == endlimb)
4371 cnt = 1;
4372 else if (startlimb + 1 == endlimb)
4373 cnt = 2;
4374 else if ((end % limb_prec) == 0)
4376 cnt = 2;
4377 use_loop = true;
4379 else
4381 cnt = 3;
4382 use_loop = startlimb + 2 < endlimb;
4384 if (cnt == 1)
4386 tree l = limb_access (NULL_TREE, var ? var : obj,
4387 size_int (startlimb), true);
4388 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4389 insert_before (g);
4390 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4391 startlimb, check_zero);
4392 ovf = make_ssa_name (boolean_type_node);
4393 if (check_zero)
4394 g = gimple_build_assign (ovf, NE_EXPR, l,
4395 build_zero_cst (m_limb_type));
4396 else
4398 g = gimple_build_assign (make_ssa_name (m_limb_type),
4399 PLUS_EXPR, l,
4400 build_int_cst (m_limb_type, 1));
4401 insert_before (g);
4402 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4403 build_int_cst (m_limb_type, 1));
4405 insert_before (g);
4407 else
4409 basic_block edge_bb = NULL;
4410 gimple_stmt_iterator gsi = m_gsi;
4411 gsi_prev (&gsi);
4412 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4413 edge_bb = e->src;
4414 m_gsi = gsi_end_bb (edge_bb);
4416 tree cmp = build_zero_cst (m_limb_type);
4417 for (unsigned i = 0; i < cnt; i++)
4419 tree idx, idx_next = NULL_TREE;
4420 if (i == 0)
4421 idx = size_int (startlimb);
4422 else if (i == 2)
4423 idx = size_int (endlimb);
4424 else if (use_loop)
4425 idx = create_loop (size_int (startlimb + 1), &idx_next);
4426 else
4427 idx = size_int (startlimb + 1);
4428 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4429 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4430 insert_before (g);
4431 l = gimple_assign_lhs (g);
4432 if (i == 0 || i == 2)
4433 l = arith_overflow_extract_bits (start, end, l,
4434 tree_to_uhwi (idx),
4435 check_zero);
4436 if (i == 0 && !check_zero)
4438 cmp = l;
4439 g = gimple_build_assign (make_ssa_name (m_limb_type),
4440 PLUS_EXPR, l,
4441 build_int_cst (m_limb_type, 1));
4442 insert_before (g);
4443 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4444 build_int_cst (m_limb_type, 1),
4445 NULL_TREE, NULL_TREE);
4447 else
4448 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4449 insert_before (g);
4450 edge e1 = split_block (gsi_bb (m_gsi), g);
4451 e1->flags = EDGE_FALSE_VALUE;
4452 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4453 EDGE_TRUE_VALUE);
4454 e1->probability = profile_probability::likely ();
4455 e2->probability = e1->probability.invert ();
4456 if (i == 0)
4457 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4458 m_gsi = gsi_after_labels (e1->dest);
4459 if (i == 1 && use_loop)
4461 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4462 size_one_node);
4463 insert_before (g);
4464 g = gimple_build_cond (NE_EXPR, idx_next,
4465 size_int (endlimb + (cnt == 1)),
4466 NULL_TREE, NULL_TREE);
4467 insert_before (g);
4468 edge true_edge, false_edge;
4469 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4470 &true_edge,
4471 &false_edge);
4472 m_gsi = gsi_after_labels (false_edge->dest);
4473 m_bb = NULL;
4477 ovf = make_ssa_name (boolean_type_node);
4478 basic_block bb = gimple_bb (final_stmt);
4479 gphi *phi = create_phi_node (ovf, bb);
4480 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4481 edge_iterator ei;
4482 FOR_EACH_EDGE (e, ei, bb->preds)
4484 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4485 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4487 m_gsi = gsi_for_stmt (final_stmt);
4491 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4494 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4495 .{ADD,SUB,MUL}_OVERFLOW call. */
4497 void
4498 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4500 tree rhs1 = gimple_assign_rhs1 (stmt);
4501 rhs1 = TREE_OPERAND (rhs1, 0);
4502 if (obj == NULL_TREE)
4504 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4505 gcc_assert (m_vars[part] != NULL_TREE);
4506 obj = m_vars[part];
4508 if (TREE_CODE (rhs1) == SSA_NAME
4509 && (m_names == NULL
4510 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4512 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4513 return;
4515 int part = var_to_partition (m_map, rhs1);
4516 gcc_assert (m_vars[part] != NULL_TREE);
4517 tree var = m_vars[part];
4518 unsigned HOST_WIDE_INT nelts
4519 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4520 tree atype = build_array_type_nelts (m_limb_type, nelts);
4521 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4522 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4523 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4524 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4525 ? 0 : nelts * m_limb_size);
4526 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4527 gimple *g = gimple_build_assign (obj, v2);
4528 insert_before (g);
4531 /* Lower COMPLEX_EXPR stmt. */
4533 void
4534 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4536 tree lhs = gimple_assign_lhs (stmt);
4537 tree rhs1 = gimple_assign_rhs1 (stmt);
4538 tree rhs2 = gimple_assign_rhs2 (stmt);
4539 int part = var_to_partition (m_map, lhs);
4540 gcc_assert (m_vars[part] != NULL_TREE);
4541 lhs = m_vars[part];
4542 unsigned HOST_WIDE_INT nelts
4543 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4544 tree atype = build_array_type_nelts (m_limb_type, nelts);
4545 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4546 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4547 tree v2;
4548 if (TREE_CODE (rhs1) == SSA_NAME)
4550 part = var_to_partition (m_map, rhs1);
4551 gcc_assert (m_vars[part] != NULL_TREE);
4552 v2 = m_vars[part];
4554 else if (integer_zerop (rhs1))
4555 v2 = build_zero_cst (atype);
4556 else
4557 v2 = tree_output_constant_def (rhs1);
4558 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4559 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4560 gimple *g = gimple_build_assign (v1, v2);
4561 insert_before (g);
4562 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4563 TYPE_SIZE_UNIT (atype));
4564 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4565 if (TREE_CODE (rhs2) == SSA_NAME)
4567 part = var_to_partition (m_map, rhs2);
4568 gcc_assert (m_vars[part] != NULL_TREE);
4569 v2 = m_vars[part];
4571 else if (integer_zerop (rhs2))
4572 v2 = build_zero_cst (atype);
4573 else
4574 v2 = tree_output_constant_def (rhs2);
4575 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4576 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4577 g = gimple_build_assign (v1, v2);
4578 insert_before (g);
4581 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4582 argument. */
4584 void
4585 bitint_large_huge::lower_bit_query (gimple *stmt)
4587 tree arg0 = gimple_call_arg (stmt, 0);
4588 tree arg1 = (gimple_call_num_args (stmt) == 2
4589 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4590 tree lhs = gimple_call_lhs (stmt);
4591 gimple *g;
4593 if (!lhs)
4595 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4596 gsi_remove (&gsi, true);
4597 return;
4599 tree type = TREE_TYPE (arg0);
4600 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4601 bitint_prec_kind kind = bitint_precision_kind (type);
4602 gcc_assert (kind >= bitint_prec_large);
4603 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4604 enum built_in_function fcode = END_BUILTINS;
4605 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4606 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4607 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4608 switch (ifn)
4610 case IFN_CLZ:
4611 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4612 fcode = BUILT_IN_CLZ;
4613 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4614 fcode = BUILT_IN_CLZL;
4615 else
4616 fcode = BUILT_IN_CLZLL;
4617 break;
4618 case IFN_FFS:
4619 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4620 we don't add the addend at the end. */
4621 arg1 = integer_zero_node;
4622 /* FALLTHRU */
4623 case IFN_CTZ:
4624 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4625 fcode = BUILT_IN_CTZ;
4626 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4627 fcode = BUILT_IN_CTZL;
4628 else
4629 fcode = BUILT_IN_CTZLL;
4630 m_upwards = true;
4631 break;
4632 case IFN_CLRSB:
4633 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4634 fcode = BUILT_IN_CLRSB;
4635 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4636 fcode = BUILT_IN_CLRSBL;
4637 else
4638 fcode = BUILT_IN_CLRSBLL;
4639 break;
4640 case IFN_PARITY:
4641 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4642 fcode = BUILT_IN_PARITY;
4643 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4644 fcode = BUILT_IN_PARITYL;
4645 else
4646 fcode = BUILT_IN_PARITYLL;
4647 m_upwards = true;
4648 break;
4649 case IFN_POPCOUNT:
4650 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4651 fcode = BUILT_IN_POPCOUNT;
4652 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4653 fcode = BUILT_IN_POPCOUNTL;
4654 else
4655 fcode = BUILT_IN_POPCOUNTLL;
4656 m_upwards = true;
4657 break;
4658 default:
4659 gcc_unreachable ();
4661 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4662 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4663 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4664 basic_block edge_bb = NULL;
4665 if (m_upwards)
4667 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4668 if (kind == bitint_prec_large)
4669 cnt = CEIL (prec, limb_prec);
4670 else
4672 rem = (prec % (2 * limb_prec));
4673 end = (prec - rem) / limb_prec;
4674 cnt = 2 + CEIL (rem, limb_prec);
4675 idx = idx_first = create_loop (size_zero_node, &idx_next);
4678 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4680 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4681 gsi_prev (&gsi);
4682 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4683 edge_bb = e->src;
4684 if (kind == bitint_prec_large)
4685 m_gsi = gsi_end_bb (edge_bb);
4686 bqp = XALLOCAVEC (struct bq_details, cnt);
4688 else
4689 m_after_stmt = stmt;
4690 if (kind != bitint_prec_large)
4691 m_upwards_2limb = end;
4693 for (unsigned i = 0; i < cnt; i++)
4695 m_data_cnt = 0;
4696 if (kind == bitint_prec_large)
4697 idx = size_int (i);
4698 else if (i >= 2)
4699 idx = size_int (end + (i > 2));
4701 tree rhs1 = handle_operand (arg0, idx);
4702 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4704 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4705 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4706 rhs1 = add_cast (m_limb_type, rhs1);
4709 tree in, out, tem;
4710 if (ifn == IFN_PARITY)
4711 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4712 else if (ifn == IFN_FFS)
4713 in = prepare_data_in_out (integer_one_node, idx, &out);
4714 else
4715 in = prepare_data_in_out (integer_zero_node, idx, &out);
4717 switch (ifn)
4719 case IFN_CTZ:
4720 case IFN_FFS:
4721 g = gimple_build_cond (NE_EXPR, rhs1,
4722 build_zero_cst (m_limb_type),
4723 NULL_TREE, NULL_TREE);
4724 insert_before (g);
4725 edge e1, e2;
4726 e1 = split_block (gsi_bb (m_gsi), g);
4727 e1->flags = EDGE_FALSE_VALUE;
4728 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4729 e1->probability = profile_probability::unlikely ();
4730 e2->probability = e1->probability.invert ();
4731 if (i == 0)
4732 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4733 m_gsi = gsi_after_labels (e1->dest);
4734 bqp[i].e = e2;
4735 bqp[i].val = rhs1;
4736 if (tree_fits_uhwi_p (idx))
4737 bqp[i].addend
4738 = build_int_cst (integer_type_node,
4739 tree_to_uhwi (idx) * limb_prec
4740 + (ifn == IFN_FFS));
4741 else
4743 bqp[i].addend = in;
4744 if (i == 1)
4745 res = out;
4746 else
4747 res = make_ssa_name (integer_type_node);
4748 g = gimple_build_assign (res, PLUS_EXPR, in,
4749 build_int_cst (integer_type_node,
4750 limb_prec));
4751 insert_before (g);
4752 m_data[m_data_cnt] = res;
4754 break;
4755 case IFN_PARITY:
4756 if (!integer_zerop (in))
4758 if (kind == bitint_prec_huge && i == 1)
4759 res = out;
4760 else
4761 res = make_ssa_name (m_limb_type);
4762 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4763 insert_before (g);
4765 else
4766 res = rhs1;
4767 m_data[m_data_cnt] = res;
4768 break;
4769 case IFN_POPCOUNT:
4770 g = gimple_build_call (fndecl, 1, rhs1);
4771 tem = make_ssa_name (integer_type_node);
4772 gimple_call_set_lhs (g, tem);
4773 insert_before (g);
4774 if (!integer_zerop (in))
4776 if (kind == bitint_prec_huge && i == 1)
4777 res = out;
4778 else
4779 res = make_ssa_name (integer_type_node);
4780 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4781 insert_before (g);
4783 else
4784 res = tem;
4785 m_data[m_data_cnt] = res;
4786 break;
4787 default:
4788 gcc_unreachable ();
4791 m_first = false;
4792 if (kind == bitint_prec_huge && i <= 1)
4794 if (i == 0)
4796 idx = make_ssa_name (sizetype);
4797 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4798 size_one_node);
4799 insert_before (g);
4801 else
4803 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4804 size_int (2));
4805 insert_before (g);
4806 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4807 NULL_TREE, NULL_TREE);
4808 insert_before (g);
4809 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4810 m_gsi = gsi_after_labels (edge_bb);
4811 else
4812 m_gsi = gsi_for_stmt (stmt);
4813 m_bb = NULL;
4818 else
4820 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4821 int sub_one = 0;
4822 if (kind == bitint_prec_large)
4823 cnt = CEIL (prec, limb_prec);
4824 else
4826 rem = prec % limb_prec;
4827 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4828 rem = limb_prec;
4829 end = (prec - rem) / limb_prec;
4830 cnt = 1 + (rem != 0);
4831 if (ifn == IFN_CLRSB)
4832 sub_one = 1;
4835 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4836 gsi_prev (&gsi);
4837 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4838 edge_bb = e->src;
4839 m_gsi = gsi_end_bb (edge_bb);
4841 if (ifn == IFN_CLZ)
4842 bqp = XALLOCAVEC (struct bq_details, cnt);
4843 else
4845 gsi = gsi_for_stmt (stmt);
4846 gsi_prev (&gsi);
4847 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4848 edge_bb = e->src;
4849 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4852 for (unsigned i = 0; i < cnt; i++)
4854 m_data_cnt = 0;
4855 if (kind == bitint_prec_large)
4856 idx = size_int (cnt - i - 1);
4857 else if (i == cnt - 1)
4858 idx = create_loop (size_int (end - 1), &idx_next);
4859 else
4860 idx = size_int (end);
4862 tree rhs1 = handle_operand (arg0, idx);
4863 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4865 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4866 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4867 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4868 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4869 rhs1 = add_cast (m_limb_type, rhs1);
4872 if (ifn == IFN_CLZ)
4874 g = gimple_build_cond (NE_EXPR, rhs1,
4875 build_zero_cst (m_limb_type),
4876 NULL_TREE, NULL_TREE);
4877 insert_before (g);
4878 edge e1 = split_block (gsi_bb (m_gsi), g);
4879 e1->flags = EDGE_FALSE_VALUE;
4880 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4881 e1->probability = profile_probability::unlikely ();
4882 e2->probability = e1->probability.invert ();
4883 if (i == 0)
4884 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4885 m_gsi = gsi_after_labels (e1->dest);
4886 bqp[i].e = e2;
4887 bqp[i].val = rhs1;
4889 else
4891 if (i == 0)
4893 first = rhs1;
4894 g = gimple_build_assign (make_ssa_name (m_limb_type),
4895 PLUS_EXPR, rhs1,
4896 build_int_cst (m_limb_type, 1));
4897 insert_before (g);
4898 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4899 build_int_cst (m_limb_type, 1),
4900 NULL_TREE, NULL_TREE);
4901 insert_before (g);
4903 else
4905 g = gimple_build_assign (make_ssa_name (m_limb_type),
4906 BIT_XOR_EXPR, rhs1, first);
4907 insert_before (g);
4908 tree stype = signed_type_for (m_limb_type);
4909 g = gimple_build_cond (LT_EXPR,
4910 add_cast (stype,
4911 gimple_assign_lhs (g)),
4912 build_zero_cst (stype),
4913 NULL_TREE, NULL_TREE);
4914 insert_before (g);
4915 edge e1 = split_block (gsi_bb (m_gsi), g);
4916 e1->flags = EDGE_FALSE_VALUE;
4917 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4918 EDGE_TRUE_VALUE);
4919 e1->probability = profile_probability::unlikely ();
4920 e2->probability = e1->probability.invert ();
4921 if (i == 1)
4922 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4923 e2->src);
4924 m_gsi = gsi_after_labels (e1->dest);
4925 bqp[2 * i].e = e2;
4926 g = gimple_build_cond (NE_EXPR, rhs1, first,
4927 NULL_TREE, NULL_TREE);
4928 insert_before (g);
4930 edge e1 = split_block (gsi_bb (m_gsi), g);
4931 e1->flags = EDGE_FALSE_VALUE;
4932 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4933 e1->probability = profile_probability::unlikely ();
4934 e2->probability = e1->probability.invert ();
4935 if (i == 0)
4936 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4937 m_gsi = gsi_after_labels (e1->dest);
4938 bqp[2 * i + 1].e = e2;
4939 bqp[i].val = rhs1;
4941 if (tree_fits_uhwi_p (idx))
4942 bqp[i].addend
4943 = build_int_cst (integer_type_node,
4944 (int) prec
4945 - (((int) tree_to_uhwi (idx) + 1)
4946 * limb_prec) - sub_one);
4947 else
4949 tree in, out;
4950 in = build_int_cst (integer_type_node, rem - sub_one);
4951 m_first = true;
4952 in = prepare_data_in_out (in, idx, &out);
4953 out = m_data[m_data_cnt + 1];
4954 bqp[i].addend = in;
4955 g = gimple_build_assign (out, PLUS_EXPR, in,
4956 build_int_cst (integer_type_node,
4957 limb_prec));
4958 insert_before (g);
4959 m_data[m_data_cnt] = out;
4962 m_first = false;
4963 if (kind == bitint_prec_huge && i == cnt - 1)
4965 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4966 size_int (-1));
4967 insert_before (g);
4968 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4969 NULL_TREE, NULL_TREE);
4970 insert_before (g);
4971 edge true_edge, false_edge;
4972 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4973 &true_edge, &false_edge);
4974 m_gsi = gsi_after_labels (false_edge->dest);
4975 m_bb = NULL;
4979 switch (ifn)
4981 case IFN_CLZ:
4982 case IFN_CTZ:
4983 case IFN_FFS:
4984 gphi *phi1, *phi2, *phi3;
4985 basic_block bb;
4986 bb = gsi_bb (m_gsi);
4987 remove_edge (find_edge (bb, gimple_bb (stmt)));
4988 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4989 gimple_bb (stmt));
4990 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4991 gimple_bb (stmt));
4992 for (unsigned i = 0; i < cnt; i++)
4994 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4995 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4997 if (arg1 == NULL_TREE)
4999 g = gimple_build_builtin_unreachable (m_loc);
5000 insert_before (g);
5002 m_gsi = gsi_for_stmt (stmt);
5003 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
5004 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5005 insert_before (g);
5006 if (arg1 == NULL_TREE)
5007 g = gimple_build_assign (lhs, PLUS_EXPR,
5008 gimple_phi_result (phi2),
5009 gimple_call_lhs (g));
5010 else
5012 g = gimple_build_assign (make_ssa_name (integer_type_node),
5013 PLUS_EXPR, gimple_phi_result (phi2),
5014 gimple_call_lhs (g));
5015 insert_before (g);
5016 edge e1 = split_block (gimple_bb (stmt), g);
5017 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5018 e2->probability = profile_probability::always ();
5019 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5020 get_immediate_dominator (CDI_DOMINATORS,
5021 e1->src));
5022 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5023 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5024 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5025 m_gsi = gsi_for_stmt (stmt);
5026 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5028 gsi_replace (&m_gsi, g, true);
5029 break;
5030 case IFN_CLRSB:
5031 bb = gsi_bb (m_gsi);
5032 remove_edge (find_edge (bb, edge_bb));
5033 edge e;
5034 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5035 e->probability = profile_probability::always ();
5036 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5037 get_immediate_dominator (CDI_DOMINATORS,
5038 edge_bb));
5039 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5040 edge_bb);
5041 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5042 edge_bb);
5043 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5044 gimple_bb (stmt));
5045 for (unsigned i = 0; i < cnt; i++)
5047 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5048 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5049 UNKNOWN_LOCATION);
5050 tree a = bqp[i].addend;
5051 if (i && kind == bitint_prec_large)
5052 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5053 if (i)
5054 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5056 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5057 UNKNOWN_LOCATION);
5058 m_gsi = gsi_after_labels (edge_bb);
5059 g = gimple_build_call (fndecl, 1,
5060 add_cast (signed_type_for (m_limb_type),
5061 gimple_phi_result (phi1)));
5062 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5063 insert_before (g);
5064 g = gimple_build_assign (make_ssa_name (integer_type_node),
5065 PLUS_EXPR, gimple_call_lhs (g),
5066 gimple_phi_result (phi2));
5067 insert_before (g);
5068 if (kind != bitint_prec_large)
5070 g = gimple_build_assign (make_ssa_name (integer_type_node),
5071 PLUS_EXPR, gimple_assign_lhs (g),
5072 integer_one_node);
5073 insert_before (g);
5075 add_phi_arg (phi3, gimple_assign_lhs (g),
5076 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5077 m_gsi = gsi_for_stmt (stmt);
5078 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5079 gsi_replace (&m_gsi, g, true);
5080 break;
5081 case IFN_PARITY:
5082 g = gimple_build_call (fndecl, 1, res);
5083 gimple_call_set_lhs (g, lhs);
5084 gsi_replace (&m_gsi, g, true);
5085 break;
5086 case IFN_POPCOUNT:
5087 g = gimple_build_assign (lhs, res);
5088 gsi_replace (&m_gsi, g, true);
5089 break;
5090 default:
5091 gcc_unreachable ();
5095 /* Lower a call statement with one or more large/huge _BitInt
5096 arguments or large/huge _BitInt return value. */
5098 void
5099 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5101 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5102 unsigned int nargs = gimple_call_num_args (stmt);
5103 if (gimple_call_internal_p (stmt))
5104 switch (gimple_call_internal_fn (stmt))
5106 case IFN_ADD_OVERFLOW:
5107 case IFN_SUB_OVERFLOW:
5108 case IFN_UBSAN_CHECK_ADD:
5109 case IFN_UBSAN_CHECK_SUB:
5110 lower_addsub_overflow (obj, stmt);
5111 return;
5112 case IFN_MUL_OVERFLOW:
5113 case IFN_UBSAN_CHECK_MUL:
5114 lower_mul_overflow (obj, stmt);
5115 return;
5116 case IFN_CLZ:
5117 case IFN_CTZ:
5118 case IFN_CLRSB:
5119 case IFN_FFS:
5120 case IFN_PARITY:
5121 case IFN_POPCOUNT:
5122 lower_bit_query (stmt);
5123 return;
5124 default:
5125 break;
5127 for (unsigned int i = 0; i < nargs; ++i)
5129 tree arg = gimple_call_arg (stmt, i);
5130 if (TREE_CODE (arg) != SSA_NAME
5131 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5132 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5133 continue;
5134 if (SSA_NAME_IS_DEFAULT_DEF (arg)
5135 && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
5137 tree var = create_tmp_reg (TREE_TYPE (arg));
5138 arg = get_or_create_ssa_default_def (cfun, var);
5140 else
5142 int p = var_to_partition (m_map, arg);
5143 tree v = m_vars[p];
5144 gcc_assert (v != NULL_TREE);
5145 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5146 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5147 arg = make_ssa_name (TREE_TYPE (arg));
5148 gimple *g = gimple_build_assign (arg, v);
5149 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5151 gimple_call_set_arg (stmt, i, arg);
5152 if (m_preserved == NULL)
5153 m_preserved = BITMAP_ALLOC (NULL);
5154 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5156 tree lhs = gimple_call_lhs (stmt);
5157 if (lhs
5158 && TREE_CODE (lhs) == SSA_NAME
5159 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5160 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5162 int p = var_to_partition (m_map, lhs);
5163 tree v = m_vars[p];
5164 gcc_assert (v != NULL_TREE);
5165 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5166 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5167 gimple_call_set_lhs (stmt, v);
5168 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5170 update_stmt (stmt);
5173 /* Lower __asm STMT which involves large/huge _BitInt values. */
5175 void
5176 bitint_large_huge::lower_asm (gimple *stmt)
5178 gasm *g = as_a <gasm *> (stmt);
5179 unsigned noutputs = gimple_asm_noutputs (g);
5180 unsigned ninputs = gimple_asm_ninputs (g);
5182 for (unsigned i = 0; i < noutputs; ++i)
5184 tree t = gimple_asm_output_op (g, i);
5185 tree s = TREE_VALUE (t);
5186 if (TREE_CODE (s) == SSA_NAME
5187 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5188 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5190 int part = var_to_partition (m_map, s);
5191 gcc_assert (m_vars[part] != NULL_TREE);
5192 TREE_VALUE (t) = m_vars[part];
5195 for (unsigned i = 0; i < ninputs; ++i)
5197 tree t = gimple_asm_input_op (g, i);
5198 tree s = TREE_VALUE (t);
5199 if (TREE_CODE (s) == SSA_NAME
5200 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5201 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5203 int part = var_to_partition (m_map, s);
5204 gcc_assert (m_vars[part] != NULL_TREE);
5205 TREE_VALUE (t) = m_vars[part];
5208 update_stmt (stmt);
5211 /* Lower statement STMT which involves large/huge _BitInt values
5212 into code accessing individual limbs. */
5214 void
5215 bitint_large_huge::lower_stmt (gimple *stmt)
5217 m_first = true;
5218 m_lhs = NULL_TREE;
5219 m_data.truncate (0);
5220 m_data_cnt = 0;
5221 m_gsi = gsi_for_stmt (stmt);
5222 m_after_stmt = NULL;
5223 m_bb = NULL;
5224 m_init_gsi = m_gsi;
5225 gsi_prev (&m_init_gsi);
5226 m_preheader_bb = NULL;
5227 m_upwards_2limb = 0;
5228 m_upwards = false;
5229 m_var_msb = false;
5230 m_cast_conditional = false;
5231 m_bitfld_load = 0;
5232 m_loc = gimple_location (stmt);
5233 if (is_gimple_call (stmt))
5235 lower_call (NULL_TREE, stmt);
5236 return;
5238 if (gimple_code (stmt) == GIMPLE_ASM)
5240 lower_asm (stmt);
5241 return;
5243 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5244 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5245 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5246 bool mergeable_cast_p = false;
5247 bool final_cast_p = false;
5248 if (gimple_assign_cast_p (stmt))
5250 lhs = gimple_assign_lhs (stmt);
5251 tree rhs1 = gimple_assign_rhs1 (stmt);
5252 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5253 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5254 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5255 mergeable_cast_p = true;
5256 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5257 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5258 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5260 final_cast_p = true;
5261 if (TREE_CODE (rhs1) == SSA_NAME
5262 && (m_names == NULL
5263 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5265 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5266 if (is_gimple_assign (g)
5267 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5269 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5270 if (TREE_CODE (rhs2) == SSA_NAME
5271 && (m_names == NULL
5272 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5274 g = SSA_NAME_DEF_STMT (rhs2);
5275 int ovf = optimizable_arith_overflow (g);
5276 if (ovf == 2)
5277 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5278 and IMAGPART_EXPR uses, where the latter is cast to
5279 non-_BitInt, it will be optimized when handling
5280 the REALPART_EXPR. */
5281 return;
5282 if (ovf == 1)
5284 lower_call (NULL_TREE, g);
5285 return;
5292 if (gimple_store_p (stmt))
5294 tree rhs1 = gimple_assign_rhs1 (stmt);
5295 if (TREE_CODE (rhs1) == SSA_NAME
5296 && (m_names == NULL
5297 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5299 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5300 m_loc = gimple_location (g);
5301 lhs = gimple_assign_lhs (stmt);
5302 if (is_gimple_assign (g) && !mergeable_op (g))
5303 switch (gimple_assign_rhs_code (g))
5305 case LSHIFT_EXPR:
5306 case RSHIFT_EXPR:
5307 lower_shift_stmt (lhs, g);
5308 handled:
5309 m_gsi = gsi_for_stmt (stmt);
5310 unlink_stmt_vdef (stmt);
5311 release_ssa_name (gimple_vdef (stmt));
5312 gsi_remove (&m_gsi, true);
5313 return;
5314 case MULT_EXPR:
5315 case TRUNC_DIV_EXPR:
5316 case TRUNC_MOD_EXPR:
5317 lower_muldiv_stmt (lhs, g);
5318 goto handled;
5319 case FIX_TRUNC_EXPR:
5320 lower_float_conv_stmt (lhs, g);
5321 goto handled;
5322 case REALPART_EXPR:
5323 case IMAGPART_EXPR:
5324 lower_cplxpart_stmt (lhs, g);
5325 goto handled;
5326 default:
5327 break;
5329 else if (optimizable_arith_overflow (g) == 3)
5331 lower_call (lhs, g);
5332 goto handled;
5334 m_loc = gimple_location (stmt);
5337 if (mergeable_op (stmt)
5338 || gimple_store_p (stmt)
5339 || gimple_assign_load_p (stmt)
5340 || eq_p
5341 || mergeable_cast_p)
5343 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5344 if (!eq_p)
5345 return;
5347 else if (cmp_code != ERROR_MARK)
5348 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5349 if (cmp_code != ERROR_MARK)
5351 if (gimple_code (stmt) == GIMPLE_COND)
5353 gcond *cstmt = as_a <gcond *> (stmt);
5354 gimple_cond_set_lhs (cstmt, lhs);
5355 gimple_cond_set_rhs (cstmt, boolean_false_node);
5356 gimple_cond_set_code (cstmt, cmp_code);
5357 update_stmt (stmt);
5358 return;
5360 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5362 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5363 boolean_false_node);
5364 gimple_assign_set_rhs1 (stmt, cond);
5365 lhs = gimple_assign_lhs (stmt);
5366 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5367 || (bitint_precision_kind (TREE_TYPE (lhs))
5368 <= bitint_prec_middle));
5369 update_stmt (stmt);
5370 return;
5372 gimple_assign_set_rhs1 (stmt, lhs);
5373 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5374 gimple_assign_set_rhs_code (stmt, cmp_code);
5375 update_stmt (stmt);
5376 return;
5378 if (final_cast_p)
5380 tree lhs_type = TREE_TYPE (lhs);
5381 /* Add support for 3 or more limbs filled in from normal integral
5382 type if this assert fails. If no target chooses limb mode smaller
5383 than half of largest supported normal integral type, this will not
5384 be needed. */
5385 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5386 gimple *g;
5387 if (TREE_CODE (lhs_type) == BITINT_TYPE
5388 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5389 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5390 TYPE_UNSIGNED (lhs_type));
5391 m_data_cnt = 0;
5392 tree rhs1 = gimple_assign_rhs1 (stmt);
5393 tree r1 = handle_operand (rhs1, size_int (0));
5394 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5395 r1 = add_cast (lhs_type, r1);
5396 if (TYPE_PRECISION (lhs_type) > limb_prec)
5398 m_data_cnt = 0;
5399 m_first = false;
5400 tree r2 = handle_operand (rhs1, size_int (1));
5401 r2 = add_cast (lhs_type, r2);
5402 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5403 build_int_cst (unsigned_type_node,
5404 limb_prec));
5405 insert_before (g);
5406 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5407 gimple_assign_lhs (g));
5408 insert_before (g);
5409 r1 = gimple_assign_lhs (g);
5411 if (lhs_type != TREE_TYPE (lhs))
5412 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5413 else
5414 g = gimple_build_assign (lhs, r1);
5415 gsi_replace (&m_gsi, g, true);
5416 return;
5418 if (is_gimple_assign (stmt))
5419 switch (gimple_assign_rhs_code (stmt))
5421 case LSHIFT_EXPR:
5422 case RSHIFT_EXPR:
5423 lower_shift_stmt (NULL_TREE, stmt);
5424 return;
5425 case MULT_EXPR:
5426 case TRUNC_DIV_EXPR:
5427 case TRUNC_MOD_EXPR:
5428 lower_muldiv_stmt (NULL_TREE, stmt);
5429 return;
5430 case FIX_TRUNC_EXPR:
5431 case FLOAT_EXPR:
5432 lower_float_conv_stmt (NULL_TREE, stmt);
5433 return;
5434 case REALPART_EXPR:
5435 case IMAGPART_EXPR:
5436 lower_cplxpart_stmt (NULL_TREE, stmt);
5437 return;
5438 case COMPLEX_EXPR:
5439 lower_complexexpr_stmt (stmt);
5440 return;
5441 default:
5442 break;
5444 gcc_unreachable ();
5447 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5448 the desired memory state. */
5450 void *
5451 vuse_eq (ao_ref *, tree vuse1, void *data)
5453 tree vuse2 = (tree) data;
5454 if (vuse1 == vuse2)
5455 return data;
5457 return NULL;
5460 /* Return true if STMT uses a library function and needs to take
5461 address of its inputs. We need to avoid bit-fields in those
5462 cases. Similarly, we need to avoid overlap between destination
5463 and source limb arrays. */
5465 bool
5466 stmt_needs_operand_addr (gimple *stmt)
5468 if (is_gimple_assign (stmt))
5469 switch (gimple_assign_rhs_code (stmt))
5471 case MULT_EXPR:
5472 case TRUNC_DIV_EXPR:
5473 case TRUNC_MOD_EXPR:
5474 case FLOAT_EXPR:
5475 return true;
5476 default:
5477 break;
5479 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5480 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5481 return true;
5482 return false;
5485 /* Dominator walker used to discover which large/huge _BitInt
5486 loads could be sunk into all their uses. */
5488 class bitint_dom_walker : public dom_walker
5490 public:
5491 bitint_dom_walker (bitmap names, bitmap loads)
5492 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5494 edge before_dom_children (basic_block) final override;
5496 private:
5497 bitmap m_names, m_loads;
5500 edge
5501 bitint_dom_walker::before_dom_children (basic_block bb)
5503 gphi *phi = get_virtual_phi (bb);
5504 tree vop;
5505 if (phi)
5506 vop = gimple_phi_result (phi);
5507 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5508 vop = NULL_TREE;
5509 else
5510 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5512 auto_vec<tree, 16> worklist;
5513 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5514 !gsi_end_p (gsi); gsi_next (&gsi))
5516 gimple *stmt = gsi_stmt (gsi);
5517 if (is_gimple_debug (stmt))
5518 continue;
5520 if (!vop && gimple_vuse (stmt))
5521 vop = gimple_vuse (stmt);
5523 tree cvop = vop;
5524 if (gimple_vdef (stmt))
5525 vop = gimple_vdef (stmt);
5527 tree lhs = gimple_get_lhs (stmt);
5528 if (lhs
5529 && TREE_CODE (lhs) == SSA_NAME
5530 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5531 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5532 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5533 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5534 it means it will be handled in a loop or straight line code
5535 at the location of its (ultimate) immediate use, so for
5536 vop checking purposes check these only at the ultimate
5537 immediate use. */
5538 continue;
5540 ssa_op_iter oi;
5541 use_operand_p use_p;
5542 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5544 tree s = USE_FROM_PTR (use_p);
5545 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5546 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5547 worklist.safe_push (s);
5550 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5551 while (worklist.length () > 0)
5553 tree s = worklist.pop ();
5555 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5557 gimple *g = SSA_NAME_DEF_STMT (s);
5558 needs_operand_addr |= stmt_needs_operand_addr (g);
5559 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5561 tree s2 = USE_FROM_PTR (use_p);
5562 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5563 && (bitint_precision_kind (TREE_TYPE (s2))
5564 >= bitint_prec_large))
5565 worklist.safe_push (s2);
5567 continue;
5569 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5570 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5572 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5573 if (TREE_CODE (rhs) == SSA_NAME
5574 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5575 s = rhs;
5576 else
5577 continue;
5579 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5580 continue;
5582 gimple *g = SSA_NAME_DEF_STMT (s);
5583 tree rhs1 = gimple_assign_rhs1 (g);
5584 if (needs_operand_addr
5585 && TREE_CODE (rhs1) == COMPONENT_REF
5586 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5588 tree fld = TREE_OPERAND (rhs1, 1);
5589 /* For little-endian, we can allow as inputs bit-fields
5590 which start at a limb boundary. */
5591 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5592 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5593 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5594 % limb_prec) == 0)
5596 else
5598 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5599 continue;
5603 ao_ref ref;
5604 ao_ref_init (&ref, rhs1);
5605 tree lvop = gimple_vuse (g);
5606 unsigned limit = 64;
5607 tree vuse = cvop;
5608 if (vop != cvop
5609 && is_gimple_assign (stmt)
5610 && gimple_store_p (stmt)
5611 && (needs_operand_addr
5612 || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
5613 vuse = vop;
5614 if (vuse != lvop
5615 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5616 NULL, NULL, limit, lvop) == NULL)
5617 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5621 bb->aux = (void *) vop;
5622 return NULL;
5627 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5628 build_ssa_conflict_graph.
5629 The differences are:
5630 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5631 2) for large/huge _BitInt multiplication/division/modulo process def
5632 only after processing uses rather than before to make uses conflict
5633 with the definition
5634 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5635 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5636 the final statement. */
5638 void
5639 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5640 ssa_conflicts *graph, bitmap names,
5641 void (*def) (live_track *, tree,
5642 ssa_conflicts *),
5643 void (*use) (live_track *, tree))
5645 bool muldiv_p = false;
5646 tree lhs = NULL_TREE;
5647 if (is_gimple_assign (stmt))
5649 lhs = gimple_assign_lhs (stmt);
5650 if (TREE_CODE (lhs) == SSA_NAME
5651 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5652 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5654 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5655 return;
5656 switch (gimple_assign_rhs_code (stmt))
5658 case MULT_EXPR:
5659 case TRUNC_DIV_EXPR:
5660 case TRUNC_MOD_EXPR:
5661 muldiv_p = true;
5662 default:
5663 break;
5668 ssa_op_iter iter;
5669 tree var;
5670 if (!muldiv_p)
5672 /* For stmts with more than one SSA_NAME definition pretend all the
5673 SSA_NAME outputs but the first one are live at this point, so
5674 that conflicts are added in between all those even when they are
5675 actually not really live after the asm, because expansion might
5676 copy those into pseudos after the asm and if multiple outputs
5677 share the same partition, it might overwrite those that should
5678 be live. E.g.
5679 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5680 return a;
5681 See PR70593. */
5682 bool first = true;
5683 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5684 if (first)
5685 first = false;
5686 else
5687 use (live, var);
5689 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5690 def (live, var, graph);
5693 auto_vec<tree, 16> worklist;
5694 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5695 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5696 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5698 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5699 use (live, var);
5700 else
5701 worklist.safe_push (var);
5704 while (worklist.length () > 0)
5706 tree s = worklist.pop ();
5707 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5708 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5709 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5711 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5712 use (live, var);
5713 else
5714 worklist.safe_push (var);
5718 if (muldiv_p)
5719 def (live, lhs, graph);
5722 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5723 return the largest bitint_prec_kind of them, otherwise return
5724 bitint_prec_small. */
5726 static bitint_prec_kind
5727 arith_overflow_arg_kind (gimple *stmt)
5729 bitint_prec_kind ret = bitint_prec_small;
5730 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5731 switch (gimple_call_internal_fn (stmt))
5733 case IFN_ADD_OVERFLOW:
5734 case IFN_SUB_OVERFLOW:
5735 case IFN_MUL_OVERFLOW:
5736 for (int i = 0; i < 2; ++i)
5738 tree a = gimple_call_arg (stmt, i);
5739 if (TREE_CODE (a) == INTEGER_CST
5740 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5742 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5743 ret = MAX (ret, kind);
5746 break;
5747 default:
5748 break;
5750 return ret;
5753 /* Entry point for _BitInt(N) operation lowering during optimization. */
5755 static unsigned int
5756 gimple_lower_bitint (void)
5758 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5759 limb_prec = 0;
5761 unsigned int i;
5762 for (i = 0; i < num_ssa_names; ++i)
5764 tree s = ssa_name (i);
5765 if (s == NULL)
5766 continue;
5767 tree type = TREE_TYPE (s);
5768 if (TREE_CODE (type) == COMPLEX_TYPE)
5770 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5771 != bitint_prec_small)
5772 break;
5773 type = TREE_TYPE (type);
5775 if (TREE_CODE (type) == BITINT_TYPE
5776 && bitint_precision_kind (type) != bitint_prec_small)
5777 break;
5778 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5779 into memory. Such functions could have no large/huge SSA_NAMEs. */
5780 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5782 gimple *g = SSA_NAME_DEF_STMT (s);
5783 if (is_gimple_assign (g) && gimple_store_p (g))
5785 tree t = gimple_assign_rhs1 (g);
5786 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5787 && (bitint_precision_kind (TREE_TYPE (t))
5788 >= bitint_prec_large))
5789 break;
5792 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5793 to floating point types need to be rewritten. */
5794 else if (SCALAR_FLOAT_TYPE_P (type))
5796 gimple *g = SSA_NAME_DEF_STMT (s);
5797 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5799 tree t = gimple_assign_rhs1 (g);
5800 if (TREE_CODE (t) == INTEGER_CST
5801 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5802 && (bitint_precision_kind (TREE_TYPE (t))
5803 != bitint_prec_small))
5804 break;
5808 if (i == num_ssa_names)
5809 return 0;
5811 basic_block bb;
5812 auto_vec<gimple *, 4> switch_statements;
5813 FOR_EACH_BB_FN (bb, cfun)
5815 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5817 tree idx = gimple_switch_index (swtch);
5818 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5819 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5820 continue;
5822 if (optimize)
5823 group_case_labels_stmt (swtch);
5824 switch_statements.safe_push (swtch);
5828 if (!switch_statements.is_empty ())
5830 bool expanded = false;
5831 gimple *stmt;
5832 unsigned int j;
5833 i = 0;
5834 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5836 gswitch *swtch = as_a<gswitch *> (stmt);
5837 tree_switch_conversion::switch_decision_tree dt (swtch);
5838 expanded |= dt.analyze_switch_statement ();
5841 if (expanded)
5843 free_dominance_info (CDI_DOMINATORS);
5844 free_dominance_info (CDI_POST_DOMINATORS);
5845 mark_virtual_operands_for_renaming (cfun);
5846 cleanup_tree_cfg (TODO_update_ssa);
5850 struct bitint_large_huge large_huge;
5851 bool has_large_huge_parm_result = false;
5852 bool has_large_huge = false;
5853 unsigned int ret = 0, first_large_huge = ~0U;
5854 bool edge_insertions = false;
5855 for (; i < num_ssa_names; ++i)
5857 tree s = ssa_name (i);
5858 if (s == NULL)
5859 continue;
5860 tree type = TREE_TYPE (s);
5861 if (TREE_CODE (type) == COMPLEX_TYPE)
5863 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5864 >= bitint_prec_large)
5865 has_large_huge = true;
5866 type = TREE_TYPE (type);
5868 if (TREE_CODE (type) == BITINT_TYPE
5869 && bitint_precision_kind (type) >= bitint_prec_large)
5871 if (first_large_huge == ~0U)
5872 first_large_huge = i;
5873 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5874 gimple_stmt_iterator gsi;
5875 tree_code rhs_code;
5876 /* Unoptimize certain constructs to simpler alternatives to
5877 avoid having to lower all of them. */
5878 if (is_gimple_assign (stmt) && gimple_bb (stmt))
5879 switch (rhs_code = gimple_assign_rhs_code (stmt))
5881 default:
5882 break;
5883 case LROTATE_EXPR:
5884 case RROTATE_EXPR:
5886 first_large_huge = 0;
5887 location_t loc = gimple_location (stmt);
5888 gsi = gsi_for_stmt (stmt);
5889 tree rhs1 = gimple_assign_rhs1 (stmt);
5890 tree type = TREE_TYPE (rhs1);
5891 tree n = gimple_assign_rhs2 (stmt), m;
5892 tree p = build_int_cst (TREE_TYPE (n),
5893 TYPE_PRECISION (type));
5894 if (TREE_CODE (n) == INTEGER_CST)
5895 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5896 else
5898 m = make_ssa_name (TREE_TYPE (n));
5899 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5900 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5901 gimple_set_location (g, loc);
5903 if (!TYPE_UNSIGNED (type))
5905 tree utype = build_bitint_type (TYPE_PRECISION (type),
5907 if (TREE_CODE (rhs1) == INTEGER_CST)
5908 rhs1 = fold_convert (utype, rhs1);
5909 else
5911 tree t = make_ssa_name (type);
5912 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5913 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5914 gimple_set_location (g, loc);
5917 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5918 rhs_code == LROTATE_EXPR
5919 ? LSHIFT_EXPR : RSHIFT_EXPR,
5920 rhs1, n);
5921 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5922 gimple_set_location (g, loc);
5923 tree op1 = gimple_assign_lhs (g);
5924 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5925 rhs_code == LROTATE_EXPR
5926 ? RSHIFT_EXPR : LSHIFT_EXPR,
5927 rhs1, m);
5928 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5929 gimple_set_location (g, loc);
5930 tree op2 = gimple_assign_lhs (g);
5931 tree lhs = gimple_assign_lhs (stmt);
5932 if (!TYPE_UNSIGNED (type))
5934 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5935 BIT_IOR_EXPR, op1, op2);
5936 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5937 gimple_set_location (g, loc);
5938 g = gimple_build_assign (lhs, NOP_EXPR,
5939 gimple_assign_lhs (g));
5941 else
5942 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5943 gsi_replace (&gsi, g, true);
5944 gimple_set_location (g, loc);
5946 break;
5947 case ABS_EXPR:
5948 case ABSU_EXPR:
5949 case MIN_EXPR:
5950 case MAX_EXPR:
5951 case COND_EXPR:
5952 first_large_huge = 0;
5953 gsi = gsi_for_stmt (stmt);
5954 tree lhs = gimple_assign_lhs (stmt);
5955 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5956 location_t loc = gimple_location (stmt);
5957 if (rhs_code == ABS_EXPR)
5958 g = gimple_build_cond (LT_EXPR, rhs1,
5959 build_zero_cst (TREE_TYPE (rhs1)),
5960 NULL_TREE, NULL_TREE);
5961 else if (rhs_code == ABSU_EXPR)
5963 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5964 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5965 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5966 gimple_set_location (g, loc);
5967 g = gimple_build_cond (LT_EXPR, rhs1,
5968 build_zero_cst (TREE_TYPE (rhs1)),
5969 NULL_TREE, NULL_TREE);
5970 rhs1 = rhs2;
5972 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5974 rhs2 = gimple_assign_rhs2 (stmt);
5975 if (TREE_CODE (rhs1) == INTEGER_CST)
5976 std::swap (rhs1, rhs2);
5977 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5978 NULL_TREE, NULL_TREE);
5979 if (rhs_code == MAX_EXPR)
5980 std::swap (rhs1, rhs2);
5982 else
5984 g = gimple_build_cond (NE_EXPR, rhs1,
5985 build_zero_cst (TREE_TYPE (rhs1)),
5986 NULL_TREE, NULL_TREE);
5987 rhs1 = gimple_assign_rhs2 (stmt);
5988 rhs2 = gimple_assign_rhs3 (stmt);
5990 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5991 gimple_set_location (g, loc);
5992 edge e1 = split_block (gsi_bb (gsi), g);
5993 edge e2 = split_block (e1->dest, (gimple *) NULL);
5994 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5995 e3->probability = profile_probability::even ();
5996 e1->flags = EDGE_TRUE_VALUE;
5997 e1->probability = e3->probability.invert ();
5998 if (dom_info_available_p (CDI_DOMINATORS))
5999 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
6000 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
6002 gsi = gsi_after_labels (e1->dest);
6003 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6004 NEGATE_EXPR, rhs1);
6005 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6006 gimple_set_location (g, loc);
6007 rhs2 = gimple_assign_lhs (g);
6008 std::swap (rhs1, rhs2);
6010 gsi = gsi_for_stmt (stmt);
6011 gsi_remove (&gsi, true);
6012 gphi *phi = create_phi_node (lhs, e2->dest);
6013 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
6014 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
6015 break;
6018 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6019 into memory. Such functions could have no large/huge SSA_NAMEs. */
6020 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6022 gimple *g = SSA_NAME_DEF_STMT (s);
6023 if (is_gimple_assign (g) && gimple_store_p (g))
6025 tree t = gimple_assign_rhs1 (g);
6026 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6027 && (bitint_precision_kind (TREE_TYPE (t))
6028 >= bitint_prec_large))
6029 has_large_huge = true;
6032 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6033 to floating point types need to be rewritten. */
6034 else if (SCALAR_FLOAT_TYPE_P (type))
6036 gimple *g = SSA_NAME_DEF_STMT (s);
6037 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6039 tree t = gimple_assign_rhs1 (g);
6040 if (TREE_CODE (t) == INTEGER_CST
6041 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6042 && (bitint_precision_kind (TREE_TYPE (t))
6043 >= bitint_prec_large))
6044 has_large_huge = true;
6048 for (i = first_large_huge; i < num_ssa_names; ++i)
6050 tree s = ssa_name (i);
6051 if (s == NULL)
6052 continue;
6053 tree type = TREE_TYPE (s);
6054 if (TREE_CODE (type) == COMPLEX_TYPE)
6055 type = TREE_TYPE (type);
6056 if (TREE_CODE (type) == BITINT_TYPE
6057 && bitint_precision_kind (type) >= bitint_prec_large)
6059 use_operand_p use_p;
6060 gimple *use_stmt;
6061 has_large_huge = true;
6062 if (optimize
6063 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6064 continue;
6065 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6066 the same bb and could be handled in the same loop with the
6067 immediate use. */
6068 if (optimize
6069 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6070 && single_imm_use (s, &use_p, &use_stmt)
6071 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6073 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6075 if (mergeable_op (use_stmt))
6076 continue;
6077 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6078 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6079 continue;
6080 if (gimple_assign_cast_p (use_stmt))
6082 tree lhs = gimple_assign_lhs (use_stmt);
6083 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6084 continue;
6086 else if (gimple_store_p (use_stmt)
6087 && is_gimple_assign (use_stmt)
6088 && !gimple_has_volatile_ops (use_stmt)
6089 && !stmt_ends_bb_p (use_stmt))
6090 continue;
6092 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6094 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6095 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6096 && ((is_gimple_assign (use_stmt)
6097 && (gimple_assign_rhs_code (use_stmt)
6098 != COMPLEX_EXPR))
6099 || gimple_code (use_stmt) == GIMPLE_COND)
6100 && (!gimple_store_p (use_stmt)
6101 || (is_gimple_assign (use_stmt)
6102 && !gimple_has_volatile_ops (use_stmt)
6103 && !stmt_ends_bb_p (use_stmt)))
6104 && (TREE_CODE (rhs1) != SSA_NAME
6105 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6107 if (is_gimple_assign (use_stmt))
6108 switch (gimple_assign_rhs_code (use_stmt))
6110 case TRUNC_DIV_EXPR:
6111 case TRUNC_MOD_EXPR:
6112 case FLOAT_EXPR:
6113 /* For division, modulo and casts to floating
6114 point, avoid representing unsigned operands
6115 using negative prec if they were sign-extended
6116 from narrower precision. */
6117 if (TYPE_UNSIGNED (TREE_TYPE (s))
6118 && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
6119 && (TYPE_PRECISION (TREE_TYPE (s))
6120 > TYPE_PRECISION (TREE_TYPE (rhs1))))
6121 goto force_name;
6122 /* FALLTHRU */
6123 case MULT_EXPR:
6124 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6125 || (bitint_precision_kind (TREE_TYPE (rhs1))
6126 < bitint_prec_large))
6127 continue;
6128 /* Uses which use handle_operand_addr can't
6129 deal with nested casts. */
6130 if (TREE_CODE (rhs1) == SSA_NAME
6131 && gimple_assign_cast_p
6132 (SSA_NAME_DEF_STMT (rhs1))
6133 && has_single_use (rhs1)
6134 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6135 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6136 goto force_name;
6137 break;
6138 default:
6139 break;
6141 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6142 || (bitint_precision_kind (TREE_TYPE (rhs1))
6143 < bitint_prec_large))
6144 continue;
6145 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6146 >= TYPE_PRECISION (TREE_TYPE (s)))
6147 && mergeable_op (use_stmt))
6148 continue;
6149 /* Prevent merging a widening non-mergeable cast
6150 on result of some narrower mergeable op
6151 together with later mergeable operations. E.g.
6152 result of _BitInt(223) addition shouldn't be
6153 sign-extended to _BitInt(513) and have another
6154 _BitInt(513) added to it, as handle_plus_minus
6155 with its PHI node handling inside of handle_cast
6156 will not work correctly. An exception is if
6157 use_stmt is a store, this is handled directly
6158 in lower_mergeable_stmt. */
6159 if (TREE_CODE (rhs1) != SSA_NAME
6160 || !has_single_use (rhs1)
6161 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6162 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6163 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6164 || gimple_store_p (use_stmt))
6165 continue;
6166 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6167 < TYPE_PRECISION (TREE_TYPE (s)))
6168 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6170 /* Another exception is if the widening cast is
6171 from mergeable same precision cast from something
6172 not mergeable. */
6173 tree rhs2
6174 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6175 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6176 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6177 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6179 if (TREE_CODE (rhs2) != SSA_NAME
6180 || !has_single_use (rhs2)
6181 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6182 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6183 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6184 continue;
6189 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6190 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6192 case IMAGPART_EXPR:
6194 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6195 rhs1 = TREE_OPERAND (rhs1, 0);
6196 if (TREE_CODE (rhs1) == SSA_NAME)
6198 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6199 if (optimizable_arith_overflow (g))
6200 continue;
6203 /* FALLTHRU */
6204 case LSHIFT_EXPR:
6205 case RSHIFT_EXPR:
6206 case MULT_EXPR:
6207 case TRUNC_DIV_EXPR:
6208 case TRUNC_MOD_EXPR:
6209 case FIX_TRUNC_EXPR:
6210 case REALPART_EXPR:
6211 if (gimple_store_p (use_stmt)
6212 && is_gimple_assign (use_stmt)
6213 && !gimple_has_volatile_ops (use_stmt)
6214 && !stmt_ends_bb_p (use_stmt))
6216 tree lhs = gimple_assign_lhs (use_stmt);
6217 /* As multiply/division passes address of the lhs
6218 to library function and that assumes it can extend
6219 it to whole number of limbs, avoid merging those
6220 with bit-field stores. Don't allow it for
6221 shifts etc. either, so that the bit-field store
6222 handling doesn't have to be done everywhere. */
6223 if (TREE_CODE (lhs) == COMPONENT_REF
6224 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6225 break;
6226 continue;
6228 break;
6229 default:
6230 break;
6234 /* Also ignore uninitialized uses. */
6235 if (SSA_NAME_IS_DEFAULT_DEF (s)
6236 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6237 continue;
6239 force_name:
6240 if (!large_huge.m_names)
6241 large_huge.m_names = BITMAP_ALLOC (NULL);
6242 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6243 if (has_single_use (s))
6245 if (!large_huge.m_single_use_names)
6246 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6247 bitmap_set_bit (large_huge.m_single_use_names,
6248 SSA_NAME_VERSION (s));
6250 if (SSA_NAME_VAR (s)
6251 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6252 && SSA_NAME_IS_DEFAULT_DEF (s))
6253 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6254 has_large_huge_parm_result = true;
6255 if (optimize
6256 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6257 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6258 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6259 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6261 use_operand_p use_p;
6262 imm_use_iterator iter;
6263 bool optimizable_load = true;
6264 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6266 gimple *use_stmt = USE_STMT (use_p);
6267 if (is_gimple_debug (use_stmt))
6268 continue;
6269 if (gimple_code (use_stmt) == GIMPLE_PHI
6270 || is_gimple_call (use_stmt)
6271 || gimple_code (use_stmt) == GIMPLE_ASM)
6273 optimizable_load = false;
6274 break;
6278 ssa_op_iter oi;
6279 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6280 oi, SSA_OP_USE)
6282 tree s2 = USE_FROM_PTR (use_p);
6283 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6285 optimizable_load = false;
6286 break;
6290 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6292 if (!large_huge.m_loads)
6293 large_huge.m_loads = BITMAP_ALLOC (NULL);
6294 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6298 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6299 into memory. Such functions could have no large/huge SSA_NAMEs. */
6300 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6302 gimple *g = SSA_NAME_DEF_STMT (s);
6303 if (is_gimple_assign (g) && gimple_store_p (g))
6305 tree t = gimple_assign_rhs1 (g);
6306 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6307 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6308 has_large_huge = true;
6313 if (large_huge.m_names || has_large_huge)
6315 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6316 calculate_dominance_info (CDI_DOMINATORS);
6317 if (optimize)
6318 enable_ranger (cfun);
6319 if (large_huge.m_loads)
6321 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6322 entry->aux = NULL;
6323 bitint_dom_walker (large_huge.m_names,
6324 large_huge.m_loads).walk (entry);
6325 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6326 clear_aux_for_blocks ();
6327 BITMAP_FREE (large_huge.m_loads);
6329 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6330 large_huge.m_limb_size
6331 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6333 if (large_huge.m_names)
6335 large_huge.m_map
6336 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6337 coalesce_ssa_name (large_huge.m_map);
6338 partition_view_normal (large_huge.m_map);
6339 if (dump_file && (dump_flags & TDF_DETAILS))
6341 fprintf (dump_file, "After Coalescing:\n");
6342 dump_var_map (dump_file, large_huge.m_map);
6344 large_huge.m_vars
6345 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6346 bitmap_iterator bi;
6347 if (has_large_huge_parm_result)
6348 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6350 tree s = ssa_name (i);
6351 if (SSA_NAME_VAR (s)
6352 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6353 && SSA_NAME_IS_DEFAULT_DEF (s))
6354 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6356 int p = var_to_partition (large_huge.m_map, s);
6357 if (large_huge.m_vars[p] == NULL_TREE)
6359 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6360 mark_addressable (SSA_NAME_VAR (s));
6364 tree atype = NULL_TREE;
6365 if (dump_file && (dump_flags & TDF_DETAILS))
6366 fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
6367 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6369 tree s = ssa_name (i);
6370 int p = var_to_partition (large_huge.m_map, s);
6371 if (large_huge.m_vars[p] == NULL_TREE)
6373 if (atype == NULL_TREE
6374 || !tree_int_cst_equal (TYPE_SIZE (atype),
6375 TYPE_SIZE (TREE_TYPE (s))))
6377 unsigned HOST_WIDE_INT nelts
6378 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6379 atype = build_array_type_nelts (large_huge.m_limb_type,
6380 nelts);
6382 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6383 mark_addressable (large_huge.m_vars[p]);
6385 if (dump_file && (dump_flags & TDF_DETAILS))
6387 print_generic_expr (dump_file, s, TDF_SLIM);
6388 fprintf (dump_file, " -> ");
6389 print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
6390 fprintf (dump_file, "\n");
6395 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6397 gimple_stmt_iterator prev;
6398 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6399 gsi = prev)
6401 prev = gsi;
6402 gsi_prev (&prev);
6403 ssa_op_iter iter;
6404 gimple *stmt = gsi_stmt (gsi);
6405 if (is_gimple_debug (stmt))
6406 continue;
6407 bitint_prec_kind kind = bitint_prec_small;
6408 tree t;
6409 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6410 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6412 bitint_prec_kind this_kind
6413 = bitint_precision_kind (TREE_TYPE (t));
6414 kind = MAX (kind, this_kind);
6416 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6418 t = gimple_assign_rhs1 (stmt);
6419 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6421 bitint_prec_kind this_kind
6422 = bitint_precision_kind (TREE_TYPE (t));
6423 kind = MAX (kind, this_kind);
6426 if (is_gimple_assign (stmt)
6427 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6429 t = gimple_assign_rhs1 (stmt);
6430 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6431 && TREE_CODE (t) == INTEGER_CST)
6433 bitint_prec_kind this_kind
6434 = bitint_precision_kind (TREE_TYPE (t));
6435 kind = MAX (kind, this_kind);
6438 if (is_gimple_call (stmt))
6440 t = gimple_call_lhs (stmt);
6441 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6443 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6444 kind = MAX (kind, this_kind);
6445 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6447 this_kind
6448 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6449 kind = MAX (kind, this_kind);
6453 if (kind == bitint_prec_small)
6454 continue;
6455 switch (gimple_code (stmt))
6457 case GIMPLE_CALL:
6458 /* For now. We'll need to handle some internal functions and
6459 perhaps some builtins. */
6460 if (kind == bitint_prec_middle)
6461 continue;
6462 break;
6463 case GIMPLE_ASM:
6464 if (kind == bitint_prec_middle)
6465 continue;
6466 break;
6467 case GIMPLE_RETURN:
6468 continue;
6469 case GIMPLE_ASSIGN:
6470 if (gimple_clobber_p (stmt))
6471 continue;
6472 if (kind >= bitint_prec_large)
6473 break;
6474 if (gimple_assign_single_p (stmt))
6475 /* No need to lower copies, loads or stores. */
6476 continue;
6477 if (gimple_assign_cast_p (stmt))
6479 tree lhs = gimple_assign_lhs (stmt);
6480 tree rhs = gimple_assign_rhs1 (stmt);
6481 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6482 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6483 && (TYPE_PRECISION (TREE_TYPE (lhs))
6484 == TYPE_PRECISION (TREE_TYPE (rhs))))
6485 /* No need to lower casts to same precision. */
6486 continue;
6488 break;
6489 default:
6490 break;
6493 if (kind == bitint_prec_middle)
6495 tree type = NULL_TREE;
6496 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6497 with the same precision and back. */
6498 unsigned int nops = gimple_num_ops (stmt);
6499 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6500 i < nops; ++i)
6501 if (tree op = gimple_op (stmt, i))
6503 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6504 if (nop != op)
6505 gimple_set_op (stmt, i, nop);
6506 else if (COMPARISON_CLASS_P (op))
6508 TREE_OPERAND (op, 0)
6509 = maybe_cast_middle_bitint (&gsi,
6510 TREE_OPERAND (op, 0),
6511 type);
6512 TREE_OPERAND (op, 1)
6513 = maybe_cast_middle_bitint (&gsi,
6514 TREE_OPERAND (op, 1),
6515 type);
6517 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6519 CASE_LOW (op)
6520 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6521 type);
6522 CASE_HIGH (op)
6523 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6524 type);
6527 if (tree lhs = gimple_get_lhs (stmt))
6528 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6529 && (bitint_precision_kind (TREE_TYPE (lhs))
6530 == bitint_prec_middle))
6532 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6533 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6534 type = build_nonstandard_integer_type (prec, uns);
6535 tree lhs2 = make_ssa_name (type);
6536 gimple_set_lhs (stmt, lhs2);
6537 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6538 if (stmt_ends_bb_p (stmt))
6540 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6541 gsi_insert_on_edge_immediate (e, g);
6543 else
6544 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6546 update_stmt (stmt);
6547 continue;
6550 if (tree lhs = gimple_get_lhs (stmt))
6551 if (TREE_CODE (lhs) == SSA_NAME)
6553 tree type = TREE_TYPE (lhs);
6554 if (TREE_CODE (type) == COMPLEX_TYPE)
6555 type = TREE_TYPE (type);
6556 if (TREE_CODE (type) == BITINT_TYPE
6557 && bitint_precision_kind (type) >= bitint_prec_large
6558 && (large_huge.m_names == NULL
6559 || !bitmap_bit_p (large_huge.m_names,
6560 SSA_NAME_VERSION (lhs))))
6561 continue;
6564 large_huge.lower_stmt (stmt);
6567 tree atype = NULL_TREE;
6568 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6569 gsi_next (&gsi))
6571 gphi *phi = gsi.phi ();
6572 tree lhs = gimple_phi_result (phi);
6573 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6574 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6575 continue;
6576 int p1 = var_to_partition (large_huge.m_map, lhs);
6577 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6578 tree v1 = large_huge.m_vars[p1];
6579 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6581 tree arg = gimple_phi_arg_def (phi, i);
6582 edge e = gimple_phi_arg_edge (phi, i);
6583 gimple *g;
6584 switch (TREE_CODE (arg))
6586 case INTEGER_CST:
6587 if (integer_zerop (arg) && VAR_P (v1))
6589 tree zero = build_zero_cst (TREE_TYPE (v1));
6590 g = gimple_build_assign (v1, zero);
6591 gsi_insert_on_edge (e, g);
6592 edge_insertions = true;
6593 break;
6595 int ext;
6596 unsigned int min_prec, prec, rem;
6597 tree c;
6598 prec = TYPE_PRECISION (TREE_TYPE (arg));
6599 rem = prec % (2 * limb_prec);
6600 min_prec = bitint_min_cst_precision (arg, ext);
6601 if (min_prec > prec - rem - 2 * limb_prec
6602 && min_prec > (unsigned) limb_prec)
6603 /* Constant which has enough significant bits that it
6604 isn't worth trying to save .rodata space by extending
6605 from smaller number. */
6606 min_prec = prec;
6607 else
6608 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6609 if (min_prec == 0)
6610 c = NULL_TREE;
6611 else if (min_prec == prec)
6612 c = tree_output_constant_def (arg);
6613 else if (min_prec == (unsigned) limb_prec)
6614 c = fold_convert (large_huge.m_limb_type, arg);
6615 else
6617 tree ctype = build_bitint_type (min_prec, 1);
6618 c = tree_output_constant_def (fold_convert (ctype, arg));
6620 if (c)
6622 if (VAR_P (v1) && min_prec == prec)
6624 tree v2 = build1 (VIEW_CONVERT_EXPR,
6625 TREE_TYPE (v1), c);
6626 g = gimple_build_assign (v1, v2);
6627 gsi_insert_on_edge (e, g);
6628 edge_insertions = true;
6629 break;
6631 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6632 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6633 TREE_TYPE (c), v1),
6635 else
6637 unsigned HOST_WIDE_INT nelts
6638 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6639 / limb_prec;
6640 tree vtype
6641 = build_array_type_nelts (large_huge.m_limb_type,
6642 nelts);
6643 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6644 vtype, v1),
6645 build1 (VIEW_CONVERT_EXPR,
6646 vtype, c));
6648 gsi_insert_on_edge (e, g);
6650 if (ext == 0)
6652 unsigned HOST_WIDE_INT nelts
6653 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6654 - min_prec) / limb_prec;
6655 tree vtype
6656 = build_array_type_nelts (large_huge.m_limb_type,
6657 nelts);
6658 tree ptype = build_pointer_type (TREE_TYPE (v1));
6659 tree off;
6660 if (c)
6661 off = fold_convert (ptype,
6662 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6663 else
6664 off = build_zero_cst (ptype);
6665 tree vd = build2 (MEM_REF, vtype,
6666 build_fold_addr_expr (v1), off);
6667 g = gimple_build_assign (vd, build_zero_cst (vtype));
6669 else
6671 tree vd = v1;
6672 if (c)
6674 tree ptype = build_pointer_type (TREE_TYPE (v1));
6675 tree off
6676 = fold_convert (ptype,
6677 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6678 vd = build2 (MEM_REF, large_huge.m_limb_type,
6679 build_fold_addr_expr (v1), off);
6681 vd = build_fold_addr_expr (vd);
6682 unsigned HOST_WIDE_INT nbytes
6683 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6684 if (c)
6685 nbytes
6686 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6687 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6688 g = gimple_build_call (fn, 3, vd,
6689 integer_minus_one_node,
6690 build_int_cst (sizetype,
6691 nbytes));
6693 gsi_insert_on_edge (e, g);
6694 edge_insertions = true;
6695 break;
6696 default:
6697 gcc_unreachable ();
6698 case SSA_NAME:
6699 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6701 if (large_huge.m_names == NULL
6702 || !bitmap_bit_p (large_huge.m_names,
6703 SSA_NAME_VERSION (arg)))
6704 continue;
6706 int p2 = var_to_partition (large_huge.m_map, arg);
6707 if (p1 == p2)
6708 continue;
6709 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6710 tree v2 = large_huge.m_vars[p2];
6711 if (VAR_P (v1) && VAR_P (v2))
6712 g = gimple_build_assign (v1, v2);
6713 else if (VAR_P (v1))
6714 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6715 TREE_TYPE (v1), v2));
6716 else if (VAR_P (v2))
6717 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6718 TREE_TYPE (v2), v1), v2);
6719 else
6721 if (atype == NULL_TREE
6722 || !tree_int_cst_equal (TYPE_SIZE (atype),
6723 TYPE_SIZE (TREE_TYPE (lhs))))
6725 unsigned HOST_WIDE_INT nelts
6726 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6727 / limb_prec;
6728 atype
6729 = build_array_type_nelts (large_huge.m_limb_type,
6730 nelts);
6732 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6733 atype, v1),
6734 build1 (VIEW_CONVERT_EXPR,
6735 atype, v2));
6737 gsi_insert_on_edge (e, g);
6738 edge_insertions = true;
6739 break;
6745 if (large_huge.m_names || has_large_huge)
6747 gimple *nop = NULL;
6748 for (i = 0; i < num_ssa_names; ++i)
6750 tree s = ssa_name (i);
6751 if (s == NULL_TREE)
6752 continue;
6753 tree type = TREE_TYPE (s);
6754 if (TREE_CODE (type) == COMPLEX_TYPE)
6755 type = TREE_TYPE (type);
6756 if (TREE_CODE (type) == BITINT_TYPE
6757 && bitint_precision_kind (type) >= bitint_prec_large)
6759 if (large_huge.m_preserved
6760 && bitmap_bit_p (large_huge.m_preserved,
6761 SSA_NAME_VERSION (s)))
6762 continue;
6763 gimple *g = SSA_NAME_DEF_STMT (s);
6764 if (gimple_code (g) == GIMPLE_NOP)
6766 if (SSA_NAME_VAR (s))
6767 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6768 release_ssa_name (s);
6769 continue;
6771 if (gimple_bb (g) == NULL)
6773 release_ssa_name (s);
6774 continue;
6776 if (gimple_code (g) != GIMPLE_ASM)
6778 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6779 bool save_vta = flag_var_tracking_assignments;
6780 flag_var_tracking_assignments = false;
6781 gsi_remove (&gsi, true);
6782 flag_var_tracking_assignments = save_vta;
6784 if (nop == NULL)
6785 nop = gimple_build_nop ();
6786 SSA_NAME_DEF_STMT (s) = nop;
6787 release_ssa_name (s);
6790 if (optimize)
6791 disable_ranger (cfun);
6794 if (edge_insertions)
6795 gsi_commit_edge_inserts ();
6797 return ret;
6800 namespace {
6802 const pass_data pass_data_lower_bitint =
6804 GIMPLE_PASS, /* type */
6805 "bitintlower", /* name */
6806 OPTGROUP_NONE, /* optinfo_flags */
6807 TV_NONE, /* tv_id */
6808 PROP_ssa, /* properties_required */
6809 PROP_gimple_lbitint, /* properties_provided */
6810 0, /* properties_destroyed */
6811 0, /* todo_flags_start */
6812 0, /* todo_flags_finish */
6815 class pass_lower_bitint : public gimple_opt_pass
6817 public:
6818 pass_lower_bitint (gcc::context *ctxt)
6819 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6822 /* opt_pass methods: */
6823 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6824 unsigned int execute (function *) final override
6826 return gimple_lower_bitint ();
6829 }; // class pass_lower_bitint
6831 } // anon namespace
6833 gimple_opt_pass *
6834 make_pass_lower_bitint (gcc::context *ctxt)
6836 return new pass_lower_bitint (ctxt);
6840 namespace {
6842 const pass_data pass_data_lower_bitint_O0 =
6844 GIMPLE_PASS, /* type */
6845 "bitintlower0", /* name */
6846 OPTGROUP_NONE, /* optinfo_flags */
6847 TV_NONE, /* tv_id */
6848 PROP_cfg, /* properties_required */
6849 PROP_gimple_lbitint, /* properties_provided */
6850 0, /* properties_destroyed */
6851 0, /* todo_flags_start */
6852 0, /* todo_flags_finish */
6855 class pass_lower_bitint_O0 : public gimple_opt_pass
6857 public:
6858 pass_lower_bitint_O0 (gcc::context *ctxt)
6859 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6862 /* opt_pass methods: */
6863 bool gate (function *fun) final override
6865 /* With errors, normal optimization passes are not run. If we don't
6866 lower bitint operations at all, rtl expansion will abort. */
6867 return !(fun->curr_properties & PROP_gimple_lbitint);
6870 unsigned int execute (function *) final override
6872 return gimple_lower_bitint ();
6875 }; // class pass_lower_bitint_O0
6877 } // anon namespace
6879 gimple_opt_pass *
6880 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6882 return new pass_lower_bitint_O0 (ctxt);