Add hppa*-*-hpux* to targets which do not support split DWARF
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob672a9a33751ad611dcd847909f0b68b34e7f7c8e
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
235 == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
237 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
238 return true;
239 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
240 return true;
241 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
242 return true;
244 break;
246 default:
247 break;
249 return false;
252 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
253 _Complex large/huge _BitInt lhs which has at most two immediate uses,
254 at most one use in REALPART_EXPR stmt in the same bb and exactly one
255 IMAGPART_EXPR use in the same bb with a single use which casts it to
256 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
257 return 2. Such cases (most common uses of those builtins) can be
258 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
259 of REALPART_EXPR as not needed to be backed up by a stack variable.
260 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
263 optimizable_arith_overflow (gimple *stmt)
265 bool is_ubsan = false;
266 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
267 return false;
268 switch (gimple_call_internal_fn (stmt))
270 case IFN_ADD_OVERFLOW:
271 case IFN_SUB_OVERFLOW:
272 case IFN_MUL_OVERFLOW:
273 break;
274 case IFN_UBSAN_CHECK_ADD:
275 case IFN_UBSAN_CHECK_SUB:
276 case IFN_UBSAN_CHECK_MUL:
277 is_ubsan = true;
278 break;
279 default:
280 return 0;
282 tree lhs = gimple_call_lhs (stmt);
283 if (!lhs)
284 return 0;
285 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
286 return 0;
287 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
288 if (TREE_CODE (type) != BITINT_TYPE
289 || bitint_precision_kind (type) < bitint_prec_large)
290 return 0;
292 if (is_ubsan)
294 use_operand_p use_p;
295 gimple *use_stmt;
296 if (!single_imm_use (lhs, &use_p, &use_stmt)
297 || gimple_bb (use_stmt) != gimple_bb (stmt)
298 || !gimple_store_p (use_stmt)
299 || !is_gimple_assign (use_stmt)
300 || gimple_has_volatile_ops (use_stmt)
301 || stmt_ends_bb_p (use_stmt))
302 return 0;
303 return 3;
306 imm_use_iterator ui;
307 use_operand_p use_p;
308 int seen = 0;
309 gimple *realpart = NULL, *cast = NULL;
310 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
312 gimple *g = USE_STMT (use_p);
313 if (is_gimple_debug (g))
314 continue;
315 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
316 return 0;
317 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
319 if ((seen & 1) != 0)
320 return 0;
321 seen |= 1;
322 realpart = g;
324 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
326 if ((seen & 2) != 0)
327 return 0;
328 seen |= 2;
330 use_operand_p use2_p;
331 gimple *use_stmt;
332 tree lhs2 = gimple_assign_lhs (g);
333 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
334 return 0;
335 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
336 || gimple_bb (use_stmt) != gimple_bb (stmt)
337 || !gimple_assign_cast_p (use_stmt))
338 return 0;
340 lhs2 = gimple_assign_lhs (use_stmt);
341 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
342 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
343 return 0;
344 cast = use_stmt;
346 else
347 return 0;
349 if ((seen & 2) == 0)
350 return 0;
351 if (seen == 3)
353 /* Punt if the cast stmt appears before realpart stmt, because
354 if both appear, the lowering wants to emit all the code
355 at the location of realpart stmt. */
356 gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
357 unsigned int cnt = 0;
360 gsi_prev_nondebug (&gsi);
361 if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
362 return 0;
363 if (gsi_stmt (gsi) == stmt)
364 return 2;
365 /* If realpart is too far from stmt, punt as well.
366 Usually it will appear right after it. */
367 if (++cnt == 32)
368 return 0;
370 while (1);
372 return 1;
375 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
376 comparing large/huge _BitInt types, return the comparison code and if
377 non-NULL fill in the comparison operands to *POP1 and *POP2. */
379 tree_code
380 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
382 tree op1 = NULL_TREE, op2 = NULL_TREE;
383 tree_code code = ERROR_MARK;
384 if (gimple_code (stmt) == GIMPLE_COND)
386 code = gimple_cond_code (stmt);
387 op1 = gimple_cond_lhs (stmt);
388 op2 = gimple_cond_rhs (stmt);
390 else if (is_gimple_assign (stmt))
392 code = gimple_assign_rhs_code (stmt);
393 op1 = gimple_assign_rhs1 (stmt);
394 if (TREE_CODE_CLASS (code) == tcc_comparison
395 || TREE_CODE_CLASS (code) == tcc_binary)
396 op2 = gimple_assign_rhs2 (stmt);
398 if (TREE_CODE_CLASS (code) != tcc_comparison)
399 return ERROR_MARK;
400 tree type = TREE_TYPE (op1);
401 if (TREE_CODE (type) != BITINT_TYPE
402 || bitint_precision_kind (type) < bitint_prec_large)
403 return ERROR_MARK;
404 if (pop1)
406 *pop1 = op1;
407 *pop2 = op2;
409 return code;
412 /* Class used during large/huge _BitInt lowering containing all the
413 state for the methods. */
415 struct bitint_large_huge
417 bitint_large_huge ()
418 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
419 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
420 m_limb_type (NULL_TREE), m_data (vNULL) {}
422 ~bitint_large_huge ();
424 void insert_before (gimple *);
425 tree limb_access_type (tree, tree);
426 tree limb_access (tree, tree, tree, bool);
427 void if_then (gimple *, profile_probability, edge &, edge &);
428 void if_then_else (gimple *, profile_probability, edge &, edge &);
429 void if_then_if_then_else (gimple *g, gimple *,
430 profile_probability, profile_probability,
431 edge &, edge &, edge &);
432 tree handle_operand (tree, tree);
433 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
434 tree add_cast (tree, tree);
435 tree handle_plus_minus (tree_code, tree, tree, tree);
436 tree handle_lshift (tree, tree, tree);
437 tree handle_cast (tree, tree, tree);
438 tree handle_load (gimple *, tree);
439 tree handle_stmt (gimple *, tree);
440 tree handle_operand_addr (tree, gimple *, int *, int *);
441 tree create_loop (tree, tree *);
442 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
443 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
444 void lower_shift_stmt (tree, gimple *);
445 void lower_muldiv_stmt (tree, gimple *);
446 void lower_float_conv_stmt (tree, gimple *);
447 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
448 unsigned int, bool);
449 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
450 tree_code);
451 void lower_addsub_overflow (tree, gimple *);
452 void lower_mul_overflow (tree, gimple *);
453 void lower_cplxpart_stmt (tree, gimple *);
454 void lower_complexexpr_stmt (gimple *);
455 void lower_bit_query (gimple *);
456 void lower_call (tree, gimple *);
457 void lower_asm (gimple *);
458 void lower_stmt (gimple *);
460 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
461 merged with their uses. */
462 bitmap m_names;
463 /* Subset of those for lhs of load statements. These will be
464 cleared in m_names if the loads will be mergeable with all
465 their uses. */
466 bitmap m_loads;
467 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
468 to later passes (arguments or return values of calls). */
469 bitmap m_preserved;
470 /* Subset of m_names which have a single use. As the lowering
471 can replace various original statements with their lowered
472 form even before it is done iterating over all basic blocks,
473 testing has_single_use for the purpose of emitting clobbers
474 doesn't work properly. */
475 bitmap m_single_use_names;
476 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
477 set in m_names. */
478 var_map m_map;
479 /* Mapping of the partitions to corresponding decls. */
480 tree *m_vars;
481 /* Unsigned integer type with limb precision. */
482 tree m_limb_type;
483 /* Its TYPE_SIZE_UNIT. */
484 unsigned HOST_WIDE_INT m_limb_size;
485 /* Location of a gimple stmt which is being currently lowered. */
486 location_t m_loc;
487 /* Current stmt iterator where code is being lowered currently. */
488 gimple_stmt_iterator m_gsi;
489 /* Statement after which any clobbers should be added if non-NULL. */
490 gimple *m_after_stmt;
491 /* Set when creating loops to the loop header bb and its preheader. */
492 basic_block m_bb, m_preheader_bb;
493 /* Stmt iterator after which initialization statements should be emitted. */
494 gimple_stmt_iterator m_init_gsi;
495 /* Decl into which a mergeable statement stores result. */
496 tree m_lhs;
497 /* handle_operand/handle_stmt can be invoked in various ways.
499 lower_mergeable_stmt for large _BitInt calls those with constant
500 idx only, expanding to straight line code, for huge _BitInt
501 emits a loop from least significant limb upwards, where each loop
502 iteration handles 2 limbs, plus there can be up to one full limb
503 and one partial limb processed after the loop, where handle_operand
504 and/or handle_stmt are called with constant idx. m_upwards_2limb
505 is set for this case, false otherwise. m_upwards is true if it
506 is either large or huge _BitInt handled by lower_mergeable_stmt,
507 i.e. indexes always increase.
509 Another way is used by lower_comparison_stmt, which walks limbs
510 from most significant to least significant, partial limb if any
511 processed first with constant idx and then loop processing a single
512 limb per iteration with non-constant idx.
514 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
515 destination limbs are processed from most significant to least
516 significant or for RSHIFT_EXPR the other way around, in loops or
517 straight line code, but idx usually is non-constant (so from
518 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
519 handling there can access even partial limbs using non-constant
520 idx (then m_var_msb should be true, for all the other cases
521 including lower_mergeable_stmt/lower_comparison_stmt that is
522 not the case and so m_var_msb should be false.
524 m_first should be set the first time handle_operand/handle_stmt
525 is called and clear when it is called for some other limb with
526 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
527 or statement (e.g. +/-/<< with < limb_prec constant) needs some
528 state between the different calls, when m_first is true it should
529 push some trees to m_data vector and also make sure m_data_cnt is
530 incremented by how many trees were pushed, and when m_first is
531 false, it can use the m_data[m_data_cnt] etc. data or update them,
532 just needs to bump m_data_cnt by the same amount as when it was
533 called with m_first set. The toplevel calls to
534 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
535 m_data vector when setting m_first to true.
537 m_cast_conditional and m_bitfld_load are used when handling a
538 bit-field load inside of a widening cast. handle_cast sometimes
539 needs to do runtime comparisons and handle_operand only conditionally
540 or even in two separate conditional blocks for one idx (once with
541 constant index after comparing the runtime one for equality with the
542 constant). In these cases, m_cast_conditional is set to true and
543 the bit-field load then communicates its m_data_cnt to handle_cast
544 using m_bitfld_load. */
545 bool m_first;
546 bool m_var_msb;
547 unsigned m_upwards_2limb;
548 bool m_upwards;
549 bool m_cast_conditional;
550 unsigned m_bitfld_load;
551 vec<tree> m_data;
552 unsigned int m_data_cnt;
555 bitint_large_huge::~bitint_large_huge ()
557 BITMAP_FREE (m_names);
558 BITMAP_FREE (m_loads);
559 BITMAP_FREE (m_preserved);
560 BITMAP_FREE (m_single_use_names);
561 if (m_map)
562 delete_var_map (m_map);
563 XDELETEVEC (m_vars);
564 m_data.release ();
567 /* Insert gimple statement G before current location
568 and set its gimple_location. */
570 void
571 bitint_large_huge::insert_before (gimple *g)
573 gimple_set_location (g, m_loc);
574 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
577 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
578 This is normally m_limb_type, except for a partial most
579 significant limb if any. */
581 tree
582 bitint_large_huge::limb_access_type (tree type, tree idx)
584 if (type == NULL_TREE)
585 return m_limb_type;
586 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
587 unsigned int prec = TYPE_PRECISION (type);
588 gcc_assert (i * limb_prec < prec);
589 if ((i + 1) * limb_prec <= prec)
590 return m_limb_type;
591 else
592 return build_nonstandard_integer_type (prec % limb_prec,
593 TYPE_UNSIGNED (type));
596 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
597 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
599 tree
600 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
602 tree atype = (tree_fits_uhwi_p (idx)
603 ? limb_access_type (type, idx) : m_limb_type);
604 tree ret;
605 if (DECL_P (var) && tree_fits_uhwi_p (idx))
607 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
608 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
609 ret = build2 (MEM_REF, m_limb_type,
610 build_fold_addr_expr (var),
611 build_int_cst (ptype, off));
612 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
613 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
615 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
618 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
619 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
620 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
621 tree_to_uhwi (idx)
622 * m_limb_size)));
623 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
624 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
625 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
627 else
629 var = unshare_expr (var);
630 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
631 || !useless_type_conversion_p (m_limb_type,
632 TREE_TYPE (TREE_TYPE (var))))
634 unsigned HOST_WIDE_INT nelts
635 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
636 tree atype = build_array_type_nelts (m_limb_type, nelts);
637 var = build1 (VIEW_CONVERT_EXPR, atype, var);
639 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
641 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
643 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
644 insert_before (g);
645 ret = gimple_assign_lhs (g);
646 ret = build1 (NOP_EXPR, atype, ret);
648 return ret;
651 /* Emit a half diamond,
652 if (COND)
656 | new_bb1
660 or if (COND) new_bb1;
661 PROB is the probability that the condition is true.
662 Updates m_gsi to start of new_bb1.
663 Sets EDGE_TRUE to edge from new_bb1 to successor and
664 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
666 void
667 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
668 edge &edge_true, edge &edge_false)
670 insert_before (cond);
671 edge e1 = split_block (gsi_bb (m_gsi), cond);
672 edge e2 = split_block (e1->dest, (gimple *) NULL);
673 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
674 e1->flags = EDGE_TRUE_VALUE;
675 e1->probability = prob;
676 e3->probability = prob.invert ();
677 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
678 edge_true = e2;
679 edge_false = e3;
680 m_gsi = gsi_after_labels (e1->dest);
683 /* Emit a full diamond,
684 if (COND)
688 new_bb1 new_bb2
692 or if (COND) new_bb2; else new_bb1;
693 PROB is the probability that the condition is true.
694 Updates m_gsi to start of new_bb2.
695 Sets EDGE_TRUE to edge from new_bb1 to successor and
696 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
698 void
699 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
700 edge &edge_true, edge &edge_false)
702 insert_before (cond);
703 edge e1 = split_block (gsi_bb (m_gsi), cond);
704 edge e2 = split_block (e1->dest, (gimple *) NULL);
705 basic_block bb = create_empty_bb (e1->dest);
706 add_bb_to_loop (bb, e1->dest->loop_father);
707 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
708 e1->flags = EDGE_FALSE_VALUE;
709 e3->probability = prob;
710 e1->probability = prob.invert ();
711 bb->count = e1->src->count.apply_probability (prob);
712 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
713 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
714 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
715 edge_false = e2;
716 m_gsi = gsi_after_labels (bb);
719 /* Emit a half diamond with full diamond in it
720 if (COND1)
724 | if (COND2)
725 | / \
726 | / \
727 |new_bb1 new_bb2
728 | | /
729 \ | /
730 \ | /
731 \ | /
733 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
734 PROB1 is the probability that the condition 1 is true.
735 PROB2 is the probability that the condition 2 is true.
736 Updates m_gsi to start of new_bb1.
737 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
738 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
739 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
740 If COND2 is NULL, this is equivalent to
741 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
742 EDGE_TRUE_TRUE = NULL; */
744 void
745 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
746 profile_probability prob1,
747 profile_probability prob2,
748 edge &edge_true_true,
749 edge &edge_true_false,
750 edge &edge_false)
752 edge e2, e3, e4 = NULL;
753 if_then (cond1, prob1, e2, e3);
754 if (cond2 == NULL)
756 edge_true_true = NULL;
757 edge_true_false = e2;
758 edge_false = e3;
759 return;
761 insert_before (cond2);
762 e2 = split_block (gsi_bb (m_gsi), cond2);
763 basic_block bb = create_empty_bb (e2->dest);
764 add_bb_to_loop (bb, e2->dest->loop_father);
765 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
766 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
767 e4->probability = prob2;
768 e2->flags = EDGE_FALSE_VALUE;
769 e2->probability = prob2.invert ();
770 bb->count = e2->src->count.apply_probability (prob2);
771 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
772 e2 = find_edge (e2->dest, e3->dest);
773 edge_true_true = e4;
774 edge_true_false = e2;
775 edge_false = e3;
776 m_gsi = gsi_after_labels (e2->src);
779 /* Emit code to access limb IDX from OP. */
781 tree
782 bitint_large_huge::handle_operand (tree op, tree idx)
784 switch (TREE_CODE (op))
786 case SSA_NAME:
787 if (m_names == NULL
788 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
790 if (SSA_NAME_IS_DEFAULT_DEF (op))
792 if (m_first)
794 tree v = create_tmp_reg (m_limb_type);
795 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
797 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
798 DECL_SOURCE_LOCATION (v)
799 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
801 v = get_or_create_ssa_default_def (cfun, v);
802 m_data.safe_push (v);
804 tree ret = m_data[m_data_cnt];
805 m_data_cnt++;
806 if (tree_fits_uhwi_p (idx))
808 tree type = limb_access_type (TREE_TYPE (op), idx);
809 ret = add_cast (type, ret);
811 return ret;
813 location_t loc_save = m_loc;
814 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
815 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
816 m_loc = loc_save;
817 return ret;
819 int p;
820 gimple *g;
821 tree t;
822 p = var_to_partition (m_map, op);
823 gcc_assert (m_vars[p] != NULL_TREE);
824 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
825 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
826 insert_before (g);
827 t = gimple_assign_lhs (g);
828 if (m_first
829 && m_single_use_names
830 && m_vars[p] != m_lhs
831 && m_after_stmt
832 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
834 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
835 CLOBBER_STORAGE_END);
836 g = gimple_build_assign (m_vars[p], clobber);
837 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
838 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
840 return t;
841 case INTEGER_CST:
842 if (tree_fits_uhwi_p (idx))
844 tree c, type = limb_access_type (TREE_TYPE (op), idx);
845 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
846 if (m_first)
848 m_data.safe_push (NULL_TREE);
849 m_data.safe_push (NULL_TREE);
851 if (limb_prec != HOST_BITS_PER_WIDE_INT)
853 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
854 TYPE_SIGN (TREE_TYPE (op)));
855 c = wide_int_to_tree (type,
856 wide_int::from (w, TYPE_PRECISION (type),
857 UNSIGNED));
859 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
860 c = build_int_cst (type,
861 tree_int_cst_sgn (op) < 0 ? -1 : 0);
862 else
863 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
864 m_data_cnt += 2;
865 return c;
867 if (m_first
868 || (m_data[m_data_cnt] == NULL_TREE
869 && m_data[m_data_cnt + 1] == NULL_TREE))
871 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
872 unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
873 int ext;
874 unsigned min_prec = bitint_min_cst_precision (op, ext);
875 if (m_first)
877 m_data.safe_push (NULL_TREE);
878 m_data.safe_push (NULL_TREE);
880 if (integer_zerop (op))
882 tree c = build_zero_cst (m_limb_type);
883 m_data[m_data_cnt] = c;
884 m_data[m_data_cnt + 1] = c;
886 else if (integer_all_onesp (op))
888 tree c = build_all_ones_cst (m_limb_type);
889 m_data[m_data_cnt] = c;
890 m_data[m_data_cnt + 1] = c;
892 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
894 /* Single limb constant. Use a phi with that limb from
895 the preheader edge and 0 or -1 constant from the other edge
896 and for the second limb in the loop. */
897 tree out;
898 gcc_assert (m_first);
899 m_data.pop ();
900 m_data.pop ();
901 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
902 build_int_cst (m_limb_type, ext));
904 else if (min_prec > prec - rem - 2 * limb_prec)
906 /* Constant which has enough significant bits that it isn't
907 worth trying to save .rodata space by extending from smaller
908 number. */
909 tree type;
910 if (m_var_msb)
911 type = TREE_TYPE (op);
912 else
913 /* If we have a guarantee the most significant partial limb
914 (if any) will be only accessed through handle_operand
915 with INTEGER_CST idx, we don't need to include the partial
916 limb in .rodata. */
917 type = build_bitint_type (prec - rem, 1);
918 tree c = tree_output_constant_def (fold_convert (type, op));
919 m_data[m_data_cnt] = c;
920 m_data[m_data_cnt + 1] = NULL_TREE;
922 else if (m_upwards_2limb)
924 /* Constant with smaller number of bits. Trade conditional
925 code for .rodata space by extending from smaller number. */
926 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
927 tree type = build_bitint_type (min_prec, 1);
928 tree c = tree_output_constant_def (fold_convert (type, op));
929 tree idx2 = make_ssa_name (sizetype);
930 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
931 insert_before (g);
932 g = gimple_build_cond (LT_EXPR, idx,
933 size_int (min_prec / limb_prec),
934 NULL_TREE, NULL_TREE);
935 edge edge_true, edge_false;
936 if_then (g, (min_prec >= (prec - rem) / 2
937 ? profile_probability::likely ()
938 : profile_probability::unlikely ()),
939 edge_true, edge_false);
940 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
941 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
942 insert_before (g);
943 c1 = gimple_assign_lhs (g);
944 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
945 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
946 insert_before (g);
947 c2 = gimple_assign_lhs (g);
948 tree c3 = build_int_cst (m_limb_type, ext);
949 m_gsi = gsi_after_labels (edge_true->dest);
950 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
951 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
952 gphi *phi = create_phi_node (m_data[m_data_cnt],
953 edge_true->dest);
954 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
955 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
956 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
957 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
958 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
960 else
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number.
964 Version for loops with random access to the limbs or
965 downwards loops. */
966 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
967 tree c;
968 if (min_prec <= (unsigned) limb_prec)
969 c = fold_convert (m_limb_type, op);
970 else
972 tree type = build_bitint_type (min_prec, 1);
973 c = tree_output_constant_def (fold_convert (type, op));
975 m_data[m_data_cnt] = c;
976 m_data[m_data_cnt + 1] = integer_type_node;
978 t = m_data[m_data_cnt];
979 if (m_data[m_data_cnt + 1] == NULL_TREE)
981 t = limb_access (TREE_TYPE (op), t, idx, false);
982 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
983 insert_before (g);
984 t = gimple_assign_lhs (g);
987 else if (m_data[m_data_cnt + 1] == NULL_TREE)
989 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
990 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
991 insert_before (g);
992 t = gimple_assign_lhs (g);
994 else
995 t = m_data[m_data_cnt + 1];
996 if (m_data[m_data_cnt + 1] == integer_type_node)
998 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
999 unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1000 int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1001 tree c = m_data[m_data_cnt];
1002 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1003 g = gimple_build_cond (LT_EXPR, idx,
1004 size_int (min_prec / limb_prec),
1005 NULL_TREE, NULL_TREE);
1006 edge edge_true, edge_false;
1007 if_then (g, (min_prec >= (prec - rem) / 2
1008 ? profile_probability::likely ()
1009 : profile_probability::unlikely ()),
1010 edge_true, edge_false);
1011 if (min_prec > (unsigned) limb_prec)
1013 c = limb_access (TREE_TYPE (op), c, idx, false);
1014 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1015 insert_before (g);
1016 c = gimple_assign_lhs (g);
1018 tree c2 = build_int_cst (m_limb_type, ext);
1019 m_gsi = gsi_after_labels (edge_true->dest);
1020 t = make_ssa_name (m_limb_type);
1021 gphi *phi = create_phi_node (t, edge_true->dest);
1022 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1023 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1025 m_data_cnt += 2;
1026 return t;
1027 default:
1028 gcc_unreachable ();
1032 /* Helper method, add a PHI node with VAL from preheader edge if
1033 inside of a loop and m_first. Keep state in a pair of m_data
1034 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1035 the latch edge, otherwise create a new SSA_NAME for it and let
1036 caller initialize it. */
1038 tree
1039 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1040 tree val_out)
1042 if (!m_first)
1044 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1045 return m_data[m_data_cnt];
1048 *data_out = NULL_TREE;
1049 if (tree_fits_uhwi_p (idx))
1051 m_data.safe_push (val);
1052 m_data.safe_push (NULL_TREE);
1053 return val;
1056 tree in = make_ssa_name (TREE_TYPE (val));
1057 gphi *phi = create_phi_node (in, m_bb);
1058 edge e1 = find_edge (m_preheader_bb, m_bb);
1059 edge e2 = EDGE_PRED (m_bb, 0);
1060 if (e1 == e2)
1061 e2 = EDGE_PRED (m_bb, 1);
1062 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1063 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1064 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1065 m_data.safe_push (in);
1066 m_data.safe_push (out);
1067 return in;
1070 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1071 convert it without emitting any code, otherwise emit
1072 the conversion statement before the current location. */
1074 tree
1075 bitint_large_huge::add_cast (tree type, tree val)
1077 if (TREE_CODE (val) == INTEGER_CST)
1078 return fold_convert (type, val);
1080 tree lhs = make_ssa_name (type);
1081 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1082 insert_before (g);
1083 return lhs;
1086 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1088 tree
1089 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1090 tree idx)
1092 tree lhs, data_out, ctype;
1093 tree rhs1_type = TREE_TYPE (rhs1);
1094 gimple *g;
1095 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1096 &data_out);
1098 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1099 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1101 ctype = build_complex_type (m_limb_type);
1102 if (!types_compatible_p (rhs1_type, m_limb_type))
1104 if (!TYPE_UNSIGNED (rhs1_type))
1106 tree type = unsigned_type_for (rhs1_type);
1107 rhs1 = add_cast (type, rhs1);
1108 rhs2 = add_cast (type, rhs2);
1110 rhs1 = add_cast (m_limb_type, rhs1);
1111 rhs2 = add_cast (m_limb_type, rhs2);
1113 lhs = make_ssa_name (ctype);
1114 g = gimple_build_call_internal (code == PLUS_EXPR
1115 ? IFN_UADDC : IFN_USUBC,
1116 3, rhs1, rhs2, data_in);
1117 gimple_call_set_lhs (g, lhs);
1118 insert_before (g);
1119 if (data_out == NULL_TREE)
1120 data_out = make_ssa_name (m_limb_type);
1121 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1122 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1123 insert_before (g);
1125 else if (types_compatible_p (rhs1_type, m_limb_type))
1127 ctype = build_complex_type (m_limb_type);
1128 lhs = make_ssa_name (ctype);
1129 g = gimple_build_call_internal (code == PLUS_EXPR
1130 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1131 2, rhs1, rhs2);
1132 gimple_call_set_lhs (g, lhs);
1133 insert_before (g);
1134 if (data_out == NULL_TREE)
1135 data_out = make_ssa_name (m_limb_type);
1136 if (!integer_zerop (data_in))
1138 rhs1 = make_ssa_name (m_limb_type);
1139 g = gimple_build_assign (rhs1, REALPART_EXPR,
1140 build1 (REALPART_EXPR, m_limb_type, lhs));
1141 insert_before (g);
1142 rhs2 = make_ssa_name (m_limb_type);
1143 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1144 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1145 insert_before (g);
1146 lhs = make_ssa_name (ctype);
1147 g = gimple_build_call_internal (code == PLUS_EXPR
1148 ? IFN_ADD_OVERFLOW
1149 : IFN_SUB_OVERFLOW,
1150 2, rhs1, data_in);
1151 gimple_call_set_lhs (g, lhs);
1152 insert_before (g);
1153 data_in = make_ssa_name (m_limb_type);
1154 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1155 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1156 insert_before (g);
1157 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1158 insert_before (g);
1160 else
1162 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1163 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1164 insert_before (g);
1167 else
1169 tree in = add_cast (rhs1_type, data_in);
1170 lhs = make_ssa_name (rhs1_type);
1171 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1172 insert_before (g);
1173 rhs1 = make_ssa_name (rhs1_type);
1174 g = gimple_build_assign (rhs1, code, lhs, in);
1175 insert_before (g);
1176 m_data[m_data_cnt] = NULL_TREE;
1177 m_data_cnt += 2;
1178 return rhs1;
1180 rhs1 = make_ssa_name (m_limb_type);
1181 g = gimple_build_assign (rhs1, REALPART_EXPR,
1182 build1 (REALPART_EXPR, m_limb_type, lhs));
1183 insert_before (g);
1184 if (!types_compatible_p (rhs1_type, m_limb_type))
1185 rhs1 = add_cast (rhs1_type, rhs1);
1186 m_data[m_data_cnt] = data_out;
1187 m_data_cnt += 2;
1188 return rhs1;
1191 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1192 count in [0, limb_prec - 1] range. */
1194 tree
1195 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1197 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1198 gcc_checking_assert (cnt < (unsigned) limb_prec);
1199 if (cnt == 0)
1200 return rhs1;
1202 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1203 gimple *g;
1204 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1205 &data_out);
1207 if (!integer_zerop (data_in))
1209 lhs = make_ssa_name (m_limb_type);
1210 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1211 build_int_cst (unsigned_type_node,
1212 limb_prec - cnt));
1213 insert_before (g);
1214 if (!types_compatible_p (rhs1_type, m_limb_type))
1215 lhs = add_cast (rhs1_type, lhs);
1216 data_in = lhs;
1218 if (types_compatible_p (rhs1_type, m_limb_type))
1220 if (data_out == NULL_TREE)
1221 data_out = make_ssa_name (m_limb_type);
1222 g = gimple_build_assign (data_out, rhs1);
1223 insert_before (g);
1225 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1227 lhs = make_ssa_name (rhs1_type);
1228 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1229 insert_before (g);
1230 if (!integer_zerop (data_in))
1232 rhs1 = lhs;
1233 lhs = make_ssa_name (rhs1_type);
1234 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1235 insert_before (g);
1238 else
1239 lhs = data_in;
1240 m_data[m_data_cnt] = data_out;
1241 m_data_cnt += 2;
1242 return lhs;
1245 /* Helper function for handle_stmt method, handle an integral
1246 to integral conversion. */
1248 tree
1249 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1251 tree rhs_type = TREE_TYPE (rhs1);
1252 gimple *g;
1253 if (TREE_CODE (rhs1) == SSA_NAME
1254 && TREE_CODE (lhs_type) == BITINT_TYPE
1255 && TREE_CODE (rhs_type) == BITINT_TYPE
1256 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1257 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1259 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1260 /* If lhs has bigger precision than rhs, we can use
1261 the simple case only if there is a guarantee that
1262 the most significant limb is handled in straight
1263 line code. If m_var_msb (on left shifts) or
1264 if m_upwards_2limb * limb_prec is equal to
1265 lhs precision that is not the case. */
1266 || (!m_var_msb
1267 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1268 == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1269 && (!m_upwards_2limb
1270 || (m_upwards_2limb * limb_prec
1271 < TYPE_PRECISION (lhs_type)))))
1273 rhs1 = handle_operand (rhs1, idx);
1274 if (tree_fits_uhwi_p (idx))
1276 tree type = limb_access_type (lhs_type, idx);
1277 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1278 rhs1 = add_cast (type, rhs1);
1280 return rhs1;
1282 tree t;
1283 /* Indexes lower than this don't need any special processing. */
1284 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1285 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1286 /* Indexes >= than this always contain an extension. */
1287 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1288 bool save_first = m_first;
1289 if (m_first)
1291 m_data.safe_push (NULL_TREE);
1292 m_data.safe_push (NULL_TREE);
1293 m_data.safe_push (NULL_TREE);
1294 if (TYPE_UNSIGNED (rhs_type))
1295 /* No need to keep state between iterations. */
1297 else if (m_upwards && !m_upwards_2limb)
1298 /* We need to keep state between iterations, but
1299 not within any loop, everything is straight line
1300 code with only increasing indexes. */
1302 else if (!m_upwards_2limb)
1304 unsigned save_data_cnt = m_data_cnt;
1305 gimple_stmt_iterator save_gsi = m_gsi;
1306 m_gsi = m_init_gsi;
1307 if (gsi_end_p (m_gsi))
1308 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1309 else
1310 gsi_next (&m_gsi);
1311 m_data_cnt = save_data_cnt + 3;
1312 t = handle_operand (rhs1, size_int (low));
1313 m_first = false;
1314 m_data[save_data_cnt + 2]
1315 = build_int_cst (NULL_TREE, m_data_cnt);
1316 m_data_cnt = save_data_cnt;
1317 t = add_cast (signed_type_for (m_limb_type), t);
1318 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1319 tree n = make_ssa_name (TREE_TYPE (t));
1320 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1321 insert_before (g);
1322 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1323 m_init_gsi = m_gsi;
1324 if (gsi_end_p (m_init_gsi))
1325 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1326 else
1327 gsi_prev (&m_init_gsi);
1328 m_gsi = save_gsi;
1330 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1331 /* We need to keep state between iterations, but
1332 fortunately not within the loop, only afterwards. */
1334 else
1336 tree out;
1337 m_data.truncate (m_data_cnt);
1338 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1339 m_data.safe_push (NULL_TREE);
1343 unsigned save_data_cnt = m_data_cnt;
1344 m_data_cnt += 3;
1345 if (!tree_fits_uhwi_p (idx))
1347 if (m_upwards_2limb
1348 && (m_upwards_2limb * limb_prec
1349 <= ((unsigned) TYPE_PRECISION (rhs_type)
1350 - !TYPE_UNSIGNED (rhs_type))))
1352 rhs1 = handle_operand (rhs1, idx);
1353 if (m_first)
1354 m_data[save_data_cnt + 2]
1355 = build_int_cst (NULL_TREE, m_data_cnt);
1356 m_first = save_first;
1357 return rhs1;
1359 bool single_comparison
1360 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1361 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1362 idx, size_int (low), NULL_TREE, NULL_TREE);
1363 edge edge_true_true, edge_true_false, edge_false;
1364 if_then_if_then_else (g, (single_comparison ? NULL
1365 : gimple_build_cond (EQ_EXPR, idx,
1366 size_int (low),
1367 NULL_TREE,
1368 NULL_TREE)),
1369 profile_probability::likely (),
1370 profile_probability::unlikely (),
1371 edge_true_true, edge_true_false, edge_false);
1372 bool save_cast_conditional = m_cast_conditional;
1373 m_cast_conditional = true;
1374 m_bitfld_load = 0;
1375 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1376 if (m_first)
1377 m_data[save_data_cnt + 2]
1378 = build_int_cst (NULL_TREE, m_data_cnt);
1379 tree ext = NULL_TREE;
1380 tree bitfld = NULL_TREE;
1381 if (!single_comparison)
1383 m_gsi = gsi_after_labels (edge_true_true->src);
1384 m_first = false;
1385 m_data_cnt = save_data_cnt + 3;
1386 if (m_bitfld_load)
1388 bitfld = m_data[m_bitfld_load];
1389 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1390 m_bitfld_load = 0;
1392 t2 = handle_operand (rhs1, size_int (low));
1393 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1394 t2 = add_cast (m_limb_type, t2);
1395 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1397 ext = add_cast (signed_type_for (m_limb_type), t2);
1398 tree lpm1 = build_int_cst (unsigned_type_node,
1399 limb_prec - 1);
1400 tree n = make_ssa_name (TREE_TYPE (ext));
1401 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1402 insert_before (g);
1403 ext = add_cast (m_limb_type, n);
1406 tree t3;
1407 if (TYPE_UNSIGNED (rhs_type))
1408 t3 = build_zero_cst (m_limb_type);
1409 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1410 t3 = m_data[save_data_cnt];
1411 else
1412 t3 = m_data[save_data_cnt + 1];
1413 m_gsi = gsi_after_labels (edge_true_false->dest);
1414 t = make_ssa_name (m_limb_type);
1415 gphi *phi = create_phi_node (t, edge_true_false->dest);
1416 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1417 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1418 if (edge_true_true)
1419 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1420 if (ext)
1422 tree t4 = make_ssa_name (m_limb_type);
1423 phi = create_phi_node (t4, edge_true_false->dest);
1424 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1425 UNKNOWN_LOCATION);
1426 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1427 UNKNOWN_LOCATION);
1428 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1429 if (!save_cast_conditional)
1431 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1432 insert_before (g);
1434 else
1435 for (basic_block bb = gsi_bb (m_gsi);;)
1437 edge e1 = single_succ_edge (bb);
1438 edge e2 = find_edge (e1->dest, m_bb), e3;
1439 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1440 : make_ssa_name (m_limb_type));
1441 phi = create_phi_node (t5, e1->dest);
1442 edge_iterator ei;
1443 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1444 add_phi_arg (phi, (e3 == e1 ? t4
1445 : build_zero_cst (m_limb_type)),
1446 e3, UNKNOWN_LOCATION);
1447 if (e2)
1448 break;
1449 t4 = t5;
1450 bb = e1->dest;
1453 if (m_bitfld_load)
1455 tree t4;
1456 if (!m_first)
1457 t4 = m_data[m_bitfld_load + 1];
1458 else
1459 t4 = make_ssa_name (m_limb_type);
1460 phi = create_phi_node (t4, edge_true_false->dest);
1461 add_phi_arg (phi,
1462 edge_true_true ? bitfld : m_data[m_bitfld_load],
1463 edge_true_false, UNKNOWN_LOCATION);
1464 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1465 edge_false, UNKNOWN_LOCATION);
1466 if (edge_true_true)
1467 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1468 UNKNOWN_LOCATION);
1469 m_data[m_bitfld_load] = t4;
1470 m_data[m_bitfld_load + 2] = t4;
1471 m_bitfld_load = 0;
1473 m_cast_conditional = save_cast_conditional;
1474 m_first = save_first;
1475 return t;
1477 else
1479 if (tree_to_uhwi (idx) < low)
1481 t = handle_operand (rhs1, idx);
1482 if (m_first)
1483 m_data[save_data_cnt + 2]
1484 = build_int_cst (NULL_TREE, m_data_cnt);
1486 else if (tree_to_uhwi (idx) < high)
1488 t = handle_operand (rhs1, size_int (low));
1489 if (m_first)
1490 m_data[save_data_cnt + 2]
1491 = build_int_cst (NULL_TREE, m_data_cnt);
1492 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1493 t = add_cast (m_limb_type, t);
1494 tree ext = NULL_TREE;
1495 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1497 ext = add_cast (signed_type_for (m_limb_type), t);
1498 tree lpm1 = build_int_cst (unsigned_type_node,
1499 limb_prec - 1);
1500 tree n = make_ssa_name (TREE_TYPE (ext));
1501 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1502 insert_before (g);
1503 ext = add_cast (m_limb_type, n);
1504 m_data[save_data_cnt + 1] = ext;
1507 else
1509 if (TYPE_UNSIGNED (rhs_type) && m_first)
1511 handle_operand (rhs1, size_zero_node);
1512 m_data[save_data_cnt + 2]
1513 = build_int_cst (NULL_TREE, m_data_cnt);
1515 else
1516 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1517 if (TYPE_UNSIGNED (rhs_type))
1518 t = build_zero_cst (m_limb_type);
1519 else if (m_bb && m_data[save_data_cnt])
1520 t = m_data[save_data_cnt];
1521 else
1522 t = m_data[save_data_cnt + 1];
1524 tree type = limb_access_type (lhs_type, idx);
1525 if (!useless_type_conversion_p (type, m_limb_type))
1526 t = add_cast (type, t);
1527 m_first = save_first;
1528 return t;
1531 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1532 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1533 && INTEGRAL_TYPE_P (rhs_type))
1535 /* Add support for 3 or more limbs filled in from normal integral
1536 type if this assert fails. If no target chooses limb mode smaller
1537 than half of largest supported normal integral type, this will not
1538 be needed. */
1539 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1540 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1541 if (m_first)
1543 gimple_stmt_iterator save_gsi = m_gsi;
1544 m_gsi = m_init_gsi;
1545 if (gsi_end_p (m_gsi))
1546 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1547 else
1548 gsi_next (&m_gsi);
1549 if (TREE_CODE (rhs_type) == BITINT_TYPE
1550 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1552 tree type = NULL_TREE;
1553 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1554 rhs_type = TREE_TYPE (rhs1);
1556 r1 = rhs1;
1557 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1558 r1 = add_cast (m_limb_type, rhs1);
1559 if (TYPE_PRECISION (rhs_type) > limb_prec)
1561 g = gimple_build_assign (make_ssa_name (rhs_type),
1562 RSHIFT_EXPR, rhs1,
1563 build_int_cst (unsigned_type_node,
1564 limb_prec));
1565 insert_before (g);
1566 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1568 if (TYPE_UNSIGNED (rhs_type))
1569 rext = build_zero_cst (m_limb_type);
1570 else
1572 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1573 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1574 RSHIFT_EXPR, rext,
1575 build_int_cst (unsigned_type_node,
1576 limb_prec - 1));
1577 insert_before (g);
1578 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1580 m_init_gsi = m_gsi;
1581 if (gsi_end_p (m_init_gsi))
1582 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1583 else
1584 gsi_prev (&m_init_gsi);
1585 m_gsi = save_gsi;
1587 tree t;
1588 if (m_upwards_2limb)
1590 if (m_first)
1592 tree out1, out2;
1593 prepare_data_in_out (r1, idx, &out1, rext);
1594 if (TYPE_PRECISION (rhs_type) > limb_prec)
1596 prepare_data_in_out (r2, idx, &out2, rext);
1597 m_data.pop ();
1598 t = m_data.pop ();
1599 m_data[m_data_cnt + 1] = t;
1601 else
1602 m_data[m_data_cnt + 1] = rext;
1603 m_data.safe_push (rext);
1604 t = m_data[m_data_cnt];
1606 else if (!tree_fits_uhwi_p (idx))
1607 t = m_data[m_data_cnt + 1];
1608 else
1610 tree type = limb_access_type (lhs_type, idx);
1611 t = m_data[m_data_cnt + 2];
1612 if (!useless_type_conversion_p (type, m_limb_type))
1613 t = add_cast (type, t);
1615 m_data_cnt += 3;
1616 return t;
1618 else if (m_first)
1620 m_data.safe_push (r1);
1621 m_data.safe_push (r2);
1622 m_data.safe_push (rext);
1624 if (tree_fits_uhwi_p (idx))
1626 tree type = limb_access_type (lhs_type, idx);
1627 if (integer_zerop (idx))
1628 t = m_data[m_data_cnt];
1629 else if (TYPE_PRECISION (rhs_type) > limb_prec
1630 && integer_onep (idx))
1631 t = m_data[m_data_cnt + 1];
1632 else
1633 t = m_data[m_data_cnt + 2];
1634 if (!useless_type_conversion_p (type, m_limb_type))
1635 t = add_cast (type, t);
1636 m_data_cnt += 3;
1637 return t;
1639 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1640 NULL_TREE, NULL_TREE);
1641 edge e2, e3, e4 = NULL;
1642 if_then (g, profile_probability::likely (), e2, e3);
1643 if (m_data[m_data_cnt + 1])
1645 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1646 NULL_TREE, NULL_TREE);
1647 insert_before (g);
1648 edge e5 = split_block (gsi_bb (m_gsi), g);
1649 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1650 e2 = find_edge (e5->dest, e2->dest);
1651 e4->probability = profile_probability::unlikely ();
1652 e5->flags = EDGE_FALSE_VALUE;
1653 e5->probability = e4->probability.invert ();
1655 m_gsi = gsi_after_labels (e2->dest);
1656 t = make_ssa_name (m_limb_type);
1657 gphi *phi = create_phi_node (t, e2->dest);
1658 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1659 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1660 if (e4)
1661 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1662 m_data_cnt += 3;
1663 return t;
1665 return NULL_TREE;
1668 /* Helper function for handle_stmt method, handle a load from memory. */
1670 tree
1671 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1673 tree rhs1 = gimple_assign_rhs1 (stmt);
1674 tree rhs_type = TREE_TYPE (rhs1);
1675 bool eh = stmt_ends_bb_p (stmt);
1676 edge eh_edge = NULL;
1677 gimple *g;
1679 if (eh)
1681 edge_iterator ei;
1682 basic_block bb = gimple_bb (stmt);
1684 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1685 if (eh_edge->flags & EDGE_EH)
1686 break;
1689 if (TREE_CODE (rhs1) == COMPONENT_REF
1690 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1692 tree fld = TREE_OPERAND (rhs1, 1);
1693 /* For little-endian, we can allow as inputs bit-fields
1694 which start at a limb boundary. */
1695 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1696 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1697 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1698 goto normal_load;
1699 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1700 handle it normally for now. */
1701 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1702 goto normal_load;
1703 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1704 poly_int64 bitoffset;
1705 poly_uint64 field_offset, repr_offset;
1706 bool var_field_off = false;
1707 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1708 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1709 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1710 else
1712 bitoffset = 0;
1713 var_field_off = true;
1715 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1716 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1717 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1718 TREE_OPERAND (rhs1, 0), repr,
1719 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1720 HOST_WIDE_INT bo = bitoffset.to_constant ();
1721 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1722 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1723 if (m_first)
1725 if (m_upwards)
1727 gimple_stmt_iterator save_gsi = m_gsi;
1728 m_gsi = m_init_gsi;
1729 if (gsi_end_p (m_gsi))
1730 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1731 else
1732 gsi_next (&m_gsi);
1733 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1734 tree iv = make_ssa_name (m_limb_type);
1735 g = gimple_build_assign (iv, t);
1736 insert_before (g);
1737 if (eh)
1739 maybe_duplicate_eh_stmt (g, stmt);
1740 if (eh_edge)
1742 edge e = split_block (gsi_bb (m_gsi), g);
1743 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1744 = profile_probability::very_unlikely ();
1745 m_gsi = gsi_after_labels (e->dest);
1746 if (gsi_bb (save_gsi) == e->src)
1748 if (gsi_end_p (save_gsi))
1749 save_gsi = gsi_end_bb (e->dest);
1750 else
1751 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1753 if (m_preheader_bb == e->src)
1754 m_preheader_bb = e->dest;
1757 m_init_gsi = m_gsi;
1758 if (gsi_end_p (m_init_gsi))
1759 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1760 else
1761 gsi_prev (&m_init_gsi);
1762 m_gsi = save_gsi;
1763 tree out;
1764 prepare_data_in_out (iv, idx, &out);
1765 out = m_data[m_data_cnt];
1766 m_data.safe_push (out);
1768 else
1770 m_data.safe_push (NULL_TREE);
1771 m_data.safe_push (NULL_TREE);
1772 m_data.safe_push (NULL_TREE);
1776 tree nidx0 = NULL_TREE, nidx1;
1777 tree iv = m_data[m_data_cnt];
1778 if (m_cast_conditional && iv)
1780 gcc_assert (!m_bitfld_load);
1781 m_bitfld_load = m_data_cnt;
1783 if (tree_fits_uhwi_p (idx))
1785 unsigned prec = TYPE_PRECISION (rhs_type);
1786 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1787 gcc_assert (i * limb_prec < prec);
1788 nidx1 = size_int (i + bo_idx + 1);
1789 if ((i + 1) * limb_prec > prec)
1791 prec %= limb_prec;
1792 if (prec + bo_bit <= (unsigned) limb_prec)
1793 nidx1 = NULL_TREE;
1795 if (!iv)
1796 nidx0 = size_int (i + bo_idx);
1798 else
1800 if (!iv)
1802 if (bo_idx == 0)
1803 nidx0 = idx;
1804 else
1806 nidx0 = make_ssa_name (sizetype);
1807 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1808 size_int (bo_idx));
1809 insert_before (g);
1812 nidx1 = make_ssa_name (sizetype);
1813 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1814 size_int (bo_idx + 1));
1815 insert_before (g);
1818 tree iv2 = NULL_TREE;
1819 if (nidx0)
1821 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1822 iv = make_ssa_name (m_limb_type);
1823 g = gimple_build_assign (iv, t);
1824 insert_before (g);
1825 gcc_assert (!eh);
1827 if (nidx1)
1829 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1830 unsigned prec = TYPE_PRECISION (rhs_type);
1831 if (conditional)
1833 if ((prec % limb_prec) == 0
1834 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1835 conditional = false;
1837 edge edge_true = NULL, edge_false = NULL;
1838 if (conditional)
1840 g = gimple_build_cond (NE_EXPR, idx,
1841 size_int (prec / limb_prec),
1842 NULL_TREE, NULL_TREE);
1843 if_then (g, profile_probability::likely (),
1844 edge_true, edge_false);
1846 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1847 if (m_upwards_2limb
1848 && !m_first
1849 && !m_bitfld_load
1850 && !tree_fits_uhwi_p (idx))
1851 iv2 = m_data[m_data_cnt + 1];
1852 else
1853 iv2 = make_ssa_name (m_limb_type);
1854 g = gimple_build_assign (iv2, t);
1855 insert_before (g);
1856 if (eh)
1858 maybe_duplicate_eh_stmt (g, stmt);
1859 if (eh_edge)
1861 edge e = split_block (gsi_bb (m_gsi), g);
1862 m_gsi = gsi_after_labels (e->dest);
1863 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1864 = profile_probability::very_unlikely ();
1867 if (conditional)
1869 tree iv3 = make_ssa_name (m_limb_type);
1870 if (eh)
1871 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1872 gphi *phi = create_phi_node (iv3, edge_true->dest);
1873 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1874 add_phi_arg (phi, build_zero_cst (m_limb_type),
1875 edge_false, UNKNOWN_LOCATION);
1876 m_gsi = gsi_after_labels (edge_true->dest);
1879 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1880 iv, build_int_cst (unsigned_type_node, bo_bit));
1881 insert_before (g);
1882 iv = gimple_assign_lhs (g);
1883 if (iv2)
1885 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1886 iv2, build_int_cst (unsigned_type_node,
1887 limb_prec - bo_bit));
1888 insert_before (g);
1889 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1890 gimple_assign_lhs (g), iv);
1891 insert_before (g);
1892 iv = gimple_assign_lhs (g);
1893 if (m_data[m_data_cnt])
1894 m_data[m_data_cnt] = iv2;
1896 if (tree_fits_uhwi_p (idx))
1898 tree atype = limb_access_type (rhs_type, idx);
1899 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1900 iv = add_cast (atype, iv);
1902 m_data_cnt += 3;
1903 return iv;
1906 normal_load:
1907 /* Use write_p = true for loads with EH edges to make
1908 sure limb_access doesn't add a cast as separate
1909 statement after it. */
1910 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1911 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1912 g = gimple_build_assign (ret, rhs1);
1913 insert_before (g);
1914 if (eh)
1916 maybe_duplicate_eh_stmt (g, stmt);
1917 if (eh_edge)
1919 edge e = split_block (gsi_bb (m_gsi), g);
1920 m_gsi = gsi_after_labels (e->dest);
1921 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1922 = profile_probability::very_unlikely ();
1924 if (tree_fits_uhwi_p (idx))
1926 tree atype = limb_access_type (rhs_type, idx);
1927 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1928 ret = add_cast (atype, ret);
1931 return ret;
1934 /* Return a limb IDX from a mergeable statement STMT. */
1936 tree
1937 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1939 tree lhs, rhs1, rhs2 = NULL_TREE;
1940 gimple *g;
1941 switch (gimple_code (stmt))
1943 case GIMPLE_ASSIGN:
1944 if (gimple_assign_load_p (stmt))
1945 return handle_load (stmt, idx);
1946 switch (gimple_assign_rhs_code (stmt))
1948 case BIT_AND_EXPR:
1949 case BIT_IOR_EXPR:
1950 case BIT_XOR_EXPR:
1951 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1952 /* FALLTHRU */
1953 case BIT_NOT_EXPR:
1954 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1955 lhs = make_ssa_name (TREE_TYPE (rhs1));
1956 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1957 rhs1, rhs2);
1958 insert_before (g);
1959 return lhs;
1960 case PLUS_EXPR:
1961 case MINUS_EXPR:
1962 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1963 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1964 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1965 rhs1, rhs2, idx);
1966 case NEGATE_EXPR:
1967 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1968 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1969 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1970 case LSHIFT_EXPR:
1971 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1972 idx),
1973 gimple_assign_rhs2 (stmt), idx);
1974 case SSA_NAME:
1975 case INTEGER_CST:
1976 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1977 CASE_CONVERT:
1978 case VIEW_CONVERT_EXPR:
1979 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1980 gimple_assign_rhs1 (stmt), idx);
1981 default:
1982 break;
1984 break;
1985 default:
1986 break;
1988 gcc_unreachable ();
1991 /* Return minimum precision of OP at STMT.
1992 Positive value is minimum precision above which all bits
1993 are zero, negative means all bits above negation of the
1994 value are copies of the sign bit. */
1996 static int
1997 range_to_prec (tree op, gimple *stmt)
1999 int_range_max r;
2000 wide_int w;
2001 tree type = TREE_TYPE (op);
2002 unsigned int prec = TYPE_PRECISION (type);
2004 if (!optimize
2005 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2006 || r.undefined_p ())
2008 if (TYPE_UNSIGNED (type))
2009 return prec;
2010 else
2011 return MIN ((int) -prec, -2);
2014 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2016 w = r.lower_bound ();
2017 if (wi::neg_p (w))
2019 int min_prec1 = wi::min_precision (w, SIGNED);
2020 w = r.upper_bound ();
2021 int min_prec2 = wi::min_precision (w, SIGNED);
2022 int min_prec = MAX (min_prec1, min_prec2);
2023 return MIN (-min_prec, -2);
2027 w = r.upper_bound ();
2028 int min_prec = wi::min_precision (w, UNSIGNED);
2029 return MAX (min_prec, 1);
2032 /* Return address of the first limb of OP and write into *PREC
2033 its precision. If positive, the operand is zero extended
2034 from that precision, if it is negative, the operand is sign-extended
2035 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2036 otherwise *PREC_STORED is prec from the innermost call without
2037 range optimizations. */
2039 tree
2040 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2041 int *prec_stored, int *prec)
2043 wide_int w;
2044 location_t loc_save = m_loc;
2045 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2046 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2047 && TREE_CODE (op) != INTEGER_CST)
2049 do_int:
2050 *prec = range_to_prec (op, stmt);
2051 bitint_prec_kind kind = bitint_prec_small;
2052 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2053 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2054 kind = bitint_precision_kind (TREE_TYPE (op));
2055 if (kind == bitint_prec_middle)
2057 tree type = NULL_TREE;
2058 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2060 tree op_type = TREE_TYPE (op);
2061 unsigned HOST_WIDE_INT nelts
2062 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2063 /* Add support for 3 or more limbs filled in from normal
2064 integral type if this assert fails. If no target chooses
2065 limb mode smaller than half of largest supported normal
2066 integral type, this will not be needed. */
2067 gcc_assert (nelts <= 2);
2068 if (prec_stored)
2069 *prec_stored = (TYPE_UNSIGNED (op_type)
2070 ? TYPE_PRECISION (op_type)
2071 : -TYPE_PRECISION (op_type));
2072 if (*prec <= limb_prec && *prec >= -limb_prec)
2074 nelts = 1;
2075 if (prec_stored)
2077 if (TYPE_UNSIGNED (op_type))
2079 if (*prec_stored > limb_prec)
2080 *prec_stored = limb_prec;
2082 else if (*prec_stored < -limb_prec)
2083 *prec_stored = -limb_prec;
2086 tree atype = build_array_type_nelts (m_limb_type, nelts);
2087 tree var = create_tmp_var (atype);
2088 tree t1 = op;
2089 if (!useless_type_conversion_p (m_limb_type, op_type))
2090 t1 = add_cast (m_limb_type, t1);
2091 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2092 NULL_TREE, NULL_TREE);
2093 gimple *g = gimple_build_assign (v, t1);
2094 insert_before (g);
2095 if (nelts > 1)
2097 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2098 g = gimple_build_assign (make_ssa_name (op_type),
2099 RSHIFT_EXPR, op, lp);
2100 insert_before (g);
2101 tree t2 = gimple_assign_lhs (g);
2102 t2 = add_cast (m_limb_type, t2);
2103 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2104 NULL_TREE, NULL_TREE);
2105 g = gimple_build_assign (v, t2);
2106 insert_before (g);
2108 tree ret = build_fold_addr_expr (var);
2109 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2111 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2112 g = gimple_build_assign (var, clobber);
2113 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2115 m_loc = loc_save;
2116 return ret;
2118 switch (TREE_CODE (op))
2120 case SSA_NAME:
2121 if (m_names == NULL
2122 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2124 gimple *g = SSA_NAME_DEF_STMT (op);
2125 tree ret;
2126 m_loc = gimple_location (g);
2127 if (gimple_assign_load_p (g))
2129 *prec = range_to_prec (op, NULL);
2130 if (prec_stored)
2131 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2132 ? TYPE_PRECISION (TREE_TYPE (op))
2133 : -TYPE_PRECISION (TREE_TYPE (op)));
2134 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2135 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2136 NULL_TREE, true, GSI_SAME_STMT);
2138 else if (gimple_code (g) == GIMPLE_NOP)
2140 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2141 if (prec_stored)
2142 *prec_stored = *prec;
2143 tree var = create_tmp_var (m_limb_type);
2144 TREE_ADDRESSABLE (var) = 1;
2145 ret = build_fold_addr_expr (var);
2146 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2148 tree clobber = build_clobber (m_limb_type,
2149 CLOBBER_STORAGE_END);
2150 g = gimple_build_assign (var, clobber);
2151 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2154 else
2156 gcc_assert (gimple_assign_cast_p (g));
2157 tree rhs1 = gimple_assign_rhs1 (g);
2158 bitint_prec_kind kind = bitint_prec_small;
2159 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2160 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2161 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2162 if (kind >= bitint_prec_large)
2164 tree lhs_type = TREE_TYPE (op);
2165 tree rhs_type = TREE_TYPE (rhs1);
2166 int prec_stored_val = 0;
2167 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2168 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2170 if (TYPE_UNSIGNED (lhs_type)
2171 && !TYPE_UNSIGNED (rhs_type))
2172 gcc_assert (*prec >= 0 || prec_stored == NULL);
2174 else
2176 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2178 else if (TYPE_UNSIGNED (lhs_type))
2180 gcc_assert (*prec > 0
2181 || prec_stored_val > 0
2182 || (-prec_stored_val
2183 >= TYPE_PRECISION (lhs_type)));
2184 *prec = TYPE_PRECISION (lhs_type);
2186 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2188 else
2189 *prec = -TYPE_PRECISION (lhs_type);
2192 else
2194 op = rhs1;
2195 stmt = g;
2196 goto do_int;
2199 m_loc = loc_save;
2200 return ret;
2202 else
2204 int p = var_to_partition (m_map, op);
2205 gcc_assert (m_vars[p] != NULL_TREE);
2206 *prec = range_to_prec (op, stmt);
2207 if (prec_stored)
2208 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2209 ? TYPE_PRECISION (TREE_TYPE (op))
2210 : -TYPE_PRECISION (TREE_TYPE (op)));
2211 return build_fold_addr_expr (m_vars[p]);
2213 case INTEGER_CST:
2214 unsigned int min_prec, mp;
2215 tree type;
2216 w = wi::to_wide (op);
2217 if (tree_int_cst_sgn (op) >= 0)
2219 min_prec = wi::min_precision (w, UNSIGNED);
2220 *prec = MAX (min_prec, 1);
2222 else
2224 min_prec = wi::min_precision (w, SIGNED);
2225 *prec = MIN ((int) -min_prec, -2);
2227 mp = CEIL (min_prec, limb_prec) * limb_prec;
2228 if (mp == 0)
2229 mp = 1;
2230 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2231 && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2232 || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2233 type = TREE_TYPE (op);
2234 else
2235 type = build_bitint_type (mp, 1);
2236 if (TREE_CODE (type) != BITINT_TYPE
2237 || bitint_precision_kind (type) == bitint_prec_small)
2239 if (TYPE_PRECISION (type) <= limb_prec)
2240 type = m_limb_type;
2241 else
2243 while (bitint_precision_kind (mp) == bitint_prec_small)
2244 mp += limb_prec;
2245 /* This case is for targets which e.g. have 64-bit
2246 limb but categorize up to 128-bits _BitInts as
2247 small. We could use type of m_limb_type[2] and
2248 similar instead to save space. */
2249 type = build_bitint_type (mp, 1);
2252 if (prec_stored)
2254 if (tree_int_cst_sgn (op) >= 0)
2255 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2256 else
2257 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2259 op = tree_output_constant_def (fold_convert (type, op));
2260 return build_fold_addr_expr (op);
2261 default:
2262 gcc_unreachable ();
2266 /* Helper function, create a loop before the current location,
2267 start with sizetype INIT value from the preheader edge. Return
2268 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2269 from the latch edge. */
2271 tree
2272 bitint_large_huge::create_loop (tree init, tree *idx_next)
2274 if (!gsi_end_p (m_gsi))
2275 gsi_prev (&m_gsi);
2276 else
2277 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2278 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2279 edge e2 = split_block (e1->dest, (gimple *) NULL);
2280 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2281 e3->probability = profile_probability::very_unlikely ();
2282 e2->flags = EDGE_FALSE_VALUE;
2283 e2->probability = e3->probability.invert ();
2284 tree idx = make_ssa_name (sizetype);
2285 gphi *phi = create_phi_node (idx, e1->dest);
2286 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2287 *idx_next = make_ssa_name (sizetype);
2288 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2289 m_gsi = gsi_after_labels (e1->dest);
2290 m_bb = e1->dest;
2291 m_preheader_bb = e1->src;
2292 class loop *loop = alloc_loop ();
2293 loop->header = e1->dest;
2294 add_loop (loop, e1->src->loop_father);
2295 return idx;
2298 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2299 lowered using iteration from the least significant limb up to the most
2300 significant limb. For large _BitInt it is emitted as straight line code
2301 before current location, for huge _BitInt as a loop handling two limbs
2302 at once, followed by handling up to limbs in straight line code (at most
2303 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2304 comparisons, in that case CMP_CODE should be the comparison code and
2305 CMP_OP1/CMP_OP2 the comparison operands. */
2307 tree
2308 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2309 tree cmp_op1, tree cmp_op2)
2311 bool eq_p = cmp_code != ERROR_MARK;
2312 tree type;
2313 if (eq_p)
2314 type = TREE_TYPE (cmp_op1);
2315 else
2316 type = TREE_TYPE (gimple_assign_lhs (stmt));
2317 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2318 bitint_prec_kind kind = bitint_precision_kind (type);
2319 gcc_assert (kind >= bitint_prec_large);
2320 gimple *g;
2321 tree lhs = gimple_get_lhs (stmt);
2322 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2323 if (lhs
2324 && TREE_CODE (lhs) == SSA_NAME
2325 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2326 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2328 int p = var_to_partition (m_map, lhs);
2329 gcc_assert (m_vars[p] != NULL_TREE);
2330 m_lhs = lhs = m_vars[p];
2332 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2333 bool sext = false;
2334 tree ext = NULL_TREE, store_operand = NULL_TREE;
2335 bool eh = false;
2336 basic_block eh_pad = NULL;
2337 tree nlhs = NULL_TREE;
2338 unsigned HOST_WIDE_INT bo_idx = 0;
2339 unsigned HOST_WIDE_INT bo_bit = 0;
2340 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2341 if (gimple_store_p (stmt))
2343 store_operand = gimple_assign_rhs1 (stmt);
2344 eh = stmt_ends_bb_p (stmt);
2345 if (eh)
2347 edge e;
2348 edge_iterator ei;
2349 basic_block bb = gimple_bb (stmt);
2351 FOR_EACH_EDGE (e, ei, bb->succs)
2352 if (e->flags & EDGE_EH)
2354 eh_pad = e->dest;
2355 break;
2358 if (TREE_CODE (lhs) == COMPONENT_REF
2359 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2361 tree fld = TREE_OPERAND (lhs, 1);
2362 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2363 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2364 poly_int64 bitoffset;
2365 poly_uint64 field_offset, repr_offset;
2366 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2367 nlhs = lhs;
2368 else
2370 bool var_field_off = false;
2371 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2372 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2373 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2374 else
2376 bitoffset = 0;
2377 var_field_off = true;
2379 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2380 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2381 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2382 TREE_OPERAND (lhs, 0), repr,
2383 var_field_off
2384 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2385 HOST_WIDE_INT bo = bitoffset.to_constant ();
2386 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2387 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2391 if ((store_operand
2392 && TREE_CODE (store_operand) == SSA_NAME
2393 && (m_names == NULL
2394 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2395 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2396 || gimple_assign_cast_p (stmt))
2398 rhs1 = gimple_assign_rhs1 (store_operand
2399 ? SSA_NAME_DEF_STMT (store_operand)
2400 : stmt);
2401 /* Optimize mergeable ops ending with widening cast to _BitInt
2402 (or followed by store). We can lower just the limbs of the
2403 cast operand and widen afterwards. */
2404 if (TREE_CODE (rhs1) == SSA_NAME
2405 && (m_names == NULL
2406 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2407 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2408 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2409 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2410 limb_prec) < CEIL (prec, limb_prec)
2411 || (kind == bitint_prec_huge
2412 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2414 store_operand = rhs1;
2415 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2416 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2417 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2418 sext = true;
2421 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2422 if (kind == bitint_prec_large)
2423 cnt = CEIL (prec, limb_prec);
2424 else
2426 rem = (prec % (2 * limb_prec));
2427 end = (prec - rem) / limb_prec;
2428 cnt = 2 + CEIL (rem, limb_prec);
2429 idx = idx_first = create_loop (size_zero_node, &idx_next);
2432 basic_block edge_bb = NULL;
2433 if (eq_p)
2435 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2436 gsi_prev (&gsi);
2437 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2438 edge_bb = e->src;
2439 if (kind == bitint_prec_large)
2440 m_gsi = gsi_end_bb (edge_bb);
2442 else
2443 m_after_stmt = stmt;
2444 if (kind != bitint_prec_large)
2445 m_upwards_2limb = end;
2446 m_upwards = true;
2448 bool separate_ext
2449 = (prec != (unsigned) TYPE_PRECISION (type)
2450 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2451 > CEIL (prec, limb_prec)));
2453 for (unsigned i = 0; i < cnt; i++)
2455 m_data_cnt = 0;
2456 if (kind == bitint_prec_large)
2457 idx = size_int (i);
2458 else if (i >= 2)
2459 idx = size_int (end + (i > 2));
2460 if (eq_p)
2462 rhs1 = handle_operand (cmp_op1, idx);
2463 tree rhs2 = handle_operand (cmp_op2, idx);
2464 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2465 insert_before (g);
2466 edge e1 = split_block (gsi_bb (m_gsi), g);
2467 e1->flags = EDGE_FALSE_VALUE;
2468 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2469 e1->probability = profile_probability::unlikely ();
2470 e2->probability = e1->probability.invert ();
2471 if (i == 0)
2472 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2473 m_gsi = gsi_after_labels (e1->dest);
2475 else
2477 if (store_operand)
2478 rhs1 = handle_operand (store_operand, idx);
2479 else
2480 rhs1 = handle_stmt (stmt, idx);
2481 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2482 rhs1 = add_cast (m_limb_type, rhs1);
2483 if (sext && i == cnt - 1)
2484 ext = rhs1;
2485 tree nidx = idx;
2486 if (bo_idx)
2488 if (tree_fits_uhwi_p (idx))
2489 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2490 else
2492 nidx = make_ssa_name (sizetype);
2493 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2494 size_int (bo_idx));
2495 insert_before (g);
2498 bool done = false;
2499 basic_block new_bb = NULL;
2500 /* Handle stores into bit-fields. */
2501 if (bo_bit)
2503 if (i == 0)
2505 edge e2 = NULL;
2506 if (kind != bitint_prec_large)
2508 prepare_data_in_out (build_zero_cst (m_limb_type),
2509 idx, &bf_next);
2510 bf_next = m_data.pop ();
2511 bf_cur = m_data.pop ();
2512 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2513 NULL_TREE, NULL_TREE);
2514 edge edge_true;
2515 if_then_else (g, profile_probability::unlikely (),
2516 edge_true, e2);
2517 new_bb = e2->dest;
2519 tree ftype
2520 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2521 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2522 bitsize_int (limb_prec - bo_bit),
2523 bitsize_int (bo_idx * limb_prec + bo_bit));
2524 tree t = add_cast (ftype, rhs1);
2525 g = gimple_build_assign (bfr, t);
2526 insert_before (g);
2527 if (eh)
2529 maybe_duplicate_eh_stmt (g, stmt);
2530 if (eh_pad)
2532 edge e = split_block (gsi_bb (m_gsi), g);
2533 m_gsi = gsi_after_labels (e->dest);
2534 make_edge (e->src, eh_pad, EDGE_EH)->probability
2535 = profile_probability::very_unlikely ();
2538 if (kind == bitint_prec_large)
2540 bf_cur = rhs1;
2541 done = true;
2543 else if (e2)
2544 m_gsi = gsi_after_labels (e2->src);
2546 if (!done)
2548 tree t1 = make_ssa_name (m_limb_type);
2549 tree t2 = make_ssa_name (m_limb_type);
2550 tree t3 = make_ssa_name (m_limb_type);
2551 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2552 build_int_cst (unsigned_type_node,
2553 limb_prec - bo_bit));
2554 insert_before (g);
2555 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2556 build_int_cst (unsigned_type_node,
2557 bo_bit));
2558 insert_before (g);
2559 bf_cur = rhs1;
2560 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2561 insert_before (g);
2562 rhs1 = t3;
2563 if (bf_next && i == 1)
2565 g = gimple_build_assign (bf_next, bf_cur);
2566 insert_before (g);
2570 if (!done)
2572 /* Handle bit-field access to partial last limb if needed. */
2573 if (nlhs
2574 && i == cnt - 1
2575 && !separate_ext
2576 && tree_fits_uhwi_p (idx))
2578 unsigned int tprec = TYPE_PRECISION (type);
2579 unsigned int rprec = tprec % limb_prec;
2580 if (rprec + bo_bit < (unsigned) limb_prec)
2582 tree ftype
2583 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2584 tree bfr = build3 (BIT_FIELD_REF, ftype,
2585 unshare_expr (nlhs),
2586 bitsize_int (rprec + bo_bit),
2587 bitsize_int ((bo_idx
2588 + tprec / limb_prec)
2589 * limb_prec));
2590 tree t = add_cast (ftype, rhs1);
2591 g = gimple_build_assign (bfr, t);
2592 done = true;
2593 bf_cur = NULL_TREE;
2595 else if (rprec + bo_bit == (unsigned) limb_prec)
2596 bf_cur = NULL_TREE;
2598 /* Otherwise, stores to any other lhs. */
2599 if (!done)
2601 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2602 nidx, true);
2603 g = gimple_build_assign (l, rhs1);
2605 insert_before (g);
2606 if (eh)
2608 maybe_duplicate_eh_stmt (g, stmt);
2609 if (eh_pad)
2611 edge e = split_block (gsi_bb (m_gsi), g);
2612 m_gsi = gsi_after_labels (e->dest);
2613 make_edge (e->src, eh_pad, EDGE_EH)->probability
2614 = profile_probability::very_unlikely ();
2617 if (new_bb)
2618 m_gsi = gsi_after_labels (new_bb);
2621 m_first = false;
2622 if (kind == bitint_prec_huge && i <= 1)
2624 if (i == 0)
2626 idx = make_ssa_name (sizetype);
2627 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2628 size_one_node);
2629 insert_before (g);
2631 else
2633 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2634 size_int (2));
2635 insert_before (g);
2636 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2637 NULL_TREE, NULL_TREE);
2638 insert_before (g);
2639 if (eq_p)
2640 m_gsi = gsi_after_labels (edge_bb);
2641 else
2642 m_gsi = gsi_for_stmt (stmt);
2643 m_bb = NULL;
2648 if (separate_ext)
2650 if (sext)
2652 ext = add_cast (signed_type_for (m_limb_type), ext);
2653 tree lpm1 = build_int_cst (unsigned_type_node,
2654 limb_prec - 1);
2655 tree n = make_ssa_name (TREE_TYPE (ext));
2656 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2657 insert_before (g);
2658 ext = add_cast (m_limb_type, n);
2660 else
2661 ext = build_zero_cst (m_limb_type);
2662 kind = bitint_precision_kind (type);
2663 unsigned start = CEIL (prec, limb_prec);
2664 prec = TYPE_PRECISION (type);
2665 idx = idx_first = idx_next = NULL_TREE;
2666 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2667 kind = bitint_prec_large;
2668 if (kind == bitint_prec_large)
2669 cnt = CEIL (prec, limb_prec) - start;
2670 else
2672 rem = prec % limb_prec;
2673 end = (prec - rem) / limb_prec;
2674 cnt = (bo_bit != 0) + 1 + (rem != 0);
2676 for (unsigned i = 0; i < cnt; i++)
2678 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2679 idx = size_int (start + i);
2680 else if (i == cnt - 1 && (rem != 0))
2681 idx = size_int (end);
2682 else if (i == (bo_bit != 0))
2683 idx = create_loop (size_int (start + i), &idx_next);
2684 rhs1 = ext;
2685 if (bf_cur != NULL_TREE && bf_cur != ext)
2687 tree t1 = make_ssa_name (m_limb_type);
2688 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2689 build_int_cst (unsigned_type_node,
2690 limb_prec - bo_bit));
2691 insert_before (g);
2692 if (integer_zerop (ext))
2693 rhs1 = t1;
2694 else
2696 tree t2 = make_ssa_name (m_limb_type);
2697 rhs1 = make_ssa_name (m_limb_type);
2698 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2699 build_int_cst (unsigned_type_node,
2700 bo_bit));
2701 insert_before (g);
2702 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2703 insert_before (g);
2705 bf_cur = ext;
2707 tree nidx = idx;
2708 if (bo_idx)
2710 if (tree_fits_uhwi_p (idx))
2711 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2712 else
2714 nidx = make_ssa_name (sizetype);
2715 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2716 size_int (bo_idx));
2717 insert_before (g);
2720 bool done = false;
2721 /* Handle bit-field access to partial last limb if needed. */
2722 if (nlhs && i == cnt - 1)
2724 unsigned int tprec = TYPE_PRECISION (type);
2725 unsigned int rprec = tprec % limb_prec;
2726 if (rprec + bo_bit < (unsigned) limb_prec)
2728 tree ftype
2729 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2730 tree bfr = build3 (BIT_FIELD_REF, ftype,
2731 unshare_expr (nlhs),
2732 bitsize_int (rprec + bo_bit),
2733 bitsize_int ((bo_idx + tprec / limb_prec)
2734 * limb_prec));
2735 tree t = add_cast (ftype, rhs1);
2736 g = gimple_build_assign (bfr, t);
2737 done = true;
2738 bf_cur = NULL_TREE;
2740 else if (rprec + bo_bit == (unsigned) limb_prec)
2741 bf_cur = NULL_TREE;
2743 /* Otherwise, stores to any other lhs. */
2744 if (!done)
2746 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2747 g = gimple_build_assign (l, rhs1);
2749 insert_before (g);
2750 if (eh)
2752 maybe_duplicate_eh_stmt (g, stmt);
2753 if (eh_pad)
2755 edge e = split_block (gsi_bb (m_gsi), g);
2756 m_gsi = gsi_after_labels (e->dest);
2757 make_edge (e->src, eh_pad, EDGE_EH)->probability
2758 = profile_probability::very_unlikely ();
2761 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2763 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2764 size_one_node);
2765 insert_before (g);
2766 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2767 NULL_TREE, NULL_TREE);
2768 insert_before (g);
2769 m_gsi = gsi_for_stmt (stmt);
2770 m_bb = NULL;
2774 if (bf_cur != NULL_TREE)
2776 unsigned int tprec = TYPE_PRECISION (type);
2777 unsigned int rprec = tprec % limb_prec;
2778 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2779 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2780 bitsize_int (rprec + bo_bit),
2781 bitsize_int ((bo_idx + tprec / limb_prec)
2782 * limb_prec));
2783 rhs1 = bf_cur;
2784 if (bf_cur != ext)
2786 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2787 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2788 build_int_cst (unsigned_type_node,
2789 limb_prec - bo_bit));
2790 insert_before (g);
2792 rhs1 = add_cast (ftype, rhs1);
2793 g = gimple_build_assign (bfr, rhs1);
2794 insert_before (g);
2795 if (eh)
2797 maybe_duplicate_eh_stmt (g, stmt);
2798 if (eh_pad)
2800 edge e = split_block (gsi_bb (m_gsi), g);
2801 m_gsi = gsi_after_labels (e->dest);
2802 make_edge (e->src, eh_pad, EDGE_EH)->probability
2803 = profile_probability::very_unlikely ();
2808 if (gimple_store_p (stmt))
2810 unlink_stmt_vdef (stmt);
2811 release_ssa_name (gimple_vdef (stmt));
2812 gsi_remove (&m_gsi, true);
2814 if (eq_p)
2816 lhs = make_ssa_name (boolean_type_node);
2817 basic_block bb = gimple_bb (stmt);
2818 gphi *phi = create_phi_node (lhs, bb);
2819 edge e = find_edge (gsi_bb (m_gsi), bb);
2820 unsigned int n = EDGE_COUNT (bb->preds);
2821 for (unsigned int i = 0; i < n; i++)
2823 edge e2 = EDGE_PRED (bb, i);
2824 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2825 e2, UNKNOWN_LOCATION);
2827 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2828 return lhs;
2830 else
2831 return NULL_TREE;
2834 /* Handle a large/huge _BitInt comparison statement STMT other than
2835 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2836 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2837 lowered by iteration from the most significant limb downwards to
2838 the least significant one, for large _BitInt in straight line code,
2839 otherwise with most significant limb handled in
2840 straight line code followed by a loop handling one limb at a time.
2841 Comparisons with unsigned huge _BitInt with precisions which are
2842 multiples of limb precision can use just the loop and don't need to
2843 handle most significant limb before the loop. The loop or straight
2844 line code jumps to final basic block if a particular pair of limbs
2845 is not equal. */
2847 tree
2848 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2849 tree cmp_op1, tree cmp_op2)
2851 tree type = TREE_TYPE (cmp_op1);
2852 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2853 bitint_prec_kind kind = bitint_precision_kind (type);
2854 gcc_assert (kind >= bitint_prec_large);
2855 gimple *g;
2856 if (!TYPE_UNSIGNED (type)
2857 && integer_zerop (cmp_op2)
2858 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2860 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2861 tree idx = size_int (end);
2862 m_data_cnt = 0;
2863 tree rhs1 = handle_operand (cmp_op1, idx);
2864 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2866 tree stype = signed_type_for (TREE_TYPE (rhs1));
2867 rhs1 = add_cast (stype, rhs1);
2869 tree lhs = make_ssa_name (boolean_type_node);
2870 g = gimple_build_assign (lhs, cmp_code, rhs1,
2871 build_zero_cst (TREE_TYPE (rhs1)));
2872 insert_before (g);
2873 cmp_code = NE_EXPR;
2874 return lhs;
2877 unsigned cnt, rem = 0, end = 0;
2878 tree idx = NULL_TREE, idx_next = NULL_TREE;
2879 if (kind == bitint_prec_large)
2880 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2881 else
2883 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2884 if (rem == 0 && !TYPE_UNSIGNED (type))
2885 rem = limb_prec;
2886 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2887 cnt = 1 + (rem != 0);
2890 basic_block edge_bb = NULL;
2891 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2892 gsi_prev (&gsi);
2893 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2894 edge_bb = e->src;
2895 m_gsi = gsi_end_bb (edge_bb);
2897 edge *edges = XALLOCAVEC (edge, cnt * 2);
2898 for (unsigned i = 0; i < cnt; i++)
2900 m_data_cnt = 0;
2901 if (kind == bitint_prec_large)
2902 idx = size_int (cnt - i - 1);
2903 else if (i == cnt - 1)
2904 idx = create_loop (size_int (end - 1), &idx_next);
2905 else
2906 idx = size_int (end);
2907 tree rhs1 = handle_operand (cmp_op1, idx);
2908 tree rhs2 = handle_operand (cmp_op2, idx);
2909 if (i == 0
2910 && !TYPE_UNSIGNED (type)
2911 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2913 tree stype = signed_type_for (TREE_TYPE (rhs1));
2914 rhs1 = add_cast (stype, rhs1);
2915 rhs2 = add_cast (stype, rhs2);
2917 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2918 insert_before (g);
2919 edge e1 = split_block (gsi_bb (m_gsi), g);
2920 e1->flags = EDGE_FALSE_VALUE;
2921 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2922 e1->probability = profile_probability::likely ();
2923 e2->probability = e1->probability.invert ();
2924 if (i == 0)
2925 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2926 m_gsi = gsi_after_labels (e1->dest);
2927 edges[2 * i] = e2;
2928 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2929 insert_before (g);
2930 e1 = split_block (gsi_bb (m_gsi), g);
2931 e1->flags = EDGE_FALSE_VALUE;
2932 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2933 e1->probability = profile_probability::unlikely ();
2934 e2->probability = e1->probability.invert ();
2935 m_gsi = gsi_after_labels (e1->dest);
2936 edges[2 * i + 1] = e2;
2937 m_first = false;
2938 if (kind == bitint_prec_huge && i == cnt - 1)
2940 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2941 insert_before (g);
2942 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2943 NULL_TREE, NULL_TREE);
2944 insert_before (g);
2945 edge true_edge, false_edge;
2946 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2947 &true_edge, &false_edge);
2948 m_gsi = gsi_after_labels (false_edge->dest);
2949 m_bb = NULL;
2953 tree lhs = make_ssa_name (boolean_type_node);
2954 basic_block bb = gimple_bb (stmt);
2955 gphi *phi = create_phi_node (lhs, bb);
2956 for (unsigned int i = 0; i < cnt * 2; i++)
2958 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2959 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2960 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2962 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2963 ? boolean_true_node : boolean_false_node,
2964 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2965 cmp_code = NE_EXPR;
2966 return lhs;
2969 /* Lower large/huge _BitInt left and right shift except for left
2970 shift by < limb_prec constant. */
2972 void
2973 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2975 tree rhs1 = gimple_assign_rhs1 (stmt);
2976 tree lhs = gimple_assign_lhs (stmt);
2977 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2978 tree type = TREE_TYPE (rhs1);
2979 gimple *final_stmt = gsi_stmt (m_gsi);
2980 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2981 && bitint_precision_kind (type) >= bitint_prec_large);
2982 int prec = TYPE_PRECISION (type);
2983 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2984 gimple *g;
2985 if (obj == NULL_TREE)
2987 int part = var_to_partition (m_map, lhs);
2988 gcc_assert (m_vars[part] != NULL_TREE);
2989 obj = m_vars[part];
2991 /* Preparation code common for both left and right shifts.
2992 unsigned n1 = n % limb_prec;
2993 size_t n2 = n / limb_prec;
2994 size_t n3 = n1 != 0;
2995 unsigned n4 = (limb_prec - n1) % limb_prec;
2996 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2997 if (TREE_CODE (n) == INTEGER_CST)
2999 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3000 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3001 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3002 n3 = size_int (!integer_zerop (n1));
3003 n4 = int_const_binop (TRUNC_MOD_EXPR,
3004 int_const_binop (MINUS_EXPR, lp, n1), lp);
3006 else
3008 n1 = make_ssa_name (TREE_TYPE (n));
3009 n2 = make_ssa_name (sizetype);
3010 n3 = make_ssa_name (sizetype);
3011 n4 = make_ssa_name (TREE_TYPE (n));
3012 if (pow2p_hwi (limb_prec))
3014 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3015 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3016 insert_before (g);
3017 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3018 TREE_TYPE (n))
3019 ? n2 : make_ssa_name (TREE_TYPE (n)),
3020 RSHIFT_EXPR, n,
3021 build_int_cst (TREE_TYPE (n),
3022 exact_log2 (limb_prec)));
3023 insert_before (g);
3024 if (gimple_assign_lhs (g) != n2)
3026 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3027 insert_before (g);
3029 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3030 NEGATE_EXPR, n1);
3031 insert_before (g);
3032 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3033 lpm1);
3034 insert_before (g);
3036 else
3038 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3039 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3040 insert_before (g);
3041 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3042 TREE_TYPE (n))
3043 ? n2 : make_ssa_name (TREE_TYPE (n)),
3044 TRUNC_DIV_EXPR, n, lp);
3045 insert_before (g);
3046 if (gimple_assign_lhs (g) != n2)
3048 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3049 insert_before (g);
3051 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3052 MINUS_EXPR, lp, n1);
3053 insert_before (g);
3054 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3055 lp);
3056 insert_before (g);
3058 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3059 build_zero_cst (TREE_TYPE (n)));
3060 insert_before (g);
3061 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3062 insert_before (g);
3064 tree p = build_int_cst (sizetype,
3065 prec / limb_prec - (prec % limb_prec == 0));
3066 if (rhs_code == RSHIFT_EXPR)
3068 /* Lower
3069 dst = src >> n;
3071 unsigned n1 = n % limb_prec;
3072 size_t n2 = n / limb_prec;
3073 size_t n3 = n1 != 0;
3074 unsigned n4 = (limb_prec - n1) % limb_prec;
3075 size_t idx;
3076 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3077 int signed_p = (typeof (src) -1) < 0;
3078 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3079 ? p : p - n3); ++idx)
3080 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3081 limb_type ext;
3082 if (prec % limb_prec == 0)
3083 ext = src[p];
3084 else if (signed_p)
3085 ext = ((signed limb_type) (src[p] << (limb_prec
3086 - (prec % limb_prec))))
3087 >> (limb_prec - (prec % limb_prec));
3088 else
3089 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3090 if (!signed_p && (prec % limb_prec == 0))
3092 else if (idx < prec / 64)
3094 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3095 ++idx;
3097 idx -= n2;
3098 if (signed_p)
3100 dst[idx] = ((signed limb_type) ext) >> n1;
3101 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3103 else
3105 dst[idx] = ext >> n1;
3106 ext = 0;
3108 for (++idx; idx <= p; ++idx)
3109 dst[idx] = ext; */
3110 tree pmn3;
3111 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3112 pmn3 = p;
3113 else if (TREE_CODE (n3) == INTEGER_CST)
3114 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3115 else
3117 pmn3 = make_ssa_name (sizetype);
3118 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3119 insert_before (g);
3121 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3122 edge edge_true, edge_false;
3123 if_then (g, profile_probability::likely (), edge_true, edge_false);
3124 tree idx_next;
3125 tree idx = create_loop (n2, &idx_next);
3126 tree idxmn2 = make_ssa_name (sizetype);
3127 tree idxpn3 = make_ssa_name (sizetype);
3128 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3129 insert_before (g);
3130 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3131 insert_before (g);
3132 m_data_cnt = 0;
3133 tree t1 = handle_operand (rhs1, idx);
3134 m_first = false;
3135 g = gimple_build_assign (make_ssa_name (m_limb_type),
3136 RSHIFT_EXPR, t1, n1);
3137 insert_before (g);
3138 t1 = gimple_assign_lhs (g);
3139 if (!integer_zerop (n3))
3141 m_data_cnt = 0;
3142 tree t2 = handle_operand (rhs1, idxpn3);
3143 g = gimple_build_assign (make_ssa_name (m_limb_type),
3144 LSHIFT_EXPR, t2, n4);
3145 insert_before (g);
3146 t2 = gimple_assign_lhs (g);
3147 g = gimple_build_assign (make_ssa_name (m_limb_type),
3148 BIT_IOR_EXPR, t1, t2);
3149 insert_before (g);
3150 t1 = gimple_assign_lhs (g);
3152 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3153 g = gimple_build_assign (l, t1);
3154 insert_before (g);
3155 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3156 insert_before (g);
3157 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3158 insert_before (g);
3159 idx = make_ssa_name (sizetype);
3160 m_gsi = gsi_for_stmt (final_stmt);
3161 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3162 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3163 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3164 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3165 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3166 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3167 m_data_cnt = 0;
3168 tree ms = handle_operand (rhs1, p);
3169 tree ext = ms;
3170 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3171 ext = add_cast (m_limb_type, ms);
3172 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3173 && !integer_zerop (n3))
3175 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3176 if_then (g, profile_probability::likely (), edge_true, edge_false);
3177 m_data_cnt = 0;
3178 t1 = handle_operand (rhs1, idx);
3179 g = gimple_build_assign (make_ssa_name (m_limb_type),
3180 RSHIFT_EXPR, t1, n1);
3181 insert_before (g);
3182 t1 = gimple_assign_lhs (g);
3183 g = gimple_build_assign (make_ssa_name (m_limb_type),
3184 LSHIFT_EXPR, ext, n4);
3185 insert_before (g);
3186 tree t2 = gimple_assign_lhs (g);
3187 g = gimple_build_assign (make_ssa_name (m_limb_type),
3188 BIT_IOR_EXPR, t1, t2);
3189 insert_before (g);
3190 t1 = gimple_assign_lhs (g);
3191 idxmn2 = make_ssa_name (sizetype);
3192 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3193 insert_before (g);
3194 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3195 g = gimple_build_assign (l, t1);
3196 insert_before (g);
3197 idx_next = make_ssa_name (sizetype);
3198 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3199 insert_before (g);
3200 m_gsi = gsi_for_stmt (final_stmt);
3201 tree nidx = make_ssa_name (sizetype);
3202 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3203 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3204 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3205 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3206 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3207 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3208 idx = nidx;
3210 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3211 insert_before (g);
3212 idx = gimple_assign_lhs (g);
3213 tree sext = ext;
3214 if (!TYPE_UNSIGNED (type))
3215 sext = add_cast (signed_type_for (m_limb_type), ext);
3216 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3217 RSHIFT_EXPR, sext, n1);
3218 insert_before (g);
3219 t1 = gimple_assign_lhs (g);
3220 if (!TYPE_UNSIGNED (type))
3222 t1 = add_cast (m_limb_type, t1);
3223 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3224 RSHIFT_EXPR, sext,
3225 build_int_cst (TREE_TYPE (n),
3226 limb_prec - 1));
3227 insert_before (g);
3228 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3230 else
3231 ext = build_zero_cst (m_limb_type);
3232 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3233 g = gimple_build_assign (l, t1);
3234 insert_before (g);
3235 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3236 size_one_node);
3237 insert_before (g);
3238 idx = gimple_assign_lhs (g);
3239 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3240 if_then (g, profile_probability::likely (), edge_true, edge_false);
3241 idx = create_loop (idx, &idx_next);
3242 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3243 g = gimple_build_assign (l, ext);
3244 insert_before (g);
3245 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3246 insert_before (g);
3247 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3248 insert_before (g);
3250 else
3252 /* Lower
3253 dst = src << n;
3255 unsigned n1 = n % limb_prec;
3256 size_t n2 = n / limb_prec;
3257 size_t n3 = n1 != 0;
3258 unsigned n4 = (limb_prec - n1) % limb_prec;
3259 size_t idx;
3260 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3261 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3262 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3263 if (n1)
3265 dst[idx] = src[idx - n2] << n1;
3266 --idx;
3268 for (; (ssize_t) idx >= 0; --idx)
3269 dst[idx] = 0; */
3270 tree n2pn3;
3271 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3272 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3273 else
3275 n2pn3 = make_ssa_name (sizetype);
3276 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3277 insert_before (g);
3279 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3280 idx even to access the most significant partial limb. */
3281 m_var_msb = true;
3282 if (integer_zerop (n3))
3283 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3284 counts. Emit if (true) condition that can be optimized later. */
3285 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3286 NULL_TREE, NULL_TREE);
3287 else
3288 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3289 edge edge_true, edge_false;
3290 if_then (g, profile_probability::likely (), edge_true, edge_false);
3291 tree idx_next;
3292 tree idx = create_loop (p, &idx_next);
3293 tree idxmn2 = make_ssa_name (sizetype);
3294 tree idxmn2mn3 = make_ssa_name (sizetype);
3295 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3296 insert_before (g);
3297 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3298 insert_before (g);
3299 m_data_cnt = 0;
3300 tree t1 = handle_operand (rhs1, idxmn2);
3301 m_first = false;
3302 g = gimple_build_assign (make_ssa_name (m_limb_type),
3303 LSHIFT_EXPR, t1, n1);
3304 insert_before (g);
3305 t1 = gimple_assign_lhs (g);
3306 if (!integer_zerop (n3))
3308 m_data_cnt = 0;
3309 tree t2 = handle_operand (rhs1, idxmn2mn3);
3310 g = gimple_build_assign (make_ssa_name (m_limb_type),
3311 RSHIFT_EXPR, t2, n4);
3312 insert_before (g);
3313 t2 = gimple_assign_lhs (g);
3314 g = gimple_build_assign (make_ssa_name (m_limb_type),
3315 BIT_IOR_EXPR, t1, t2);
3316 insert_before (g);
3317 t1 = gimple_assign_lhs (g);
3319 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3320 g = gimple_build_assign (l, t1);
3321 insert_before (g);
3322 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3323 insert_before (g);
3324 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3325 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3326 NULL_TREE, NULL_TREE);
3327 insert_before (g);
3328 idx = make_ssa_name (sizetype);
3329 m_gsi = gsi_for_stmt (final_stmt);
3330 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3331 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3332 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3333 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3334 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3335 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3336 m_data_cnt = 0;
3337 if (!integer_zerop (n3))
3339 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3340 NULL_TREE, NULL_TREE);
3341 if_then (g, profile_probability::likely (), edge_true, edge_false);
3342 idxmn2 = make_ssa_name (sizetype);
3343 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3344 insert_before (g);
3345 m_data_cnt = 0;
3346 t1 = handle_operand (rhs1, idxmn2);
3347 g = gimple_build_assign (make_ssa_name (m_limb_type),
3348 LSHIFT_EXPR, t1, n1);
3349 insert_before (g);
3350 t1 = gimple_assign_lhs (g);
3351 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3352 g = gimple_build_assign (l, t1);
3353 insert_before (g);
3354 idx_next = make_ssa_name (sizetype);
3355 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3356 insert_before (g);
3357 m_gsi = gsi_for_stmt (final_stmt);
3358 tree nidx = make_ssa_name (sizetype);
3359 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3360 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3361 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3362 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3363 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3364 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3365 idx = nidx;
3367 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3368 ssize_int (0), NULL_TREE, NULL_TREE);
3369 if_then (g, profile_probability::likely (), edge_true, edge_false);
3370 idx = create_loop (idx, &idx_next);
3371 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3372 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3373 insert_before (g);
3374 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3375 insert_before (g);
3376 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3377 ssize_int (0), NULL_TREE, NULL_TREE);
3378 insert_before (g);
3382 /* Lower large/huge _BitInt multiplication or division. */
3384 void
3385 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3387 tree rhs1 = gimple_assign_rhs1 (stmt);
3388 tree rhs2 = gimple_assign_rhs2 (stmt);
3389 tree lhs = gimple_assign_lhs (stmt);
3390 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3391 tree type = TREE_TYPE (rhs1);
3392 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3393 && bitint_precision_kind (type) >= bitint_prec_large);
3394 int prec = TYPE_PRECISION (type), prec1, prec2;
3395 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3396 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3397 if (obj == NULL_TREE)
3399 int part = var_to_partition (m_map, lhs);
3400 gcc_assert (m_vars[part] != NULL_TREE);
3401 obj = m_vars[part];
3402 lhs = build_fold_addr_expr (obj);
3404 else
3406 lhs = build_fold_addr_expr (obj);
3407 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3408 NULL_TREE, true, GSI_SAME_STMT);
3410 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3411 gimple *g;
3412 switch (rhs_code)
3414 case MULT_EXPR:
3415 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3416 lhs, build_int_cst (sitype, prec),
3417 rhs1, build_int_cst (sitype, prec1),
3418 rhs2, build_int_cst (sitype, prec2));
3419 insert_before (g);
3420 break;
3421 case TRUNC_DIV_EXPR:
3422 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3423 lhs, build_int_cst (sitype, prec),
3424 null_pointer_node,
3425 build_int_cst (sitype, 0),
3426 rhs1, build_int_cst (sitype, prec1),
3427 rhs2, build_int_cst (sitype, prec2));
3428 if (!stmt_ends_bb_p (stmt))
3429 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3430 insert_before (g);
3431 break;
3432 case TRUNC_MOD_EXPR:
3433 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3434 build_int_cst (sitype, 0),
3435 lhs, build_int_cst (sitype, prec),
3436 rhs1, build_int_cst (sitype, prec1),
3437 rhs2, build_int_cst (sitype, prec2));
3438 if (!stmt_ends_bb_p (stmt))
3439 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3440 insert_before (g);
3441 break;
3442 default:
3443 gcc_unreachable ();
3445 if (stmt_ends_bb_p (stmt))
3447 maybe_duplicate_eh_stmt (g, stmt);
3448 edge e1;
3449 edge_iterator ei;
3450 basic_block bb = gimple_bb (stmt);
3452 FOR_EACH_EDGE (e1, ei, bb->succs)
3453 if (e1->flags & EDGE_EH)
3454 break;
3455 if (e1)
3457 edge e2 = split_block (gsi_bb (m_gsi), g);
3458 m_gsi = gsi_after_labels (e2->dest);
3459 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3460 = profile_probability::very_unlikely ();
3465 /* Lower large/huge _BitInt conversion to/from floating point. */
3467 void
3468 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3470 tree rhs1 = gimple_assign_rhs1 (stmt);
3471 tree lhs = gimple_assign_lhs (stmt);
3472 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3473 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3474 gimple *g;
3475 if (rhs_code == FIX_TRUNC_EXPR)
3477 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3478 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3479 prec = -prec;
3480 if (obj == NULL_TREE)
3482 int part = var_to_partition (m_map, lhs);
3483 gcc_assert (m_vars[part] != NULL_TREE);
3484 obj = m_vars[part];
3485 lhs = build_fold_addr_expr (obj);
3487 else
3489 lhs = build_fold_addr_expr (obj);
3490 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3491 NULL_TREE, true, GSI_SAME_STMT);
3493 scalar_mode from_mode
3494 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3495 #ifdef HAVE_SFmode
3496 /* IEEE single is a full superset of both IEEE half and
3497 bfloat formats, convert to float first and then to _BitInt
3498 to avoid the need of another 2 library routines. */
3499 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3500 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3501 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3503 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3504 if (type)
3505 rhs1 = add_cast (type, rhs1);
3507 #endif
3508 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3509 lhs, build_int_cst (sitype, prec),
3510 rhs1);
3511 insert_before (g);
3513 else
3515 int prec;
3516 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3517 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3518 rhs1, build_int_cst (sitype, prec));
3519 gimple_call_set_lhs (g, lhs);
3520 if (!stmt_ends_bb_p (stmt))
3521 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3522 gsi_replace (&m_gsi, g, true);
3526 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3527 If check_zero is true, caller wants to check if all bits in [start, end)
3528 are zero, otherwise if bits in [start, end) are either all zero or
3529 all ones. L is the limb with index LIMB, START and END are measured
3530 in bits. */
3532 tree
3533 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3534 unsigned int end, tree l,
3535 unsigned int limb,
3536 bool check_zero)
3538 unsigned startlimb = start / limb_prec;
3539 unsigned endlimb = (end - 1) / limb_prec;
3540 gimple *g;
3542 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3543 return l;
3544 if (startlimb == endlimb && limb == startlimb)
3546 if (check_zero)
3548 wide_int w = wi::shifted_mask (start % limb_prec,
3549 end - start, false, limb_prec);
3550 g = gimple_build_assign (make_ssa_name (m_limb_type),
3551 BIT_AND_EXPR, l,
3552 wide_int_to_tree (m_limb_type, w));
3553 insert_before (g);
3554 return gimple_assign_lhs (g);
3556 unsigned int shift = start % limb_prec;
3557 if ((end % limb_prec) != 0)
3559 unsigned int lshift = (-end) % limb_prec;
3560 shift += lshift;
3561 g = gimple_build_assign (make_ssa_name (m_limb_type),
3562 LSHIFT_EXPR, l,
3563 build_int_cst (unsigned_type_node,
3564 lshift));
3565 insert_before (g);
3566 l = gimple_assign_lhs (g);
3568 l = add_cast (signed_type_for (m_limb_type), l);
3569 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3570 RSHIFT_EXPR, l,
3571 build_int_cst (unsigned_type_node, shift));
3572 insert_before (g);
3573 return add_cast (m_limb_type, gimple_assign_lhs (g));
3575 else if (limb == startlimb)
3577 if ((start % limb_prec) == 0)
3578 return l;
3579 if (!check_zero)
3580 l = add_cast (signed_type_for (m_limb_type), l);
3581 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3582 RSHIFT_EXPR, l,
3583 build_int_cst (unsigned_type_node,
3584 start % limb_prec));
3585 insert_before (g);
3586 l = gimple_assign_lhs (g);
3587 if (!check_zero)
3588 l = add_cast (m_limb_type, l);
3589 return l;
3591 else if (limb == endlimb)
3593 if ((end % limb_prec) == 0)
3594 return l;
3595 if (check_zero)
3597 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3598 g = gimple_build_assign (make_ssa_name (m_limb_type),
3599 BIT_AND_EXPR, l,
3600 wide_int_to_tree (m_limb_type, w));
3601 insert_before (g);
3602 return gimple_assign_lhs (g);
3604 unsigned int shift = (-end) % limb_prec;
3605 g = gimple_build_assign (make_ssa_name (m_limb_type),
3606 LSHIFT_EXPR, l,
3607 build_int_cst (unsigned_type_node, shift));
3608 insert_before (g);
3609 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3610 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3611 RSHIFT_EXPR, l,
3612 build_int_cst (unsigned_type_node, shift));
3613 insert_before (g);
3614 return add_cast (m_limb_type, gimple_assign_lhs (g));
3616 return l;
3619 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3620 result including overflow flag into the right locations. */
3622 void
3623 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3624 tree ovf, tree lhs, tree orig_obj,
3625 gimple *stmt, tree_code code)
3627 gimple *g;
3629 if (obj == NULL_TREE
3630 && (TREE_CODE (type) != BITINT_TYPE
3631 || bitint_precision_kind (type) < bitint_prec_large))
3633 /* Add support for 3 or more limbs filled in from normal integral
3634 type if this assert fails. If no target chooses limb mode smaller
3635 than half of largest supported normal integral type, this will not
3636 be needed. */
3637 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3638 tree lhs_type = type;
3639 if (TREE_CODE (type) == BITINT_TYPE
3640 && bitint_precision_kind (type) == bitint_prec_middle)
3641 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3642 TYPE_UNSIGNED (type));
3643 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3644 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3645 insert_before (g);
3646 r1 = gimple_assign_lhs (g);
3647 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3648 r1 = add_cast (lhs_type, r1);
3649 if (TYPE_PRECISION (lhs_type) > limb_prec)
3651 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3652 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3653 insert_before (g);
3654 r2 = gimple_assign_lhs (g);
3655 r2 = add_cast (lhs_type, r2);
3656 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3657 build_int_cst (unsigned_type_node,
3658 limb_prec));
3659 insert_before (g);
3660 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3661 gimple_assign_lhs (g));
3662 insert_before (g);
3663 r1 = gimple_assign_lhs (g);
3665 if (lhs_type != type)
3666 r1 = add_cast (type, r1);
3667 ovf = add_cast (lhs_type, ovf);
3668 if (lhs_type != type)
3669 ovf = add_cast (type, ovf);
3670 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3671 m_gsi = gsi_for_stmt (stmt);
3672 gsi_replace (&m_gsi, g, true);
3674 else
3676 unsigned HOST_WIDE_INT nelts = 0;
3677 tree atype = NULL_TREE;
3678 if (obj)
3680 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3681 if (orig_obj == NULL_TREE)
3682 nelts >>= 1;
3683 atype = build_array_type_nelts (m_limb_type, nelts);
3685 if (var && obj)
3687 tree v1, v2;
3688 tree zero;
3689 if (orig_obj == NULL_TREE)
3691 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3692 v1 = build2 (MEM_REF, atype,
3693 build_fold_addr_expr (unshare_expr (obj)), zero);
3695 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3696 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3697 else
3698 v1 = unshare_expr (obj);
3699 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3700 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3701 g = gimple_build_assign (v1, v2);
3702 insert_before (g);
3704 if (orig_obj == NULL_TREE && obj)
3706 ovf = add_cast (m_limb_type, ovf);
3707 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3708 g = gimple_build_assign (l, ovf);
3709 insert_before (g);
3710 if (nelts > 1)
3712 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3713 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3714 (nelts + 1) * m_limb_size);
3715 tree v1 = build2 (MEM_REF, atype,
3716 build_fold_addr_expr (unshare_expr (obj)),
3717 off);
3718 g = gimple_build_assign (v1, build_zero_cst (atype));
3719 insert_before (g);
3722 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3724 imm_use_iterator ui;
3725 use_operand_p use_p;
3726 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3728 g = USE_STMT (use_p);
3729 if (!is_gimple_assign (g)
3730 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3731 continue;
3732 tree lhs2 = gimple_assign_lhs (g);
3733 gimple *use_stmt;
3734 single_imm_use (lhs2, &use_p, &use_stmt);
3735 lhs2 = gimple_assign_lhs (use_stmt);
3736 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3737 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3738 g = gimple_build_assign (lhs2, ovf);
3739 else
3740 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3741 gsi_replace (&gsi, g, true);
3742 if (gsi_stmt (m_gsi) == use_stmt)
3743 m_gsi = gsi_for_stmt (g);
3744 break;
3747 else if (ovf != boolean_false_node)
3749 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3750 NULL_TREE, NULL_TREE);
3751 edge edge_true, edge_false;
3752 if_then (g, profile_probability::very_unlikely (),
3753 edge_true, edge_false);
3754 tree zero = build_zero_cst (TREE_TYPE (lhs));
3755 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3756 TREE_TYPE (lhs),
3757 zero, zero, NULL);
3758 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3759 true, GSI_SAME_STMT);
3760 m_gsi = gsi_after_labels (edge_true->dest);
3763 if (var)
3765 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3766 g = gimple_build_assign (var, clobber);
3767 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3771 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3772 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3773 argument 1 precision PREC1 and minimum precision for the result
3774 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3776 static tree
3777 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3778 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3780 *start = 0;
3781 *end = 0;
3782 *check_zero = true;
3783 /* Ignore this special rule for subtraction, even if both
3784 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3785 in infinite precision. */
3786 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3788 /* Result in [0, prec2) is unsigned, if prec > prec2,
3789 all bits above it will be zero. */
3790 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3791 return boolean_false_node;
3792 else
3794 /* ovf if any of bits in [start, end) is non-zero. */
3795 *start = prec - !TYPE_UNSIGNED (type);
3796 *end = prec2;
3799 else if (TYPE_UNSIGNED (type))
3801 /* If result in [0, prec2) is signed and if prec > prec2,
3802 all bits above it will be sign bit copies. */
3803 if (prec >= prec2)
3805 /* ovf if bit prec - 1 is non-zero. */
3806 *start = prec - 1;
3807 *end = prec;
3809 else
3811 /* ovf if any of bits in [start, end) is non-zero. */
3812 *start = prec;
3813 *end = prec2;
3816 else if (prec >= prec2)
3817 return boolean_false_node;
3818 else
3820 /* ovf if [start, end) bits aren't all zeros or all ones. */
3821 *start = prec - 1;
3822 *end = prec2;
3823 *check_zero = false;
3825 return NULL_TREE;
3828 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3829 argument or return type _Complex large/huge _BitInt. */
3831 void
3832 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3834 tree arg0 = gimple_call_arg (stmt, 0);
3835 tree arg1 = gimple_call_arg (stmt, 1);
3836 tree lhs = gimple_call_lhs (stmt);
3837 gimple *g;
3839 if (!lhs)
3841 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3842 gsi_remove (&gsi, true);
3843 return;
3845 gimple *final_stmt = gsi_stmt (m_gsi);
3846 tree type = TREE_TYPE (lhs);
3847 if (TREE_CODE (type) == COMPLEX_TYPE)
3848 type = TREE_TYPE (type);
3849 int prec = TYPE_PRECISION (type);
3850 int prec0 = range_to_prec (arg0, stmt);
3851 int prec1 = range_to_prec (arg1, stmt);
3852 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3853 the be minimum unsigned precision of any possible operation's
3854 result, otherwise it is minimum signed precision.
3855 Some examples:
3856 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3857 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3858 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3859 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3860 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3861 8 + 8 [0, 0x1fe] 9 UNSIGNED
3862 8 + 10 [0, 0x4fe] 11 UNSIGNED
3863 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3864 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3865 8 + -8 [-0x80, 0x17e] 10 SIGNED
3866 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3867 10 + -8 [-0x80, 0x47e] 12 SIGNED
3868 8 - 8 [-0xff, 0xff] 9 SIGNED
3869 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3870 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3871 -8 - -8 [-0xff, 0xff] 9 SIGNED
3872 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3873 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3874 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3875 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3876 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3877 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3878 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3879 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3880 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3881 prec1 < 0 ? -prec1 : prec1);
3882 /* If operands are either both signed or both unsigned,
3883 we need just one additional bit. */
3884 prec2 = (((prec0 < 0) == (prec1 < 0)
3885 /* If one operand is signed and one unsigned and
3886 the signed one has larger precision, we need
3887 just one extra bit, otherwise two. */
3888 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3889 : (prec2 == -prec1 && prec2 != prec0)))
3890 ? prec2 + 1 : prec2 + 2);
3891 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3892 prec1 < 0 ? -prec1 : prec1);
3893 prec3 = MAX (prec3, prec);
3894 tree var = NULL_TREE;
3895 tree orig_obj = obj;
3896 if (obj == NULL_TREE
3897 && TREE_CODE (type) == BITINT_TYPE
3898 && bitint_precision_kind (type) >= bitint_prec_large
3899 && m_names
3900 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3902 int part = var_to_partition (m_map, lhs);
3903 gcc_assert (m_vars[part] != NULL_TREE);
3904 obj = m_vars[part];
3905 if (TREE_TYPE (lhs) == type)
3906 orig_obj = obj;
3908 if (TREE_CODE (type) != BITINT_TYPE
3909 || bitint_precision_kind (type) < bitint_prec_large)
3911 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3912 tree atype = build_array_type_nelts (m_limb_type, nelts);
3913 var = create_tmp_var (atype);
3916 enum tree_code code;
3917 switch (gimple_call_internal_fn (stmt))
3919 case IFN_ADD_OVERFLOW:
3920 case IFN_UBSAN_CHECK_ADD:
3921 code = PLUS_EXPR;
3922 break;
3923 case IFN_SUB_OVERFLOW:
3924 case IFN_UBSAN_CHECK_SUB:
3925 code = MINUS_EXPR;
3926 break;
3927 default:
3928 gcc_unreachable ();
3930 unsigned start, end;
3931 bool check_zero;
3932 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3933 &start, &end, &check_zero);
3935 unsigned startlimb, endlimb;
3936 if (ovf)
3938 startlimb = ~0U;
3939 endlimb = ~0U;
3941 else
3943 startlimb = start / limb_prec;
3944 endlimb = (end - 1) / limb_prec;
3947 int prec4 = ovf != NULL_TREE ? prec : prec3;
3948 bitint_prec_kind kind = bitint_precision_kind (prec4);
3949 unsigned cnt, rem = 0, fin = 0;
3950 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3951 bool last_ovf = (ovf == NULL_TREE
3952 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3953 if (kind != bitint_prec_huge)
3954 cnt = CEIL (prec4, limb_prec) + last_ovf;
3955 else
3957 rem = (prec4 % (2 * limb_prec));
3958 fin = (prec4 - rem) / limb_prec;
3959 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3960 idx = idx_first = create_loop (size_zero_node, &idx_next);
3963 if (kind == bitint_prec_huge)
3964 m_upwards_2limb = fin;
3965 m_upwards = true;
3967 tree type0 = TREE_TYPE (arg0);
3968 tree type1 = TREE_TYPE (arg1);
3969 int prec5 = prec3;
3970 if (bitint_precision_kind (prec5) < bitint_prec_large)
3971 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3972 if (TYPE_PRECISION (type0) < prec5)
3974 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3975 if (TREE_CODE (arg0) == INTEGER_CST)
3976 arg0 = fold_convert (type0, arg0);
3978 if (TYPE_PRECISION (type1) < prec5)
3980 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3981 if (TREE_CODE (arg1) == INTEGER_CST)
3982 arg1 = fold_convert (type1, arg1);
3984 unsigned int data_cnt = 0;
3985 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3986 tree cmp = build_zero_cst (m_limb_type);
3987 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3988 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3989 for (unsigned i = 0; i < cnt; i++)
3991 m_data_cnt = 0;
3992 tree rhs1, rhs2;
3993 if (kind != bitint_prec_huge)
3994 idx = size_int (i);
3995 else if (i >= 2)
3996 idx = size_int (fin + (i > 2));
3997 if (!last_ovf || i < cnt - 1)
3999 if (type0 != TREE_TYPE (arg0))
4000 rhs1 = handle_cast (type0, arg0, idx);
4001 else
4002 rhs1 = handle_operand (arg0, idx);
4003 if (type1 != TREE_TYPE (arg1))
4004 rhs2 = handle_cast (type1, arg1, idx);
4005 else
4006 rhs2 = handle_operand (arg1, idx);
4007 if (i == 0)
4008 data_cnt = m_data_cnt;
4009 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4010 rhs1 = add_cast (m_limb_type, rhs1);
4011 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4012 rhs2 = add_cast (m_limb_type, rhs2);
4013 last_rhs1 = rhs1;
4014 last_rhs2 = rhs2;
4016 else
4018 m_data_cnt = data_cnt;
4019 if (TYPE_UNSIGNED (type0))
4020 rhs1 = build_zero_cst (m_limb_type);
4021 else
4023 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4024 if (TREE_CODE (rhs1) == INTEGER_CST)
4025 rhs1 = build_int_cst (m_limb_type,
4026 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4027 else
4029 tree lpm1 = build_int_cst (unsigned_type_node,
4030 limb_prec - 1);
4031 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4032 RSHIFT_EXPR, rhs1, lpm1);
4033 insert_before (g);
4034 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4037 if (TYPE_UNSIGNED (type1))
4038 rhs2 = build_zero_cst (m_limb_type);
4039 else
4041 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4042 if (TREE_CODE (rhs2) == INTEGER_CST)
4043 rhs2 = build_int_cst (m_limb_type,
4044 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4045 else
4047 tree lpm1 = build_int_cst (unsigned_type_node,
4048 limb_prec - 1);
4049 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4050 RSHIFT_EXPR, rhs2, lpm1);
4051 insert_before (g);
4052 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4056 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4057 if (ovf != boolean_false_node)
4059 if (tree_fits_uhwi_p (idx))
4061 unsigned limb = tree_to_uhwi (idx);
4062 if (limb >= startlimb && limb <= endlimb)
4064 tree l = arith_overflow_extract_bits (start, end, rhs,
4065 limb, check_zero);
4066 tree this_ovf = make_ssa_name (boolean_type_node);
4067 if (ovf == NULL_TREE && !check_zero)
4069 cmp = l;
4070 g = gimple_build_assign (make_ssa_name (m_limb_type),
4071 PLUS_EXPR, l,
4072 build_int_cst (m_limb_type, 1));
4073 insert_before (g);
4074 g = gimple_build_assign (this_ovf, GT_EXPR,
4075 gimple_assign_lhs (g),
4076 build_int_cst (m_limb_type, 1));
4078 else
4079 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4080 insert_before (g);
4081 if (ovf == NULL_TREE)
4082 ovf = this_ovf;
4083 else
4085 tree b = make_ssa_name (boolean_type_node);
4086 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4087 insert_before (g);
4088 ovf = b;
4092 else if (startlimb < fin)
4094 if (m_first && startlimb + 2 < fin)
4096 tree data_out;
4097 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4098 ovf_out = m_data.pop ();
4099 m_data.pop ();
4100 if (!check_zero)
4102 cmp = prepare_data_in_out (cmp, idx, &data_out);
4103 cmp_out = m_data.pop ();
4104 m_data.pop ();
4107 if (i != 0 || startlimb != fin - 1)
4109 tree_code cmp_code;
4110 bool single_comparison
4111 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4112 if (!single_comparison)
4114 cmp_code = GE_EXPR;
4115 if (!check_zero && (start % limb_prec) == 0)
4116 single_comparison = true;
4118 else if ((startlimb & 1) == (i & 1))
4119 cmp_code = EQ_EXPR;
4120 else
4121 cmp_code = GT_EXPR;
4122 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4123 NULL_TREE, NULL_TREE);
4124 edge edge_true_true, edge_true_false, edge_false;
4125 gimple *g2 = NULL;
4126 if (!single_comparison)
4127 g2 = gimple_build_cond (NE_EXPR, idx,
4128 size_int (startlimb), NULL_TREE,
4129 NULL_TREE);
4130 if_then_if_then_else (g, g2, profile_probability::likely (),
4131 profile_probability::likely (),
4132 edge_true_true, edge_true_false,
4133 edge_false);
4134 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4135 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4136 check_zero);
4137 tree this_ovf = make_ssa_name (boolean_type_node);
4138 if (cmp_code != GT_EXPR && !check_zero)
4140 g = gimple_build_assign (make_ssa_name (m_limb_type),
4141 PLUS_EXPR, l,
4142 build_int_cst (m_limb_type, 1));
4143 insert_before (g);
4144 g = gimple_build_assign (this_ovf, GT_EXPR,
4145 gimple_assign_lhs (g),
4146 build_int_cst (m_limb_type, 1));
4148 else
4149 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4150 insert_before (g);
4151 if (cmp_code == GT_EXPR)
4153 tree t = make_ssa_name (boolean_type_node);
4154 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4155 insert_before (g);
4156 this_ovf = t;
4158 tree this_ovf2 = NULL_TREE;
4159 if (!single_comparison)
4161 m_gsi = gsi_after_labels (edge_true_true->src);
4162 tree t = make_ssa_name (boolean_type_node);
4163 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4164 insert_before (g);
4165 this_ovf2 = make_ssa_name (boolean_type_node);
4166 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4167 ovf, t);
4168 insert_before (g);
4170 m_gsi = gsi_after_labels (edge_true_false->dest);
4171 tree t;
4172 if (i == 1 && ovf_out)
4173 t = ovf_out;
4174 else
4175 t = make_ssa_name (boolean_type_node);
4176 gphi *phi = create_phi_node (t, edge_true_false->dest);
4177 add_phi_arg (phi, this_ovf, edge_true_false,
4178 UNKNOWN_LOCATION);
4179 add_phi_arg (phi, ovf ? ovf
4180 : boolean_false_node, edge_false,
4181 UNKNOWN_LOCATION);
4182 if (edge_true_true)
4183 add_phi_arg (phi, this_ovf2, edge_true_true,
4184 UNKNOWN_LOCATION);
4185 ovf = t;
4186 if (!check_zero && cmp_code != GT_EXPR)
4188 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4189 phi = create_phi_node (t, edge_true_false->dest);
4190 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4191 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4192 if (edge_true_true)
4193 add_phi_arg (phi, cmp, edge_true_true,
4194 UNKNOWN_LOCATION);
4195 cmp = t;
4201 if (var || obj)
4203 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4205 else if (!tree_fits_uhwi_p (idx)
4206 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4208 bool single_comparison
4209 = (((unsigned) prec % limb_prec) == 0
4210 || prec_limbs + 1 >= fin
4211 || (prec_limbs & 1) == (i & 1));
4212 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4213 NULL_TREE, NULL_TREE);
4214 gimple *g2 = NULL;
4215 if (!single_comparison)
4216 g2 = gimple_build_cond (LT_EXPR, idx,
4217 size_int (prec_limbs - 1),
4218 NULL_TREE, NULL_TREE);
4219 edge edge_true_true, edge_true_false, edge_false;
4220 if_then_if_then_else (g, g2, profile_probability::likely (),
4221 profile_probability::likely (),
4222 edge_true_true, edge_true_false,
4223 edge_false);
4224 tree l = limb_access (type, var ? var : obj, idx, true);
4225 g = gimple_build_assign (l, rhs);
4226 insert_before (g);
4227 if (!single_comparison)
4229 m_gsi = gsi_after_labels (edge_true_true->src);
4230 l = limb_access (type, var ? var : obj,
4231 size_int (prec_limbs - 1), true);
4232 if (!useless_type_conversion_p (TREE_TYPE (l),
4233 TREE_TYPE (rhs)))
4234 rhs = add_cast (TREE_TYPE (l), rhs);
4235 g = gimple_build_assign (l, rhs);
4236 insert_before (g);
4238 m_gsi = gsi_after_labels (edge_true_false->dest);
4240 else
4242 tree l = limb_access (type, var ? var : obj, idx, true);
4243 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4244 rhs = add_cast (TREE_TYPE (l), rhs);
4245 g = gimple_build_assign (l, rhs);
4246 insert_before (g);
4249 m_first = false;
4250 if (kind == bitint_prec_huge && i <= 1)
4252 if (i == 0)
4254 idx = make_ssa_name (sizetype);
4255 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4256 size_one_node);
4257 insert_before (g);
4259 else
4261 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4262 size_int (2));
4263 insert_before (g);
4264 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4265 NULL_TREE, NULL_TREE);
4266 insert_before (g);
4267 m_gsi = gsi_for_stmt (final_stmt);
4268 m_bb = NULL;
4273 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4276 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4277 argument or return type _Complex large/huge _BitInt. */
4279 void
4280 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4282 tree arg0 = gimple_call_arg (stmt, 0);
4283 tree arg1 = gimple_call_arg (stmt, 1);
4284 tree lhs = gimple_call_lhs (stmt);
4285 if (!lhs)
4287 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4288 gsi_remove (&gsi, true);
4289 return;
4291 gimple *final_stmt = gsi_stmt (m_gsi);
4292 tree type = TREE_TYPE (lhs);
4293 if (TREE_CODE (type) == COMPLEX_TYPE)
4294 type = TREE_TYPE (type);
4295 int prec = TYPE_PRECISION (type), prec0, prec1;
4296 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4297 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4298 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4299 + (prec1 < 0 ? -prec1 : prec1));
4300 if (prec0 == 1 || prec1 == 1)
4301 --prec2;
4302 tree var = NULL_TREE;
4303 tree orig_obj = obj;
4304 bool force_var = false;
4305 if (obj == NULL_TREE
4306 && TREE_CODE (type) == BITINT_TYPE
4307 && bitint_precision_kind (type) >= bitint_prec_large
4308 && m_names
4309 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4311 int part = var_to_partition (m_map, lhs);
4312 gcc_assert (m_vars[part] != NULL_TREE);
4313 obj = m_vars[part];
4314 if (TREE_TYPE (lhs) == type)
4315 orig_obj = obj;
4317 else if (obj != NULL_TREE && DECL_P (obj))
4319 for (int i = 0; i < 2; ++i)
4321 tree arg = i ? arg1 : arg0;
4322 if (TREE_CODE (arg) == ADDR_EXPR)
4323 arg = TREE_OPERAND (arg, 0);
4324 if (get_base_address (arg) == obj)
4326 force_var = true;
4327 break;
4331 if (obj == NULL_TREE
4332 || force_var
4333 || TREE_CODE (type) != BITINT_TYPE
4334 || bitint_precision_kind (type) < bitint_prec_large
4335 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4337 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4338 tree atype = build_array_type_nelts (m_limb_type, nelts);
4339 var = create_tmp_var (atype);
4341 tree addr = build_fold_addr_expr (var ? var : obj);
4342 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4343 NULL_TREE, true, GSI_SAME_STMT);
4344 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4345 gimple *g
4346 = gimple_build_call_internal (IFN_MULBITINT, 6,
4347 addr, build_int_cst (sitype,
4348 MAX (prec2, prec)),
4349 arg0, build_int_cst (sitype, prec0),
4350 arg1, build_int_cst (sitype, prec1));
4351 insert_before (g);
4353 unsigned start, end;
4354 bool check_zero;
4355 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4356 &start, &end, &check_zero);
4357 if (ovf == NULL_TREE)
4359 unsigned startlimb = start / limb_prec;
4360 unsigned endlimb = (end - 1) / limb_prec;
4361 unsigned cnt;
4362 bool use_loop = false;
4363 if (startlimb == endlimb)
4364 cnt = 1;
4365 else if (startlimb + 1 == endlimb)
4366 cnt = 2;
4367 else if ((end % limb_prec) == 0)
4369 cnt = 2;
4370 use_loop = true;
4372 else
4374 cnt = 3;
4375 use_loop = startlimb + 2 < endlimb;
4377 if (cnt == 1)
4379 tree l = limb_access (NULL_TREE, var ? var : obj,
4380 size_int (startlimb), true);
4381 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4382 insert_before (g);
4383 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4384 startlimb, check_zero);
4385 ovf = make_ssa_name (boolean_type_node);
4386 if (check_zero)
4387 g = gimple_build_assign (ovf, NE_EXPR, l,
4388 build_zero_cst (m_limb_type));
4389 else
4391 g = gimple_build_assign (make_ssa_name (m_limb_type),
4392 PLUS_EXPR, l,
4393 build_int_cst (m_limb_type, 1));
4394 insert_before (g);
4395 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4396 build_int_cst (m_limb_type, 1));
4398 insert_before (g);
4400 else
4402 basic_block edge_bb = NULL;
4403 gimple_stmt_iterator gsi = m_gsi;
4404 gsi_prev (&gsi);
4405 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4406 edge_bb = e->src;
4407 m_gsi = gsi_end_bb (edge_bb);
4409 tree cmp = build_zero_cst (m_limb_type);
4410 for (unsigned i = 0; i < cnt; i++)
4412 tree idx, idx_next = NULL_TREE;
4413 if (i == 0)
4414 idx = size_int (startlimb);
4415 else if (i == 2)
4416 idx = size_int (endlimb);
4417 else if (use_loop)
4418 idx = create_loop (size_int (startlimb + 1), &idx_next);
4419 else
4420 idx = size_int (startlimb + 1);
4421 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4422 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4423 insert_before (g);
4424 l = gimple_assign_lhs (g);
4425 if (i == 0 || i == 2)
4426 l = arith_overflow_extract_bits (start, end, l,
4427 tree_to_uhwi (idx),
4428 check_zero);
4429 if (i == 0 && !check_zero)
4431 cmp = l;
4432 g = gimple_build_assign (make_ssa_name (m_limb_type),
4433 PLUS_EXPR, l,
4434 build_int_cst (m_limb_type, 1));
4435 insert_before (g);
4436 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4437 build_int_cst (m_limb_type, 1),
4438 NULL_TREE, NULL_TREE);
4440 else
4441 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4442 insert_before (g);
4443 edge e1 = split_block (gsi_bb (m_gsi), g);
4444 e1->flags = EDGE_FALSE_VALUE;
4445 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4446 EDGE_TRUE_VALUE);
4447 e1->probability = profile_probability::likely ();
4448 e2->probability = e1->probability.invert ();
4449 if (i == 0)
4450 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4451 m_gsi = gsi_after_labels (e1->dest);
4452 if (i == 1 && use_loop)
4454 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4455 size_one_node);
4456 insert_before (g);
4457 g = gimple_build_cond (NE_EXPR, idx_next,
4458 size_int (endlimb + (cnt == 1)),
4459 NULL_TREE, NULL_TREE);
4460 insert_before (g);
4461 edge true_edge, false_edge;
4462 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4463 &true_edge,
4464 &false_edge);
4465 m_gsi = gsi_after_labels (false_edge->dest);
4466 m_bb = NULL;
4470 ovf = make_ssa_name (boolean_type_node);
4471 basic_block bb = gimple_bb (final_stmt);
4472 gphi *phi = create_phi_node (ovf, bb);
4473 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4474 edge_iterator ei;
4475 FOR_EACH_EDGE (e, ei, bb->preds)
4477 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4478 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4480 m_gsi = gsi_for_stmt (final_stmt);
4484 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4487 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4488 .{ADD,SUB,MUL}_OVERFLOW call. */
4490 void
4491 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4493 tree rhs1 = gimple_assign_rhs1 (stmt);
4494 rhs1 = TREE_OPERAND (rhs1, 0);
4495 if (obj == NULL_TREE)
4497 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4498 gcc_assert (m_vars[part] != NULL_TREE);
4499 obj = m_vars[part];
4501 if (TREE_CODE (rhs1) == SSA_NAME
4502 && (m_names == NULL
4503 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4505 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4506 return;
4508 int part = var_to_partition (m_map, rhs1);
4509 gcc_assert (m_vars[part] != NULL_TREE);
4510 tree var = m_vars[part];
4511 unsigned HOST_WIDE_INT nelts
4512 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4513 tree atype = build_array_type_nelts (m_limb_type, nelts);
4514 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4515 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4516 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4517 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4518 ? 0 : nelts * m_limb_size);
4519 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4520 gimple *g = gimple_build_assign (obj, v2);
4521 insert_before (g);
4524 /* Lower COMPLEX_EXPR stmt. */
4526 void
4527 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4529 tree lhs = gimple_assign_lhs (stmt);
4530 tree rhs1 = gimple_assign_rhs1 (stmt);
4531 tree rhs2 = gimple_assign_rhs2 (stmt);
4532 int part = var_to_partition (m_map, lhs);
4533 gcc_assert (m_vars[part] != NULL_TREE);
4534 lhs = m_vars[part];
4535 unsigned HOST_WIDE_INT nelts
4536 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4537 tree atype = build_array_type_nelts (m_limb_type, nelts);
4538 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4539 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4540 tree v2;
4541 if (TREE_CODE (rhs1) == SSA_NAME)
4543 part = var_to_partition (m_map, rhs1);
4544 gcc_assert (m_vars[part] != NULL_TREE);
4545 v2 = m_vars[part];
4547 else if (integer_zerop (rhs1))
4548 v2 = build_zero_cst (atype);
4549 else
4550 v2 = tree_output_constant_def (rhs1);
4551 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4552 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4553 gimple *g = gimple_build_assign (v1, v2);
4554 insert_before (g);
4555 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4556 TYPE_SIZE_UNIT (atype));
4557 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4558 if (TREE_CODE (rhs2) == SSA_NAME)
4560 part = var_to_partition (m_map, rhs2);
4561 gcc_assert (m_vars[part] != NULL_TREE);
4562 v2 = m_vars[part];
4564 else if (integer_zerop (rhs2))
4565 v2 = build_zero_cst (atype);
4566 else
4567 v2 = tree_output_constant_def (rhs2);
4568 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4569 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4570 g = gimple_build_assign (v1, v2);
4571 insert_before (g);
4574 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4575 argument. */
4577 void
4578 bitint_large_huge::lower_bit_query (gimple *stmt)
4580 tree arg0 = gimple_call_arg (stmt, 0);
4581 tree arg1 = (gimple_call_num_args (stmt) == 2
4582 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4583 tree lhs = gimple_call_lhs (stmt);
4584 gimple *g;
4586 if (!lhs)
4588 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4589 gsi_remove (&gsi, true);
4590 return;
4592 tree type = TREE_TYPE (arg0);
4593 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4594 bitint_prec_kind kind = bitint_precision_kind (type);
4595 gcc_assert (kind >= bitint_prec_large);
4596 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4597 enum built_in_function fcode = END_BUILTINS;
4598 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4599 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4600 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4601 switch (ifn)
4603 case IFN_CLZ:
4604 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4605 fcode = BUILT_IN_CLZ;
4606 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4607 fcode = BUILT_IN_CLZL;
4608 else
4609 fcode = BUILT_IN_CLZLL;
4610 break;
4611 case IFN_FFS:
4612 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4613 we don't add the addend at the end. */
4614 arg1 = integer_zero_node;
4615 /* FALLTHRU */
4616 case IFN_CTZ:
4617 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4618 fcode = BUILT_IN_CTZ;
4619 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4620 fcode = BUILT_IN_CTZL;
4621 else
4622 fcode = BUILT_IN_CTZLL;
4623 m_upwards = true;
4624 break;
4625 case IFN_CLRSB:
4626 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4627 fcode = BUILT_IN_CLRSB;
4628 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4629 fcode = BUILT_IN_CLRSBL;
4630 else
4631 fcode = BUILT_IN_CLRSBLL;
4632 break;
4633 case IFN_PARITY:
4634 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4635 fcode = BUILT_IN_PARITY;
4636 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4637 fcode = BUILT_IN_PARITYL;
4638 else
4639 fcode = BUILT_IN_PARITYLL;
4640 m_upwards = true;
4641 break;
4642 case IFN_POPCOUNT:
4643 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4644 fcode = BUILT_IN_POPCOUNT;
4645 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4646 fcode = BUILT_IN_POPCOUNTL;
4647 else
4648 fcode = BUILT_IN_POPCOUNTLL;
4649 m_upwards = true;
4650 break;
4651 default:
4652 gcc_unreachable ();
4654 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4655 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4656 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4657 basic_block edge_bb = NULL;
4658 if (m_upwards)
4660 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4661 if (kind == bitint_prec_large)
4662 cnt = CEIL (prec, limb_prec);
4663 else
4665 rem = (prec % (2 * limb_prec));
4666 end = (prec - rem) / limb_prec;
4667 cnt = 2 + CEIL (rem, limb_prec);
4668 idx = idx_first = create_loop (size_zero_node, &idx_next);
4671 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4673 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4674 gsi_prev (&gsi);
4675 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4676 edge_bb = e->src;
4677 if (kind == bitint_prec_large)
4678 m_gsi = gsi_end_bb (edge_bb);
4679 bqp = XALLOCAVEC (struct bq_details, cnt);
4681 else
4682 m_after_stmt = stmt;
4683 if (kind != bitint_prec_large)
4684 m_upwards_2limb = end;
4686 for (unsigned i = 0; i < cnt; i++)
4688 m_data_cnt = 0;
4689 if (kind == bitint_prec_large)
4690 idx = size_int (i);
4691 else if (i >= 2)
4692 idx = size_int (end + (i > 2));
4694 tree rhs1 = handle_operand (arg0, idx);
4695 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4697 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4698 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4699 rhs1 = add_cast (m_limb_type, rhs1);
4702 tree in, out, tem;
4703 if (ifn == IFN_PARITY)
4704 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4705 else if (ifn == IFN_FFS)
4706 in = prepare_data_in_out (integer_one_node, idx, &out);
4707 else
4708 in = prepare_data_in_out (integer_zero_node, idx, &out);
4710 switch (ifn)
4712 case IFN_CTZ:
4713 case IFN_FFS:
4714 g = gimple_build_cond (NE_EXPR, rhs1,
4715 build_zero_cst (m_limb_type),
4716 NULL_TREE, NULL_TREE);
4717 insert_before (g);
4718 edge e1, e2;
4719 e1 = split_block (gsi_bb (m_gsi), g);
4720 e1->flags = EDGE_FALSE_VALUE;
4721 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4722 e1->probability = profile_probability::unlikely ();
4723 e2->probability = e1->probability.invert ();
4724 if (i == 0)
4725 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4726 m_gsi = gsi_after_labels (e1->dest);
4727 bqp[i].e = e2;
4728 bqp[i].val = rhs1;
4729 if (tree_fits_uhwi_p (idx))
4730 bqp[i].addend
4731 = build_int_cst (integer_type_node,
4732 tree_to_uhwi (idx) * limb_prec
4733 + (ifn == IFN_FFS));
4734 else
4736 bqp[i].addend = in;
4737 if (i == 1)
4738 res = out;
4739 else
4740 res = make_ssa_name (integer_type_node);
4741 g = gimple_build_assign (res, PLUS_EXPR, in,
4742 build_int_cst (integer_type_node,
4743 limb_prec));
4744 insert_before (g);
4745 m_data[m_data_cnt] = res;
4747 break;
4748 case IFN_PARITY:
4749 if (!integer_zerop (in))
4751 if (kind == bitint_prec_huge && i == 1)
4752 res = out;
4753 else
4754 res = make_ssa_name (m_limb_type);
4755 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4756 insert_before (g);
4758 else
4759 res = rhs1;
4760 m_data[m_data_cnt] = res;
4761 break;
4762 case IFN_POPCOUNT:
4763 g = gimple_build_call (fndecl, 1, rhs1);
4764 tem = make_ssa_name (integer_type_node);
4765 gimple_call_set_lhs (g, tem);
4766 insert_before (g);
4767 if (!integer_zerop (in))
4769 if (kind == bitint_prec_huge && i == 1)
4770 res = out;
4771 else
4772 res = make_ssa_name (integer_type_node);
4773 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4774 insert_before (g);
4776 else
4777 res = tem;
4778 m_data[m_data_cnt] = res;
4779 break;
4780 default:
4781 gcc_unreachable ();
4784 m_first = false;
4785 if (kind == bitint_prec_huge && i <= 1)
4787 if (i == 0)
4789 idx = make_ssa_name (sizetype);
4790 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4791 size_one_node);
4792 insert_before (g);
4794 else
4796 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4797 size_int (2));
4798 insert_before (g);
4799 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4800 NULL_TREE, NULL_TREE);
4801 insert_before (g);
4802 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4803 m_gsi = gsi_after_labels (edge_bb);
4804 else
4805 m_gsi = gsi_for_stmt (stmt);
4806 m_bb = NULL;
4811 else
4813 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4814 int sub_one = 0;
4815 if (kind == bitint_prec_large)
4816 cnt = CEIL (prec, limb_prec);
4817 else
4819 rem = prec % limb_prec;
4820 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4821 rem = limb_prec;
4822 end = (prec - rem) / limb_prec;
4823 cnt = 1 + (rem != 0);
4824 if (ifn == IFN_CLRSB)
4825 sub_one = 1;
4828 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4829 gsi_prev (&gsi);
4830 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4831 edge_bb = e->src;
4832 m_gsi = gsi_end_bb (edge_bb);
4834 if (ifn == IFN_CLZ)
4835 bqp = XALLOCAVEC (struct bq_details, cnt);
4836 else
4838 gsi = gsi_for_stmt (stmt);
4839 gsi_prev (&gsi);
4840 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4841 edge_bb = e->src;
4842 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4845 for (unsigned i = 0; i < cnt; i++)
4847 m_data_cnt = 0;
4848 if (kind == bitint_prec_large)
4849 idx = size_int (cnt - i - 1);
4850 else if (i == cnt - 1)
4851 idx = create_loop (size_int (end - 1), &idx_next);
4852 else
4853 idx = size_int (end);
4855 tree rhs1 = handle_operand (arg0, idx);
4856 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4858 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4859 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4860 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4861 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4862 rhs1 = add_cast (m_limb_type, rhs1);
4865 if (ifn == IFN_CLZ)
4867 g = gimple_build_cond (NE_EXPR, rhs1,
4868 build_zero_cst (m_limb_type),
4869 NULL_TREE, NULL_TREE);
4870 insert_before (g);
4871 edge e1 = split_block (gsi_bb (m_gsi), g);
4872 e1->flags = EDGE_FALSE_VALUE;
4873 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4874 e1->probability = profile_probability::unlikely ();
4875 e2->probability = e1->probability.invert ();
4876 if (i == 0)
4877 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4878 m_gsi = gsi_after_labels (e1->dest);
4879 bqp[i].e = e2;
4880 bqp[i].val = rhs1;
4882 else
4884 if (i == 0)
4886 first = rhs1;
4887 g = gimple_build_assign (make_ssa_name (m_limb_type),
4888 PLUS_EXPR, rhs1,
4889 build_int_cst (m_limb_type, 1));
4890 insert_before (g);
4891 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4892 build_int_cst (m_limb_type, 1),
4893 NULL_TREE, NULL_TREE);
4894 insert_before (g);
4896 else
4898 g = gimple_build_assign (make_ssa_name (m_limb_type),
4899 BIT_XOR_EXPR, rhs1, first);
4900 insert_before (g);
4901 tree stype = signed_type_for (m_limb_type);
4902 g = gimple_build_cond (LT_EXPR,
4903 add_cast (stype,
4904 gimple_assign_lhs (g)),
4905 build_zero_cst (stype),
4906 NULL_TREE, NULL_TREE);
4907 insert_before (g);
4908 edge e1 = split_block (gsi_bb (m_gsi), g);
4909 e1->flags = EDGE_FALSE_VALUE;
4910 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4911 EDGE_TRUE_VALUE);
4912 e1->probability = profile_probability::unlikely ();
4913 e2->probability = e1->probability.invert ();
4914 if (i == 1)
4915 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4916 e2->src);
4917 m_gsi = gsi_after_labels (e1->dest);
4918 bqp[2 * i].e = e2;
4919 g = gimple_build_cond (NE_EXPR, rhs1, first,
4920 NULL_TREE, NULL_TREE);
4921 insert_before (g);
4923 edge e1 = split_block (gsi_bb (m_gsi), g);
4924 e1->flags = EDGE_FALSE_VALUE;
4925 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4926 e1->probability = profile_probability::unlikely ();
4927 e2->probability = e1->probability.invert ();
4928 if (i == 0)
4929 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4930 m_gsi = gsi_after_labels (e1->dest);
4931 bqp[2 * i + 1].e = e2;
4932 bqp[i].val = rhs1;
4934 if (tree_fits_uhwi_p (idx))
4935 bqp[i].addend
4936 = build_int_cst (integer_type_node,
4937 (int) prec
4938 - (((int) tree_to_uhwi (idx) + 1)
4939 * limb_prec) - sub_one);
4940 else
4942 tree in, out;
4943 in = build_int_cst (integer_type_node, rem - sub_one);
4944 m_first = true;
4945 in = prepare_data_in_out (in, idx, &out);
4946 out = m_data[m_data_cnt + 1];
4947 bqp[i].addend = in;
4948 g = gimple_build_assign (out, PLUS_EXPR, in,
4949 build_int_cst (integer_type_node,
4950 limb_prec));
4951 insert_before (g);
4952 m_data[m_data_cnt] = out;
4955 m_first = false;
4956 if (kind == bitint_prec_huge && i == cnt - 1)
4958 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4959 size_int (-1));
4960 insert_before (g);
4961 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4962 NULL_TREE, NULL_TREE);
4963 insert_before (g);
4964 edge true_edge, false_edge;
4965 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4966 &true_edge, &false_edge);
4967 m_gsi = gsi_after_labels (false_edge->dest);
4968 m_bb = NULL;
4972 switch (ifn)
4974 case IFN_CLZ:
4975 case IFN_CTZ:
4976 case IFN_FFS:
4977 gphi *phi1, *phi2, *phi3;
4978 basic_block bb;
4979 bb = gsi_bb (m_gsi);
4980 remove_edge (find_edge (bb, gimple_bb (stmt)));
4981 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4982 gimple_bb (stmt));
4983 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4984 gimple_bb (stmt));
4985 for (unsigned i = 0; i < cnt; i++)
4987 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4988 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4990 if (arg1 == NULL_TREE)
4992 g = gimple_build_builtin_unreachable (m_loc);
4993 insert_before (g);
4995 m_gsi = gsi_for_stmt (stmt);
4996 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4997 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4998 insert_before (g);
4999 if (arg1 == NULL_TREE)
5000 g = gimple_build_assign (lhs, PLUS_EXPR,
5001 gimple_phi_result (phi2),
5002 gimple_call_lhs (g));
5003 else
5005 g = gimple_build_assign (make_ssa_name (integer_type_node),
5006 PLUS_EXPR, gimple_phi_result (phi2),
5007 gimple_call_lhs (g));
5008 insert_before (g);
5009 edge e1 = split_block (gimple_bb (stmt), g);
5010 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5011 e2->probability = profile_probability::always ();
5012 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5013 get_immediate_dominator (CDI_DOMINATORS,
5014 e1->src));
5015 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5016 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5017 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5018 m_gsi = gsi_for_stmt (stmt);
5019 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5021 gsi_replace (&m_gsi, g, true);
5022 break;
5023 case IFN_CLRSB:
5024 bb = gsi_bb (m_gsi);
5025 remove_edge (find_edge (bb, edge_bb));
5026 edge e;
5027 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5028 e->probability = profile_probability::always ();
5029 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5030 get_immediate_dominator (CDI_DOMINATORS,
5031 edge_bb));
5032 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5033 edge_bb);
5034 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5035 edge_bb);
5036 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5037 gimple_bb (stmt));
5038 for (unsigned i = 0; i < cnt; i++)
5040 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5041 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5042 UNKNOWN_LOCATION);
5043 tree a = bqp[i].addend;
5044 if (i && kind == bitint_prec_large)
5045 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5046 if (i)
5047 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5049 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5050 UNKNOWN_LOCATION);
5051 m_gsi = gsi_after_labels (edge_bb);
5052 g = gimple_build_call (fndecl, 1,
5053 add_cast (signed_type_for (m_limb_type),
5054 gimple_phi_result (phi1)));
5055 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5056 insert_before (g);
5057 g = gimple_build_assign (make_ssa_name (integer_type_node),
5058 PLUS_EXPR, gimple_call_lhs (g),
5059 gimple_phi_result (phi2));
5060 insert_before (g);
5061 if (kind != bitint_prec_large)
5063 g = gimple_build_assign (make_ssa_name (integer_type_node),
5064 PLUS_EXPR, gimple_assign_lhs (g),
5065 integer_one_node);
5066 insert_before (g);
5068 add_phi_arg (phi3, gimple_assign_lhs (g),
5069 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5070 m_gsi = gsi_for_stmt (stmt);
5071 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5072 gsi_replace (&m_gsi, g, true);
5073 break;
5074 case IFN_PARITY:
5075 g = gimple_build_call (fndecl, 1, res);
5076 gimple_call_set_lhs (g, lhs);
5077 gsi_replace (&m_gsi, g, true);
5078 break;
5079 case IFN_POPCOUNT:
5080 g = gimple_build_assign (lhs, res);
5081 gsi_replace (&m_gsi, g, true);
5082 break;
5083 default:
5084 gcc_unreachable ();
5088 /* Lower a call statement with one or more large/huge _BitInt
5089 arguments or large/huge _BitInt return value. */
5091 void
5092 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5094 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5095 unsigned int nargs = gimple_call_num_args (stmt);
5096 if (gimple_call_internal_p (stmt))
5097 switch (gimple_call_internal_fn (stmt))
5099 case IFN_ADD_OVERFLOW:
5100 case IFN_SUB_OVERFLOW:
5101 case IFN_UBSAN_CHECK_ADD:
5102 case IFN_UBSAN_CHECK_SUB:
5103 lower_addsub_overflow (obj, stmt);
5104 return;
5105 case IFN_MUL_OVERFLOW:
5106 case IFN_UBSAN_CHECK_MUL:
5107 lower_mul_overflow (obj, stmt);
5108 return;
5109 case IFN_CLZ:
5110 case IFN_CTZ:
5111 case IFN_CLRSB:
5112 case IFN_FFS:
5113 case IFN_PARITY:
5114 case IFN_POPCOUNT:
5115 lower_bit_query (stmt);
5116 return;
5117 default:
5118 break;
5120 for (unsigned int i = 0; i < nargs; ++i)
5122 tree arg = gimple_call_arg (stmt, i);
5123 if (TREE_CODE (arg) != SSA_NAME
5124 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5125 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5126 continue;
5127 if (SSA_NAME_IS_DEFAULT_DEF (arg)
5128 && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
5130 tree var = create_tmp_reg (TREE_TYPE (arg));
5131 arg = get_or_create_ssa_default_def (cfun, var);
5133 else
5135 int p = var_to_partition (m_map, arg);
5136 tree v = m_vars[p];
5137 gcc_assert (v != NULL_TREE);
5138 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5139 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5140 arg = make_ssa_name (TREE_TYPE (arg));
5141 gimple *g = gimple_build_assign (arg, v);
5142 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5144 gimple_call_set_arg (stmt, i, arg);
5145 if (m_preserved == NULL)
5146 m_preserved = BITMAP_ALLOC (NULL);
5147 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5149 tree lhs = gimple_call_lhs (stmt);
5150 if (lhs
5151 && TREE_CODE (lhs) == SSA_NAME
5152 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5153 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5155 int p = var_to_partition (m_map, lhs);
5156 tree v = m_vars[p];
5157 gcc_assert (v != NULL_TREE);
5158 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5159 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5160 gimple_call_set_lhs (stmt, v);
5161 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5163 update_stmt (stmt);
5166 /* Lower __asm STMT which involves large/huge _BitInt values. */
5168 void
5169 bitint_large_huge::lower_asm (gimple *stmt)
5171 gasm *g = as_a <gasm *> (stmt);
5172 unsigned noutputs = gimple_asm_noutputs (g);
5173 unsigned ninputs = gimple_asm_ninputs (g);
5175 for (unsigned i = 0; i < noutputs; ++i)
5177 tree t = gimple_asm_output_op (g, i);
5178 tree s = TREE_VALUE (t);
5179 if (TREE_CODE (s) == SSA_NAME
5180 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5181 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5183 int part = var_to_partition (m_map, s);
5184 gcc_assert (m_vars[part] != NULL_TREE);
5185 TREE_VALUE (t) = m_vars[part];
5188 for (unsigned i = 0; i < ninputs; ++i)
5190 tree t = gimple_asm_input_op (g, i);
5191 tree s = TREE_VALUE (t);
5192 if (TREE_CODE (s) == SSA_NAME
5193 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5194 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5196 int part = var_to_partition (m_map, s);
5197 gcc_assert (m_vars[part] != NULL_TREE);
5198 TREE_VALUE (t) = m_vars[part];
5201 update_stmt (stmt);
5204 /* Lower statement STMT which involves large/huge _BitInt values
5205 into code accessing individual limbs. */
5207 void
5208 bitint_large_huge::lower_stmt (gimple *stmt)
5210 m_first = true;
5211 m_lhs = NULL_TREE;
5212 m_data.truncate (0);
5213 m_data_cnt = 0;
5214 m_gsi = gsi_for_stmt (stmt);
5215 m_after_stmt = NULL;
5216 m_bb = NULL;
5217 m_init_gsi = m_gsi;
5218 gsi_prev (&m_init_gsi);
5219 m_preheader_bb = NULL;
5220 m_upwards_2limb = 0;
5221 m_upwards = false;
5222 m_var_msb = false;
5223 m_cast_conditional = false;
5224 m_bitfld_load = 0;
5225 m_loc = gimple_location (stmt);
5226 if (is_gimple_call (stmt))
5228 lower_call (NULL_TREE, stmt);
5229 return;
5231 if (gimple_code (stmt) == GIMPLE_ASM)
5233 lower_asm (stmt);
5234 return;
5236 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5237 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5238 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5239 bool mergeable_cast_p = false;
5240 bool final_cast_p = false;
5241 if (gimple_assign_cast_p (stmt))
5243 lhs = gimple_assign_lhs (stmt);
5244 tree rhs1 = gimple_assign_rhs1 (stmt);
5245 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5246 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5247 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5248 mergeable_cast_p = true;
5249 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5250 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5251 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5253 final_cast_p = true;
5254 if (TREE_CODE (rhs1) == SSA_NAME
5255 && (m_names == NULL
5256 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5258 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5259 if (is_gimple_assign (g)
5260 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5262 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5263 if (TREE_CODE (rhs2) == SSA_NAME
5264 && (m_names == NULL
5265 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5267 g = SSA_NAME_DEF_STMT (rhs2);
5268 int ovf = optimizable_arith_overflow (g);
5269 if (ovf == 2)
5270 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5271 and IMAGPART_EXPR uses, where the latter is cast to
5272 non-_BitInt, it will be optimized when handling
5273 the REALPART_EXPR. */
5274 return;
5275 if (ovf == 1)
5277 lower_call (NULL_TREE, g);
5278 return;
5285 if (gimple_store_p (stmt))
5287 tree rhs1 = gimple_assign_rhs1 (stmt);
5288 if (TREE_CODE (rhs1) == SSA_NAME
5289 && (m_names == NULL
5290 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5292 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5293 m_loc = gimple_location (g);
5294 lhs = gimple_assign_lhs (stmt);
5295 if (is_gimple_assign (g) && !mergeable_op (g))
5296 switch (gimple_assign_rhs_code (g))
5298 case LSHIFT_EXPR:
5299 case RSHIFT_EXPR:
5300 lower_shift_stmt (lhs, g);
5301 handled:
5302 m_gsi = gsi_for_stmt (stmt);
5303 unlink_stmt_vdef (stmt);
5304 release_ssa_name (gimple_vdef (stmt));
5305 gsi_remove (&m_gsi, true);
5306 return;
5307 case MULT_EXPR:
5308 case TRUNC_DIV_EXPR:
5309 case TRUNC_MOD_EXPR:
5310 lower_muldiv_stmt (lhs, g);
5311 goto handled;
5312 case FIX_TRUNC_EXPR:
5313 lower_float_conv_stmt (lhs, g);
5314 goto handled;
5315 case REALPART_EXPR:
5316 case IMAGPART_EXPR:
5317 lower_cplxpart_stmt (lhs, g);
5318 goto handled;
5319 default:
5320 break;
5322 else if (optimizable_arith_overflow (g) == 3)
5324 lower_call (lhs, g);
5325 goto handled;
5327 m_loc = gimple_location (stmt);
5330 if (mergeable_op (stmt)
5331 || gimple_store_p (stmt)
5332 || gimple_assign_load_p (stmt)
5333 || eq_p
5334 || mergeable_cast_p)
5336 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5337 if (!eq_p)
5338 return;
5340 else if (cmp_code != ERROR_MARK)
5341 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5342 if (cmp_code != ERROR_MARK)
5344 if (gimple_code (stmt) == GIMPLE_COND)
5346 gcond *cstmt = as_a <gcond *> (stmt);
5347 gimple_cond_set_lhs (cstmt, lhs);
5348 gimple_cond_set_rhs (cstmt, boolean_false_node);
5349 gimple_cond_set_code (cstmt, cmp_code);
5350 update_stmt (stmt);
5351 return;
5353 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5355 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5356 boolean_false_node);
5357 gimple_assign_set_rhs1 (stmt, cond);
5358 lhs = gimple_assign_lhs (stmt);
5359 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5360 || (bitint_precision_kind (TREE_TYPE (lhs))
5361 <= bitint_prec_middle));
5362 update_stmt (stmt);
5363 return;
5365 gimple_assign_set_rhs1 (stmt, lhs);
5366 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5367 gimple_assign_set_rhs_code (stmt, cmp_code);
5368 update_stmt (stmt);
5369 return;
5371 if (final_cast_p)
5373 tree lhs_type = TREE_TYPE (lhs);
5374 /* Add support for 3 or more limbs filled in from normal integral
5375 type if this assert fails. If no target chooses limb mode smaller
5376 than half of largest supported normal integral type, this will not
5377 be needed. */
5378 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5379 gimple *g;
5380 if (TREE_CODE (lhs_type) == BITINT_TYPE
5381 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5382 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5383 TYPE_UNSIGNED (lhs_type));
5384 m_data_cnt = 0;
5385 tree rhs1 = gimple_assign_rhs1 (stmt);
5386 tree r1 = handle_operand (rhs1, size_int (0));
5387 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5388 r1 = add_cast (lhs_type, r1);
5389 if (TYPE_PRECISION (lhs_type) > limb_prec)
5391 m_data_cnt = 0;
5392 m_first = false;
5393 tree r2 = handle_operand (rhs1, size_int (1));
5394 r2 = add_cast (lhs_type, r2);
5395 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5396 build_int_cst (unsigned_type_node,
5397 limb_prec));
5398 insert_before (g);
5399 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5400 gimple_assign_lhs (g));
5401 insert_before (g);
5402 r1 = gimple_assign_lhs (g);
5404 if (lhs_type != TREE_TYPE (lhs))
5405 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5406 else
5407 g = gimple_build_assign (lhs, r1);
5408 gsi_replace (&m_gsi, g, true);
5409 return;
5411 if (is_gimple_assign (stmt))
5412 switch (gimple_assign_rhs_code (stmt))
5414 case LSHIFT_EXPR:
5415 case RSHIFT_EXPR:
5416 lower_shift_stmt (NULL_TREE, stmt);
5417 return;
5418 case MULT_EXPR:
5419 case TRUNC_DIV_EXPR:
5420 case TRUNC_MOD_EXPR:
5421 lower_muldiv_stmt (NULL_TREE, stmt);
5422 return;
5423 case FIX_TRUNC_EXPR:
5424 case FLOAT_EXPR:
5425 lower_float_conv_stmt (NULL_TREE, stmt);
5426 return;
5427 case REALPART_EXPR:
5428 case IMAGPART_EXPR:
5429 lower_cplxpart_stmt (NULL_TREE, stmt);
5430 return;
5431 case COMPLEX_EXPR:
5432 lower_complexexpr_stmt (stmt);
5433 return;
5434 default:
5435 break;
5437 gcc_unreachable ();
5440 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5441 the desired memory state. */
5443 void *
5444 vuse_eq (ao_ref *, tree vuse1, void *data)
5446 tree vuse2 = (tree) data;
5447 if (vuse1 == vuse2)
5448 return data;
5450 return NULL;
5453 /* Return true if STMT uses a library function and needs to take
5454 address of its inputs. We need to avoid bit-fields in those
5455 cases. */
5457 bool
5458 stmt_needs_operand_addr (gimple *stmt)
5460 if (is_gimple_assign (stmt))
5461 switch (gimple_assign_rhs_code (stmt))
5463 case MULT_EXPR:
5464 case TRUNC_DIV_EXPR:
5465 case TRUNC_MOD_EXPR:
5466 case FLOAT_EXPR:
5467 return true;
5468 default:
5469 break;
5471 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5472 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5473 return true;
5474 return false;
5477 /* Dominator walker used to discover which large/huge _BitInt
5478 loads could be sunk into all their uses. */
5480 class bitint_dom_walker : public dom_walker
5482 public:
5483 bitint_dom_walker (bitmap names, bitmap loads)
5484 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5486 edge before_dom_children (basic_block) final override;
5488 private:
5489 bitmap m_names, m_loads;
5492 edge
5493 bitint_dom_walker::before_dom_children (basic_block bb)
5495 gphi *phi = get_virtual_phi (bb);
5496 tree vop;
5497 if (phi)
5498 vop = gimple_phi_result (phi);
5499 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5500 vop = NULL_TREE;
5501 else
5502 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5504 auto_vec<tree, 16> worklist;
5505 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5506 !gsi_end_p (gsi); gsi_next (&gsi))
5508 gimple *stmt = gsi_stmt (gsi);
5509 if (is_gimple_debug (stmt))
5510 continue;
5512 if (!vop && gimple_vuse (stmt))
5513 vop = gimple_vuse (stmt);
5515 tree cvop = vop;
5516 if (gimple_vdef (stmt))
5517 vop = gimple_vdef (stmt);
5519 tree lhs = gimple_get_lhs (stmt);
5520 if (lhs
5521 && TREE_CODE (lhs) == SSA_NAME
5522 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5523 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5524 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5525 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5526 it means it will be handled in a loop or straight line code
5527 at the location of its (ultimate) immediate use, so for
5528 vop checking purposes check these only at the ultimate
5529 immediate use. */
5530 continue;
5532 ssa_op_iter oi;
5533 use_operand_p use_p;
5534 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5536 tree s = USE_FROM_PTR (use_p);
5537 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5538 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5539 worklist.safe_push (s);
5542 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5543 while (worklist.length () > 0)
5545 tree s = worklist.pop ();
5547 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5549 gimple *g = SSA_NAME_DEF_STMT (s);
5550 needs_operand_addr |= stmt_needs_operand_addr (g);
5551 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5553 tree s2 = USE_FROM_PTR (use_p);
5554 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5555 && (bitint_precision_kind (TREE_TYPE (s2))
5556 >= bitint_prec_large))
5557 worklist.safe_push (s2);
5559 continue;
5561 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5562 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5564 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5565 if (TREE_CODE (rhs) == SSA_NAME
5566 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5567 s = rhs;
5568 else
5569 continue;
5571 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5572 continue;
5574 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5575 if (needs_operand_addr
5576 && TREE_CODE (rhs1) == COMPONENT_REF
5577 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5579 tree fld = TREE_OPERAND (rhs1, 1);
5580 /* For little-endian, we can allow as inputs bit-fields
5581 which start at a limb boundary. */
5582 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5583 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5584 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5585 % limb_prec) == 0)
5587 else
5589 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5590 continue;
5594 ao_ref ref;
5595 ao_ref_init (&ref, rhs1);
5596 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5597 unsigned limit = 64;
5598 tree vuse = cvop;
5599 if (vop != cvop
5600 && is_gimple_assign (stmt)
5601 && gimple_store_p (stmt)
5602 && !operand_equal_p (lhs,
5603 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5605 vuse = vop;
5606 if (vuse != lvop
5607 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5608 NULL, NULL, limit, lvop) == NULL)
5609 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5613 bb->aux = (void *) vop;
5614 return NULL;
5619 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5620 build_ssa_conflict_graph.
5621 The differences are:
5622 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5623 2) for large/huge _BitInt multiplication/division/modulo process def
5624 only after processing uses rather than before to make uses conflict
5625 with the definition
5626 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5627 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5628 the final statement. */
5630 void
5631 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5632 ssa_conflicts *graph, bitmap names,
5633 void (*def) (live_track *, tree,
5634 ssa_conflicts *),
5635 void (*use) (live_track *, tree))
5637 bool muldiv_p = false;
5638 tree lhs = NULL_TREE;
5639 if (is_gimple_assign (stmt))
5641 lhs = gimple_assign_lhs (stmt);
5642 if (TREE_CODE (lhs) == SSA_NAME
5643 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5644 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5646 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5647 return;
5648 switch (gimple_assign_rhs_code (stmt))
5650 case MULT_EXPR:
5651 case TRUNC_DIV_EXPR:
5652 case TRUNC_MOD_EXPR:
5653 muldiv_p = true;
5654 default:
5655 break;
5660 ssa_op_iter iter;
5661 tree var;
5662 if (!muldiv_p)
5664 /* For stmts with more than one SSA_NAME definition pretend all the
5665 SSA_NAME outputs but the first one are live at this point, so
5666 that conflicts are added in between all those even when they are
5667 actually not really live after the asm, because expansion might
5668 copy those into pseudos after the asm and if multiple outputs
5669 share the same partition, it might overwrite those that should
5670 be live. E.g.
5671 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5672 return a;
5673 See PR70593. */
5674 bool first = true;
5675 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5676 if (first)
5677 first = false;
5678 else
5679 use (live, var);
5681 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5682 def (live, var, graph);
5685 auto_vec<tree, 16> worklist;
5686 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5687 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5688 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5690 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5691 use (live, var);
5692 else
5693 worklist.safe_push (var);
5696 while (worklist.length () > 0)
5698 tree s = worklist.pop ();
5699 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5700 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5701 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5703 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5704 use (live, var);
5705 else
5706 worklist.safe_push (var);
5710 if (muldiv_p)
5711 def (live, lhs, graph);
5714 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5715 return the largest bitint_prec_kind of them, otherwise return
5716 bitint_prec_small. */
5718 static bitint_prec_kind
5719 arith_overflow_arg_kind (gimple *stmt)
5721 bitint_prec_kind ret = bitint_prec_small;
5722 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5723 switch (gimple_call_internal_fn (stmt))
5725 case IFN_ADD_OVERFLOW:
5726 case IFN_SUB_OVERFLOW:
5727 case IFN_MUL_OVERFLOW:
5728 for (int i = 0; i < 2; ++i)
5730 tree a = gimple_call_arg (stmt, i);
5731 if (TREE_CODE (a) == INTEGER_CST
5732 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5734 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5735 ret = MAX (ret, kind);
5738 break;
5739 default:
5740 break;
5742 return ret;
5745 /* Entry point for _BitInt(N) operation lowering during optimization. */
5747 static unsigned int
5748 gimple_lower_bitint (void)
5750 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5751 limb_prec = 0;
5753 unsigned int i;
5754 for (i = 0; i < num_ssa_names; ++i)
5756 tree s = ssa_name (i);
5757 if (s == NULL)
5758 continue;
5759 tree type = TREE_TYPE (s);
5760 if (TREE_CODE (type) == COMPLEX_TYPE)
5762 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5763 != bitint_prec_small)
5764 break;
5765 type = TREE_TYPE (type);
5767 if (TREE_CODE (type) == BITINT_TYPE
5768 && bitint_precision_kind (type) != bitint_prec_small)
5769 break;
5770 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5771 into memory. Such functions could have no large/huge SSA_NAMEs. */
5772 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5774 gimple *g = SSA_NAME_DEF_STMT (s);
5775 if (is_gimple_assign (g) && gimple_store_p (g))
5777 tree t = gimple_assign_rhs1 (g);
5778 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5779 && (bitint_precision_kind (TREE_TYPE (t))
5780 >= bitint_prec_large))
5781 break;
5784 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5785 to floating point types need to be rewritten. */
5786 else if (SCALAR_FLOAT_TYPE_P (type))
5788 gimple *g = SSA_NAME_DEF_STMT (s);
5789 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5791 tree t = gimple_assign_rhs1 (g);
5792 if (TREE_CODE (t) == INTEGER_CST
5793 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5794 && (bitint_precision_kind (TREE_TYPE (t))
5795 != bitint_prec_small))
5796 break;
5800 if (i == num_ssa_names)
5801 return 0;
5803 basic_block bb;
5804 auto_vec<gimple *, 4> switch_statements;
5805 FOR_EACH_BB_FN (bb, cfun)
5807 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5809 tree idx = gimple_switch_index (swtch);
5810 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5811 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5812 continue;
5814 if (optimize)
5815 group_case_labels_stmt (swtch);
5816 switch_statements.safe_push (swtch);
5820 if (!switch_statements.is_empty ())
5822 bool expanded = false;
5823 gimple *stmt;
5824 unsigned int j;
5825 i = 0;
5826 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5828 gswitch *swtch = as_a<gswitch *> (stmt);
5829 tree_switch_conversion::switch_decision_tree dt (swtch);
5830 expanded |= dt.analyze_switch_statement ();
5833 if (expanded)
5835 free_dominance_info (CDI_DOMINATORS);
5836 free_dominance_info (CDI_POST_DOMINATORS);
5837 mark_virtual_operands_for_renaming (cfun);
5838 cleanup_tree_cfg (TODO_update_ssa);
5842 struct bitint_large_huge large_huge;
5843 bool has_large_huge_parm_result = false;
5844 bool has_large_huge = false;
5845 unsigned int ret = 0, first_large_huge = ~0U;
5846 bool edge_insertions = false;
5847 for (; i < num_ssa_names; ++i)
5849 tree s = ssa_name (i);
5850 if (s == NULL)
5851 continue;
5852 tree type = TREE_TYPE (s);
5853 if (TREE_CODE (type) == COMPLEX_TYPE)
5855 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5856 >= bitint_prec_large)
5857 has_large_huge = true;
5858 type = TREE_TYPE (type);
5860 if (TREE_CODE (type) == BITINT_TYPE
5861 && bitint_precision_kind (type) >= bitint_prec_large)
5863 if (first_large_huge == ~0U)
5864 first_large_huge = i;
5865 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5866 gimple_stmt_iterator gsi;
5867 tree_code rhs_code;
5868 /* Unoptimize certain constructs to simpler alternatives to
5869 avoid having to lower all of them. */
5870 if (is_gimple_assign (stmt) && gimple_bb (stmt))
5871 switch (rhs_code = gimple_assign_rhs_code (stmt))
5873 default:
5874 break;
5875 case LROTATE_EXPR:
5876 case RROTATE_EXPR:
5878 first_large_huge = 0;
5879 location_t loc = gimple_location (stmt);
5880 gsi = gsi_for_stmt (stmt);
5881 tree rhs1 = gimple_assign_rhs1 (stmt);
5882 tree type = TREE_TYPE (rhs1);
5883 tree n = gimple_assign_rhs2 (stmt), m;
5884 tree p = build_int_cst (TREE_TYPE (n),
5885 TYPE_PRECISION (type));
5886 if (TREE_CODE (n) == INTEGER_CST)
5887 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5888 else
5890 m = make_ssa_name (TREE_TYPE (n));
5891 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5892 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5893 gimple_set_location (g, loc);
5895 if (!TYPE_UNSIGNED (type))
5897 tree utype = build_bitint_type (TYPE_PRECISION (type),
5899 if (TREE_CODE (rhs1) == INTEGER_CST)
5900 rhs1 = fold_convert (utype, rhs1);
5901 else
5903 tree t = make_ssa_name (type);
5904 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5905 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5906 gimple_set_location (g, loc);
5909 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5910 rhs_code == LROTATE_EXPR
5911 ? LSHIFT_EXPR : RSHIFT_EXPR,
5912 rhs1, n);
5913 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5914 gimple_set_location (g, loc);
5915 tree op1 = gimple_assign_lhs (g);
5916 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5917 rhs_code == LROTATE_EXPR
5918 ? RSHIFT_EXPR : LSHIFT_EXPR,
5919 rhs1, m);
5920 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5921 gimple_set_location (g, loc);
5922 tree op2 = gimple_assign_lhs (g);
5923 tree lhs = gimple_assign_lhs (stmt);
5924 if (!TYPE_UNSIGNED (type))
5926 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5927 BIT_IOR_EXPR, op1, op2);
5928 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5929 gimple_set_location (g, loc);
5930 g = gimple_build_assign (lhs, NOP_EXPR,
5931 gimple_assign_lhs (g));
5933 else
5934 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5935 gsi_replace (&gsi, g, true);
5936 gimple_set_location (g, loc);
5938 break;
5939 case ABS_EXPR:
5940 case ABSU_EXPR:
5941 case MIN_EXPR:
5942 case MAX_EXPR:
5943 case COND_EXPR:
5944 first_large_huge = 0;
5945 gsi = gsi_for_stmt (stmt);
5946 tree lhs = gimple_assign_lhs (stmt);
5947 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5948 location_t loc = gimple_location (stmt);
5949 if (rhs_code == ABS_EXPR)
5950 g = gimple_build_cond (LT_EXPR, rhs1,
5951 build_zero_cst (TREE_TYPE (rhs1)),
5952 NULL_TREE, NULL_TREE);
5953 else if (rhs_code == ABSU_EXPR)
5955 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5956 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5957 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5958 gimple_set_location (g, loc);
5959 g = gimple_build_cond (LT_EXPR, rhs1,
5960 build_zero_cst (TREE_TYPE (rhs1)),
5961 NULL_TREE, NULL_TREE);
5962 rhs1 = rhs2;
5964 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5966 rhs2 = gimple_assign_rhs2 (stmt);
5967 if (TREE_CODE (rhs1) == INTEGER_CST)
5968 std::swap (rhs1, rhs2);
5969 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5970 NULL_TREE, NULL_TREE);
5971 if (rhs_code == MAX_EXPR)
5972 std::swap (rhs1, rhs2);
5974 else
5976 g = gimple_build_cond (NE_EXPR, rhs1,
5977 build_zero_cst (TREE_TYPE (rhs1)),
5978 NULL_TREE, NULL_TREE);
5979 rhs1 = gimple_assign_rhs2 (stmt);
5980 rhs2 = gimple_assign_rhs3 (stmt);
5982 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5983 gimple_set_location (g, loc);
5984 edge e1 = split_block (gsi_bb (gsi), g);
5985 edge e2 = split_block (e1->dest, (gimple *) NULL);
5986 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5987 e3->probability = profile_probability::even ();
5988 e1->flags = EDGE_TRUE_VALUE;
5989 e1->probability = e3->probability.invert ();
5990 if (dom_info_available_p (CDI_DOMINATORS))
5991 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5992 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5994 gsi = gsi_after_labels (e1->dest);
5995 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5996 NEGATE_EXPR, rhs1);
5997 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5998 gimple_set_location (g, loc);
5999 rhs2 = gimple_assign_lhs (g);
6000 std::swap (rhs1, rhs2);
6002 gsi = gsi_for_stmt (stmt);
6003 gsi_remove (&gsi, true);
6004 gphi *phi = create_phi_node (lhs, e2->dest);
6005 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
6006 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
6007 break;
6010 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6011 into memory. Such functions could have no large/huge SSA_NAMEs. */
6012 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6014 gimple *g = SSA_NAME_DEF_STMT (s);
6015 if (is_gimple_assign (g) && gimple_store_p (g))
6017 tree t = gimple_assign_rhs1 (g);
6018 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6019 && (bitint_precision_kind (TREE_TYPE (t))
6020 >= bitint_prec_large))
6021 has_large_huge = true;
6024 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6025 to floating point types need to be rewritten. */
6026 else if (SCALAR_FLOAT_TYPE_P (type))
6028 gimple *g = SSA_NAME_DEF_STMT (s);
6029 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6031 tree t = gimple_assign_rhs1 (g);
6032 if (TREE_CODE (t) == INTEGER_CST
6033 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6034 && (bitint_precision_kind (TREE_TYPE (t))
6035 >= bitint_prec_large))
6036 has_large_huge = true;
6040 for (i = first_large_huge; i < num_ssa_names; ++i)
6042 tree s = ssa_name (i);
6043 if (s == NULL)
6044 continue;
6045 tree type = TREE_TYPE (s);
6046 if (TREE_CODE (type) == COMPLEX_TYPE)
6047 type = TREE_TYPE (type);
6048 if (TREE_CODE (type) == BITINT_TYPE
6049 && bitint_precision_kind (type) >= bitint_prec_large)
6051 use_operand_p use_p;
6052 gimple *use_stmt;
6053 has_large_huge = true;
6054 if (optimize
6055 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6056 continue;
6057 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6058 the same bb and could be handled in the same loop with the
6059 immediate use. */
6060 if (optimize
6061 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6062 && single_imm_use (s, &use_p, &use_stmt)
6063 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6065 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6067 if (mergeable_op (use_stmt))
6068 continue;
6069 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6070 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6071 continue;
6072 if (gimple_assign_cast_p (use_stmt))
6074 tree lhs = gimple_assign_lhs (use_stmt);
6075 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6076 continue;
6078 else if (gimple_store_p (use_stmt)
6079 && is_gimple_assign (use_stmt)
6080 && !gimple_has_volatile_ops (use_stmt)
6081 && !stmt_ends_bb_p (use_stmt))
6082 continue;
6084 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6086 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6087 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6088 && ((is_gimple_assign (use_stmt)
6089 && (gimple_assign_rhs_code (use_stmt)
6090 != COMPLEX_EXPR))
6091 || gimple_code (use_stmt) == GIMPLE_COND)
6092 && (!gimple_store_p (use_stmt)
6093 || (is_gimple_assign (use_stmt)
6094 && !gimple_has_volatile_ops (use_stmt)
6095 && !stmt_ends_bb_p (use_stmt)))
6096 && (TREE_CODE (rhs1) != SSA_NAME
6097 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6099 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6100 || (bitint_precision_kind (TREE_TYPE (rhs1))
6101 < bitint_prec_large))
6102 continue;
6103 if (is_gimple_assign (use_stmt))
6104 switch (gimple_assign_rhs_code (use_stmt))
6106 case MULT_EXPR:
6107 case TRUNC_DIV_EXPR:
6108 case TRUNC_MOD_EXPR:
6109 case FLOAT_EXPR:
6110 /* Uses which use handle_operand_addr can't
6111 deal with nested casts. */
6112 if (TREE_CODE (rhs1) == SSA_NAME
6113 && gimple_assign_cast_p
6114 (SSA_NAME_DEF_STMT (rhs1))
6115 && has_single_use (rhs1)
6116 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6117 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6118 goto force_name;
6119 break;
6120 default:
6121 break;
6123 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6124 >= TYPE_PRECISION (TREE_TYPE (s)))
6125 && mergeable_op (use_stmt))
6126 continue;
6127 /* Prevent merging a widening non-mergeable cast
6128 on result of some narrower mergeable op
6129 together with later mergeable operations. E.g.
6130 result of _BitInt(223) addition shouldn't be
6131 sign-extended to _BitInt(513) and have another
6132 _BitInt(513) added to it, as handle_plus_minus
6133 with its PHI node handling inside of handle_cast
6134 will not work correctly. An exception is if
6135 use_stmt is a store, this is handled directly
6136 in lower_mergeable_stmt. */
6137 if (TREE_CODE (rhs1) != SSA_NAME
6138 || !has_single_use (rhs1)
6139 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6140 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6141 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6142 || gimple_store_p (use_stmt))
6143 continue;
6144 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6145 < TYPE_PRECISION (TREE_TYPE (s)))
6146 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6148 /* Another exception is if the widening cast is
6149 from mergeable same precision cast from something
6150 not mergeable. */
6151 tree rhs2
6152 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6153 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6154 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6155 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6157 if (TREE_CODE (rhs2) != SSA_NAME
6158 || !has_single_use (rhs2)
6159 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6160 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6161 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6162 continue;
6167 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6168 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6170 case IMAGPART_EXPR:
6172 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6173 rhs1 = TREE_OPERAND (rhs1, 0);
6174 if (TREE_CODE (rhs1) == SSA_NAME)
6176 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6177 if (optimizable_arith_overflow (g))
6178 continue;
6181 /* FALLTHRU */
6182 case LSHIFT_EXPR:
6183 case RSHIFT_EXPR:
6184 case MULT_EXPR:
6185 case TRUNC_DIV_EXPR:
6186 case TRUNC_MOD_EXPR:
6187 case FIX_TRUNC_EXPR:
6188 case REALPART_EXPR:
6189 if (gimple_store_p (use_stmt)
6190 && is_gimple_assign (use_stmt)
6191 && !gimple_has_volatile_ops (use_stmt)
6192 && !stmt_ends_bb_p (use_stmt))
6194 tree lhs = gimple_assign_lhs (use_stmt);
6195 /* As multiply/division passes address of the lhs
6196 to library function and that assumes it can extend
6197 it to whole number of limbs, avoid merging those
6198 with bit-field stores. Don't allow it for
6199 shifts etc. either, so that the bit-field store
6200 handling doesn't have to be done everywhere. */
6201 if (TREE_CODE (lhs) == COMPONENT_REF
6202 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6203 break;
6204 continue;
6206 break;
6207 default:
6208 break;
6212 /* Also ignore uninitialized uses. */
6213 if (SSA_NAME_IS_DEFAULT_DEF (s)
6214 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6215 continue;
6217 force_name:
6218 if (!large_huge.m_names)
6219 large_huge.m_names = BITMAP_ALLOC (NULL);
6220 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6221 if (has_single_use (s))
6223 if (!large_huge.m_single_use_names)
6224 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6225 bitmap_set_bit (large_huge.m_single_use_names,
6226 SSA_NAME_VERSION (s));
6228 if (SSA_NAME_VAR (s)
6229 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6230 && SSA_NAME_IS_DEFAULT_DEF (s))
6231 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6232 has_large_huge_parm_result = true;
6233 if (optimize
6234 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6235 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6236 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6237 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6239 use_operand_p use_p;
6240 imm_use_iterator iter;
6241 bool optimizable_load = true;
6242 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6244 gimple *use_stmt = USE_STMT (use_p);
6245 if (is_gimple_debug (use_stmt))
6246 continue;
6247 if (gimple_code (use_stmt) == GIMPLE_PHI
6248 || is_gimple_call (use_stmt))
6250 optimizable_load = false;
6251 break;
6255 ssa_op_iter oi;
6256 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6257 oi, SSA_OP_USE)
6259 tree s2 = USE_FROM_PTR (use_p);
6260 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6262 optimizable_load = false;
6263 break;
6267 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6269 if (!large_huge.m_loads)
6270 large_huge.m_loads = BITMAP_ALLOC (NULL);
6271 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6275 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6276 into memory. Such functions could have no large/huge SSA_NAMEs. */
6277 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6279 gimple *g = SSA_NAME_DEF_STMT (s);
6280 if (is_gimple_assign (g) && gimple_store_p (g))
6282 tree t = gimple_assign_rhs1 (g);
6283 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6284 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6285 has_large_huge = true;
6290 if (large_huge.m_names || has_large_huge)
6292 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6293 calculate_dominance_info (CDI_DOMINATORS);
6294 if (optimize)
6295 enable_ranger (cfun);
6296 if (large_huge.m_loads)
6298 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6299 entry->aux = NULL;
6300 bitint_dom_walker (large_huge.m_names,
6301 large_huge.m_loads).walk (entry);
6302 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6303 clear_aux_for_blocks ();
6304 BITMAP_FREE (large_huge.m_loads);
6306 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6307 large_huge.m_limb_size
6308 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6310 if (large_huge.m_names)
6312 large_huge.m_map
6313 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6314 coalesce_ssa_name (large_huge.m_map);
6315 partition_view_normal (large_huge.m_map);
6316 if (dump_file && (dump_flags & TDF_DETAILS))
6318 fprintf (dump_file, "After Coalescing:\n");
6319 dump_var_map (dump_file, large_huge.m_map);
6321 large_huge.m_vars
6322 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6323 bitmap_iterator bi;
6324 if (has_large_huge_parm_result)
6325 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6327 tree s = ssa_name (i);
6328 if (SSA_NAME_VAR (s)
6329 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6330 && SSA_NAME_IS_DEFAULT_DEF (s))
6331 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6333 int p = var_to_partition (large_huge.m_map, s);
6334 if (large_huge.m_vars[p] == NULL_TREE)
6336 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6337 mark_addressable (SSA_NAME_VAR (s));
6341 tree atype = NULL_TREE;
6342 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6344 tree s = ssa_name (i);
6345 int p = var_to_partition (large_huge.m_map, s);
6346 if (large_huge.m_vars[p] != NULL_TREE)
6347 continue;
6348 if (atype == NULL_TREE
6349 || !tree_int_cst_equal (TYPE_SIZE (atype),
6350 TYPE_SIZE (TREE_TYPE (s))))
6352 unsigned HOST_WIDE_INT nelts
6353 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6354 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6356 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6357 mark_addressable (large_huge.m_vars[p]);
6361 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6363 gimple_stmt_iterator prev;
6364 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6365 gsi = prev)
6367 prev = gsi;
6368 gsi_prev (&prev);
6369 ssa_op_iter iter;
6370 gimple *stmt = gsi_stmt (gsi);
6371 if (is_gimple_debug (stmt))
6372 continue;
6373 bitint_prec_kind kind = bitint_prec_small;
6374 tree t;
6375 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6376 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6378 bitint_prec_kind this_kind
6379 = bitint_precision_kind (TREE_TYPE (t));
6380 kind = MAX (kind, this_kind);
6382 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6384 t = gimple_assign_rhs1 (stmt);
6385 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6387 bitint_prec_kind this_kind
6388 = bitint_precision_kind (TREE_TYPE (t));
6389 kind = MAX (kind, this_kind);
6392 if (is_gimple_assign (stmt)
6393 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6395 t = gimple_assign_rhs1 (stmt);
6396 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6397 && TREE_CODE (t) == INTEGER_CST)
6399 bitint_prec_kind this_kind
6400 = bitint_precision_kind (TREE_TYPE (t));
6401 kind = MAX (kind, this_kind);
6404 if (is_gimple_call (stmt))
6406 t = gimple_call_lhs (stmt);
6407 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6409 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6410 kind = MAX (kind, this_kind);
6411 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6413 this_kind
6414 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6415 kind = MAX (kind, this_kind);
6419 if (kind == bitint_prec_small)
6420 continue;
6421 switch (gimple_code (stmt))
6423 case GIMPLE_CALL:
6424 /* For now. We'll need to handle some internal functions and
6425 perhaps some builtins. */
6426 if (kind == bitint_prec_middle)
6427 continue;
6428 break;
6429 case GIMPLE_ASM:
6430 if (kind == bitint_prec_middle)
6431 continue;
6432 break;
6433 case GIMPLE_RETURN:
6434 continue;
6435 case GIMPLE_ASSIGN:
6436 if (gimple_clobber_p (stmt))
6437 continue;
6438 if (kind >= bitint_prec_large)
6439 break;
6440 if (gimple_assign_single_p (stmt))
6441 /* No need to lower copies, loads or stores. */
6442 continue;
6443 if (gimple_assign_cast_p (stmt))
6445 tree lhs = gimple_assign_lhs (stmt);
6446 tree rhs = gimple_assign_rhs1 (stmt);
6447 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6448 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6449 && (TYPE_PRECISION (TREE_TYPE (lhs))
6450 == TYPE_PRECISION (TREE_TYPE (rhs))))
6451 /* No need to lower casts to same precision. */
6452 continue;
6454 break;
6455 default:
6456 break;
6459 if (kind == bitint_prec_middle)
6461 tree type = NULL_TREE;
6462 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6463 with the same precision and back. */
6464 unsigned int nops = gimple_num_ops (stmt);
6465 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6466 i < nops; ++i)
6467 if (tree op = gimple_op (stmt, i))
6469 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6470 if (nop != op)
6471 gimple_set_op (stmt, i, nop);
6472 else if (COMPARISON_CLASS_P (op))
6474 TREE_OPERAND (op, 0)
6475 = maybe_cast_middle_bitint (&gsi,
6476 TREE_OPERAND (op, 0),
6477 type);
6478 TREE_OPERAND (op, 1)
6479 = maybe_cast_middle_bitint (&gsi,
6480 TREE_OPERAND (op, 1),
6481 type);
6483 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6485 CASE_LOW (op)
6486 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6487 type);
6488 CASE_HIGH (op)
6489 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6490 type);
6493 if (tree lhs = gimple_get_lhs (stmt))
6494 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6495 && (bitint_precision_kind (TREE_TYPE (lhs))
6496 == bitint_prec_middle))
6498 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6499 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6500 type = build_nonstandard_integer_type (prec, uns);
6501 tree lhs2 = make_ssa_name (type);
6502 gimple_set_lhs (stmt, lhs2);
6503 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6504 if (stmt_ends_bb_p (stmt))
6506 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6507 gsi_insert_on_edge_immediate (e, g);
6509 else
6510 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6512 update_stmt (stmt);
6513 continue;
6516 if (tree lhs = gimple_get_lhs (stmt))
6517 if (TREE_CODE (lhs) == SSA_NAME)
6519 tree type = TREE_TYPE (lhs);
6520 if (TREE_CODE (type) == COMPLEX_TYPE)
6521 type = TREE_TYPE (type);
6522 if (TREE_CODE (type) == BITINT_TYPE
6523 && bitint_precision_kind (type) >= bitint_prec_large
6524 && (large_huge.m_names == NULL
6525 || !bitmap_bit_p (large_huge.m_names,
6526 SSA_NAME_VERSION (lhs))))
6527 continue;
6530 large_huge.lower_stmt (stmt);
6533 tree atype = NULL_TREE;
6534 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6535 gsi_next (&gsi))
6537 gphi *phi = gsi.phi ();
6538 tree lhs = gimple_phi_result (phi);
6539 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6540 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6541 continue;
6542 int p1 = var_to_partition (large_huge.m_map, lhs);
6543 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6544 tree v1 = large_huge.m_vars[p1];
6545 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6547 tree arg = gimple_phi_arg_def (phi, i);
6548 edge e = gimple_phi_arg_edge (phi, i);
6549 gimple *g;
6550 switch (TREE_CODE (arg))
6552 case INTEGER_CST:
6553 if (integer_zerop (arg) && VAR_P (v1))
6555 tree zero = build_zero_cst (TREE_TYPE (v1));
6556 g = gimple_build_assign (v1, zero);
6557 gsi_insert_on_edge (e, g);
6558 edge_insertions = true;
6559 break;
6561 int ext;
6562 unsigned int min_prec, prec, rem;
6563 tree c;
6564 prec = TYPE_PRECISION (TREE_TYPE (arg));
6565 rem = prec % (2 * limb_prec);
6566 min_prec = bitint_min_cst_precision (arg, ext);
6567 if (min_prec > prec - rem - 2 * limb_prec
6568 && min_prec > (unsigned) limb_prec)
6569 /* Constant which has enough significant bits that it
6570 isn't worth trying to save .rodata space by extending
6571 from smaller number. */
6572 min_prec = prec;
6573 else
6574 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6575 if (min_prec == 0)
6576 c = NULL_TREE;
6577 else if (min_prec == prec)
6578 c = tree_output_constant_def (arg);
6579 else if (min_prec == (unsigned) limb_prec)
6580 c = fold_convert (large_huge.m_limb_type, arg);
6581 else
6583 tree ctype = build_bitint_type (min_prec, 1);
6584 c = tree_output_constant_def (fold_convert (ctype, arg));
6586 if (c)
6588 if (VAR_P (v1) && min_prec == prec)
6590 tree v2 = build1 (VIEW_CONVERT_EXPR,
6591 TREE_TYPE (v1), c);
6592 g = gimple_build_assign (v1, v2);
6593 gsi_insert_on_edge (e, g);
6594 edge_insertions = true;
6595 break;
6597 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6598 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6599 TREE_TYPE (c), v1),
6601 else
6603 unsigned HOST_WIDE_INT nelts
6604 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6605 / limb_prec;
6606 tree vtype
6607 = build_array_type_nelts (large_huge.m_limb_type,
6608 nelts);
6609 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6610 vtype, v1),
6611 build1 (VIEW_CONVERT_EXPR,
6612 vtype, c));
6614 gsi_insert_on_edge (e, g);
6616 if (ext == 0)
6618 unsigned HOST_WIDE_INT nelts
6619 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6620 - min_prec) / limb_prec;
6621 tree vtype
6622 = build_array_type_nelts (large_huge.m_limb_type,
6623 nelts);
6624 tree ptype = build_pointer_type (TREE_TYPE (v1));
6625 tree off;
6626 if (c)
6627 off = fold_convert (ptype,
6628 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6629 else
6630 off = build_zero_cst (ptype);
6631 tree vd = build2 (MEM_REF, vtype,
6632 build_fold_addr_expr (v1), off);
6633 g = gimple_build_assign (vd, build_zero_cst (vtype));
6635 else
6637 tree vd = v1;
6638 if (c)
6640 tree ptype = build_pointer_type (TREE_TYPE (v1));
6641 tree off
6642 = fold_convert (ptype,
6643 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6644 vd = build2 (MEM_REF, large_huge.m_limb_type,
6645 build_fold_addr_expr (v1), off);
6647 vd = build_fold_addr_expr (vd);
6648 unsigned HOST_WIDE_INT nbytes
6649 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6650 if (c)
6651 nbytes
6652 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6653 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6654 g = gimple_build_call (fn, 3, vd,
6655 integer_minus_one_node,
6656 build_int_cst (sizetype,
6657 nbytes));
6659 gsi_insert_on_edge (e, g);
6660 edge_insertions = true;
6661 break;
6662 default:
6663 gcc_unreachable ();
6664 case SSA_NAME:
6665 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6667 if (large_huge.m_names == NULL
6668 || !bitmap_bit_p (large_huge.m_names,
6669 SSA_NAME_VERSION (arg)))
6670 continue;
6672 int p2 = var_to_partition (large_huge.m_map, arg);
6673 if (p1 == p2)
6674 continue;
6675 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6676 tree v2 = large_huge.m_vars[p2];
6677 if (VAR_P (v1) && VAR_P (v2))
6678 g = gimple_build_assign (v1, v2);
6679 else if (VAR_P (v1))
6680 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6681 TREE_TYPE (v1), v2));
6682 else if (VAR_P (v2))
6683 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6684 TREE_TYPE (v2), v1), v2);
6685 else
6687 if (atype == NULL_TREE
6688 || !tree_int_cst_equal (TYPE_SIZE (atype),
6689 TYPE_SIZE (TREE_TYPE (lhs))))
6691 unsigned HOST_WIDE_INT nelts
6692 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6693 / limb_prec;
6694 atype
6695 = build_array_type_nelts (large_huge.m_limb_type,
6696 nelts);
6698 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6699 atype, v1),
6700 build1 (VIEW_CONVERT_EXPR,
6701 atype, v2));
6703 gsi_insert_on_edge (e, g);
6704 edge_insertions = true;
6705 break;
6711 if (large_huge.m_names || has_large_huge)
6713 gimple *nop = NULL;
6714 for (i = 0; i < num_ssa_names; ++i)
6716 tree s = ssa_name (i);
6717 if (s == NULL_TREE)
6718 continue;
6719 tree type = TREE_TYPE (s);
6720 if (TREE_CODE (type) == COMPLEX_TYPE)
6721 type = TREE_TYPE (type);
6722 if (TREE_CODE (type) == BITINT_TYPE
6723 && bitint_precision_kind (type) >= bitint_prec_large)
6725 if (large_huge.m_preserved
6726 && bitmap_bit_p (large_huge.m_preserved,
6727 SSA_NAME_VERSION (s)))
6728 continue;
6729 gimple *g = SSA_NAME_DEF_STMT (s);
6730 if (gimple_code (g) == GIMPLE_NOP)
6732 if (SSA_NAME_VAR (s))
6733 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6734 release_ssa_name (s);
6735 continue;
6737 if (gimple_bb (g) == NULL)
6739 release_ssa_name (s);
6740 continue;
6742 if (gimple_code (g) != GIMPLE_ASM)
6744 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6745 bool save_vta = flag_var_tracking_assignments;
6746 flag_var_tracking_assignments = false;
6747 gsi_remove (&gsi, true);
6748 flag_var_tracking_assignments = save_vta;
6750 if (nop == NULL)
6751 nop = gimple_build_nop ();
6752 SSA_NAME_DEF_STMT (s) = nop;
6753 release_ssa_name (s);
6756 if (optimize)
6757 disable_ranger (cfun);
6760 if (edge_insertions)
6761 gsi_commit_edge_inserts ();
6763 return ret;
6766 namespace {
6768 const pass_data pass_data_lower_bitint =
6770 GIMPLE_PASS, /* type */
6771 "bitintlower", /* name */
6772 OPTGROUP_NONE, /* optinfo_flags */
6773 TV_NONE, /* tv_id */
6774 PROP_ssa, /* properties_required */
6775 PROP_gimple_lbitint, /* properties_provided */
6776 0, /* properties_destroyed */
6777 0, /* todo_flags_start */
6778 0, /* todo_flags_finish */
6781 class pass_lower_bitint : public gimple_opt_pass
6783 public:
6784 pass_lower_bitint (gcc::context *ctxt)
6785 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6788 /* opt_pass methods: */
6789 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6790 unsigned int execute (function *) final override
6792 return gimple_lower_bitint ();
6795 }; // class pass_lower_bitint
6797 } // anon namespace
6799 gimple_opt_pass *
6800 make_pass_lower_bitint (gcc::context *ctxt)
6802 return new pass_lower_bitint (ctxt);
6806 namespace {
6808 const pass_data pass_data_lower_bitint_O0 =
6810 GIMPLE_PASS, /* type */
6811 "bitintlower0", /* name */
6812 OPTGROUP_NONE, /* optinfo_flags */
6813 TV_NONE, /* tv_id */
6814 PROP_cfg, /* properties_required */
6815 PROP_gimple_lbitint, /* properties_provided */
6816 0, /* properties_destroyed */
6817 0, /* todo_flags_start */
6818 0, /* todo_flags_finish */
6821 class pass_lower_bitint_O0 : public gimple_opt_pass
6823 public:
6824 pass_lower_bitint_O0 (gcc::context *ctxt)
6825 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6828 /* opt_pass methods: */
6829 bool gate (function *fun) final override
6831 /* With errors, normal optimization passes are not run. If we don't
6832 lower bitint operations at all, rtl expansion will abort. */
6833 return !(fun->curr_properties & PROP_gimple_lbitint);
6836 unsigned int execute (function *) final override
6838 return gimple_lower_bitint ();
6841 }; // class pass_lower_bitint_O0
6843 } // anon namespace
6845 gimple_opt_pass *
6846 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6848 return new pass_lower_bitint_O0 (ctxt);