Daily bump.
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blobd7bf029973ca8c676369af030d4716b138066936
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
235 == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
237 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
238 return true;
239 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
240 return true;
241 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
242 return true;
244 break;
246 default:
247 break;
249 return false;
252 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
253 _Complex large/huge _BitInt lhs which has at most two immediate uses,
254 at most one use in REALPART_EXPR stmt in the same bb and exactly one
255 IMAGPART_EXPR use in the same bb with a single use which casts it to
256 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
257 return 2. Such cases (most common uses of those builtins) can be
258 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
259 of REALPART_EXPR as not needed to be backed up by a stack variable.
260 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
263 optimizable_arith_overflow (gimple *stmt)
265 bool is_ubsan = false;
266 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
267 return false;
268 switch (gimple_call_internal_fn (stmt))
270 case IFN_ADD_OVERFLOW:
271 case IFN_SUB_OVERFLOW:
272 case IFN_MUL_OVERFLOW:
273 break;
274 case IFN_UBSAN_CHECK_ADD:
275 case IFN_UBSAN_CHECK_SUB:
276 case IFN_UBSAN_CHECK_MUL:
277 is_ubsan = true;
278 break;
279 default:
280 return 0;
282 tree lhs = gimple_call_lhs (stmt);
283 if (!lhs)
284 return 0;
285 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
286 return 0;
287 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
288 if (TREE_CODE (type) != BITINT_TYPE
289 || bitint_precision_kind (type) < bitint_prec_large)
290 return 0;
292 if (is_ubsan)
294 use_operand_p use_p;
295 gimple *use_stmt;
296 if (!single_imm_use (lhs, &use_p, &use_stmt)
297 || gimple_bb (use_stmt) != gimple_bb (stmt)
298 || !gimple_store_p (use_stmt)
299 || !is_gimple_assign (use_stmt)
300 || gimple_has_volatile_ops (use_stmt)
301 || stmt_ends_bb_p (use_stmt))
302 return 0;
303 return 3;
306 imm_use_iterator ui;
307 use_operand_p use_p;
308 int seen = 0;
309 gimple *realpart = NULL, *cast = NULL;
310 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
312 gimple *g = USE_STMT (use_p);
313 if (is_gimple_debug (g))
314 continue;
315 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
316 return 0;
317 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
319 if ((seen & 1) != 0)
320 return 0;
321 seen |= 1;
322 realpart = g;
324 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
326 if ((seen & 2) != 0)
327 return 0;
328 seen |= 2;
330 use_operand_p use2_p;
331 gimple *use_stmt;
332 tree lhs2 = gimple_assign_lhs (g);
333 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
334 return 0;
335 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
336 || gimple_bb (use_stmt) != gimple_bb (stmt)
337 || !gimple_assign_cast_p (use_stmt))
338 return 0;
340 lhs2 = gimple_assign_lhs (use_stmt);
341 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
342 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
343 return 0;
344 cast = use_stmt;
346 else
347 return 0;
349 if ((seen & 2) == 0)
350 return 0;
351 if (seen == 3)
353 /* Punt if the cast stmt appears before realpart stmt, because
354 if both appear, the lowering wants to emit all the code
355 at the location of realpart stmt. */
356 gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
357 unsigned int cnt = 0;
360 gsi_prev_nondebug (&gsi);
361 if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
362 return 0;
363 if (gsi_stmt (gsi) == stmt)
364 return 2;
365 /* If realpart is too far from stmt, punt as well.
366 Usually it will appear right after it. */
367 if (++cnt == 32)
368 return 0;
370 while (1);
372 return 1;
375 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
376 comparing large/huge _BitInt types, return the comparison code and if
377 non-NULL fill in the comparison operands to *POP1 and *POP2. */
379 tree_code
380 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
382 tree op1 = NULL_TREE, op2 = NULL_TREE;
383 tree_code code = ERROR_MARK;
384 if (gimple_code (stmt) == GIMPLE_COND)
386 code = gimple_cond_code (stmt);
387 op1 = gimple_cond_lhs (stmt);
388 op2 = gimple_cond_rhs (stmt);
390 else if (is_gimple_assign (stmt))
392 code = gimple_assign_rhs_code (stmt);
393 op1 = gimple_assign_rhs1 (stmt);
394 if (TREE_CODE_CLASS (code) == tcc_comparison
395 || TREE_CODE_CLASS (code) == tcc_binary)
396 op2 = gimple_assign_rhs2 (stmt);
398 if (TREE_CODE_CLASS (code) != tcc_comparison)
399 return ERROR_MARK;
400 tree type = TREE_TYPE (op1);
401 if (TREE_CODE (type) != BITINT_TYPE
402 || bitint_precision_kind (type) < bitint_prec_large)
403 return ERROR_MARK;
404 if (pop1)
406 *pop1 = op1;
407 *pop2 = op2;
409 return code;
412 /* Class used during large/huge _BitInt lowering containing all the
413 state for the methods. */
415 struct bitint_large_huge
417 bitint_large_huge ()
418 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
419 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
420 m_limb_type (NULL_TREE), m_data (vNULL) {}
422 ~bitint_large_huge ();
424 void insert_before (gimple *);
425 tree limb_access_type (tree, tree);
426 tree limb_access (tree, tree, tree, bool);
427 void if_then (gimple *, profile_probability, edge &, edge &);
428 void if_then_else (gimple *, profile_probability, edge &, edge &);
429 void if_then_if_then_else (gimple *g, gimple *,
430 profile_probability, profile_probability,
431 edge &, edge &, edge &);
432 tree handle_operand (tree, tree);
433 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
434 tree add_cast (tree, tree);
435 tree handle_plus_minus (tree_code, tree, tree, tree);
436 tree handle_lshift (tree, tree, tree);
437 tree handle_cast (tree, tree, tree);
438 tree handle_load (gimple *, tree);
439 tree handle_stmt (gimple *, tree);
440 tree handle_operand_addr (tree, gimple *, int *, int *);
441 tree create_loop (tree, tree *);
442 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
443 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
444 void lower_shift_stmt (tree, gimple *);
445 void lower_muldiv_stmt (tree, gimple *);
446 void lower_float_conv_stmt (tree, gimple *);
447 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
448 unsigned int, bool);
449 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
450 tree_code);
451 void lower_addsub_overflow (tree, gimple *);
452 void lower_mul_overflow (tree, gimple *);
453 void lower_cplxpart_stmt (tree, gimple *);
454 void lower_complexexpr_stmt (gimple *);
455 void lower_bit_query (gimple *);
456 void lower_call (tree, gimple *);
457 void lower_asm (gimple *);
458 void lower_stmt (gimple *);
460 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
461 merged with their uses. */
462 bitmap m_names;
463 /* Subset of those for lhs of load statements. These will be
464 cleared in m_names if the loads will be mergeable with all
465 their uses. */
466 bitmap m_loads;
467 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
468 to later passes (arguments or return values of calls). */
469 bitmap m_preserved;
470 /* Subset of m_names which have a single use. As the lowering
471 can replace various original statements with their lowered
472 form even before it is done iterating over all basic blocks,
473 testing has_single_use for the purpose of emitting clobbers
474 doesn't work properly. */
475 bitmap m_single_use_names;
476 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
477 set in m_names. */
478 var_map m_map;
479 /* Mapping of the partitions to corresponding decls. */
480 tree *m_vars;
481 /* Unsigned integer type with limb precision. */
482 tree m_limb_type;
483 /* Its TYPE_SIZE_UNIT. */
484 unsigned HOST_WIDE_INT m_limb_size;
485 /* Location of a gimple stmt which is being currently lowered. */
486 location_t m_loc;
487 /* Current stmt iterator where code is being lowered currently. */
488 gimple_stmt_iterator m_gsi;
489 /* Statement after which any clobbers should be added if non-NULL. */
490 gimple *m_after_stmt;
491 /* Set when creating loops to the loop header bb and its preheader. */
492 basic_block m_bb, m_preheader_bb;
493 /* Stmt iterator after which initialization statements should be emitted. */
494 gimple_stmt_iterator m_init_gsi;
495 /* Decl into which a mergeable statement stores result. */
496 tree m_lhs;
497 /* handle_operand/handle_stmt can be invoked in various ways.
499 lower_mergeable_stmt for large _BitInt calls those with constant
500 idx only, expanding to straight line code, for huge _BitInt
501 emits a loop from least significant limb upwards, where each loop
502 iteration handles 2 limbs, plus there can be up to one full limb
503 and one partial limb processed after the loop, where handle_operand
504 and/or handle_stmt are called with constant idx. m_upwards_2limb
505 is set for this case, false otherwise. m_upwards is true if it
506 is either large or huge _BitInt handled by lower_mergeable_stmt,
507 i.e. indexes always increase.
509 Another way is used by lower_comparison_stmt, which walks limbs
510 from most significant to least significant, partial limb if any
511 processed first with constant idx and then loop processing a single
512 limb per iteration with non-constant idx.
514 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
515 destination limbs are processed from most significant to least
516 significant or for RSHIFT_EXPR the other way around, in loops or
517 straight line code, but idx usually is non-constant (so from
518 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
519 handling there can access even partial limbs using non-constant
520 idx (then m_var_msb should be true, for all the other cases
521 including lower_mergeable_stmt/lower_comparison_stmt that is
522 not the case and so m_var_msb should be false.
524 m_first should be set the first time handle_operand/handle_stmt
525 is called and clear when it is called for some other limb with
526 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
527 or statement (e.g. +/-/<< with < limb_prec constant) needs some
528 state between the different calls, when m_first is true it should
529 push some trees to m_data vector and also make sure m_data_cnt is
530 incremented by how many trees were pushed, and when m_first is
531 false, it can use the m_data[m_data_cnt] etc. data or update them,
532 just needs to bump m_data_cnt by the same amount as when it was
533 called with m_first set. The toplevel calls to
534 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
535 m_data vector when setting m_first to true.
537 m_cast_conditional and m_bitfld_load are used when handling a
538 bit-field load inside of a widening cast. handle_cast sometimes
539 needs to do runtime comparisons and handle_operand only conditionally
540 or even in two separate conditional blocks for one idx (once with
541 constant index after comparing the runtime one for equality with the
542 constant). In these cases, m_cast_conditional is set to true and
543 the bit-field load then communicates its m_data_cnt to handle_cast
544 using m_bitfld_load. */
545 bool m_first;
546 bool m_var_msb;
547 unsigned m_upwards_2limb;
548 bool m_upwards;
549 bool m_cast_conditional;
550 unsigned m_bitfld_load;
551 vec<tree> m_data;
552 unsigned int m_data_cnt;
555 bitint_large_huge::~bitint_large_huge ()
557 BITMAP_FREE (m_names);
558 BITMAP_FREE (m_loads);
559 BITMAP_FREE (m_preserved);
560 BITMAP_FREE (m_single_use_names);
561 if (m_map)
562 delete_var_map (m_map);
563 XDELETEVEC (m_vars);
564 m_data.release ();
567 /* Insert gimple statement G before current location
568 and set its gimple_location. */
570 void
571 bitint_large_huge::insert_before (gimple *g)
573 gimple_set_location (g, m_loc);
574 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
577 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
578 This is normally m_limb_type, except for a partial most
579 significant limb if any. */
581 tree
582 bitint_large_huge::limb_access_type (tree type, tree idx)
584 if (type == NULL_TREE)
585 return m_limb_type;
586 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
587 unsigned int prec = TYPE_PRECISION (type);
588 gcc_assert (i * limb_prec < prec);
589 if ((i + 1) * limb_prec <= prec)
590 return m_limb_type;
591 else
592 return build_nonstandard_integer_type (prec % limb_prec,
593 TYPE_UNSIGNED (type));
596 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
597 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
599 tree
600 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
602 tree atype = (tree_fits_uhwi_p (idx)
603 ? limb_access_type (type, idx) : m_limb_type);
604 tree ltype = m_limb_type;
605 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
606 if (as != TYPE_ADDR_SPACE (ltype))
607 ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
608 | ENCODE_QUAL_ADDR_SPACE (as));
609 tree ret;
610 if (DECL_P (var) && tree_fits_uhwi_p (idx))
612 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
613 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
614 ret = build2 (MEM_REF, ltype,
615 build_fold_addr_expr (var),
616 build_int_cst (ptype, off));
617 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
618 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
620 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
623 = build2 (MEM_REF, ltype, TREE_OPERAND (var, 0),
624 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
625 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
626 tree_to_uhwi (idx)
627 * m_limb_size)));
628 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
629 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
630 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
632 else
634 var = unshare_expr (var);
635 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
636 || !useless_type_conversion_p (m_limb_type,
637 TREE_TYPE (TREE_TYPE (var))))
639 unsigned HOST_WIDE_INT nelts
640 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
641 tree atype = build_array_type_nelts (ltype, nelts);
642 var = build1 (VIEW_CONVERT_EXPR, atype, var);
644 ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
646 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
648 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
649 insert_before (g);
650 ret = gimple_assign_lhs (g);
651 ret = build1 (NOP_EXPR, atype, ret);
653 return ret;
656 /* Emit a half diamond,
657 if (COND)
661 | new_bb1
665 or if (COND) new_bb1;
666 PROB is the probability that the condition is true.
667 Updates m_gsi to start of new_bb1.
668 Sets EDGE_TRUE to edge from new_bb1 to successor and
669 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
671 void
672 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
673 edge &edge_true, edge &edge_false)
675 insert_before (cond);
676 edge e1 = split_block (gsi_bb (m_gsi), cond);
677 edge e2 = split_block (e1->dest, (gimple *) NULL);
678 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
679 e1->flags = EDGE_TRUE_VALUE;
680 e1->probability = prob;
681 e3->probability = prob.invert ();
682 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
683 edge_true = e2;
684 edge_false = e3;
685 m_gsi = gsi_after_labels (e1->dest);
688 /* Emit a full diamond,
689 if (COND)
693 new_bb1 new_bb2
697 or if (COND) new_bb2; else new_bb1;
698 PROB is the probability that the condition is true.
699 Updates m_gsi to start of new_bb2.
700 Sets EDGE_TRUE to edge from new_bb1 to successor and
701 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
703 void
704 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
705 edge &edge_true, edge &edge_false)
707 insert_before (cond);
708 edge e1 = split_block (gsi_bb (m_gsi), cond);
709 edge e2 = split_block (e1->dest, (gimple *) NULL);
710 basic_block bb = create_empty_bb (e1->dest);
711 add_bb_to_loop (bb, e1->dest->loop_father);
712 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
713 e1->flags = EDGE_FALSE_VALUE;
714 e3->probability = prob;
715 e1->probability = prob.invert ();
716 bb->count = e1->src->count.apply_probability (prob);
717 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
718 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
719 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
720 edge_false = e2;
721 m_gsi = gsi_after_labels (bb);
724 /* Emit a half diamond with full diamond in it
725 if (COND1)
729 | if (COND2)
730 | / \
731 | / \
732 |new_bb1 new_bb2
733 | | /
734 \ | /
735 \ | /
736 \ | /
738 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
739 PROB1 is the probability that the condition 1 is true.
740 PROB2 is the probability that the condition 2 is true.
741 Updates m_gsi to start of new_bb1.
742 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
743 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
744 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
745 If COND2 is NULL, this is equivalent to
746 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
747 EDGE_TRUE_TRUE = NULL; */
749 void
750 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
751 profile_probability prob1,
752 profile_probability prob2,
753 edge &edge_true_true,
754 edge &edge_true_false,
755 edge &edge_false)
757 edge e2, e3, e4 = NULL;
758 if_then (cond1, prob1, e2, e3);
759 if (cond2 == NULL)
761 edge_true_true = NULL;
762 edge_true_false = e2;
763 edge_false = e3;
764 return;
766 insert_before (cond2);
767 e2 = split_block (gsi_bb (m_gsi), cond2);
768 basic_block bb = create_empty_bb (e2->dest);
769 add_bb_to_loop (bb, e2->dest->loop_father);
770 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
771 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
772 e4->probability = prob2;
773 e2->flags = EDGE_FALSE_VALUE;
774 e2->probability = prob2.invert ();
775 bb->count = e2->src->count.apply_probability (prob2);
776 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
777 e2 = find_edge (e2->dest, e3->dest);
778 edge_true_true = e4;
779 edge_true_false = e2;
780 edge_false = e3;
781 m_gsi = gsi_after_labels (e2->src);
784 /* Emit code to access limb IDX from OP. */
786 tree
787 bitint_large_huge::handle_operand (tree op, tree idx)
789 switch (TREE_CODE (op))
791 case SSA_NAME:
792 if (m_names == NULL
793 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
795 if (SSA_NAME_IS_DEFAULT_DEF (op))
797 if (m_first)
799 tree v = create_tmp_reg (m_limb_type);
800 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
802 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
803 DECL_SOURCE_LOCATION (v)
804 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
806 v = get_or_create_ssa_default_def (cfun, v);
807 m_data.safe_push (v);
809 tree ret = m_data[m_data_cnt];
810 m_data_cnt++;
811 if (tree_fits_uhwi_p (idx))
813 tree type = limb_access_type (TREE_TYPE (op), idx);
814 ret = add_cast (type, ret);
816 return ret;
818 location_t loc_save = m_loc;
819 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
820 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
821 m_loc = loc_save;
822 return ret;
824 int p;
825 gimple *g;
826 tree t;
827 p = var_to_partition (m_map, op);
828 gcc_assert (m_vars[p] != NULL_TREE);
829 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
830 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
831 insert_before (g);
832 t = gimple_assign_lhs (g);
833 if (m_first
834 && m_single_use_names
835 && m_vars[p] != m_lhs
836 && m_after_stmt
837 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
839 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
840 CLOBBER_STORAGE_END);
841 g = gimple_build_assign (m_vars[p], clobber);
842 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
843 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
845 return t;
846 case INTEGER_CST:
847 if (tree_fits_uhwi_p (idx))
849 tree c, type = limb_access_type (TREE_TYPE (op), idx);
850 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
851 if (m_first)
853 m_data.safe_push (NULL_TREE);
854 m_data.safe_push (NULL_TREE);
856 if (limb_prec != HOST_BITS_PER_WIDE_INT)
858 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
859 TYPE_SIGN (TREE_TYPE (op)));
860 c = wide_int_to_tree (type,
861 wide_int::from (w, TYPE_PRECISION (type),
862 UNSIGNED));
864 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
865 c = build_int_cst (type,
866 tree_int_cst_sgn (op) < 0 ? -1 : 0);
867 else
868 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
869 m_data_cnt += 2;
870 return c;
872 if (m_first
873 || (m_data[m_data_cnt] == NULL_TREE
874 && m_data[m_data_cnt + 1] == NULL_TREE))
876 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
877 unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
878 int ext;
879 unsigned min_prec = bitint_min_cst_precision (op, ext);
880 if (m_first)
882 m_data.safe_push (NULL_TREE);
883 m_data.safe_push (NULL_TREE);
885 if (integer_zerop (op))
887 tree c = build_zero_cst (m_limb_type);
888 m_data[m_data_cnt] = c;
889 m_data[m_data_cnt + 1] = c;
891 else if (integer_all_onesp (op))
893 tree c = build_all_ones_cst (m_limb_type);
894 m_data[m_data_cnt] = c;
895 m_data[m_data_cnt + 1] = c;
897 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
899 /* Single limb constant. Use a phi with that limb from
900 the preheader edge and 0 or -1 constant from the other edge
901 and for the second limb in the loop. */
902 tree out;
903 gcc_assert (m_first);
904 m_data.pop ();
905 m_data.pop ();
906 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
907 build_int_cst (m_limb_type, ext));
909 else if (min_prec > prec - rem - 2 * limb_prec)
911 /* Constant which has enough significant bits that it isn't
912 worth trying to save .rodata space by extending from smaller
913 number. */
914 tree type;
915 if (m_var_msb)
916 type = TREE_TYPE (op);
917 else
918 /* If we have a guarantee the most significant partial limb
919 (if any) will be only accessed through handle_operand
920 with INTEGER_CST idx, we don't need to include the partial
921 limb in .rodata. */
922 type = build_bitint_type (prec - rem, 1);
923 tree c = tree_output_constant_def (fold_convert (type, op));
924 m_data[m_data_cnt] = c;
925 m_data[m_data_cnt + 1] = NULL_TREE;
927 else if (m_upwards_2limb)
929 /* Constant with smaller number of bits. Trade conditional
930 code for .rodata space by extending from smaller number. */
931 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
932 tree type = build_bitint_type (min_prec, 1);
933 tree c = tree_output_constant_def (fold_convert (type, op));
934 tree idx2 = make_ssa_name (sizetype);
935 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
936 insert_before (g);
937 g = gimple_build_cond (LT_EXPR, idx,
938 size_int (min_prec / limb_prec),
939 NULL_TREE, NULL_TREE);
940 edge edge_true, edge_false;
941 if_then (g, (min_prec >= (prec - rem) / 2
942 ? profile_probability::likely ()
943 : profile_probability::unlikely ()),
944 edge_true, edge_false);
945 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
946 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
947 insert_before (g);
948 c1 = gimple_assign_lhs (g);
949 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
950 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
951 insert_before (g);
952 c2 = gimple_assign_lhs (g);
953 tree c3 = build_int_cst (m_limb_type, ext);
954 m_gsi = gsi_after_labels (edge_true->dest);
955 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
956 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
957 gphi *phi = create_phi_node (m_data[m_data_cnt],
958 edge_true->dest);
959 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
960 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
961 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
962 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
963 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
965 else
967 /* Constant with smaller number of bits. Trade conditional
968 code for .rodata space by extending from smaller number.
969 Version for loops with random access to the limbs or
970 downwards loops. */
971 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
972 tree c;
973 if (min_prec <= (unsigned) limb_prec)
974 c = fold_convert (m_limb_type, op);
975 else
977 tree type = build_bitint_type (min_prec, 1);
978 c = tree_output_constant_def (fold_convert (type, op));
980 m_data[m_data_cnt] = c;
981 m_data[m_data_cnt + 1] = integer_type_node;
983 t = m_data[m_data_cnt];
984 if (m_data[m_data_cnt + 1] == NULL_TREE)
986 t = limb_access (TREE_TYPE (op), t, idx, false);
987 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
988 insert_before (g);
989 t = gimple_assign_lhs (g);
992 else if (m_data[m_data_cnt + 1] == NULL_TREE)
994 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
995 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
996 insert_before (g);
997 t = gimple_assign_lhs (g);
999 else
1000 t = m_data[m_data_cnt + 1];
1001 if (m_data[m_data_cnt + 1] == integer_type_node)
1003 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1004 unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1005 int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1006 tree c = m_data[m_data_cnt];
1007 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1008 g = gimple_build_cond (LT_EXPR, idx,
1009 size_int (min_prec / limb_prec),
1010 NULL_TREE, NULL_TREE);
1011 edge edge_true, edge_false;
1012 if_then (g, (min_prec >= (prec - rem) / 2
1013 ? profile_probability::likely ()
1014 : profile_probability::unlikely ()),
1015 edge_true, edge_false);
1016 if (min_prec > (unsigned) limb_prec)
1018 c = limb_access (TREE_TYPE (op), c, idx, false);
1019 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1020 insert_before (g);
1021 c = gimple_assign_lhs (g);
1023 tree c2 = build_int_cst (m_limb_type, ext);
1024 m_gsi = gsi_after_labels (edge_true->dest);
1025 t = make_ssa_name (m_limb_type);
1026 gphi *phi = create_phi_node (t, edge_true->dest);
1027 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1028 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1030 m_data_cnt += 2;
1031 return t;
1032 default:
1033 gcc_unreachable ();
1037 /* Helper method, add a PHI node with VAL from preheader edge if
1038 inside of a loop and m_first. Keep state in a pair of m_data
1039 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1040 the latch edge, otherwise create a new SSA_NAME for it and let
1041 caller initialize it. */
1043 tree
1044 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1045 tree val_out)
1047 if (!m_first)
1049 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1050 return m_data[m_data_cnt];
1053 *data_out = NULL_TREE;
1054 if (tree_fits_uhwi_p (idx))
1056 m_data.safe_push (val);
1057 m_data.safe_push (NULL_TREE);
1058 return val;
1061 tree in = make_ssa_name (TREE_TYPE (val));
1062 gphi *phi = create_phi_node (in, m_bb);
1063 edge e1 = find_edge (m_preheader_bb, m_bb);
1064 edge e2 = EDGE_PRED (m_bb, 0);
1065 if (e1 == e2)
1066 e2 = EDGE_PRED (m_bb, 1);
1067 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1068 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1069 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1070 m_data.safe_push (in);
1071 m_data.safe_push (out);
1072 return in;
1075 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1076 convert it without emitting any code, otherwise emit
1077 the conversion statement before the current location. */
1079 tree
1080 bitint_large_huge::add_cast (tree type, tree val)
1082 if (TREE_CODE (val) == INTEGER_CST)
1083 return fold_convert (type, val);
1085 tree lhs = make_ssa_name (type);
1086 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1087 insert_before (g);
1088 return lhs;
1091 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1093 tree
1094 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1095 tree idx)
1097 tree lhs, data_out, ctype;
1098 tree rhs1_type = TREE_TYPE (rhs1);
1099 gimple *g;
1100 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1101 &data_out);
1103 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1104 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1106 ctype = build_complex_type (m_limb_type);
1107 if (!types_compatible_p (rhs1_type, m_limb_type))
1109 if (!TYPE_UNSIGNED (rhs1_type))
1111 tree type = unsigned_type_for (rhs1_type);
1112 rhs1 = add_cast (type, rhs1);
1113 rhs2 = add_cast (type, rhs2);
1115 rhs1 = add_cast (m_limb_type, rhs1);
1116 rhs2 = add_cast (m_limb_type, rhs2);
1118 lhs = make_ssa_name (ctype);
1119 g = gimple_build_call_internal (code == PLUS_EXPR
1120 ? IFN_UADDC : IFN_USUBC,
1121 3, rhs1, rhs2, data_in);
1122 gimple_call_set_lhs (g, lhs);
1123 insert_before (g);
1124 if (data_out == NULL_TREE)
1125 data_out = make_ssa_name (m_limb_type);
1126 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1127 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1128 insert_before (g);
1130 else if (types_compatible_p (rhs1_type, m_limb_type))
1132 ctype = build_complex_type (m_limb_type);
1133 lhs = make_ssa_name (ctype);
1134 g = gimple_build_call_internal (code == PLUS_EXPR
1135 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1136 2, rhs1, rhs2);
1137 gimple_call_set_lhs (g, lhs);
1138 insert_before (g);
1139 if (data_out == NULL_TREE)
1140 data_out = make_ssa_name (m_limb_type);
1141 if (!integer_zerop (data_in))
1143 rhs1 = make_ssa_name (m_limb_type);
1144 g = gimple_build_assign (rhs1, REALPART_EXPR,
1145 build1 (REALPART_EXPR, m_limb_type, lhs));
1146 insert_before (g);
1147 rhs2 = make_ssa_name (m_limb_type);
1148 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1149 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1150 insert_before (g);
1151 lhs = make_ssa_name (ctype);
1152 g = gimple_build_call_internal (code == PLUS_EXPR
1153 ? IFN_ADD_OVERFLOW
1154 : IFN_SUB_OVERFLOW,
1155 2, rhs1, data_in);
1156 gimple_call_set_lhs (g, lhs);
1157 insert_before (g);
1158 data_in = make_ssa_name (m_limb_type);
1159 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1160 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1161 insert_before (g);
1162 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1163 insert_before (g);
1165 else
1167 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1168 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1169 insert_before (g);
1172 else
1174 tree in = add_cast (rhs1_type, data_in);
1175 lhs = make_ssa_name (rhs1_type);
1176 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1177 insert_before (g);
1178 rhs1 = make_ssa_name (rhs1_type);
1179 g = gimple_build_assign (rhs1, code, lhs, in);
1180 insert_before (g);
1181 m_data[m_data_cnt] = NULL_TREE;
1182 m_data_cnt += 2;
1183 return rhs1;
1185 rhs1 = make_ssa_name (m_limb_type);
1186 g = gimple_build_assign (rhs1, REALPART_EXPR,
1187 build1 (REALPART_EXPR, m_limb_type, lhs));
1188 insert_before (g);
1189 if (!types_compatible_p (rhs1_type, m_limb_type))
1190 rhs1 = add_cast (rhs1_type, rhs1);
1191 m_data[m_data_cnt] = data_out;
1192 m_data_cnt += 2;
1193 return rhs1;
1196 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1197 count in [0, limb_prec - 1] range. */
1199 tree
1200 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1202 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1203 gcc_checking_assert (cnt < (unsigned) limb_prec);
1204 if (cnt == 0)
1205 return rhs1;
1207 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1208 gimple *g;
1209 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1210 &data_out);
1212 if (!integer_zerop (data_in))
1214 lhs = make_ssa_name (m_limb_type);
1215 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1216 build_int_cst (unsigned_type_node,
1217 limb_prec - cnt));
1218 insert_before (g);
1219 if (!types_compatible_p (rhs1_type, m_limb_type))
1220 lhs = add_cast (rhs1_type, lhs);
1221 data_in = lhs;
1223 if (types_compatible_p (rhs1_type, m_limb_type))
1225 if (data_out == NULL_TREE)
1226 data_out = make_ssa_name (m_limb_type);
1227 g = gimple_build_assign (data_out, rhs1);
1228 insert_before (g);
1230 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1232 lhs = make_ssa_name (rhs1_type);
1233 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1234 insert_before (g);
1235 if (!integer_zerop (data_in))
1237 rhs1 = lhs;
1238 lhs = make_ssa_name (rhs1_type);
1239 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1240 insert_before (g);
1243 else
1244 lhs = data_in;
1245 m_data[m_data_cnt] = data_out;
1246 m_data_cnt += 2;
1247 return lhs;
1250 /* Helper function for handle_stmt method, handle an integral
1251 to integral conversion. */
1253 tree
1254 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1256 tree rhs_type = TREE_TYPE (rhs1);
1257 gimple *g;
1258 if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1259 && TREE_CODE (lhs_type) == BITINT_TYPE
1260 && TREE_CODE (rhs_type) == BITINT_TYPE
1261 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1262 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1264 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1265 /* If lhs has bigger precision than rhs, we can use
1266 the simple case only if there is a guarantee that
1267 the most significant limb is handled in straight
1268 line code. If m_var_msb (on left shifts) or
1269 if m_upwards_2limb * limb_prec is equal to
1270 lhs precision or if not m_upwards_2limb and lhs_type
1271 has precision which is multiple of limb_prec that is
1272 not the case. */
1273 || (!m_var_msb
1274 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1275 == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1276 && ((!m_upwards_2limb
1277 && (TYPE_PRECISION (lhs_type) % limb_prec != 0))
1278 || (m_upwards_2limb
1279 && (m_upwards_2limb * limb_prec
1280 < TYPE_PRECISION (lhs_type))))))
1282 rhs1 = handle_operand (rhs1, idx);
1283 if (tree_fits_uhwi_p (idx))
1285 tree type = limb_access_type (lhs_type, idx);
1286 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1287 rhs1 = add_cast (type, rhs1);
1289 return rhs1;
1291 tree t;
1292 /* Indexes lower than this don't need any special processing. */
1293 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1294 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1295 /* Indexes >= than this always contain an extension. */
1296 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1297 bool save_first = m_first;
1298 if (m_first)
1300 m_data.safe_push (NULL_TREE);
1301 m_data.safe_push (NULL_TREE);
1302 m_data.safe_push (NULL_TREE);
1303 if (TYPE_UNSIGNED (rhs_type))
1304 /* No need to keep state between iterations. */
1306 else if (m_upwards && !m_upwards_2limb)
1307 /* We need to keep state between iterations, but
1308 not within any loop, everything is straight line
1309 code with only increasing indexes. */
1311 else if (!m_upwards_2limb)
1313 unsigned save_data_cnt = m_data_cnt;
1314 gimple_stmt_iterator save_gsi = m_gsi;
1315 m_gsi = m_init_gsi;
1316 if (gsi_end_p (m_gsi))
1317 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1318 else
1319 gsi_next (&m_gsi);
1320 m_data_cnt = save_data_cnt + 3;
1321 t = handle_operand (rhs1, size_int (low));
1322 m_first = false;
1323 m_data[save_data_cnt + 2]
1324 = build_int_cst (NULL_TREE, m_data_cnt);
1325 m_data_cnt = save_data_cnt;
1326 t = add_cast (signed_type_for (m_limb_type), t);
1327 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1328 tree n = make_ssa_name (TREE_TYPE (t));
1329 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1330 insert_before (g);
1331 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1332 m_init_gsi = m_gsi;
1333 if (gsi_end_p (m_init_gsi))
1334 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1335 else
1336 gsi_prev (&m_init_gsi);
1337 m_gsi = save_gsi;
1339 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1340 /* We need to keep state between iterations, but
1341 fortunately not within the loop, only afterwards. */
1343 else
1345 tree out;
1346 m_data.truncate (m_data_cnt);
1347 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1348 m_data.safe_push (NULL_TREE);
1352 unsigned save_data_cnt = m_data_cnt;
1353 m_data_cnt += 3;
1354 if (!tree_fits_uhwi_p (idx))
1356 if (m_upwards_2limb
1357 && low >= m_upwards_2limb - m_first)
1359 rhs1 = handle_operand (rhs1, idx);
1360 if (m_first)
1361 m_data[save_data_cnt + 2]
1362 = build_int_cst (NULL_TREE, m_data_cnt);
1363 m_first = save_first;
1364 return rhs1;
1366 bool single_comparison
1367 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1368 tree idxc = idx;
1369 if (!single_comparison
1370 && m_upwards_2limb
1371 && !m_first
1372 && low + 1 == m_upwards_2limb)
1373 /* In this case we know that idx <= low always,
1374 so effectively we just needs a single comparison,
1375 idx < low or idx == low, but we'd need to emit different
1376 code for the 2 branches than single_comparison normally
1377 emits. So, instead of special-casing that, emit a
1378 low <= low comparison which cfg cleanup will clean up
1379 at the end of the pass. */
1380 idxc = size_int (low);
1381 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1382 idxc, size_int (low), NULL_TREE, NULL_TREE);
1383 edge edge_true_true, edge_true_false, edge_false;
1384 if_then_if_then_else (g, (single_comparison ? NULL
1385 : gimple_build_cond (EQ_EXPR, idx,
1386 size_int (low),
1387 NULL_TREE,
1388 NULL_TREE)),
1389 profile_probability::likely (),
1390 profile_probability::unlikely (),
1391 edge_true_true, edge_true_false, edge_false);
1392 bool save_cast_conditional = m_cast_conditional;
1393 m_cast_conditional = true;
1394 m_bitfld_load = 0;
1395 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1396 if (m_first)
1397 m_data[save_data_cnt + 2]
1398 = build_int_cst (NULL_TREE, m_data_cnt);
1399 tree ext = NULL_TREE;
1400 tree bitfld = NULL_TREE;
1401 if (!single_comparison)
1403 m_gsi = gsi_after_labels (edge_true_true->src);
1404 m_first = false;
1405 m_data_cnt = save_data_cnt + 3;
1406 if (m_bitfld_load)
1408 bitfld = m_data[m_bitfld_load];
1409 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1410 m_bitfld_load = 0;
1412 t2 = handle_operand (rhs1, size_int (low));
1413 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1414 t2 = add_cast (m_limb_type, t2);
1415 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1417 ext = add_cast (signed_type_for (m_limb_type), t2);
1418 tree lpm1 = build_int_cst (unsigned_type_node,
1419 limb_prec - 1);
1420 tree n = make_ssa_name (TREE_TYPE (ext));
1421 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1422 insert_before (g);
1423 ext = add_cast (m_limb_type, n);
1426 tree t3;
1427 if (TYPE_UNSIGNED (rhs_type))
1428 t3 = build_zero_cst (m_limb_type);
1429 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1430 t3 = m_data[save_data_cnt];
1431 else
1432 t3 = m_data[save_data_cnt + 1];
1433 m_gsi = gsi_after_labels (edge_true_false->dest);
1434 t = make_ssa_name (m_limb_type);
1435 gphi *phi = create_phi_node (t, edge_true_false->dest);
1436 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1437 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1438 if (edge_true_true)
1439 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1440 if (ext)
1442 tree t4 = make_ssa_name (m_limb_type);
1443 phi = create_phi_node (t4, edge_true_false->dest);
1444 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1445 UNKNOWN_LOCATION);
1446 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1447 UNKNOWN_LOCATION);
1448 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1449 if (!save_cast_conditional)
1451 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1452 insert_before (g);
1454 else
1455 for (basic_block bb = gsi_bb (m_gsi);;)
1457 edge e1 = single_succ_edge (bb);
1458 edge e2 = find_edge (e1->dest, m_bb), e3;
1459 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1460 : make_ssa_name (m_limb_type));
1461 phi = create_phi_node (t5, e1->dest);
1462 edge_iterator ei;
1463 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1464 add_phi_arg (phi, (e3 == e1 ? t4
1465 : build_zero_cst (m_limb_type)),
1466 e3, UNKNOWN_LOCATION);
1467 if (e2)
1468 break;
1469 t4 = t5;
1470 bb = e1->dest;
1473 if (m_bitfld_load)
1475 tree t4;
1476 if (!m_first)
1477 t4 = m_data[m_bitfld_load + 1];
1478 else
1479 t4 = make_ssa_name (m_limb_type);
1480 phi = create_phi_node (t4, edge_true_false->dest);
1481 add_phi_arg (phi,
1482 edge_true_true ? bitfld : m_data[m_bitfld_load],
1483 edge_true_false, UNKNOWN_LOCATION);
1484 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1485 edge_false, UNKNOWN_LOCATION);
1486 if (edge_true_true)
1487 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1488 UNKNOWN_LOCATION);
1489 m_data[m_bitfld_load] = t4;
1490 m_data[m_bitfld_load + 2] = t4;
1491 m_bitfld_load = 0;
1493 m_cast_conditional = save_cast_conditional;
1494 m_first = save_first;
1495 return t;
1497 else
1499 if (tree_to_uhwi (idx) < low)
1501 t = handle_operand (rhs1, idx);
1502 if (m_first)
1503 m_data[save_data_cnt + 2]
1504 = build_int_cst (NULL_TREE, m_data_cnt);
1506 else if (tree_to_uhwi (idx) < high)
1508 t = handle_operand (rhs1, size_int (low));
1509 if (m_first)
1510 m_data[save_data_cnt + 2]
1511 = build_int_cst (NULL_TREE, m_data_cnt);
1512 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1513 t = add_cast (m_limb_type, t);
1514 tree ext = NULL_TREE;
1515 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1517 ext = add_cast (signed_type_for (m_limb_type), t);
1518 tree lpm1 = build_int_cst (unsigned_type_node,
1519 limb_prec - 1);
1520 tree n = make_ssa_name (TREE_TYPE (ext));
1521 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1522 insert_before (g);
1523 ext = add_cast (m_limb_type, n);
1524 m_data[save_data_cnt + 1] = ext;
1527 else
1529 if (TYPE_UNSIGNED (rhs_type) && m_first)
1531 handle_operand (rhs1, size_zero_node);
1532 m_data[save_data_cnt + 2]
1533 = build_int_cst (NULL_TREE, m_data_cnt);
1535 else
1536 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1537 if (TYPE_UNSIGNED (rhs_type))
1538 t = build_zero_cst (m_limb_type);
1539 else if (m_bb && m_data[save_data_cnt])
1540 t = m_data[save_data_cnt];
1541 else
1542 t = m_data[save_data_cnt + 1];
1544 tree type = limb_access_type (lhs_type, idx);
1545 if (!useless_type_conversion_p (type, m_limb_type))
1546 t = add_cast (type, t);
1547 m_first = save_first;
1548 return t;
1551 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1552 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1553 && INTEGRAL_TYPE_P (rhs_type))
1555 /* Add support for 3 or more limbs filled in from normal integral
1556 type if this assert fails. If no target chooses limb mode smaller
1557 than half of largest supported normal integral type, this will not
1558 be needed. */
1559 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1560 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1561 if (m_first)
1563 gimple_stmt_iterator save_gsi = m_gsi;
1564 m_gsi = m_init_gsi;
1565 if (gsi_end_p (m_gsi))
1566 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1567 else
1568 gsi_next (&m_gsi);
1569 if (TREE_CODE (rhs_type) == BITINT_TYPE
1570 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1572 tree type = NULL_TREE;
1573 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1574 rhs_type = TREE_TYPE (rhs1);
1576 r1 = rhs1;
1577 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1578 r1 = add_cast (m_limb_type, rhs1);
1579 if (TYPE_PRECISION (rhs_type) > limb_prec)
1581 g = gimple_build_assign (make_ssa_name (rhs_type),
1582 RSHIFT_EXPR, rhs1,
1583 build_int_cst (unsigned_type_node,
1584 limb_prec));
1585 insert_before (g);
1586 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1588 if (TYPE_UNSIGNED (rhs_type))
1589 rext = build_zero_cst (m_limb_type);
1590 else
1592 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1593 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1594 RSHIFT_EXPR, rext,
1595 build_int_cst (unsigned_type_node,
1596 limb_prec - 1));
1597 insert_before (g);
1598 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1600 m_init_gsi = m_gsi;
1601 if (gsi_end_p (m_init_gsi))
1602 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1603 else
1604 gsi_prev (&m_init_gsi);
1605 m_gsi = save_gsi;
1607 tree t;
1608 if (m_upwards_2limb)
1610 if (m_first)
1612 tree out1, out2;
1613 prepare_data_in_out (r1, idx, &out1, rext);
1614 if (TYPE_PRECISION (rhs_type) > limb_prec)
1616 prepare_data_in_out (r2, idx, &out2, rext);
1617 m_data.pop ();
1618 t = m_data.pop ();
1619 m_data[m_data_cnt + 1] = t;
1621 else
1622 m_data[m_data_cnt + 1] = rext;
1623 m_data.safe_push (rext);
1624 t = m_data[m_data_cnt];
1626 else if (!tree_fits_uhwi_p (idx))
1627 t = m_data[m_data_cnt + 1];
1628 else
1630 tree type = limb_access_type (lhs_type, idx);
1631 t = m_data[m_data_cnt + 2];
1632 if (!useless_type_conversion_p (type, m_limb_type))
1633 t = add_cast (type, t);
1635 m_data_cnt += 3;
1636 return t;
1638 else if (m_first)
1640 m_data.safe_push (r1);
1641 m_data.safe_push (r2);
1642 m_data.safe_push (rext);
1644 if (tree_fits_uhwi_p (idx))
1646 tree type = limb_access_type (lhs_type, idx);
1647 if (integer_zerop (idx))
1648 t = m_data[m_data_cnt];
1649 else if (TYPE_PRECISION (rhs_type) > limb_prec
1650 && integer_onep (idx))
1651 t = m_data[m_data_cnt + 1];
1652 else
1653 t = m_data[m_data_cnt + 2];
1654 if (!useless_type_conversion_p (type, m_limb_type))
1655 t = add_cast (type, t);
1656 m_data_cnt += 3;
1657 return t;
1659 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1660 NULL_TREE, NULL_TREE);
1661 edge e2, e3, e4 = NULL;
1662 if_then (g, profile_probability::likely (), e2, e3);
1663 if (m_data[m_data_cnt + 1])
1665 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1666 NULL_TREE, NULL_TREE);
1667 insert_before (g);
1668 edge e5 = split_block (gsi_bb (m_gsi), g);
1669 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1670 e2 = find_edge (e5->dest, e2->dest);
1671 e4->probability = profile_probability::unlikely ();
1672 e5->flags = EDGE_FALSE_VALUE;
1673 e5->probability = e4->probability.invert ();
1675 m_gsi = gsi_after_labels (e2->dest);
1676 t = make_ssa_name (m_limb_type);
1677 gphi *phi = create_phi_node (t, e2->dest);
1678 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1679 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1680 if (e4)
1681 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1682 m_data_cnt += 3;
1683 return t;
1685 return NULL_TREE;
1688 /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1689 is an older EH edge, and except for virtual PHIs duplicate the
1690 PHI argument from the EH_EDGE to the new EH edge. */
1692 static void
1693 add_eh_edge (basic_block src, edge eh_edge)
1695 edge e = make_edge (src, eh_edge->dest, EDGE_EH);
1696 e->probability = profile_probability::very_unlikely ();
1697 for (gphi_iterator gsi = gsi_start_phis (eh_edge->dest);
1698 !gsi_end_p (gsi); gsi_next (&gsi))
1700 gphi *phi = gsi.phi ();
1701 tree lhs = gimple_phi_result (phi);
1702 if (virtual_operand_p (lhs))
1703 continue;
1704 const phi_arg_d *arg = gimple_phi_arg (phi, eh_edge->dest_idx);
1705 add_phi_arg (phi, arg->def, e, arg->locus);
1709 /* Helper function for handle_stmt method, handle a load from memory. */
1711 tree
1712 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1714 tree rhs1 = gimple_assign_rhs1 (stmt);
1715 tree rhs_type = TREE_TYPE (rhs1);
1716 bool eh = stmt_ends_bb_p (stmt);
1717 edge eh_edge = NULL;
1718 gimple *g;
1720 if (eh)
1722 edge_iterator ei;
1723 basic_block bb = gimple_bb (stmt);
1725 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1726 if (eh_edge->flags & EDGE_EH)
1727 break;
1730 if (TREE_CODE (rhs1) == COMPONENT_REF
1731 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1733 tree fld = TREE_OPERAND (rhs1, 1);
1734 /* For little-endian, we can allow as inputs bit-fields
1735 which start at a limb boundary. */
1736 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1737 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1738 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1739 goto normal_load;
1740 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1741 handle it normally for now. */
1742 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1743 goto normal_load;
1744 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1745 poly_int64 bitoffset;
1746 poly_uint64 field_offset, repr_offset;
1747 bool var_field_off = false;
1748 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1749 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1750 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1751 else
1753 bitoffset = 0;
1754 var_field_off = true;
1756 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1757 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1758 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1759 TREE_OPERAND (rhs1, 0), repr,
1760 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1761 HOST_WIDE_INT bo = bitoffset.to_constant ();
1762 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1763 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1764 if (m_first)
1766 if (m_upwards)
1768 gimple_stmt_iterator save_gsi = m_gsi;
1769 m_gsi = m_init_gsi;
1770 if (gsi_end_p (m_gsi))
1771 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1772 else
1773 gsi_next (&m_gsi);
1774 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1775 tree iv = make_ssa_name (m_limb_type);
1776 g = gimple_build_assign (iv, t);
1777 insert_before (g);
1778 if (eh)
1780 maybe_duplicate_eh_stmt (g, stmt);
1781 if (eh_edge)
1783 edge e = split_block (gsi_bb (m_gsi), g);
1784 add_eh_edge (e->src, eh_edge);
1785 m_gsi = gsi_after_labels (e->dest);
1786 if (gsi_bb (save_gsi) == e->src)
1788 if (gsi_end_p (save_gsi))
1789 save_gsi = gsi_end_bb (e->dest);
1790 else
1791 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1793 if (m_preheader_bb == e->src)
1794 m_preheader_bb = e->dest;
1797 m_init_gsi = m_gsi;
1798 if (gsi_end_p (m_init_gsi))
1799 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1800 else
1801 gsi_prev (&m_init_gsi);
1802 m_gsi = save_gsi;
1803 tree out;
1804 prepare_data_in_out (iv, idx, &out);
1805 out = m_data[m_data_cnt];
1806 m_data.safe_push (out);
1808 else
1810 m_data.safe_push (NULL_TREE);
1811 m_data.safe_push (NULL_TREE);
1812 m_data.safe_push (NULL_TREE);
1816 tree nidx0 = NULL_TREE, nidx1;
1817 tree iv = m_data[m_data_cnt];
1818 if (m_cast_conditional && iv)
1820 gcc_assert (!m_bitfld_load);
1821 m_bitfld_load = m_data_cnt;
1823 if (tree_fits_uhwi_p (idx))
1825 unsigned prec = TYPE_PRECISION (rhs_type);
1826 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1827 gcc_assert (i * limb_prec < prec);
1828 nidx1 = size_int (i + bo_idx + 1);
1829 if ((i + 1) * limb_prec > prec)
1831 prec %= limb_prec;
1832 if (prec + bo_bit <= (unsigned) limb_prec)
1833 nidx1 = NULL_TREE;
1835 if (!iv)
1836 nidx0 = size_int (i + bo_idx);
1838 else
1840 if (!iv)
1842 if (bo_idx == 0)
1843 nidx0 = idx;
1844 else
1846 nidx0 = make_ssa_name (sizetype);
1847 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1848 size_int (bo_idx));
1849 insert_before (g);
1852 nidx1 = make_ssa_name (sizetype);
1853 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1854 size_int (bo_idx + 1));
1855 insert_before (g);
1858 tree iv2 = NULL_TREE;
1859 if (nidx0)
1861 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1862 iv = make_ssa_name (m_limb_type);
1863 g = gimple_build_assign (iv, t);
1864 insert_before (g);
1865 gcc_assert (!eh);
1867 if (nidx1)
1869 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1870 unsigned prec = TYPE_PRECISION (rhs_type);
1871 if (conditional)
1873 if ((prec % limb_prec) == 0
1874 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1875 conditional = false;
1877 edge edge_true = NULL, edge_false = NULL;
1878 if (conditional)
1880 g = gimple_build_cond (NE_EXPR, idx,
1881 size_int (prec / limb_prec),
1882 NULL_TREE, NULL_TREE);
1883 if_then (g, profile_probability::likely (),
1884 edge_true, edge_false);
1886 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1887 if (m_upwards_2limb
1888 && !m_first
1889 && !m_bitfld_load
1890 && !tree_fits_uhwi_p (idx))
1891 iv2 = m_data[m_data_cnt + 1];
1892 else
1893 iv2 = make_ssa_name (m_limb_type);
1894 g = gimple_build_assign (iv2, t);
1895 insert_before (g);
1896 if (eh)
1898 maybe_duplicate_eh_stmt (g, stmt);
1899 if (eh_edge)
1901 edge e = split_block (gsi_bb (m_gsi), g);
1902 m_gsi = gsi_after_labels (e->dest);
1903 add_eh_edge (e->src, eh_edge);
1906 if (conditional)
1908 tree iv3 = make_ssa_name (m_limb_type);
1909 if (eh)
1910 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1911 gphi *phi = create_phi_node (iv3, edge_true->dest);
1912 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1913 add_phi_arg (phi, build_zero_cst (m_limb_type),
1914 edge_false, UNKNOWN_LOCATION);
1915 m_gsi = gsi_after_labels (edge_true->dest);
1918 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1919 iv, build_int_cst (unsigned_type_node, bo_bit));
1920 insert_before (g);
1921 iv = gimple_assign_lhs (g);
1922 if (iv2)
1924 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1925 iv2, build_int_cst (unsigned_type_node,
1926 limb_prec - bo_bit));
1927 insert_before (g);
1928 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1929 gimple_assign_lhs (g), iv);
1930 insert_before (g);
1931 iv = gimple_assign_lhs (g);
1932 if (m_data[m_data_cnt])
1933 m_data[m_data_cnt] = iv2;
1935 if (tree_fits_uhwi_p (idx))
1937 tree atype = limb_access_type (rhs_type, idx);
1938 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1939 iv = add_cast (atype, iv);
1941 m_data_cnt += 3;
1942 return iv;
1945 normal_load:
1946 /* Use write_p = true for loads with EH edges to make
1947 sure limb_access doesn't add a cast as separate
1948 statement after it. */
1949 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1950 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1951 g = gimple_build_assign (ret, rhs1);
1952 insert_before (g);
1953 if (eh)
1955 maybe_duplicate_eh_stmt (g, stmt);
1956 if (eh_edge)
1958 edge e = split_block (gsi_bb (m_gsi), g);
1959 m_gsi = gsi_after_labels (e->dest);
1960 add_eh_edge (e->src, eh_edge);
1962 if (tree_fits_uhwi_p (idx))
1964 tree atype = limb_access_type (rhs_type, idx);
1965 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1966 ret = add_cast (atype, ret);
1969 return ret;
1972 /* Return a limb IDX from a mergeable statement STMT. */
1974 tree
1975 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1977 tree lhs, rhs1, rhs2 = NULL_TREE;
1978 gimple *g;
1979 switch (gimple_code (stmt))
1981 case GIMPLE_ASSIGN:
1982 if (gimple_assign_load_p (stmt))
1983 return handle_load (stmt, idx);
1984 switch (gimple_assign_rhs_code (stmt))
1986 case BIT_AND_EXPR:
1987 case BIT_IOR_EXPR:
1988 case BIT_XOR_EXPR:
1989 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1990 /* FALLTHRU */
1991 case BIT_NOT_EXPR:
1992 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1993 lhs = make_ssa_name (TREE_TYPE (rhs1));
1994 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1995 rhs1, rhs2);
1996 insert_before (g);
1997 return lhs;
1998 case PLUS_EXPR:
1999 case MINUS_EXPR:
2000 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2001 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2002 return handle_plus_minus (gimple_assign_rhs_code (stmt),
2003 rhs1, rhs2, idx);
2004 case NEGATE_EXPR:
2005 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2006 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
2007 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
2008 case LSHIFT_EXPR:
2009 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
2010 idx),
2011 gimple_assign_rhs2 (stmt), idx);
2012 case SSA_NAME:
2013 case INTEGER_CST:
2014 return handle_operand (gimple_assign_rhs1 (stmt), idx);
2015 CASE_CONVERT:
2016 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2017 gimple_assign_rhs1 (stmt), idx);
2018 case VIEW_CONVERT_EXPR:
2019 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2020 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
2021 idx);
2022 default:
2023 break;
2025 break;
2026 default:
2027 break;
2029 gcc_unreachable ();
2032 /* Return minimum precision of OP at STMT.
2033 Positive value is minimum precision above which all bits
2034 are zero, negative means all bits above negation of the
2035 value are copies of the sign bit. */
2037 static int
2038 range_to_prec (tree op, gimple *stmt)
2040 int_range_max r;
2041 wide_int w;
2042 tree type = TREE_TYPE (op);
2043 unsigned int prec = TYPE_PRECISION (type);
2045 if (!optimize
2046 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2047 || r.undefined_p ())
2049 if (TYPE_UNSIGNED (type))
2050 return prec;
2051 else
2052 return MIN ((int) -prec, -2);
2055 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2057 w = r.lower_bound ();
2058 if (wi::neg_p (w))
2060 int min_prec1 = wi::min_precision (w, SIGNED);
2061 w = r.upper_bound ();
2062 int min_prec2 = wi::min_precision (w, SIGNED);
2063 int min_prec = MAX (min_prec1, min_prec2);
2064 return MIN (-min_prec, -2);
2068 w = r.upper_bound ();
2069 int min_prec = wi::min_precision (w, UNSIGNED);
2070 return MAX (min_prec, 1);
2073 /* Return address of the first limb of OP and write into *PREC
2074 its precision. If positive, the operand is zero extended
2075 from that precision, if it is negative, the operand is sign-extended
2076 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2077 otherwise *PREC_STORED is prec from the innermost call without
2078 range optimizations. */
2080 tree
2081 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2082 int *prec_stored, int *prec)
2084 wide_int w;
2085 location_t loc_save = m_loc;
2086 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2087 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2088 && TREE_CODE (op) != INTEGER_CST)
2090 do_int:
2091 *prec = range_to_prec (op, stmt);
2092 bitint_prec_kind kind = bitint_prec_small;
2093 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2094 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2095 kind = bitint_precision_kind (TREE_TYPE (op));
2096 if (kind == bitint_prec_middle)
2098 tree type = NULL_TREE;
2099 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2101 tree op_type = TREE_TYPE (op);
2102 unsigned HOST_WIDE_INT nelts
2103 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2104 /* Add support for 3 or more limbs filled in from normal
2105 integral type if this assert fails. If no target chooses
2106 limb mode smaller than half of largest supported normal
2107 integral type, this will not be needed. */
2108 gcc_assert (nelts <= 2);
2109 if (prec_stored)
2110 *prec_stored = (TYPE_UNSIGNED (op_type)
2111 ? TYPE_PRECISION (op_type)
2112 : -TYPE_PRECISION (op_type));
2113 if (*prec <= limb_prec && *prec >= -limb_prec)
2115 nelts = 1;
2116 if (prec_stored)
2118 if (TYPE_UNSIGNED (op_type))
2120 if (*prec_stored > limb_prec)
2121 *prec_stored = limb_prec;
2123 else if (*prec_stored < -limb_prec)
2124 *prec_stored = -limb_prec;
2127 tree atype = build_array_type_nelts (m_limb_type, nelts);
2128 tree var = create_tmp_var (atype);
2129 tree t1 = op;
2130 if (!useless_type_conversion_p (m_limb_type, op_type))
2131 t1 = add_cast (m_limb_type, t1);
2132 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2133 NULL_TREE, NULL_TREE);
2134 gimple *g = gimple_build_assign (v, t1);
2135 insert_before (g);
2136 if (nelts > 1)
2138 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2139 g = gimple_build_assign (make_ssa_name (op_type),
2140 RSHIFT_EXPR, op, lp);
2141 insert_before (g);
2142 tree t2 = gimple_assign_lhs (g);
2143 t2 = add_cast (m_limb_type, t2);
2144 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2145 NULL_TREE, NULL_TREE);
2146 g = gimple_build_assign (v, t2);
2147 insert_before (g);
2149 tree ret = build_fold_addr_expr (var);
2150 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2152 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2153 g = gimple_build_assign (var, clobber);
2154 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2156 m_loc = loc_save;
2157 return ret;
2159 switch (TREE_CODE (op))
2161 case SSA_NAME:
2162 if (m_names == NULL
2163 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2165 gimple *g = SSA_NAME_DEF_STMT (op);
2166 tree ret;
2167 m_loc = gimple_location (g);
2168 if (gimple_assign_load_p (g))
2170 *prec = range_to_prec (op, NULL);
2171 if (prec_stored)
2172 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2173 ? TYPE_PRECISION (TREE_TYPE (op))
2174 : -TYPE_PRECISION (TREE_TYPE (op)));
2175 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2176 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2177 NULL_TREE, true, GSI_SAME_STMT);
2179 else if (gimple_code (g) == GIMPLE_NOP)
2181 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2182 if (prec_stored)
2183 *prec_stored = *prec;
2184 tree var = create_tmp_var (m_limb_type);
2185 TREE_ADDRESSABLE (var) = 1;
2186 ret = build_fold_addr_expr (var);
2187 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2189 tree clobber = build_clobber (m_limb_type,
2190 CLOBBER_STORAGE_END);
2191 g = gimple_build_assign (var, clobber);
2192 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2195 else
2197 gcc_assert (gimple_assign_cast_p (g));
2198 tree rhs1 = gimple_assign_rhs1 (g);
2199 bitint_prec_kind kind = bitint_prec_small;
2200 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2201 rhs1 = TREE_OPERAND (rhs1, 0);
2202 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2203 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2204 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2205 if (kind >= bitint_prec_large)
2207 tree lhs_type = TREE_TYPE (op);
2208 tree rhs_type = TREE_TYPE (rhs1);
2209 int prec_stored_val = 0;
2210 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2211 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2213 if (TYPE_UNSIGNED (lhs_type)
2214 && !TYPE_UNSIGNED (rhs_type))
2215 gcc_assert (*prec >= 0 || prec_stored == NULL);
2217 else
2219 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2221 else if (TYPE_UNSIGNED (lhs_type))
2223 gcc_assert (*prec > 0
2224 || prec_stored_val > 0
2225 || (-prec_stored_val
2226 >= TYPE_PRECISION (lhs_type)));
2227 *prec = TYPE_PRECISION (lhs_type);
2229 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2231 else
2232 *prec = -TYPE_PRECISION (lhs_type);
2235 else
2237 op = rhs1;
2238 stmt = g;
2239 goto do_int;
2242 m_loc = loc_save;
2243 return ret;
2245 else
2247 int p = var_to_partition (m_map, op);
2248 gcc_assert (m_vars[p] != NULL_TREE);
2249 *prec = range_to_prec (op, stmt);
2250 if (prec_stored)
2251 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2252 ? TYPE_PRECISION (TREE_TYPE (op))
2253 : -TYPE_PRECISION (TREE_TYPE (op)));
2254 return build_fold_addr_expr (m_vars[p]);
2256 case INTEGER_CST:
2257 unsigned int min_prec, mp;
2258 tree type;
2259 w = wi::to_wide (op);
2260 if (tree_int_cst_sgn (op) >= 0)
2262 min_prec = wi::min_precision (w, UNSIGNED);
2263 *prec = MAX (min_prec, 1);
2265 else
2267 min_prec = wi::min_precision (w, SIGNED);
2268 *prec = MIN ((int) -min_prec, -2);
2270 mp = CEIL (min_prec, limb_prec) * limb_prec;
2271 if (mp == 0)
2272 mp = 1;
2273 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2274 && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2275 || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2276 type = TREE_TYPE (op);
2277 else
2278 type = build_bitint_type (mp, 1);
2279 if (TREE_CODE (type) != BITINT_TYPE
2280 || bitint_precision_kind (type) == bitint_prec_small)
2282 if (TYPE_PRECISION (type) <= limb_prec)
2283 type = m_limb_type;
2284 else
2286 while (bitint_precision_kind (mp) == bitint_prec_small)
2287 mp += limb_prec;
2288 /* This case is for targets which e.g. have 64-bit
2289 limb but categorize up to 128-bits _BitInts as
2290 small. We could use type of m_limb_type[2] and
2291 similar instead to save space. */
2292 type = build_bitint_type (mp, 1);
2295 if (prec_stored)
2297 if (tree_int_cst_sgn (op) >= 0)
2298 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2299 else
2300 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2302 op = tree_output_constant_def (fold_convert (type, op));
2303 return build_fold_addr_expr (op);
2304 default:
2305 gcc_unreachable ();
2309 /* Helper function, create a loop before the current location,
2310 start with sizetype INIT value from the preheader edge. Return
2311 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2312 from the latch edge. */
2314 tree
2315 bitint_large_huge::create_loop (tree init, tree *idx_next)
2317 if (!gsi_end_p (m_gsi))
2318 gsi_prev (&m_gsi);
2319 else
2320 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2321 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2322 edge e2 = split_block (e1->dest, (gimple *) NULL);
2323 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2324 e3->probability = profile_probability::very_unlikely ();
2325 e2->flags = EDGE_FALSE_VALUE;
2326 e2->probability = e3->probability.invert ();
2327 tree idx = make_ssa_name (sizetype);
2328 gphi *phi = create_phi_node (idx, e1->dest);
2329 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2330 *idx_next = make_ssa_name (sizetype);
2331 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2332 m_gsi = gsi_after_labels (e1->dest);
2333 m_bb = e1->dest;
2334 m_preheader_bb = e1->src;
2335 class loop *loop = alloc_loop ();
2336 loop->header = e1->dest;
2337 add_loop (loop, e1->src->loop_father);
2338 return idx;
2341 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2342 lowered using iteration from the least significant limb up to the most
2343 significant limb. For large _BitInt it is emitted as straight line code
2344 before current location, for huge _BitInt as a loop handling two limbs
2345 at once, followed by handling up to limbs in straight line code (at most
2346 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2347 comparisons, in that case CMP_CODE should be the comparison code and
2348 CMP_OP1/CMP_OP2 the comparison operands. */
2350 tree
2351 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2352 tree cmp_op1, tree cmp_op2)
2354 bool eq_p = cmp_code != ERROR_MARK;
2355 tree type;
2356 if (eq_p)
2357 type = TREE_TYPE (cmp_op1);
2358 else
2359 type = TREE_TYPE (gimple_assign_lhs (stmt));
2360 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2361 bitint_prec_kind kind = bitint_precision_kind (type);
2362 gcc_assert (kind >= bitint_prec_large);
2363 gimple *g;
2364 tree lhs = gimple_get_lhs (stmt);
2365 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2366 if (lhs
2367 && TREE_CODE (lhs) == SSA_NAME
2368 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2369 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2371 int p = var_to_partition (m_map, lhs);
2372 gcc_assert (m_vars[p] != NULL_TREE);
2373 m_lhs = lhs = m_vars[p];
2375 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2376 bool sext = false;
2377 tree ext = NULL_TREE, store_operand = NULL_TREE;
2378 bool eh = false;
2379 basic_block eh_pad = NULL;
2380 tree nlhs = NULL_TREE;
2381 unsigned HOST_WIDE_INT bo_idx = 0;
2382 unsigned HOST_WIDE_INT bo_bit = 0;
2383 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2384 if (gimple_store_p (stmt))
2386 store_operand = gimple_assign_rhs1 (stmt);
2387 eh = stmt_ends_bb_p (stmt);
2388 if (eh)
2390 edge e;
2391 edge_iterator ei;
2392 basic_block bb = gimple_bb (stmt);
2394 FOR_EACH_EDGE (e, ei, bb->succs)
2395 if (e->flags & EDGE_EH)
2397 eh_pad = e->dest;
2398 break;
2401 if (TREE_CODE (lhs) == COMPONENT_REF
2402 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2404 tree fld = TREE_OPERAND (lhs, 1);
2405 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2406 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2407 poly_int64 bitoffset;
2408 poly_uint64 field_offset, repr_offset;
2409 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2410 nlhs = lhs;
2411 else
2413 bool var_field_off = false;
2414 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2415 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2416 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2417 else
2419 bitoffset = 0;
2420 var_field_off = true;
2422 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2423 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2424 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2425 TREE_OPERAND (lhs, 0), repr,
2426 var_field_off
2427 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2428 HOST_WIDE_INT bo = bitoffset.to_constant ();
2429 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2430 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2434 if ((store_operand
2435 && TREE_CODE (store_operand) == SSA_NAME
2436 && (m_names == NULL
2437 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2438 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2439 || gimple_assign_cast_p (stmt))
2441 rhs1 = gimple_assign_rhs1 (store_operand
2442 ? SSA_NAME_DEF_STMT (store_operand)
2443 : stmt);
2444 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2445 rhs1 = TREE_OPERAND (rhs1, 0);
2446 /* Optimize mergeable ops ending with widening cast to _BitInt
2447 (or followed by store). We can lower just the limbs of the
2448 cast operand and widen afterwards. */
2449 if (TREE_CODE (rhs1) == SSA_NAME
2450 && (m_names == NULL
2451 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2452 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2453 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2454 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2455 limb_prec) < CEIL (prec, limb_prec)
2456 || (kind == bitint_prec_huge
2457 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2459 store_operand = rhs1;
2460 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2461 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2462 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2463 sext = true;
2466 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2467 if (kind == bitint_prec_large)
2468 cnt = CEIL (prec, limb_prec);
2469 else
2471 rem = (prec % (2 * limb_prec));
2472 end = (prec - rem) / limb_prec;
2473 cnt = 2 + CEIL (rem, limb_prec);
2474 idx = idx_first = create_loop (size_zero_node, &idx_next);
2477 basic_block edge_bb = NULL;
2478 if (eq_p)
2480 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2481 gsi_prev (&gsi);
2482 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2483 edge_bb = e->src;
2484 if (kind == bitint_prec_large)
2485 m_gsi = gsi_end_bb (edge_bb);
2487 else
2488 m_after_stmt = stmt;
2489 if (kind != bitint_prec_large)
2490 m_upwards_2limb = end;
2491 m_upwards = true;
2493 bool separate_ext
2494 = (prec != (unsigned) TYPE_PRECISION (type)
2495 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2496 > CEIL (prec, limb_prec)));
2498 for (unsigned i = 0; i < cnt; i++)
2500 m_data_cnt = 0;
2501 if (kind == bitint_prec_large)
2502 idx = size_int (i);
2503 else if (i >= 2)
2504 idx = size_int (end + (i > 2));
2505 if (eq_p)
2507 rhs1 = handle_operand (cmp_op1, idx);
2508 tree rhs2 = handle_operand (cmp_op2, idx);
2509 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2510 insert_before (g);
2511 edge e1 = split_block (gsi_bb (m_gsi), g);
2512 e1->flags = EDGE_FALSE_VALUE;
2513 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2514 e1->probability = profile_probability::unlikely ();
2515 e2->probability = e1->probability.invert ();
2516 if (i == 0)
2517 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2518 m_gsi = gsi_after_labels (e1->dest);
2520 else
2522 if (store_operand)
2523 rhs1 = handle_operand (store_operand, idx);
2524 else
2525 rhs1 = handle_stmt (stmt, idx);
2526 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2527 rhs1 = add_cast (m_limb_type, rhs1);
2528 if (sext && i == cnt - 1)
2529 ext = rhs1;
2530 tree nidx = idx;
2531 if (bo_idx)
2533 if (tree_fits_uhwi_p (idx))
2534 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2535 else
2537 nidx = make_ssa_name (sizetype);
2538 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2539 size_int (bo_idx));
2540 insert_before (g);
2543 bool done = false;
2544 basic_block new_bb = NULL;
2545 /* Handle stores into bit-fields. */
2546 if (bo_bit)
2548 if (i == 0)
2550 edge e2 = NULL;
2551 if (kind != bitint_prec_large)
2553 prepare_data_in_out (build_zero_cst (m_limb_type),
2554 idx, &bf_next);
2555 bf_next = m_data.pop ();
2556 bf_cur = m_data.pop ();
2557 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2558 NULL_TREE, NULL_TREE);
2559 edge edge_true;
2560 if_then_else (g, profile_probability::unlikely (),
2561 edge_true, e2);
2562 new_bb = e2->dest;
2564 tree ftype
2565 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2566 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2567 bitsize_int (limb_prec - bo_bit),
2568 bitsize_int (bo_idx * limb_prec + bo_bit));
2569 tree t = add_cast (ftype, rhs1);
2570 g = gimple_build_assign (bfr, t);
2571 insert_before (g);
2572 if (eh)
2574 maybe_duplicate_eh_stmt (g, stmt);
2575 if (eh_pad)
2577 edge e = split_block (gsi_bb (m_gsi), g);
2578 m_gsi = gsi_after_labels (e->dest);
2579 add_eh_edge (e->src,
2580 find_edge (gimple_bb (stmt), eh_pad));
2583 if (kind == bitint_prec_large)
2585 bf_cur = rhs1;
2586 done = true;
2588 else if (e2)
2589 m_gsi = gsi_after_labels (e2->src);
2591 if (!done)
2593 tree t1 = make_ssa_name (m_limb_type);
2594 tree t2 = make_ssa_name (m_limb_type);
2595 tree t3 = make_ssa_name (m_limb_type);
2596 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2597 build_int_cst (unsigned_type_node,
2598 limb_prec - bo_bit));
2599 insert_before (g);
2600 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2601 build_int_cst (unsigned_type_node,
2602 bo_bit));
2603 insert_before (g);
2604 bf_cur = rhs1;
2605 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2606 insert_before (g);
2607 rhs1 = t3;
2608 if (bf_next && i == 1)
2610 g = gimple_build_assign (bf_next, bf_cur);
2611 insert_before (g);
2615 if (!done)
2617 /* Handle bit-field access to partial last limb if needed. */
2618 if (nlhs
2619 && i == cnt - 1
2620 && !separate_ext
2621 && tree_fits_uhwi_p (idx))
2623 unsigned int tprec = TYPE_PRECISION (type);
2624 unsigned int rprec = tprec % limb_prec;
2625 if (rprec + bo_bit < (unsigned) limb_prec)
2627 tree ftype
2628 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2629 tree bfr = build3 (BIT_FIELD_REF, ftype,
2630 unshare_expr (nlhs),
2631 bitsize_int (rprec + bo_bit),
2632 bitsize_int ((bo_idx
2633 + tprec / limb_prec)
2634 * limb_prec));
2635 tree t = add_cast (ftype, rhs1);
2636 g = gimple_build_assign (bfr, t);
2637 done = true;
2638 bf_cur = NULL_TREE;
2640 else if (rprec + bo_bit == (unsigned) limb_prec)
2641 bf_cur = NULL_TREE;
2643 /* Otherwise, stores to any other lhs. */
2644 if (!done)
2646 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2647 nidx, true);
2648 g = gimple_build_assign (l, rhs1);
2650 insert_before (g);
2651 if (eh)
2653 maybe_duplicate_eh_stmt (g, stmt);
2654 if (eh_pad)
2656 edge e = split_block (gsi_bb (m_gsi), g);
2657 m_gsi = gsi_after_labels (e->dest);
2658 add_eh_edge (e->src,
2659 find_edge (gimple_bb (stmt), eh_pad));
2662 if (new_bb)
2663 m_gsi = gsi_after_labels (new_bb);
2666 m_first = false;
2667 if (kind == bitint_prec_huge && i <= 1)
2669 if (i == 0)
2671 idx = make_ssa_name (sizetype);
2672 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2673 size_one_node);
2674 insert_before (g);
2676 else
2678 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2679 size_int (2));
2680 insert_before (g);
2681 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2682 NULL_TREE, NULL_TREE);
2683 insert_before (g);
2684 if (eq_p)
2685 m_gsi = gsi_after_labels (edge_bb);
2686 else
2687 m_gsi = gsi_for_stmt (stmt);
2688 m_bb = NULL;
2693 if (separate_ext)
2695 if (sext)
2697 ext = add_cast (signed_type_for (m_limb_type), ext);
2698 tree lpm1 = build_int_cst (unsigned_type_node,
2699 limb_prec - 1);
2700 tree n = make_ssa_name (TREE_TYPE (ext));
2701 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2702 insert_before (g);
2703 ext = add_cast (m_limb_type, n);
2705 else
2706 ext = build_zero_cst (m_limb_type);
2707 kind = bitint_precision_kind (type);
2708 unsigned start = CEIL (prec, limb_prec);
2709 prec = TYPE_PRECISION (type);
2710 idx = idx_first = idx_next = NULL_TREE;
2711 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2712 kind = bitint_prec_large;
2713 if (kind == bitint_prec_large)
2714 cnt = CEIL (prec, limb_prec) - start;
2715 else
2717 rem = prec % limb_prec;
2718 end = (prec - rem) / limb_prec;
2719 cnt = (bo_bit != 0) + 1 + (rem != 0);
2721 for (unsigned i = 0; i < cnt; i++)
2723 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2724 idx = size_int (start + i);
2725 else if (i == cnt - 1 && (rem != 0))
2726 idx = size_int (end);
2727 else if (i == (bo_bit != 0))
2728 idx = create_loop (size_int (start + i), &idx_next);
2729 rhs1 = ext;
2730 if (bf_cur != NULL_TREE && bf_cur != ext)
2732 tree t1 = make_ssa_name (m_limb_type);
2733 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2734 build_int_cst (unsigned_type_node,
2735 limb_prec - bo_bit));
2736 insert_before (g);
2737 if (integer_zerop (ext))
2738 rhs1 = t1;
2739 else
2741 tree t2 = make_ssa_name (m_limb_type);
2742 rhs1 = make_ssa_name (m_limb_type);
2743 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2744 build_int_cst (unsigned_type_node,
2745 bo_bit));
2746 insert_before (g);
2747 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2748 insert_before (g);
2750 bf_cur = ext;
2752 tree nidx = idx;
2753 if (bo_idx)
2755 if (tree_fits_uhwi_p (idx))
2756 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2757 else
2759 nidx = make_ssa_name (sizetype);
2760 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2761 size_int (bo_idx));
2762 insert_before (g);
2765 bool done = false;
2766 /* Handle bit-field access to partial last limb if needed. */
2767 if (nlhs && i == cnt - 1)
2769 unsigned int tprec = TYPE_PRECISION (type);
2770 unsigned int rprec = tprec % limb_prec;
2771 if (rprec + bo_bit < (unsigned) limb_prec)
2773 tree ftype
2774 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2775 tree bfr = build3 (BIT_FIELD_REF, ftype,
2776 unshare_expr (nlhs),
2777 bitsize_int (rprec + bo_bit),
2778 bitsize_int ((bo_idx + tprec / limb_prec)
2779 * limb_prec));
2780 tree t = add_cast (ftype, rhs1);
2781 g = gimple_build_assign (bfr, t);
2782 done = true;
2783 bf_cur = NULL_TREE;
2785 else if (rprec + bo_bit == (unsigned) limb_prec)
2786 bf_cur = NULL_TREE;
2788 /* Otherwise, stores to any other lhs. */
2789 if (!done)
2791 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2792 g = gimple_build_assign (l, rhs1);
2794 insert_before (g);
2795 if (eh)
2797 maybe_duplicate_eh_stmt (g, stmt);
2798 if (eh_pad)
2800 edge e = split_block (gsi_bb (m_gsi), g);
2801 m_gsi = gsi_after_labels (e->dest);
2802 add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
2805 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2807 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2808 size_one_node);
2809 insert_before (g);
2810 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2811 NULL_TREE, NULL_TREE);
2812 insert_before (g);
2813 m_gsi = gsi_for_stmt (stmt);
2814 m_bb = NULL;
2818 if (bf_cur != NULL_TREE)
2820 unsigned int tprec = TYPE_PRECISION (type);
2821 unsigned int rprec = tprec % limb_prec;
2822 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2823 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2824 bitsize_int (rprec + bo_bit),
2825 bitsize_int ((bo_idx + tprec / limb_prec)
2826 * limb_prec));
2827 rhs1 = bf_cur;
2828 if (bf_cur != ext)
2830 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2831 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2832 build_int_cst (unsigned_type_node,
2833 limb_prec - bo_bit));
2834 insert_before (g);
2836 rhs1 = add_cast (ftype, rhs1);
2837 g = gimple_build_assign (bfr, rhs1);
2838 insert_before (g);
2839 if (eh)
2841 maybe_duplicate_eh_stmt (g, stmt);
2842 if (eh_pad)
2844 edge e = split_block (gsi_bb (m_gsi), g);
2845 m_gsi = gsi_after_labels (e->dest);
2846 add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
2851 if (gimple_store_p (stmt))
2853 unlink_stmt_vdef (stmt);
2854 release_ssa_name (gimple_vdef (stmt));
2855 gsi_remove (&m_gsi, true);
2857 if (eq_p)
2859 lhs = make_ssa_name (boolean_type_node);
2860 basic_block bb = gimple_bb (stmt);
2861 gphi *phi = create_phi_node (lhs, bb);
2862 edge e = find_edge (gsi_bb (m_gsi), bb);
2863 unsigned int n = EDGE_COUNT (bb->preds);
2864 for (unsigned int i = 0; i < n; i++)
2866 edge e2 = EDGE_PRED (bb, i);
2867 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2868 e2, UNKNOWN_LOCATION);
2870 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2871 return lhs;
2873 else
2874 return NULL_TREE;
2877 /* Handle a large/huge _BitInt comparison statement STMT other than
2878 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2879 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2880 lowered by iteration from the most significant limb downwards to
2881 the least significant one, for large _BitInt in straight line code,
2882 otherwise with most significant limb handled in
2883 straight line code followed by a loop handling one limb at a time.
2884 Comparisons with unsigned huge _BitInt with precisions which are
2885 multiples of limb precision can use just the loop and don't need to
2886 handle most significant limb before the loop. The loop or straight
2887 line code jumps to final basic block if a particular pair of limbs
2888 is not equal. */
2890 tree
2891 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2892 tree cmp_op1, tree cmp_op2)
2894 tree type = TREE_TYPE (cmp_op1);
2895 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2896 bitint_prec_kind kind = bitint_precision_kind (type);
2897 gcc_assert (kind >= bitint_prec_large);
2898 gimple *g;
2899 if (!TYPE_UNSIGNED (type)
2900 && integer_zerop (cmp_op2)
2901 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2903 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2904 tree idx = size_int (end);
2905 m_data_cnt = 0;
2906 tree rhs1 = handle_operand (cmp_op1, idx);
2907 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2909 tree stype = signed_type_for (TREE_TYPE (rhs1));
2910 rhs1 = add_cast (stype, rhs1);
2912 tree lhs = make_ssa_name (boolean_type_node);
2913 g = gimple_build_assign (lhs, cmp_code, rhs1,
2914 build_zero_cst (TREE_TYPE (rhs1)));
2915 insert_before (g);
2916 cmp_code = NE_EXPR;
2917 return lhs;
2920 unsigned cnt, rem = 0, end = 0;
2921 tree idx = NULL_TREE, idx_next = NULL_TREE;
2922 if (kind == bitint_prec_large)
2923 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2924 else
2926 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2927 if (rem == 0 && !TYPE_UNSIGNED (type))
2928 rem = limb_prec;
2929 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2930 cnt = 1 + (rem != 0);
2933 basic_block edge_bb = NULL;
2934 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2935 gsi_prev (&gsi);
2936 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2937 edge_bb = e->src;
2938 m_gsi = gsi_end_bb (edge_bb);
2940 edge *edges = XALLOCAVEC (edge, cnt * 2);
2941 for (unsigned i = 0; i < cnt; i++)
2943 m_data_cnt = 0;
2944 if (kind == bitint_prec_large)
2945 idx = size_int (cnt - i - 1);
2946 else if (i == cnt - 1)
2947 idx = create_loop (size_int (end - 1), &idx_next);
2948 else
2949 idx = size_int (end);
2950 tree rhs1 = handle_operand (cmp_op1, idx);
2951 tree rhs2 = handle_operand (cmp_op2, idx);
2952 if (i == 0
2953 && !TYPE_UNSIGNED (type)
2954 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2956 tree stype = signed_type_for (TREE_TYPE (rhs1));
2957 rhs1 = add_cast (stype, rhs1);
2958 rhs2 = add_cast (stype, rhs2);
2960 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2961 insert_before (g);
2962 edge e1 = split_block (gsi_bb (m_gsi), g);
2963 e1->flags = EDGE_FALSE_VALUE;
2964 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2965 e1->probability = profile_probability::likely ();
2966 e2->probability = e1->probability.invert ();
2967 if (i == 0)
2968 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2969 m_gsi = gsi_after_labels (e1->dest);
2970 edges[2 * i] = e2;
2971 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2972 insert_before (g);
2973 e1 = split_block (gsi_bb (m_gsi), g);
2974 e1->flags = EDGE_FALSE_VALUE;
2975 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2976 e1->probability = profile_probability::unlikely ();
2977 e2->probability = e1->probability.invert ();
2978 m_gsi = gsi_after_labels (e1->dest);
2979 edges[2 * i + 1] = e2;
2980 m_first = false;
2981 if (kind == bitint_prec_huge && i == cnt - 1)
2983 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2984 insert_before (g);
2985 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2986 NULL_TREE, NULL_TREE);
2987 insert_before (g);
2988 edge true_edge, false_edge;
2989 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2990 &true_edge, &false_edge);
2991 m_gsi = gsi_after_labels (false_edge->dest);
2992 m_bb = NULL;
2996 tree lhs = make_ssa_name (boolean_type_node);
2997 basic_block bb = gimple_bb (stmt);
2998 gphi *phi = create_phi_node (lhs, bb);
2999 for (unsigned int i = 0; i < cnt * 2; i++)
3001 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
3002 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
3003 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
3005 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
3006 ? boolean_true_node : boolean_false_node,
3007 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
3008 cmp_code = NE_EXPR;
3009 return lhs;
3012 /* Lower large/huge _BitInt left and right shift except for left
3013 shift by < limb_prec constant. */
3015 void
3016 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
3018 tree rhs1 = gimple_assign_rhs1 (stmt);
3019 tree lhs = gimple_assign_lhs (stmt);
3020 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3021 tree type = TREE_TYPE (rhs1);
3022 gimple *final_stmt = gsi_stmt (m_gsi);
3023 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3024 && bitint_precision_kind (type) >= bitint_prec_large);
3025 int prec = TYPE_PRECISION (type);
3026 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
3027 gimple *g;
3028 if (obj == NULL_TREE)
3030 int part = var_to_partition (m_map, lhs);
3031 gcc_assert (m_vars[part] != NULL_TREE);
3032 obj = m_vars[part];
3034 /* Preparation code common for both left and right shifts.
3035 unsigned n1 = n % limb_prec;
3036 size_t n2 = n / limb_prec;
3037 size_t n3 = n1 != 0;
3038 unsigned n4 = (limb_prec - n1) % limb_prec;
3039 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3040 if (TREE_CODE (n) == INTEGER_CST)
3042 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3043 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3044 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3045 n3 = size_int (!integer_zerop (n1));
3046 n4 = int_const_binop (TRUNC_MOD_EXPR,
3047 int_const_binop (MINUS_EXPR, lp, n1), lp);
3049 else
3051 n1 = make_ssa_name (TREE_TYPE (n));
3052 n2 = make_ssa_name (sizetype);
3053 n3 = make_ssa_name (sizetype);
3054 n4 = make_ssa_name (TREE_TYPE (n));
3055 if (pow2p_hwi (limb_prec))
3057 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3058 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3059 insert_before (g);
3060 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3061 TREE_TYPE (n))
3062 ? n2 : make_ssa_name (TREE_TYPE (n)),
3063 RSHIFT_EXPR, n,
3064 build_int_cst (TREE_TYPE (n),
3065 exact_log2 (limb_prec)));
3066 insert_before (g);
3067 if (gimple_assign_lhs (g) != n2)
3069 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3070 insert_before (g);
3072 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3073 NEGATE_EXPR, n1);
3074 insert_before (g);
3075 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3076 lpm1);
3077 insert_before (g);
3079 else
3081 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3082 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3083 insert_before (g);
3084 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3085 TREE_TYPE (n))
3086 ? n2 : make_ssa_name (TREE_TYPE (n)),
3087 TRUNC_DIV_EXPR, n, lp);
3088 insert_before (g);
3089 if (gimple_assign_lhs (g) != n2)
3091 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3092 insert_before (g);
3094 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3095 MINUS_EXPR, lp, n1);
3096 insert_before (g);
3097 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3098 lp);
3099 insert_before (g);
3101 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3102 build_zero_cst (TREE_TYPE (n)));
3103 insert_before (g);
3104 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3105 insert_before (g);
3107 tree p = build_int_cst (sizetype,
3108 prec / limb_prec - (prec % limb_prec == 0));
3109 if (rhs_code == RSHIFT_EXPR)
3111 /* Lower
3112 dst = src >> n;
3114 unsigned n1 = n % limb_prec;
3115 size_t n2 = n / limb_prec;
3116 size_t n3 = n1 != 0;
3117 unsigned n4 = (limb_prec - n1) % limb_prec;
3118 size_t idx;
3119 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3120 int signed_p = (typeof (src) -1) < 0;
3121 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3122 ? p : p - n3); ++idx)
3123 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3124 limb_type ext;
3125 if (prec % limb_prec == 0)
3126 ext = src[p];
3127 else if (signed_p)
3128 ext = ((signed limb_type) (src[p] << (limb_prec
3129 - (prec % limb_prec))))
3130 >> (limb_prec - (prec % limb_prec));
3131 else
3132 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3133 if (!signed_p && (prec % limb_prec == 0))
3135 else if (idx < prec / 64)
3137 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3138 ++idx;
3140 idx -= n2;
3141 if (signed_p)
3143 dst[idx] = ((signed limb_type) ext) >> n1;
3144 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3146 else
3148 dst[idx] = ext >> n1;
3149 ext = 0;
3151 for (++idx; idx <= p; ++idx)
3152 dst[idx] = ext; */
3153 tree pmn3;
3154 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3155 pmn3 = p;
3156 else if (TREE_CODE (n3) == INTEGER_CST)
3157 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3158 else
3160 pmn3 = make_ssa_name (sizetype);
3161 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3162 insert_before (g);
3164 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3165 edge edge_true, edge_false;
3166 if_then (g, profile_probability::likely (), edge_true, edge_false);
3167 tree idx_next;
3168 tree idx = create_loop (n2, &idx_next);
3169 tree idxmn2 = make_ssa_name (sizetype);
3170 tree idxpn3 = make_ssa_name (sizetype);
3171 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3172 insert_before (g);
3173 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3174 insert_before (g);
3175 m_data_cnt = 0;
3176 tree t1 = handle_operand (rhs1, idx);
3177 m_first = false;
3178 g = gimple_build_assign (make_ssa_name (m_limb_type),
3179 RSHIFT_EXPR, t1, n1);
3180 insert_before (g);
3181 t1 = gimple_assign_lhs (g);
3182 if (!integer_zerop (n3))
3184 m_data_cnt = 0;
3185 tree t2 = handle_operand (rhs1, idxpn3);
3186 g = gimple_build_assign (make_ssa_name (m_limb_type),
3187 LSHIFT_EXPR, t2, n4);
3188 insert_before (g);
3189 t2 = gimple_assign_lhs (g);
3190 g = gimple_build_assign (make_ssa_name (m_limb_type),
3191 BIT_IOR_EXPR, t1, t2);
3192 insert_before (g);
3193 t1 = gimple_assign_lhs (g);
3195 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3196 g = gimple_build_assign (l, t1);
3197 insert_before (g);
3198 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3199 insert_before (g);
3200 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3201 insert_before (g);
3202 idx = make_ssa_name (sizetype);
3203 m_gsi = gsi_for_stmt (final_stmt);
3204 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3205 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3206 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3207 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3208 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3209 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3210 m_data_cnt = 0;
3211 tree ms = handle_operand (rhs1, p);
3212 tree ext = ms;
3213 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3214 ext = add_cast (m_limb_type, ms);
3215 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3216 && !integer_zerop (n3))
3218 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3219 if_then (g, profile_probability::likely (), edge_true, edge_false);
3220 m_data_cnt = 0;
3221 t1 = handle_operand (rhs1, idx);
3222 g = gimple_build_assign (make_ssa_name (m_limb_type),
3223 RSHIFT_EXPR, t1, n1);
3224 insert_before (g);
3225 t1 = gimple_assign_lhs (g);
3226 g = gimple_build_assign (make_ssa_name (m_limb_type),
3227 LSHIFT_EXPR, ext, n4);
3228 insert_before (g);
3229 tree t2 = gimple_assign_lhs (g);
3230 g = gimple_build_assign (make_ssa_name (m_limb_type),
3231 BIT_IOR_EXPR, t1, t2);
3232 insert_before (g);
3233 t1 = gimple_assign_lhs (g);
3234 idxmn2 = make_ssa_name (sizetype);
3235 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3236 insert_before (g);
3237 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3238 g = gimple_build_assign (l, t1);
3239 insert_before (g);
3240 idx_next = make_ssa_name (sizetype);
3241 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3242 insert_before (g);
3243 m_gsi = gsi_for_stmt (final_stmt);
3244 tree nidx = make_ssa_name (sizetype);
3245 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3246 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3247 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3248 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3249 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3250 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3251 idx = nidx;
3253 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3254 insert_before (g);
3255 idx = gimple_assign_lhs (g);
3256 tree sext = ext;
3257 if (!TYPE_UNSIGNED (type))
3258 sext = add_cast (signed_type_for (m_limb_type), ext);
3259 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3260 RSHIFT_EXPR, sext, n1);
3261 insert_before (g);
3262 t1 = gimple_assign_lhs (g);
3263 if (!TYPE_UNSIGNED (type))
3265 t1 = add_cast (m_limb_type, t1);
3266 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3267 RSHIFT_EXPR, sext,
3268 build_int_cst (TREE_TYPE (n),
3269 limb_prec - 1));
3270 insert_before (g);
3271 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3273 else
3274 ext = build_zero_cst (m_limb_type);
3275 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3276 g = gimple_build_assign (l, t1);
3277 insert_before (g);
3278 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3279 size_one_node);
3280 insert_before (g);
3281 idx = gimple_assign_lhs (g);
3282 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3283 if_then (g, profile_probability::likely (), edge_true, edge_false);
3284 idx = create_loop (idx, &idx_next);
3285 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3286 g = gimple_build_assign (l, ext);
3287 insert_before (g);
3288 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3289 insert_before (g);
3290 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3291 insert_before (g);
3293 else
3295 /* Lower
3296 dst = src << n;
3298 unsigned n1 = n % limb_prec;
3299 size_t n2 = n / limb_prec;
3300 size_t n3 = n1 != 0;
3301 unsigned n4 = (limb_prec - n1) % limb_prec;
3302 size_t idx;
3303 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3304 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3305 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3306 if (n1)
3308 dst[idx] = src[idx - n2] << n1;
3309 --idx;
3311 for (; (ssize_t) idx >= 0; --idx)
3312 dst[idx] = 0; */
3313 tree n2pn3;
3314 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3315 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3316 else
3318 n2pn3 = make_ssa_name (sizetype);
3319 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3320 insert_before (g);
3322 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3323 idx even to access the most significant partial limb. */
3324 m_var_msb = true;
3325 if (integer_zerop (n3))
3326 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3327 counts. Emit if (true) condition that can be optimized later. */
3328 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3329 NULL_TREE, NULL_TREE);
3330 else
3331 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3332 edge edge_true, edge_false;
3333 if_then (g, profile_probability::likely (), edge_true, edge_false);
3334 tree idx_next;
3335 tree idx = create_loop (p, &idx_next);
3336 tree idxmn2 = make_ssa_name (sizetype);
3337 tree idxmn2mn3 = make_ssa_name (sizetype);
3338 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3339 insert_before (g);
3340 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3341 insert_before (g);
3342 m_data_cnt = 0;
3343 tree t1 = handle_operand (rhs1, idxmn2);
3344 m_first = false;
3345 g = gimple_build_assign (make_ssa_name (m_limb_type),
3346 LSHIFT_EXPR, t1, n1);
3347 insert_before (g);
3348 t1 = gimple_assign_lhs (g);
3349 if (!integer_zerop (n3))
3351 m_data_cnt = 0;
3352 tree t2 = handle_operand (rhs1, idxmn2mn3);
3353 g = gimple_build_assign (make_ssa_name (m_limb_type),
3354 RSHIFT_EXPR, t2, n4);
3355 insert_before (g);
3356 t2 = gimple_assign_lhs (g);
3357 g = gimple_build_assign (make_ssa_name (m_limb_type),
3358 BIT_IOR_EXPR, t1, t2);
3359 insert_before (g);
3360 t1 = gimple_assign_lhs (g);
3362 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3363 g = gimple_build_assign (l, t1);
3364 insert_before (g);
3365 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3366 insert_before (g);
3367 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3368 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3369 NULL_TREE, NULL_TREE);
3370 insert_before (g);
3371 idx = make_ssa_name (sizetype);
3372 m_gsi = gsi_for_stmt (final_stmt);
3373 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3374 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3375 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3376 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3377 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3378 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3379 m_data_cnt = 0;
3380 if (!integer_zerop (n3))
3382 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3383 NULL_TREE, NULL_TREE);
3384 if_then (g, profile_probability::likely (), edge_true, edge_false);
3385 idxmn2 = make_ssa_name (sizetype);
3386 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3387 insert_before (g);
3388 m_data_cnt = 0;
3389 t1 = handle_operand (rhs1, idxmn2);
3390 g = gimple_build_assign (make_ssa_name (m_limb_type),
3391 LSHIFT_EXPR, t1, n1);
3392 insert_before (g);
3393 t1 = gimple_assign_lhs (g);
3394 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3395 g = gimple_build_assign (l, t1);
3396 insert_before (g);
3397 idx_next = make_ssa_name (sizetype);
3398 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3399 insert_before (g);
3400 m_gsi = gsi_for_stmt (final_stmt);
3401 tree nidx = make_ssa_name (sizetype);
3402 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3403 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3404 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3405 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3406 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3407 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3408 idx = nidx;
3410 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3411 ssize_int (0), NULL_TREE, NULL_TREE);
3412 if_then (g, profile_probability::likely (), edge_true, edge_false);
3413 idx = create_loop (idx, &idx_next);
3414 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3415 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3416 insert_before (g);
3417 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3418 insert_before (g);
3419 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3420 ssize_int (0), NULL_TREE, NULL_TREE);
3421 insert_before (g);
3425 /* Lower large/huge _BitInt multiplication or division. */
3427 void
3428 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3430 tree rhs1 = gimple_assign_rhs1 (stmt);
3431 tree rhs2 = gimple_assign_rhs2 (stmt);
3432 tree lhs = gimple_assign_lhs (stmt);
3433 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3434 tree type = TREE_TYPE (rhs1);
3435 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3436 && bitint_precision_kind (type) >= bitint_prec_large);
3437 int prec = TYPE_PRECISION (type), prec1, prec2;
3438 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3439 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3440 if (obj == NULL_TREE)
3442 int part = var_to_partition (m_map, lhs);
3443 gcc_assert (m_vars[part] != NULL_TREE);
3444 obj = m_vars[part];
3445 lhs = build_fold_addr_expr (obj);
3447 else
3449 lhs = build_fold_addr_expr (obj);
3450 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3451 NULL_TREE, true, GSI_SAME_STMT);
3453 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3454 gimple *g;
3455 switch (rhs_code)
3457 case MULT_EXPR:
3458 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3459 lhs, build_int_cst (sitype, prec),
3460 rhs1, build_int_cst (sitype, prec1),
3461 rhs2, build_int_cst (sitype, prec2));
3462 insert_before (g);
3463 break;
3464 case TRUNC_DIV_EXPR:
3465 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3466 lhs, build_int_cst (sitype, prec),
3467 null_pointer_node,
3468 build_int_cst (sitype, 0),
3469 rhs1, build_int_cst (sitype, prec1),
3470 rhs2, build_int_cst (sitype, prec2));
3471 if (!stmt_ends_bb_p (stmt))
3472 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3473 insert_before (g);
3474 break;
3475 case TRUNC_MOD_EXPR:
3476 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3477 build_int_cst (sitype, 0),
3478 lhs, build_int_cst (sitype, prec),
3479 rhs1, build_int_cst (sitype, prec1),
3480 rhs2, build_int_cst (sitype, prec2));
3481 if (!stmt_ends_bb_p (stmt))
3482 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3483 insert_before (g);
3484 break;
3485 default:
3486 gcc_unreachable ();
3488 if (stmt_ends_bb_p (stmt))
3490 maybe_duplicate_eh_stmt (g, stmt);
3491 edge e1;
3492 edge_iterator ei;
3493 basic_block bb = gimple_bb (stmt);
3495 FOR_EACH_EDGE (e1, ei, bb->succs)
3496 if (e1->flags & EDGE_EH)
3497 break;
3498 if (e1)
3500 edge e2 = split_block (gsi_bb (m_gsi), g);
3501 m_gsi = gsi_after_labels (e2->dest);
3502 add_eh_edge (e2->src, e1);
3507 /* Lower large/huge _BitInt conversion to/from floating point. */
3509 void
3510 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3512 tree rhs1 = gimple_assign_rhs1 (stmt);
3513 tree lhs = gimple_assign_lhs (stmt);
3514 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3515 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3516 gimple *g;
3517 if (rhs_code == FIX_TRUNC_EXPR)
3519 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3520 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3521 prec = -prec;
3522 if (obj == NULL_TREE)
3524 int part = var_to_partition (m_map, lhs);
3525 gcc_assert (m_vars[part] != NULL_TREE);
3526 obj = m_vars[part];
3527 lhs = build_fold_addr_expr (obj);
3529 else
3531 lhs = build_fold_addr_expr (obj);
3532 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3533 NULL_TREE, true, GSI_SAME_STMT);
3535 scalar_mode from_mode
3536 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3537 #ifdef HAVE_SFmode
3538 /* IEEE single is a full superset of both IEEE half and
3539 bfloat formats, convert to float first and then to _BitInt
3540 to avoid the need of another 2 library routines. */
3541 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3542 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3543 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3545 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3546 if (type)
3547 rhs1 = add_cast (type, rhs1);
3549 #endif
3550 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3551 lhs, build_int_cst (sitype, prec),
3552 rhs1);
3553 insert_before (g);
3555 else
3557 int prec;
3558 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3559 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3560 rhs1, build_int_cst (sitype, prec));
3561 gimple_call_set_lhs (g, lhs);
3562 if (!stmt_ends_bb_p (stmt))
3563 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3564 gsi_replace (&m_gsi, g, true);
3568 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3569 If check_zero is true, caller wants to check if all bits in [start, end)
3570 are zero, otherwise if bits in [start, end) are either all zero or
3571 all ones. L is the limb with index LIMB, START and END are measured
3572 in bits. */
3574 tree
3575 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3576 unsigned int end, tree l,
3577 unsigned int limb,
3578 bool check_zero)
3580 unsigned startlimb = start / limb_prec;
3581 unsigned endlimb = (end - 1) / limb_prec;
3582 gimple *g;
3584 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3585 return l;
3586 if (startlimb == endlimb && limb == startlimb)
3588 if (check_zero)
3590 wide_int w = wi::shifted_mask (start % limb_prec,
3591 end - start, false, limb_prec);
3592 g = gimple_build_assign (make_ssa_name (m_limb_type),
3593 BIT_AND_EXPR, l,
3594 wide_int_to_tree (m_limb_type, w));
3595 insert_before (g);
3596 return gimple_assign_lhs (g);
3598 unsigned int shift = start % limb_prec;
3599 if ((end % limb_prec) != 0)
3601 unsigned int lshift = (-end) % limb_prec;
3602 shift += lshift;
3603 g = gimple_build_assign (make_ssa_name (m_limb_type),
3604 LSHIFT_EXPR, l,
3605 build_int_cst (unsigned_type_node,
3606 lshift));
3607 insert_before (g);
3608 l = gimple_assign_lhs (g);
3610 l = add_cast (signed_type_for (m_limb_type), l);
3611 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3612 RSHIFT_EXPR, l,
3613 build_int_cst (unsigned_type_node, shift));
3614 insert_before (g);
3615 return add_cast (m_limb_type, gimple_assign_lhs (g));
3617 else if (limb == startlimb)
3619 if ((start % limb_prec) == 0)
3620 return l;
3621 if (!check_zero)
3622 l = add_cast (signed_type_for (m_limb_type), l);
3623 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3624 RSHIFT_EXPR, l,
3625 build_int_cst (unsigned_type_node,
3626 start % limb_prec));
3627 insert_before (g);
3628 l = gimple_assign_lhs (g);
3629 if (!check_zero)
3630 l = add_cast (m_limb_type, l);
3631 return l;
3633 else if (limb == endlimb)
3635 if ((end % limb_prec) == 0)
3636 return l;
3637 if (check_zero)
3639 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3640 g = gimple_build_assign (make_ssa_name (m_limb_type),
3641 BIT_AND_EXPR, l,
3642 wide_int_to_tree (m_limb_type, w));
3643 insert_before (g);
3644 return gimple_assign_lhs (g);
3646 unsigned int shift = (-end) % limb_prec;
3647 g = gimple_build_assign (make_ssa_name (m_limb_type),
3648 LSHIFT_EXPR, l,
3649 build_int_cst (unsigned_type_node, shift));
3650 insert_before (g);
3651 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3652 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3653 RSHIFT_EXPR, l,
3654 build_int_cst (unsigned_type_node, shift));
3655 insert_before (g);
3656 return add_cast (m_limb_type, gimple_assign_lhs (g));
3658 return l;
3661 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3662 result including overflow flag into the right locations. */
3664 void
3665 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3666 tree ovf, tree lhs, tree orig_obj,
3667 gimple *stmt, tree_code code)
3669 gimple *g;
3671 if (obj == NULL_TREE
3672 && (TREE_CODE (type) != BITINT_TYPE
3673 || bitint_precision_kind (type) < bitint_prec_large))
3675 /* Add support for 3 or more limbs filled in from normal integral
3676 type if this assert fails. If no target chooses limb mode smaller
3677 than half of largest supported normal integral type, this will not
3678 be needed. */
3679 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3680 tree lhs_type = type;
3681 if (TREE_CODE (type) == BITINT_TYPE
3682 && bitint_precision_kind (type) == bitint_prec_middle)
3683 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3684 TYPE_UNSIGNED (type));
3685 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3686 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3687 insert_before (g);
3688 r1 = gimple_assign_lhs (g);
3689 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3690 r1 = add_cast (lhs_type, r1);
3691 if (TYPE_PRECISION (lhs_type) > limb_prec)
3693 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3694 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3695 insert_before (g);
3696 r2 = gimple_assign_lhs (g);
3697 r2 = add_cast (lhs_type, r2);
3698 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3699 build_int_cst (unsigned_type_node,
3700 limb_prec));
3701 insert_before (g);
3702 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3703 gimple_assign_lhs (g));
3704 insert_before (g);
3705 r1 = gimple_assign_lhs (g);
3707 if (lhs_type != type)
3708 r1 = add_cast (type, r1);
3709 ovf = add_cast (lhs_type, ovf);
3710 if (lhs_type != type)
3711 ovf = add_cast (type, ovf);
3712 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3713 m_gsi = gsi_for_stmt (stmt);
3714 gsi_replace (&m_gsi, g, true);
3716 else
3718 unsigned HOST_WIDE_INT nelts = 0;
3719 tree atype = NULL_TREE;
3720 if (obj)
3722 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3723 if (orig_obj == NULL_TREE)
3724 nelts >>= 1;
3725 atype = build_array_type_nelts (m_limb_type, nelts);
3727 if (var && obj)
3729 tree v1, v2;
3730 tree zero;
3731 if (orig_obj == NULL_TREE)
3733 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3734 v1 = build2 (MEM_REF, atype,
3735 build_fold_addr_expr (unshare_expr (obj)), zero);
3737 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3738 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3739 else
3740 v1 = unshare_expr (obj);
3741 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3742 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3743 g = gimple_build_assign (v1, v2);
3744 insert_before (g);
3746 if (orig_obj == NULL_TREE && obj)
3748 ovf = add_cast (m_limb_type, ovf);
3749 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3750 g = gimple_build_assign (l, ovf);
3751 insert_before (g);
3752 if (nelts > 1)
3754 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3755 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3756 (nelts + 1) * m_limb_size);
3757 tree v1 = build2 (MEM_REF, atype,
3758 build_fold_addr_expr (unshare_expr (obj)),
3759 off);
3760 g = gimple_build_assign (v1, build_zero_cst (atype));
3761 insert_before (g);
3764 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3766 imm_use_iterator ui;
3767 use_operand_p use_p;
3768 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3770 g = USE_STMT (use_p);
3771 if (!is_gimple_assign (g)
3772 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3773 continue;
3774 tree lhs2 = gimple_assign_lhs (g);
3775 gimple *use_stmt;
3776 single_imm_use (lhs2, &use_p, &use_stmt);
3777 lhs2 = gimple_assign_lhs (use_stmt);
3778 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3779 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3780 g = gimple_build_assign (lhs2, ovf);
3781 else
3782 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3783 gsi_replace (&gsi, g, true);
3784 if (gsi_stmt (m_gsi) == use_stmt)
3785 m_gsi = gsi_for_stmt (g);
3786 break;
3789 else if (ovf != boolean_false_node)
3791 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3792 NULL_TREE, NULL_TREE);
3793 edge edge_true, edge_false;
3794 if_then (g, profile_probability::very_unlikely (),
3795 edge_true, edge_false);
3796 tree zero = build_zero_cst (TREE_TYPE (lhs));
3797 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3798 TREE_TYPE (lhs),
3799 zero, zero, NULL);
3800 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3801 true, GSI_SAME_STMT);
3802 m_gsi = gsi_after_labels (edge_true->dest);
3805 if (var)
3807 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3808 g = gimple_build_assign (var, clobber);
3809 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3813 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3814 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3815 argument 1 precision PREC1 and minimum precision for the result
3816 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3818 static tree
3819 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3820 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3822 *start = 0;
3823 *end = 0;
3824 *check_zero = true;
3825 /* Ignore this special rule for subtraction, even if both
3826 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3827 in infinite precision. */
3828 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3830 /* Result in [0, prec2) is unsigned, if prec > prec2,
3831 all bits above it will be zero. */
3832 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3833 return boolean_false_node;
3834 else
3836 /* ovf if any of bits in [start, end) is non-zero. */
3837 *start = prec - !TYPE_UNSIGNED (type);
3838 *end = prec2;
3841 else if (TYPE_UNSIGNED (type))
3843 /* If result in [0, prec2) is signed and if prec > prec2,
3844 all bits above it will be sign bit copies. */
3845 if (prec >= prec2)
3847 /* ovf if bit prec - 1 is non-zero. */
3848 *start = prec - 1;
3849 *end = prec;
3851 else
3853 /* ovf if any of bits in [start, end) is non-zero. */
3854 *start = prec;
3855 *end = prec2;
3858 else if (prec >= prec2)
3859 return boolean_false_node;
3860 else
3862 /* ovf if [start, end) bits aren't all zeros or all ones. */
3863 *start = prec - 1;
3864 *end = prec2;
3865 *check_zero = false;
3867 return NULL_TREE;
3870 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3871 argument or return type _Complex large/huge _BitInt. */
3873 void
3874 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3876 tree arg0 = gimple_call_arg (stmt, 0);
3877 tree arg1 = gimple_call_arg (stmt, 1);
3878 tree lhs = gimple_call_lhs (stmt);
3879 gimple *g;
3881 if (!lhs)
3883 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3884 gsi_remove (&gsi, true);
3885 return;
3887 gimple *final_stmt = gsi_stmt (m_gsi);
3888 tree type = TREE_TYPE (lhs);
3889 if (TREE_CODE (type) == COMPLEX_TYPE)
3890 type = TREE_TYPE (type);
3891 int prec = TYPE_PRECISION (type);
3892 int prec0 = range_to_prec (arg0, stmt);
3893 int prec1 = range_to_prec (arg1, stmt);
3894 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3895 the be minimum unsigned precision of any possible operation's
3896 result, otherwise it is minimum signed precision.
3897 Some examples:
3898 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3899 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3900 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3901 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3902 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3903 8 + 8 [0, 0x1fe] 9 UNSIGNED
3904 8 + 10 [0, 0x4fe] 11 UNSIGNED
3905 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3906 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3907 8 + -8 [-0x80, 0x17e] 10 SIGNED
3908 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3909 10 + -8 [-0x80, 0x47e] 12 SIGNED
3910 8 - 8 [-0xff, 0xff] 9 SIGNED
3911 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3912 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3913 -8 - -8 [-0xff, 0xff] 9 SIGNED
3914 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3915 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3916 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3917 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3918 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3919 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3920 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3921 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3922 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3923 prec1 < 0 ? -prec1 : prec1);
3924 /* If operands are either both signed or both unsigned,
3925 we need just one additional bit. */
3926 prec2 = (((prec0 < 0) == (prec1 < 0)
3927 /* If one operand is signed and one unsigned and
3928 the signed one has larger precision, we need
3929 just one extra bit, otherwise two. */
3930 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3931 : (prec2 == -prec1 && prec2 != prec0)))
3932 ? prec2 + 1 : prec2 + 2);
3933 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3934 prec1 < 0 ? -prec1 : prec1);
3935 prec3 = MAX (prec3, prec);
3936 tree var = NULL_TREE;
3937 tree orig_obj = obj;
3938 if (obj == NULL_TREE
3939 && TREE_CODE (type) == BITINT_TYPE
3940 && bitint_precision_kind (type) >= bitint_prec_large
3941 && m_names
3942 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3944 int part = var_to_partition (m_map, lhs);
3945 gcc_assert (m_vars[part] != NULL_TREE);
3946 obj = m_vars[part];
3947 if (TREE_TYPE (lhs) == type)
3948 orig_obj = obj;
3950 if (TREE_CODE (type) != BITINT_TYPE
3951 || bitint_precision_kind (type) < bitint_prec_large)
3953 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3954 tree atype = build_array_type_nelts (m_limb_type, nelts);
3955 var = create_tmp_var (atype);
3958 enum tree_code code;
3959 switch (gimple_call_internal_fn (stmt))
3961 case IFN_ADD_OVERFLOW:
3962 case IFN_UBSAN_CHECK_ADD:
3963 code = PLUS_EXPR;
3964 break;
3965 case IFN_SUB_OVERFLOW:
3966 case IFN_UBSAN_CHECK_SUB:
3967 code = MINUS_EXPR;
3968 break;
3969 default:
3970 gcc_unreachable ();
3972 unsigned start, end;
3973 bool check_zero;
3974 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3975 &start, &end, &check_zero);
3977 unsigned startlimb, endlimb;
3978 if (ovf)
3980 startlimb = ~0U;
3981 endlimb = ~0U;
3983 else
3985 startlimb = start / limb_prec;
3986 endlimb = (end - 1) / limb_prec;
3989 int prec4 = ovf != NULL_TREE ? prec : prec3;
3990 bitint_prec_kind kind = bitint_precision_kind (prec4);
3991 unsigned cnt, rem = 0, fin = 0;
3992 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3993 bool last_ovf = (ovf == NULL_TREE
3994 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3995 if (kind != bitint_prec_huge)
3996 cnt = CEIL (prec4, limb_prec) + last_ovf;
3997 else
3999 rem = (prec4 % (2 * limb_prec));
4000 fin = (prec4 - rem) / limb_prec;
4001 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
4002 idx = idx_first = create_loop (size_zero_node, &idx_next);
4005 if (kind == bitint_prec_huge)
4006 m_upwards_2limb = fin;
4007 m_upwards = true;
4009 tree type0 = TREE_TYPE (arg0);
4010 tree type1 = TREE_TYPE (arg1);
4011 int prec5 = prec3;
4012 if (bitint_precision_kind (prec5) < bitint_prec_large)
4013 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
4014 if (TYPE_PRECISION (type0) < prec5)
4016 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
4017 if (TREE_CODE (arg0) == INTEGER_CST)
4018 arg0 = fold_convert (type0, arg0);
4020 if (TYPE_PRECISION (type1) < prec5)
4022 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
4023 if (TREE_CODE (arg1) == INTEGER_CST)
4024 arg1 = fold_convert (type1, arg1);
4026 unsigned int data_cnt = 0;
4027 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
4028 tree cmp = build_zero_cst (m_limb_type);
4029 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
4030 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
4031 for (unsigned i = 0; i < cnt; i++)
4033 m_data_cnt = 0;
4034 tree rhs1, rhs2;
4035 if (kind != bitint_prec_huge)
4036 idx = size_int (i);
4037 else if (i >= 2)
4038 idx = size_int (fin + i - 2);
4039 if (!last_ovf || i < cnt - 1)
4041 if (type0 != TREE_TYPE (arg0))
4042 rhs1 = handle_cast (type0, arg0, idx);
4043 else
4044 rhs1 = handle_operand (arg0, idx);
4045 if (type1 != TREE_TYPE (arg1))
4046 rhs2 = handle_cast (type1, arg1, idx);
4047 else
4048 rhs2 = handle_operand (arg1, idx);
4049 if (i == 0)
4050 data_cnt = m_data_cnt;
4051 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4052 rhs1 = add_cast (m_limb_type, rhs1);
4053 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4054 rhs2 = add_cast (m_limb_type, rhs2);
4055 last_rhs1 = rhs1;
4056 last_rhs2 = rhs2;
4058 else
4060 m_data_cnt = data_cnt;
4061 if (TYPE_UNSIGNED (type0))
4062 rhs1 = build_zero_cst (m_limb_type);
4063 else
4065 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4066 if (TREE_CODE (rhs1) == INTEGER_CST)
4067 rhs1 = build_int_cst (m_limb_type,
4068 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4069 else
4071 tree lpm1 = build_int_cst (unsigned_type_node,
4072 limb_prec - 1);
4073 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4074 RSHIFT_EXPR, rhs1, lpm1);
4075 insert_before (g);
4076 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4079 if (TYPE_UNSIGNED (type1))
4080 rhs2 = build_zero_cst (m_limb_type);
4081 else
4083 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4084 if (TREE_CODE (rhs2) == INTEGER_CST)
4085 rhs2 = build_int_cst (m_limb_type,
4086 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4087 else
4089 tree lpm1 = build_int_cst (unsigned_type_node,
4090 limb_prec - 1);
4091 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4092 RSHIFT_EXPR, rhs2, lpm1);
4093 insert_before (g);
4094 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4098 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4099 if (ovf != boolean_false_node)
4101 if (tree_fits_uhwi_p (idx))
4103 unsigned limb = tree_to_uhwi (idx);
4104 if (limb >= startlimb && limb <= endlimb)
4106 tree l = arith_overflow_extract_bits (start, end, rhs,
4107 limb, check_zero);
4108 tree this_ovf = make_ssa_name (boolean_type_node);
4109 if (ovf == NULL_TREE && !check_zero)
4111 cmp = l;
4112 g = gimple_build_assign (make_ssa_name (m_limb_type),
4113 PLUS_EXPR, l,
4114 build_int_cst (m_limb_type, 1));
4115 insert_before (g);
4116 g = gimple_build_assign (this_ovf, GT_EXPR,
4117 gimple_assign_lhs (g),
4118 build_int_cst (m_limb_type, 1));
4120 else
4121 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4122 insert_before (g);
4123 if (ovf == NULL_TREE)
4124 ovf = this_ovf;
4125 else
4127 tree b = make_ssa_name (boolean_type_node);
4128 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4129 insert_before (g);
4130 ovf = b;
4134 else if (startlimb < fin)
4136 if (m_first && startlimb + 2 < fin)
4138 tree data_out;
4139 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4140 ovf_out = m_data.pop ();
4141 m_data.pop ();
4142 if (!check_zero)
4144 cmp = prepare_data_in_out (cmp, idx, &data_out);
4145 cmp_out = m_data.pop ();
4146 m_data.pop ();
4149 if (i != 0 || startlimb != fin - 1)
4151 tree_code cmp_code;
4152 bool single_comparison
4153 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4154 if (!single_comparison)
4156 cmp_code = GE_EXPR;
4157 if (!check_zero && (start % limb_prec) == 0)
4158 single_comparison = true;
4160 else if ((startlimb & 1) == (i & 1))
4161 cmp_code = EQ_EXPR;
4162 else
4163 cmp_code = GT_EXPR;
4164 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4165 NULL_TREE, NULL_TREE);
4166 edge edge_true_true, edge_true_false, edge_false;
4167 gimple *g2 = NULL;
4168 if (!single_comparison)
4169 g2 = gimple_build_cond (NE_EXPR, idx,
4170 size_int (startlimb), NULL_TREE,
4171 NULL_TREE);
4172 if_then_if_then_else (g, g2, profile_probability::likely (),
4173 profile_probability::likely (),
4174 edge_true_true, edge_true_false,
4175 edge_false);
4176 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4177 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4178 check_zero);
4179 tree this_ovf = make_ssa_name (boolean_type_node);
4180 if (cmp_code != GT_EXPR && !check_zero)
4182 g = gimple_build_assign (make_ssa_name (m_limb_type),
4183 PLUS_EXPR, l,
4184 build_int_cst (m_limb_type, 1));
4185 insert_before (g);
4186 g = gimple_build_assign (this_ovf, GT_EXPR,
4187 gimple_assign_lhs (g),
4188 build_int_cst (m_limb_type, 1));
4190 else
4191 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4192 insert_before (g);
4193 if (cmp_code == GT_EXPR)
4195 tree t = make_ssa_name (boolean_type_node);
4196 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4197 insert_before (g);
4198 this_ovf = t;
4200 tree this_ovf2 = NULL_TREE;
4201 if (!single_comparison)
4203 m_gsi = gsi_after_labels (edge_true_true->src);
4204 tree t = make_ssa_name (boolean_type_node);
4205 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4206 insert_before (g);
4207 this_ovf2 = make_ssa_name (boolean_type_node);
4208 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4209 ovf, t);
4210 insert_before (g);
4212 m_gsi = gsi_after_labels (edge_true_false->dest);
4213 tree t;
4214 if (i == 1 && ovf_out)
4215 t = ovf_out;
4216 else
4217 t = make_ssa_name (boolean_type_node);
4218 gphi *phi = create_phi_node (t, edge_true_false->dest);
4219 add_phi_arg (phi, this_ovf, edge_true_false,
4220 UNKNOWN_LOCATION);
4221 add_phi_arg (phi, ovf ? ovf
4222 : boolean_false_node, edge_false,
4223 UNKNOWN_LOCATION);
4224 if (edge_true_true)
4225 add_phi_arg (phi, this_ovf2, edge_true_true,
4226 UNKNOWN_LOCATION);
4227 ovf = t;
4228 if (!check_zero && cmp_code != GT_EXPR)
4230 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4231 phi = create_phi_node (t, edge_true_false->dest);
4232 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4233 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4234 if (edge_true_true)
4235 add_phi_arg (phi, cmp, edge_true_true,
4236 UNKNOWN_LOCATION);
4237 cmp = t;
4243 if (var || obj)
4245 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4247 else if (!tree_fits_uhwi_p (idx)
4248 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4250 bool single_comparison
4251 = (((unsigned) prec % limb_prec) == 0
4252 || prec_limbs + 1 >= fin
4253 || (prec_limbs & 1) == (i & 1));
4254 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4255 NULL_TREE, NULL_TREE);
4256 gimple *g2 = NULL;
4257 if (!single_comparison)
4258 g2 = gimple_build_cond (EQ_EXPR, idx,
4259 size_int (prec_limbs - 1),
4260 NULL_TREE, NULL_TREE);
4261 edge edge_true_true, edge_true_false, edge_false;
4262 if_then_if_then_else (g, g2, profile_probability::likely (),
4263 profile_probability::unlikely (),
4264 edge_true_true, edge_true_false,
4265 edge_false);
4266 tree l = limb_access (type, var ? var : obj, idx, true);
4267 g = gimple_build_assign (l, rhs);
4268 insert_before (g);
4269 if (!single_comparison)
4271 m_gsi = gsi_after_labels (edge_true_true->src);
4272 tree plm1idx = size_int (prec_limbs - 1);
4273 tree plm1type = limb_access_type (type, plm1idx);
4274 l = limb_access (type, var ? var : obj, plm1idx, true);
4275 if (!useless_type_conversion_p (plm1type, TREE_TYPE (rhs)))
4276 rhs = add_cast (plm1type, rhs);
4277 if (!useless_type_conversion_p (TREE_TYPE (l),
4278 TREE_TYPE (rhs)))
4279 rhs = add_cast (TREE_TYPE (l), rhs);
4280 g = gimple_build_assign (l, rhs);
4281 insert_before (g);
4283 m_gsi = gsi_after_labels (edge_true_false->dest);
4285 else
4287 tree l = limb_access (type, var ? var : obj, idx, true);
4288 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4289 rhs = add_cast (TREE_TYPE (l), rhs);
4290 g = gimple_build_assign (l, rhs);
4291 insert_before (g);
4294 m_first = false;
4295 if (kind == bitint_prec_huge && i <= 1)
4297 if (i == 0)
4299 idx = make_ssa_name (sizetype);
4300 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4301 size_one_node);
4302 insert_before (g);
4304 else
4306 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4307 size_int (2));
4308 insert_before (g);
4309 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4310 NULL_TREE, NULL_TREE);
4311 insert_before (g);
4312 m_gsi = gsi_for_stmt (final_stmt);
4313 m_bb = NULL;
4318 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4321 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4322 argument or return type _Complex large/huge _BitInt. */
4324 void
4325 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4327 tree arg0 = gimple_call_arg (stmt, 0);
4328 tree arg1 = gimple_call_arg (stmt, 1);
4329 tree lhs = gimple_call_lhs (stmt);
4330 if (!lhs)
4332 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4333 gsi_remove (&gsi, true);
4334 return;
4336 gimple *final_stmt = gsi_stmt (m_gsi);
4337 tree type = TREE_TYPE (lhs);
4338 if (TREE_CODE (type) == COMPLEX_TYPE)
4339 type = TREE_TYPE (type);
4340 int prec = TYPE_PRECISION (type), prec0, prec1;
4341 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4342 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4343 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4344 + (prec1 < 0 ? -prec1 : prec1));
4345 if (prec0 == 1 || prec1 == 1)
4346 --prec2;
4347 tree var = NULL_TREE;
4348 tree orig_obj = obj;
4349 bool force_var = false;
4350 if (obj == NULL_TREE
4351 && TREE_CODE (type) == BITINT_TYPE
4352 && bitint_precision_kind (type) >= bitint_prec_large
4353 && m_names
4354 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4356 int part = var_to_partition (m_map, lhs);
4357 gcc_assert (m_vars[part] != NULL_TREE);
4358 obj = m_vars[part];
4359 if (TREE_TYPE (lhs) == type)
4360 orig_obj = obj;
4362 else if (obj != NULL_TREE && DECL_P (obj))
4364 for (int i = 0; i < 2; ++i)
4366 tree arg = i ? arg1 : arg0;
4367 if (TREE_CODE (arg) == ADDR_EXPR)
4368 arg = TREE_OPERAND (arg, 0);
4369 if (get_base_address (arg) == obj)
4371 force_var = true;
4372 break;
4376 if (obj == NULL_TREE
4377 || force_var
4378 || TREE_CODE (type) != BITINT_TYPE
4379 || bitint_precision_kind (type) < bitint_prec_large
4380 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4382 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4383 tree atype = build_array_type_nelts (m_limb_type, nelts);
4384 var = create_tmp_var (atype);
4386 tree addr = build_fold_addr_expr (var ? var : obj);
4387 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4388 NULL_TREE, true, GSI_SAME_STMT);
4389 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4390 gimple *g
4391 = gimple_build_call_internal (IFN_MULBITINT, 6,
4392 addr, build_int_cst (sitype,
4393 MAX (prec2, prec)),
4394 arg0, build_int_cst (sitype, prec0),
4395 arg1, build_int_cst (sitype, prec1));
4396 insert_before (g);
4398 unsigned start, end;
4399 bool check_zero;
4400 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4401 &start, &end, &check_zero);
4402 if (ovf == NULL_TREE)
4404 unsigned startlimb = start / limb_prec;
4405 unsigned endlimb = (end - 1) / limb_prec;
4406 unsigned cnt;
4407 bool use_loop = false;
4408 if (startlimb == endlimb)
4409 cnt = 1;
4410 else if (startlimb + 1 == endlimb)
4411 cnt = 2;
4412 else if ((end % limb_prec) == 0)
4414 cnt = 2;
4415 use_loop = true;
4417 else
4419 cnt = 3;
4420 use_loop = startlimb + 2 < endlimb;
4422 if (cnt == 1)
4424 tree l = limb_access (NULL_TREE, var ? var : obj,
4425 size_int (startlimb), true);
4426 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4427 insert_before (g);
4428 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4429 startlimb, check_zero);
4430 ovf = make_ssa_name (boolean_type_node);
4431 if (check_zero)
4432 g = gimple_build_assign (ovf, NE_EXPR, l,
4433 build_zero_cst (m_limb_type));
4434 else
4436 g = gimple_build_assign (make_ssa_name (m_limb_type),
4437 PLUS_EXPR, l,
4438 build_int_cst (m_limb_type, 1));
4439 insert_before (g);
4440 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4441 build_int_cst (m_limb_type, 1));
4443 insert_before (g);
4445 else
4447 basic_block edge_bb = NULL;
4448 gimple_stmt_iterator gsi = m_gsi;
4449 gsi_prev (&gsi);
4450 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4451 edge_bb = e->src;
4452 m_gsi = gsi_end_bb (edge_bb);
4454 tree cmp = build_zero_cst (m_limb_type);
4455 for (unsigned i = 0; i < cnt; i++)
4457 tree idx, idx_next = NULL_TREE;
4458 if (i == 0)
4459 idx = size_int (startlimb);
4460 else if (i == 2)
4461 idx = size_int (endlimb);
4462 else if (use_loop)
4463 idx = create_loop (size_int (startlimb + 1), &idx_next);
4464 else
4465 idx = size_int (startlimb + 1);
4466 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4467 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4468 insert_before (g);
4469 l = gimple_assign_lhs (g);
4470 if (i == 0 || i == 2)
4471 l = arith_overflow_extract_bits (start, end, l,
4472 tree_to_uhwi (idx),
4473 check_zero);
4474 if (i == 0 && !check_zero)
4476 cmp = l;
4477 g = gimple_build_assign (make_ssa_name (m_limb_type),
4478 PLUS_EXPR, l,
4479 build_int_cst (m_limb_type, 1));
4480 insert_before (g);
4481 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4482 build_int_cst (m_limb_type, 1),
4483 NULL_TREE, NULL_TREE);
4485 else
4486 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4487 insert_before (g);
4488 edge e1 = split_block (gsi_bb (m_gsi), g);
4489 e1->flags = EDGE_FALSE_VALUE;
4490 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4491 EDGE_TRUE_VALUE);
4492 e1->probability = profile_probability::likely ();
4493 e2->probability = e1->probability.invert ();
4494 if (i == 0)
4495 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4496 m_gsi = gsi_after_labels (e1->dest);
4497 if (i == 1 && use_loop)
4499 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4500 size_one_node);
4501 insert_before (g);
4502 g = gimple_build_cond (NE_EXPR, idx_next,
4503 size_int (endlimb + (cnt == 2)),
4504 NULL_TREE, NULL_TREE);
4505 insert_before (g);
4506 edge true_edge, false_edge;
4507 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4508 &true_edge,
4509 &false_edge);
4510 m_gsi = gsi_after_labels (false_edge->dest);
4511 m_bb = NULL;
4515 ovf = make_ssa_name (boolean_type_node);
4516 basic_block bb = gimple_bb (final_stmt);
4517 gphi *phi = create_phi_node (ovf, bb);
4518 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4519 edge_iterator ei;
4520 FOR_EACH_EDGE (e, ei, bb->preds)
4522 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4523 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4525 m_gsi = gsi_for_stmt (final_stmt);
4529 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4532 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4533 .{ADD,SUB,MUL}_OVERFLOW call. */
4535 void
4536 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4538 tree rhs1 = gimple_assign_rhs1 (stmt);
4539 rhs1 = TREE_OPERAND (rhs1, 0);
4540 if (obj == NULL_TREE)
4542 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4543 gcc_assert (m_vars[part] != NULL_TREE);
4544 obj = m_vars[part];
4546 if (TREE_CODE (rhs1) == SSA_NAME
4547 && (m_names == NULL
4548 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4550 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4551 return;
4553 int part = var_to_partition (m_map, rhs1);
4554 gcc_assert (m_vars[part] != NULL_TREE);
4555 tree var = m_vars[part];
4556 unsigned HOST_WIDE_INT nelts
4557 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4558 tree atype = build_array_type_nelts (m_limb_type, nelts);
4559 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4560 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4561 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4562 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4563 ? 0 : nelts * m_limb_size);
4564 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4565 gimple *g = gimple_build_assign (obj, v2);
4566 insert_before (g);
4569 /* Lower COMPLEX_EXPR stmt. */
4571 void
4572 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4574 tree lhs = gimple_assign_lhs (stmt);
4575 tree rhs1 = gimple_assign_rhs1 (stmt);
4576 tree rhs2 = gimple_assign_rhs2 (stmt);
4577 int part = var_to_partition (m_map, lhs);
4578 gcc_assert (m_vars[part] != NULL_TREE);
4579 lhs = m_vars[part];
4580 unsigned HOST_WIDE_INT nelts
4581 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4582 tree atype = build_array_type_nelts (m_limb_type, nelts);
4583 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4584 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4585 tree v2;
4586 if (TREE_CODE (rhs1) == SSA_NAME)
4588 part = var_to_partition (m_map, rhs1);
4589 gcc_assert (m_vars[part] != NULL_TREE);
4590 v2 = m_vars[part];
4592 else if (integer_zerop (rhs1))
4593 v2 = build_zero_cst (atype);
4594 else
4595 v2 = tree_output_constant_def (rhs1);
4596 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4597 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4598 gimple *g = gimple_build_assign (v1, v2);
4599 insert_before (g);
4600 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4601 TYPE_SIZE_UNIT (atype));
4602 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4603 if (TREE_CODE (rhs2) == SSA_NAME)
4605 part = var_to_partition (m_map, rhs2);
4606 gcc_assert (m_vars[part] != NULL_TREE);
4607 v2 = m_vars[part];
4609 else if (integer_zerop (rhs2))
4610 v2 = build_zero_cst (atype);
4611 else
4612 v2 = tree_output_constant_def (rhs2);
4613 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4614 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4615 g = gimple_build_assign (v1, v2);
4616 insert_before (g);
4619 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4620 argument. */
4622 void
4623 bitint_large_huge::lower_bit_query (gimple *stmt)
4625 tree arg0 = gimple_call_arg (stmt, 0);
4626 tree arg1 = (gimple_call_num_args (stmt) == 2
4627 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4628 tree lhs = gimple_call_lhs (stmt);
4629 gimple *g;
4631 if (!lhs)
4633 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4634 gsi_remove (&gsi, true);
4635 return;
4637 tree type = TREE_TYPE (arg0);
4638 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4639 bitint_prec_kind kind = bitint_precision_kind (type);
4640 gcc_assert (kind >= bitint_prec_large);
4641 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4642 enum built_in_function fcode = END_BUILTINS;
4643 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4644 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4645 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4646 switch (ifn)
4648 case IFN_CLZ:
4649 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4650 fcode = BUILT_IN_CLZ;
4651 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4652 fcode = BUILT_IN_CLZL;
4653 else
4654 fcode = BUILT_IN_CLZLL;
4655 break;
4656 case IFN_FFS:
4657 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4658 we don't add the addend at the end. */
4659 arg1 = integer_zero_node;
4660 /* FALLTHRU */
4661 case IFN_CTZ:
4662 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4663 fcode = BUILT_IN_CTZ;
4664 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4665 fcode = BUILT_IN_CTZL;
4666 else
4667 fcode = BUILT_IN_CTZLL;
4668 m_upwards = true;
4669 break;
4670 case IFN_CLRSB:
4671 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4672 fcode = BUILT_IN_CLRSB;
4673 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4674 fcode = BUILT_IN_CLRSBL;
4675 else
4676 fcode = BUILT_IN_CLRSBLL;
4677 break;
4678 case IFN_PARITY:
4679 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4680 fcode = BUILT_IN_PARITY;
4681 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4682 fcode = BUILT_IN_PARITYL;
4683 else
4684 fcode = BUILT_IN_PARITYLL;
4685 m_upwards = true;
4686 break;
4687 case IFN_POPCOUNT:
4688 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4689 fcode = BUILT_IN_POPCOUNT;
4690 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4691 fcode = BUILT_IN_POPCOUNTL;
4692 else
4693 fcode = BUILT_IN_POPCOUNTLL;
4694 m_upwards = true;
4695 break;
4696 default:
4697 gcc_unreachable ();
4699 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4700 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4701 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4702 basic_block edge_bb = NULL;
4703 if (m_upwards)
4705 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4706 if (kind == bitint_prec_large)
4707 cnt = CEIL (prec, limb_prec);
4708 else
4710 rem = (prec % (2 * limb_prec));
4711 end = (prec - rem) / limb_prec;
4712 cnt = 2 + CEIL (rem, limb_prec);
4713 idx = idx_first = create_loop (size_zero_node, &idx_next);
4716 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4718 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4719 gsi_prev (&gsi);
4720 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4721 edge_bb = e->src;
4722 if (kind == bitint_prec_large)
4723 m_gsi = gsi_end_bb (edge_bb);
4724 bqp = XALLOCAVEC (struct bq_details, cnt);
4726 else
4727 m_after_stmt = stmt;
4728 if (kind != bitint_prec_large)
4729 m_upwards_2limb = end;
4731 for (unsigned i = 0; i < cnt; i++)
4733 m_data_cnt = 0;
4734 if (kind == bitint_prec_large)
4735 idx = size_int (i);
4736 else if (i >= 2)
4737 idx = size_int (end + (i > 2));
4739 tree rhs1 = handle_operand (arg0, idx);
4740 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4742 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4743 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4744 rhs1 = add_cast (m_limb_type, rhs1);
4747 tree in, out, tem;
4748 if (ifn == IFN_PARITY)
4749 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4750 else if (ifn == IFN_FFS)
4751 in = prepare_data_in_out (integer_one_node, idx, &out);
4752 else
4753 in = prepare_data_in_out (integer_zero_node, idx, &out);
4755 switch (ifn)
4757 case IFN_CTZ:
4758 case IFN_FFS:
4759 g = gimple_build_cond (NE_EXPR, rhs1,
4760 build_zero_cst (m_limb_type),
4761 NULL_TREE, NULL_TREE);
4762 insert_before (g);
4763 edge e1, e2;
4764 e1 = split_block (gsi_bb (m_gsi), g);
4765 e1->flags = EDGE_FALSE_VALUE;
4766 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4767 e1->probability = profile_probability::unlikely ();
4768 e2->probability = e1->probability.invert ();
4769 if (i == 0)
4770 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4771 m_gsi = gsi_after_labels (e1->dest);
4772 bqp[i].e = e2;
4773 bqp[i].val = rhs1;
4774 if (tree_fits_uhwi_p (idx))
4775 bqp[i].addend
4776 = build_int_cst (integer_type_node,
4777 tree_to_uhwi (idx) * limb_prec
4778 + (ifn == IFN_FFS));
4779 else
4781 bqp[i].addend = in;
4782 if (i == 1)
4783 res = out;
4784 else
4785 res = make_ssa_name (integer_type_node);
4786 g = gimple_build_assign (res, PLUS_EXPR, in,
4787 build_int_cst (integer_type_node,
4788 limb_prec));
4789 insert_before (g);
4790 m_data[m_data_cnt] = res;
4792 break;
4793 case IFN_PARITY:
4794 if (!integer_zerop (in))
4796 if (kind == bitint_prec_huge && i == 1)
4797 res = out;
4798 else
4799 res = make_ssa_name (m_limb_type);
4800 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4801 insert_before (g);
4803 else
4804 res = rhs1;
4805 m_data[m_data_cnt] = res;
4806 break;
4807 case IFN_POPCOUNT:
4808 g = gimple_build_call (fndecl, 1, rhs1);
4809 tem = make_ssa_name (integer_type_node);
4810 gimple_call_set_lhs (g, tem);
4811 insert_before (g);
4812 if (!integer_zerop (in))
4814 if (kind == bitint_prec_huge && i == 1)
4815 res = out;
4816 else
4817 res = make_ssa_name (integer_type_node);
4818 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4819 insert_before (g);
4821 else
4822 res = tem;
4823 m_data[m_data_cnt] = res;
4824 break;
4825 default:
4826 gcc_unreachable ();
4829 m_first = false;
4830 if (kind == bitint_prec_huge && i <= 1)
4832 if (i == 0)
4834 idx = make_ssa_name (sizetype);
4835 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4836 size_one_node);
4837 insert_before (g);
4839 else
4841 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4842 size_int (2));
4843 insert_before (g);
4844 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4845 NULL_TREE, NULL_TREE);
4846 insert_before (g);
4847 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4848 m_gsi = gsi_after_labels (edge_bb);
4849 else
4850 m_gsi = gsi_for_stmt (stmt);
4851 m_bb = NULL;
4856 else
4858 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4859 int sub_one = 0;
4860 if (kind == bitint_prec_large)
4861 cnt = CEIL (prec, limb_prec);
4862 else
4864 rem = prec % limb_prec;
4865 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4866 rem = limb_prec;
4867 end = (prec - rem) / limb_prec;
4868 cnt = 1 + (rem != 0);
4869 if (ifn == IFN_CLRSB)
4870 sub_one = 1;
4873 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4874 gsi_prev (&gsi);
4875 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4876 edge_bb = e->src;
4877 m_gsi = gsi_end_bb (edge_bb);
4879 if (ifn == IFN_CLZ)
4880 bqp = XALLOCAVEC (struct bq_details, cnt);
4881 else
4883 gsi = gsi_for_stmt (stmt);
4884 gsi_prev (&gsi);
4885 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4886 edge_bb = e->src;
4887 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4890 for (unsigned i = 0; i < cnt; i++)
4892 m_data_cnt = 0;
4893 if (kind == bitint_prec_large)
4894 idx = size_int (cnt - i - 1);
4895 else if (i == cnt - 1)
4896 idx = create_loop (size_int (end - 1), &idx_next);
4897 else
4898 idx = size_int (end);
4900 tree rhs1 = handle_operand (arg0, idx);
4901 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4903 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4904 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4905 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4906 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4907 rhs1 = add_cast (m_limb_type, rhs1);
4910 if (ifn == IFN_CLZ)
4912 g = gimple_build_cond (NE_EXPR, rhs1,
4913 build_zero_cst (m_limb_type),
4914 NULL_TREE, NULL_TREE);
4915 insert_before (g);
4916 edge e1 = split_block (gsi_bb (m_gsi), g);
4917 e1->flags = EDGE_FALSE_VALUE;
4918 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4919 e1->probability = profile_probability::unlikely ();
4920 e2->probability = e1->probability.invert ();
4921 if (i == 0)
4922 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4923 m_gsi = gsi_after_labels (e1->dest);
4924 bqp[i].e = e2;
4925 bqp[i].val = rhs1;
4927 else
4929 if (i == 0)
4931 first = rhs1;
4932 g = gimple_build_assign (make_ssa_name (m_limb_type),
4933 PLUS_EXPR, rhs1,
4934 build_int_cst (m_limb_type, 1));
4935 insert_before (g);
4936 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4937 build_int_cst (m_limb_type, 1),
4938 NULL_TREE, NULL_TREE);
4939 insert_before (g);
4941 else
4943 g = gimple_build_assign (make_ssa_name (m_limb_type),
4944 BIT_XOR_EXPR, rhs1, first);
4945 insert_before (g);
4946 tree stype = signed_type_for (m_limb_type);
4947 g = gimple_build_cond (LT_EXPR,
4948 add_cast (stype,
4949 gimple_assign_lhs (g)),
4950 build_zero_cst (stype),
4951 NULL_TREE, NULL_TREE);
4952 insert_before (g);
4953 edge e1 = split_block (gsi_bb (m_gsi), g);
4954 e1->flags = EDGE_FALSE_VALUE;
4955 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4956 EDGE_TRUE_VALUE);
4957 e1->probability = profile_probability::unlikely ();
4958 e2->probability = e1->probability.invert ();
4959 if (i == 1)
4960 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4961 e2->src);
4962 m_gsi = gsi_after_labels (e1->dest);
4963 bqp[2 * i].e = e2;
4964 g = gimple_build_cond (NE_EXPR, rhs1, first,
4965 NULL_TREE, NULL_TREE);
4966 insert_before (g);
4968 edge e1 = split_block (gsi_bb (m_gsi), g);
4969 e1->flags = EDGE_FALSE_VALUE;
4970 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4971 e1->probability = profile_probability::unlikely ();
4972 e2->probability = e1->probability.invert ();
4973 if (i == 0)
4974 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4975 m_gsi = gsi_after_labels (e1->dest);
4976 bqp[2 * i + 1].e = e2;
4977 bqp[i].val = rhs1;
4979 if (tree_fits_uhwi_p (idx))
4980 bqp[i].addend
4981 = build_int_cst (integer_type_node,
4982 (int) prec
4983 - (((int) tree_to_uhwi (idx) + 1)
4984 * limb_prec) - sub_one);
4985 else
4987 tree in, out;
4988 in = build_int_cst (integer_type_node, rem - sub_one);
4989 m_first = true;
4990 in = prepare_data_in_out (in, idx, &out);
4991 out = m_data[m_data_cnt + 1];
4992 bqp[i].addend = in;
4993 g = gimple_build_assign (out, PLUS_EXPR, in,
4994 build_int_cst (integer_type_node,
4995 limb_prec));
4996 insert_before (g);
4997 m_data[m_data_cnt] = out;
5000 m_first = false;
5001 if (kind == bitint_prec_huge && i == cnt - 1)
5003 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5004 size_int (-1));
5005 insert_before (g);
5006 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
5007 NULL_TREE, NULL_TREE);
5008 insert_before (g);
5009 edge true_edge, false_edge;
5010 extract_true_false_edges_from_block (gsi_bb (m_gsi),
5011 &true_edge, &false_edge);
5012 m_gsi = gsi_after_labels (false_edge->dest);
5013 m_bb = NULL;
5017 switch (ifn)
5019 case IFN_CLZ:
5020 case IFN_CTZ:
5021 case IFN_FFS:
5022 gphi *phi1, *phi2, *phi3;
5023 basic_block bb;
5024 bb = gsi_bb (m_gsi);
5025 remove_edge (find_edge (bb, gimple_bb (stmt)));
5026 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5027 gimple_bb (stmt));
5028 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5029 gimple_bb (stmt));
5030 for (unsigned i = 0; i < cnt; i++)
5032 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
5033 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
5035 if (arg1 == NULL_TREE)
5037 g = gimple_build_builtin_unreachable (m_loc);
5038 insert_before (g);
5040 m_gsi = gsi_for_stmt (stmt);
5041 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
5042 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5043 insert_before (g);
5044 if (arg1 == NULL_TREE)
5045 g = gimple_build_assign (lhs, PLUS_EXPR,
5046 gimple_phi_result (phi2),
5047 gimple_call_lhs (g));
5048 else
5050 g = gimple_build_assign (make_ssa_name (integer_type_node),
5051 PLUS_EXPR, gimple_phi_result (phi2),
5052 gimple_call_lhs (g));
5053 insert_before (g);
5054 edge e1 = split_block (gimple_bb (stmt), g);
5055 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5056 e2->probability = profile_probability::always ();
5057 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5058 get_immediate_dominator (CDI_DOMINATORS,
5059 e1->src));
5060 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5061 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5062 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5063 m_gsi = gsi_for_stmt (stmt);
5064 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5066 gsi_replace (&m_gsi, g, true);
5067 break;
5068 case IFN_CLRSB:
5069 bb = gsi_bb (m_gsi);
5070 remove_edge (find_edge (bb, edge_bb));
5071 edge e;
5072 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5073 e->probability = profile_probability::always ();
5074 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5075 get_immediate_dominator (CDI_DOMINATORS,
5076 edge_bb));
5077 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5078 edge_bb);
5079 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5080 edge_bb);
5081 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5082 gimple_bb (stmt));
5083 for (unsigned i = 0; i < cnt; i++)
5085 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5086 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5087 UNKNOWN_LOCATION);
5088 tree a = bqp[i].addend;
5089 if (i && kind == bitint_prec_large)
5090 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5091 if (i)
5092 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5094 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5095 UNKNOWN_LOCATION);
5096 m_gsi = gsi_after_labels (edge_bb);
5097 g = gimple_build_call (fndecl, 1,
5098 add_cast (signed_type_for (m_limb_type),
5099 gimple_phi_result (phi1)));
5100 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5101 insert_before (g);
5102 g = gimple_build_assign (make_ssa_name (integer_type_node),
5103 PLUS_EXPR, gimple_call_lhs (g),
5104 gimple_phi_result (phi2));
5105 insert_before (g);
5106 if (kind != bitint_prec_large)
5108 g = gimple_build_assign (make_ssa_name (integer_type_node),
5109 PLUS_EXPR, gimple_assign_lhs (g),
5110 integer_one_node);
5111 insert_before (g);
5113 add_phi_arg (phi3, gimple_assign_lhs (g),
5114 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5115 m_gsi = gsi_for_stmt (stmt);
5116 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5117 gsi_replace (&m_gsi, g, true);
5118 break;
5119 case IFN_PARITY:
5120 g = gimple_build_call (fndecl, 1, res);
5121 gimple_call_set_lhs (g, lhs);
5122 gsi_replace (&m_gsi, g, true);
5123 break;
5124 case IFN_POPCOUNT:
5125 g = gimple_build_assign (lhs, res);
5126 gsi_replace (&m_gsi, g, true);
5127 break;
5128 default:
5129 gcc_unreachable ();
5133 /* Lower a call statement with one or more large/huge _BitInt
5134 arguments or large/huge _BitInt return value. */
5136 void
5137 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5139 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5140 unsigned int nargs = gimple_call_num_args (stmt);
5141 if (gimple_call_internal_p (stmt))
5142 switch (gimple_call_internal_fn (stmt))
5144 case IFN_ADD_OVERFLOW:
5145 case IFN_SUB_OVERFLOW:
5146 case IFN_UBSAN_CHECK_ADD:
5147 case IFN_UBSAN_CHECK_SUB:
5148 lower_addsub_overflow (obj, stmt);
5149 return;
5150 case IFN_MUL_OVERFLOW:
5151 case IFN_UBSAN_CHECK_MUL:
5152 lower_mul_overflow (obj, stmt);
5153 return;
5154 case IFN_CLZ:
5155 case IFN_CTZ:
5156 case IFN_CLRSB:
5157 case IFN_FFS:
5158 case IFN_PARITY:
5159 case IFN_POPCOUNT:
5160 lower_bit_query (stmt);
5161 return;
5162 default:
5163 break;
5165 for (unsigned int i = 0; i < nargs; ++i)
5167 tree arg = gimple_call_arg (stmt, i);
5168 if (TREE_CODE (arg) != SSA_NAME
5169 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5170 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5171 continue;
5172 if (SSA_NAME_IS_DEFAULT_DEF (arg)
5173 && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
5175 tree var = create_tmp_reg (TREE_TYPE (arg));
5176 arg = get_or_create_ssa_default_def (cfun, var);
5178 else
5180 int p = var_to_partition (m_map, arg);
5181 tree v = m_vars[p];
5182 gcc_assert (v != NULL_TREE);
5183 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5184 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5185 arg = make_ssa_name (TREE_TYPE (arg));
5186 gimple *g = gimple_build_assign (arg, v);
5187 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5189 gimple_call_set_arg (stmt, i, arg);
5190 if (m_preserved == NULL)
5191 m_preserved = BITMAP_ALLOC (NULL);
5192 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5194 tree lhs = gimple_call_lhs (stmt);
5195 if (lhs
5196 && TREE_CODE (lhs) == SSA_NAME
5197 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5198 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5200 int p = var_to_partition (m_map, lhs);
5201 tree v = m_vars[p];
5202 gcc_assert (v != NULL_TREE);
5203 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5204 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5205 gimple_call_set_lhs (stmt, v);
5206 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5208 update_stmt (stmt);
5211 /* Lower __asm STMT which involves large/huge _BitInt values. */
5213 void
5214 bitint_large_huge::lower_asm (gimple *stmt)
5216 gasm *g = as_a <gasm *> (stmt);
5217 unsigned noutputs = gimple_asm_noutputs (g);
5218 unsigned ninputs = gimple_asm_ninputs (g);
5220 for (unsigned i = 0; i < noutputs; ++i)
5222 tree t = gimple_asm_output_op (g, i);
5223 tree s = TREE_VALUE (t);
5224 if (TREE_CODE (s) == SSA_NAME
5225 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5226 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5228 int part = var_to_partition (m_map, s);
5229 gcc_assert (m_vars[part] != NULL_TREE);
5230 TREE_VALUE (t) = m_vars[part];
5233 for (unsigned i = 0; i < ninputs; ++i)
5235 tree t = gimple_asm_input_op (g, i);
5236 tree s = TREE_VALUE (t);
5237 if (TREE_CODE (s) == SSA_NAME
5238 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5239 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5241 if (SSA_NAME_IS_DEFAULT_DEF (s)
5242 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
5244 TREE_VALUE (t) = create_tmp_var (TREE_TYPE (s), "bitint");
5245 mark_addressable (TREE_VALUE (t));
5247 else
5249 int part = var_to_partition (m_map, s);
5250 gcc_assert (m_vars[part] != NULL_TREE);
5251 TREE_VALUE (t) = m_vars[part];
5255 update_stmt (stmt);
5258 /* Lower statement STMT which involves large/huge _BitInt values
5259 into code accessing individual limbs. */
5261 void
5262 bitint_large_huge::lower_stmt (gimple *stmt)
5264 m_first = true;
5265 m_lhs = NULL_TREE;
5266 m_data.truncate (0);
5267 m_data_cnt = 0;
5268 m_gsi = gsi_for_stmt (stmt);
5269 m_after_stmt = NULL;
5270 m_bb = NULL;
5271 m_init_gsi = m_gsi;
5272 gsi_prev (&m_init_gsi);
5273 m_preheader_bb = NULL;
5274 m_upwards_2limb = 0;
5275 m_upwards = false;
5276 m_var_msb = false;
5277 m_cast_conditional = false;
5278 m_bitfld_load = 0;
5279 m_loc = gimple_location (stmt);
5280 if (is_gimple_call (stmt))
5282 lower_call (NULL_TREE, stmt);
5283 return;
5285 if (gimple_code (stmt) == GIMPLE_ASM)
5287 lower_asm (stmt);
5288 return;
5290 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5291 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5292 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5293 bool mergeable_cast_p = false;
5294 bool final_cast_p = false;
5295 if (gimple_assign_cast_p (stmt))
5297 lhs = gimple_assign_lhs (stmt);
5298 tree rhs1 = gimple_assign_rhs1 (stmt);
5299 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
5300 rhs1 = TREE_OPERAND (rhs1, 0);
5301 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5302 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5303 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5304 mergeable_cast_p = true;
5305 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5306 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5307 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5308 || POINTER_TYPE_P (TREE_TYPE (lhs))
5309 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR))
5311 final_cast_p = true;
5312 if (((TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
5313 && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
5314 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5315 && !POINTER_TYPE_P (TREE_TYPE (lhs))))
5316 && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
5318 /* Handle VIEW_CONVERT_EXPRs to not generally supported
5319 huge INTEGER_TYPEs like uint256_t or uint512_t. These
5320 are usually emitted from memcpy folding and backends
5321 support moves with them but that is usually it.
5322 Similarly handle VCEs to vector/complex types etc. */
5323 gcc_assert (TREE_CODE (rhs1) == SSA_NAME);
5324 if (SSA_NAME_IS_DEFAULT_DEF (rhs1)
5325 && (!SSA_NAME_VAR (rhs1) || VAR_P (SSA_NAME_VAR (rhs1))))
5327 tree var = create_tmp_reg (TREE_TYPE (lhs));
5328 rhs1 = get_or_create_ssa_default_def (cfun, var);
5329 gimple_assign_set_rhs1 (stmt, rhs1);
5330 gimple_assign_set_rhs_code (stmt, SSA_NAME);
5332 else
5334 int part = var_to_partition (m_map, rhs1);
5335 gcc_assert (m_vars[part] != NULL_TREE);
5336 rhs1 = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
5337 m_vars[part]);
5338 gimple_assign_set_rhs1 (stmt, rhs1);
5340 update_stmt (stmt);
5341 return;
5343 if (TREE_CODE (rhs1) == SSA_NAME
5344 && (m_names == NULL
5345 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5347 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5348 if (is_gimple_assign (g)
5349 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5351 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5352 if (TREE_CODE (rhs2) == SSA_NAME
5353 && (m_names == NULL
5354 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5356 g = SSA_NAME_DEF_STMT (rhs2);
5357 int ovf = optimizable_arith_overflow (g);
5358 if (ovf == 2)
5359 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5360 and IMAGPART_EXPR uses, where the latter is cast to
5361 non-_BitInt, it will be optimized when handling
5362 the REALPART_EXPR. */
5363 return;
5364 if (ovf == 1)
5366 lower_call (NULL_TREE, g);
5367 return;
5373 else if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5374 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5375 && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5376 && !POINTER_TYPE_P (TREE_TYPE (rhs1))
5377 && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
5379 int part = var_to_partition (m_map, lhs);
5380 gcc_assert (m_vars[part] != NULL_TREE);
5381 lhs = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs1), m_vars[part]);
5382 insert_before (gimple_build_assign (lhs, rhs1));
5383 return;
5386 if (gimple_store_p (stmt))
5388 tree rhs1 = gimple_assign_rhs1 (stmt);
5389 if (TREE_CODE (rhs1) == SSA_NAME
5390 && (m_names == NULL
5391 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5393 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5394 m_loc = gimple_location (g);
5395 lhs = gimple_assign_lhs (stmt);
5396 if (is_gimple_assign (g) && !mergeable_op (g))
5397 switch (gimple_assign_rhs_code (g))
5399 case LSHIFT_EXPR:
5400 case RSHIFT_EXPR:
5401 lower_shift_stmt (lhs, g);
5402 handled:
5403 m_gsi = gsi_for_stmt (stmt);
5404 unlink_stmt_vdef (stmt);
5405 release_ssa_name (gimple_vdef (stmt));
5406 gsi_remove (&m_gsi, true);
5407 return;
5408 case MULT_EXPR:
5409 case TRUNC_DIV_EXPR:
5410 case TRUNC_MOD_EXPR:
5411 lower_muldiv_stmt (lhs, g);
5412 goto handled;
5413 case FIX_TRUNC_EXPR:
5414 lower_float_conv_stmt (lhs, g);
5415 goto handled;
5416 case REALPART_EXPR:
5417 case IMAGPART_EXPR:
5418 lower_cplxpart_stmt (lhs, g);
5419 goto handled;
5420 case VIEW_CONVERT_EXPR:
5422 tree rhs1 = gimple_assign_rhs1 (g);
5423 rhs1 = TREE_OPERAND (rhs1, 0);
5424 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5425 && !POINTER_TYPE_P (TREE_TYPE (rhs1)))
5427 tree ltype = TREE_TYPE (rhs1);
5428 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (lhs));
5429 ltype
5430 = build_qualified_type (ltype,
5431 TYPE_QUALS (TREE_TYPE (lhs))
5432 | ENCODE_QUAL_ADDR_SPACE (as));
5433 lhs = build1 (VIEW_CONVERT_EXPR, ltype, lhs);
5434 gimple_assign_set_lhs (stmt, lhs);
5435 gimple_assign_set_rhs1 (stmt, rhs1);
5436 gimple_assign_set_rhs_code (stmt, TREE_CODE (rhs1));
5437 update_stmt (stmt);
5438 return;
5441 break;
5442 default:
5443 break;
5445 else if (optimizable_arith_overflow (g) == 3)
5447 lower_call (lhs, g);
5448 goto handled;
5450 m_loc = gimple_location (stmt);
5453 if (mergeable_op (stmt)
5454 || gimple_store_p (stmt)
5455 || gimple_assign_load_p (stmt)
5456 || eq_p
5457 || mergeable_cast_p)
5459 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5460 if (!eq_p)
5461 return;
5463 else if (cmp_code != ERROR_MARK)
5464 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5465 if (cmp_code != ERROR_MARK)
5467 if (gimple_code (stmt) == GIMPLE_COND)
5469 gcond *cstmt = as_a <gcond *> (stmt);
5470 gimple_cond_set_lhs (cstmt, lhs);
5471 gimple_cond_set_rhs (cstmt, boolean_false_node);
5472 gimple_cond_set_code (cstmt, cmp_code);
5473 update_stmt (stmt);
5474 return;
5476 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5478 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5479 boolean_false_node);
5480 gimple_assign_set_rhs1 (stmt, cond);
5481 lhs = gimple_assign_lhs (stmt);
5482 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5483 || (bitint_precision_kind (TREE_TYPE (lhs))
5484 <= bitint_prec_middle));
5485 update_stmt (stmt);
5486 return;
5488 gimple_assign_set_rhs1 (stmt, lhs);
5489 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5490 gimple_assign_set_rhs_code (stmt, cmp_code);
5491 update_stmt (stmt);
5492 return;
5494 if (final_cast_p)
5496 tree lhs_type = TREE_TYPE (lhs);
5497 /* Add support for 3 or more limbs filled in from normal integral
5498 type if this assert fails. If no target chooses limb mode smaller
5499 than half of largest supported normal integral type, this will not
5500 be needed. */
5501 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5502 gimple *g;
5503 if ((TREE_CODE (lhs_type) == BITINT_TYPE
5504 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5505 || POINTER_TYPE_P (lhs_type))
5506 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5507 TYPE_UNSIGNED (lhs_type));
5508 m_data_cnt = 0;
5509 tree rhs1 = gimple_assign_rhs1 (stmt);
5510 tree r1 = handle_operand (rhs1, size_int (0));
5511 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5512 r1 = add_cast (lhs_type, r1);
5513 if (TYPE_PRECISION (lhs_type) > limb_prec)
5515 m_data_cnt = 0;
5516 m_first = false;
5517 tree r2 = handle_operand (rhs1, size_int (1));
5518 r2 = add_cast (lhs_type, r2);
5519 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5520 build_int_cst (unsigned_type_node,
5521 limb_prec));
5522 insert_before (g);
5523 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5524 gimple_assign_lhs (g));
5525 insert_before (g);
5526 r1 = gimple_assign_lhs (g);
5528 if (lhs_type != TREE_TYPE (lhs))
5529 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5530 else
5531 g = gimple_build_assign (lhs, r1);
5532 gsi_replace (&m_gsi, g, true);
5533 return;
5535 if (is_gimple_assign (stmt))
5536 switch (gimple_assign_rhs_code (stmt))
5538 case LSHIFT_EXPR:
5539 case RSHIFT_EXPR:
5540 lower_shift_stmt (NULL_TREE, stmt);
5541 return;
5542 case MULT_EXPR:
5543 case TRUNC_DIV_EXPR:
5544 case TRUNC_MOD_EXPR:
5545 lower_muldiv_stmt (NULL_TREE, stmt);
5546 return;
5547 case FIX_TRUNC_EXPR:
5548 case FLOAT_EXPR:
5549 lower_float_conv_stmt (NULL_TREE, stmt);
5550 return;
5551 case REALPART_EXPR:
5552 case IMAGPART_EXPR:
5553 lower_cplxpart_stmt (NULL_TREE, stmt);
5554 return;
5555 case COMPLEX_EXPR:
5556 lower_complexexpr_stmt (stmt);
5557 return;
5558 default:
5559 break;
5561 gcc_unreachable ();
5564 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5565 the desired memory state. */
5567 void *
5568 vuse_eq (ao_ref *, tree vuse1, void *data)
5570 tree vuse2 = (tree) data;
5571 if (vuse1 == vuse2)
5572 return data;
5574 return NULL;
5577 /* Return true if STMT uses a library function and needs to take
5578 address of its inputs. We need to avoid bit-fields in those
5579 cases. Similarly, we need to avoid overlap between destination
5580 and source limb arrays. */
5582 bool
5583 stmt_needs_operand_addr (gimple *stmt)
5585 if (is_gimple_assign (stmt))
5586 switch (gimple_assign_rhs_code (stmt))
5588 case MULT_EXPR:
5589 case TRUNC_DIV_EXPR:
5590 case TRUNC_MOD_EXPR:
5591 case FLOAT_EXPR:
5592 return true;
5593 default:
5594 break;
5596 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5597 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5598 return true;
5599 return false;
5602 /* Dominator walker used to discover which large/huge _BitInt
5603 loads could be sunk into all their uses. */
5605 class bitint_dom_walker : public dom_walker
5607 public:
5608 bitint_dom_walker (bitmap names, bitmap loads)
5609 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5611 edge before_dom_children (basic_block) final override;
5613 private:
5614 bitmap m_names, m_loads;
5617 edge
5618 bitint_dom_walker::before_dom_children (basic_block bb)
5620 gphi *phi = get_virtual_phi (bb);
5621 tree vop;
5622 if (phi)
5623 vop = gimple_phi_result (phi);
5624 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5625 vop = NULL_TREE;
5626 else
5627 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5629 auto_vec<tree, 16> worklist;
5630 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5631 !gsi_end_p (gsi); gsi_next (&gsi))
5633 gimple *stmt = gsi_stmt (gsi);
5634 if (is_gimple_debug (stmt))
5635 continue;
5637 if (!vop && gimple_vuse (stmt))
5638 vop = gimple_vuse (stmt);
5640 tree cvop = vop;
5641 if (gimple_vdef (stmt))
5642 vop = gimple_vdef (stmt);
5644 tree lhs = gimple_get_lhs (stmt);
5645 if (lhs
5646 && TREE_CODE (lhs) == SSA_NAME
5647 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5648 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5649 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5650 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5651 it means it will be handled in a loop or straight line code
5652 at the location of its (ultimate) immediate use, so for
5653 vop checking purposes check these only at the ultimate
5654 immediate use. */
5655 continue;
5657 ssa_op_iter oi;
5658 use_operand_p use_p;
5659 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5661 tree s = USE_FROM_PTR (use_p);
5662 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5663 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5664 worklist.safe_push (s);
5667 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5668 while (worklist.length () > 0)
5670 tree s = worklist.pop ();
5672 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5674 gimple *g = SSA_NAME_DEF_STMT (s);
5675 needs_operand_addr |= stmt_needs_operand_addr (g);
5676 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5678 tree s2 = USE_FROM_PTR (use_p);
5679 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5680 && (bitint_precision_kind (TREE_TYPE (s2))
5681 >= bitint_prec_large))
5682 worklist.safe_push (s2);
5684 continue;
5686 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5687 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5689 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5690 if (TREE_CODE (rhs) == SSA_NAME
5691 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5692 s = rhs;
5693 else
5694 continue;
5696 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5697 continue;
5699 gimple *g = SSA_NAME_DEF_STMT (s);
5700 tree rhs1 = gimple_assign_rhs1 (g);
5701 if (needs_operand_addr
5702 && TREE_CODE (rhs1) == COMPONENT_REF
5703 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5705 tree fld = TREE_OPERAND (rhs1, 1);
5706 /* For little-endian, we can allow as inputs bit-fields
5707 which start at a limb boundary. */
5708 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5709 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5710 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5711 % limb_prec) == 0)
5713 else
5715 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5716 continue;
5720 ao_ref ref;
5721 ao_ref_init (&ref, rhs1);
5722 tree lvop = gimple_vuse (g);
5723 unsigned limit = 64;
5724 tree vuse = cvop;
5725 if (vop != cvop
5726 && is_gimple_assign (stmt)
5727 && gimple_store_p (stmt)
5728 && (needs_operand_addr
5729 || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
5730 vuse = vop;
5731 if (vuse != lvop
5732 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5733 NULL, NULL, limit, lvop) == NULL)
5734 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5738 bb->aux = (void *) vop;
5739 return NULL;
5744 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5745 build_ssa_conflict_graph.
5746 The differences are:
5747 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5748 2) for large/huge _BitInt multiplication/division/modulo process def
5749 only after processing uses rather than before to make uses conflict
5750 with the definition
5751 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5752 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5753 the final statement. */
5755 void
5756 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5757 ssa_conflicts *graph, bitmap names,
5758 void (*def) (live_track *, tree,
5759 ssa_conflicts *),
5760 void (*use) (live_track *, tree))
5762 bool muldiv_p = false;
5763 tree lhs = NULL_TREE;
5764 if (is_gimple_assign (stmt))
5766 lhs = gimple_assign_lhs (stmt);
5767 if (TREE_CODE (lhs) == SSA_NAME
5768 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5769 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5771 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5772 return;
5773 switch (gimple_assign_rhs_code (stmt))
5775 case MULT_EXPR:
5776 case TRUNC_DIV_EXPR:
5777 case TRUNC_MOD_EXPR:
5778 muldiv_p = true;
5779 default:
5780 break;
5785 ssa_op_iter iter;
5786 tree var;
5787 if (!muldiv_p)
5789 /* For stmts with more than one SSA_NAME definition pretend all the
5790 SSA_NAME outputs but the first one are live at this point, so
5791 that conflicts are added in between all those even when they are
5792 actually not really live after the asm, because expansion might
5793 copy those into pseudos after the asm and if multiple outputs
5794 share the same partition, it might overwrite those that should
5795 be live. E.g.
5796 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5797 return a;
5798 See PR70593. */
5799 bool first = true;
5800 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5801 if (first)
5802 first = false;
5803 else
5804 use (live, var);
5806 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5807 def (live, var, graph);
5810 auto_vec<tree, 16> worklist;
5811 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5812 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5813 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5815 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5816 use (live, var);
5817 else
5818 worklist.safe_push (var);
5821 while (worklist.length () > 0)
5823 tree s = worklist.pop ();
5824 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5825 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5826 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5828 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5829 use (live, var);
5830 else
5831 worklist.safe_push (var);
5835 if (muldiv_p)
5836 def (live, lhs, graph);
5839 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5840 return the largest bitint_prec_kind of them, otherwise return
5841 bitint_prec_small. */
5843 static bitint_prec_kind
5844 arith_overflow_arg_kind (gimple *stmt)
5846 bitint_prec_kind ret = bitint_prec_small;
5847 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5848 switch (gimple_call_internal_fn (stmt))
5850 case IFN_ADD_OVERFLOW:
5851 case IFN_SUB_OVERFLOW:
5852 case IFN_MUL_OVERFLOW:
5853 for (int i = 0; i < 2; ++i)
5855 tree a = gimple_call_arg (stmt, i);
5856 if (TREE_CODE (a) == INTEGER_CST
5857 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5859 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5860 ret = MAX (ret, kind);
5863 break;
5864 default:
5865 break;
5867 return ret;
5870 /* Entry point for _BitInt(N) operation lowering during optimization. */
5872 static unsigned int
5873 gimple_lower_bitint (void)
5875 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5876 limb_prec = 0;
5878 unsigned int i;
5879 for (i = 0; i < num_ssa_names; ++i)
5881 tree s = ssa_name (i);
5882 if (s == NULL)
5883 continue;
5884 tree type = TREE_TYPE (s);
5885 if (TREE_CODE (type) == COMPLEX_TYPE)
5887 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5888 != bitint_prec_small)
5889 break;
5890 type = TREE_TYPE (type);
5892 if (TREE_CODE (type) == BITINT_TYPE
5893 && bitint_precision_kind (type) != bitint_prec_small)
5894 break;
5895 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5896 into memory. Such functions could have no large/huge SSA_NAMEs. */
5897 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5899 gimple *g = SSA_NAME_DEF_STMT (s);
5900 if (is_gimple_assign (g) && gimple_store_p (g))
5902 tree t = gimple_assign_rhs1 (g);
5903 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5904 && (bitint_precision_kind (TREE_TYPE (t))
5905 >= bitint_prec_large))
5906 break;
5909 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5910 to floating point types need to be rewritten. */
5911 else if (SCALAR_FLOAT_TYPE_P (type))
5913 gimple *g = SSA_NAME_DEF_STMT (s);
5914 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5916 tree t = gimple_assign_rhs1 (g);
5917 if (TREE_CODE (t) == INTEGER_CST
5918 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5919 && (bitint_precision_kind (TREE_TYPE (t))
5920 != bitint_prec_small))
5921 break;
5925 if (i == num_ssa_names)
5926 return 0;
5928 basic_block bb;
5929 auto_vec<gimple *, 4> switch_statements;
5930 FOR_EACH_BB_FN (bb, cfun)
5932 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5934 tree idx = gimple_switch_index (swtch);
5935 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5936 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5937 continue;
5939 if (optimize)
5940 group_case_labels_stmt (swtch);
5941 if (gimple_switch_num_labels (swtch) == 1)
5943 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5944 gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
5945 gsi_remove (&gsi, true);
5947 else
5948 switch_statements.safe_push (swtch);
5952 if (!switch_statements.is_empty ())
5954 bool expanded = false;
5955 gimple *stmt;
5956 unsigned int j;
5957 i = 0;
5958 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5960 gswitch *swtch = as_a<gswitch *> (stmt);
5961 tree_switch_conversion::switch_decision_tree dt (swtch);
5962 expanded |= dt.analyze_switch_statement ();
5965 if (expanded)
5967 free_dominance_info (CDI_DOMINATORS);
5968 free_dominance_info (CDI_POST_DOMINATORS);
5969 mark_virtual_operands_for_renaming (cfun);
5970 cleanup_tree_cfg (TODO_update_ssa);
5974 struct bitint_large_huge large_huge;
5975 bool has_large_huge_parm_result = false;
5976 bool has_large_huge = false;
5977 unsigned int ret = 0, first_large_huge = ~0U;
5978 bool edge_insertions = false;
5979 for (; i < num_ssa_names; ++i)
5981 tree s = ssa_name (i);
5982 if (s == NULL)
5983 continue;
5984 tree type = TREE_TYPE (s);
5985 if (TREE_CODE (type) == COMPLEX_TYPE)
5987 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5988 >= bitint_prec_large)
5989 has_large_huge = true;
5990 type = TREE_TYPE (type);
5992 if (TREE_CODE (type) == BITINT_TYPE
5993 && bitint_precision_kind (type) >= bitint_prec_large)
5995 if (first_large_huge == ~0U)
5996 first_large_huge = i;
5997 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5998 gimple_stmt_iterator gsi;
5999 tree_code rhs_code;
6000 /* Unoptimize certain constructs to simpler alternatives to
6001 avoid having to lower all of them. */
6002 if (is_gimple_assign (stmt) && gimple_bb (stmt))
6003 switch (rhs_code = gimple_assign_rhs_code (stmt))
6005 default:
6006 break;
6007 case MULT_EXPR:
6008 case TRUNC_DIV_EXPR:
6009 case TRUNC_MOD_EXPR:
6010 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s))
6012 location_t loc = gimple_location (stmt);
6013 gsi = gsi_for_stmt (stmt);
6014 tree rhs1 = gimple_assign_rhs1 (stmt);
6015 tree rhs2 = gimple_assign_rhs2 (stmt);
6016 /* For multiplication and division with (ab)
6017 lhs and one or both operands force the operands
6018 into new SSA_NAMEs to avoid coalescing failures. */
6019 if (TREE_CODE (rhs1) == SSA_NAME
6020 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
6022 first_large_huge = 0;
6023 tree t = make_ssa_name (TREE_TYPE (rhs1));
6024 g = gimple_build_assign (t, SSA_NAME, rhs1);
6025 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6026 gimple_set_location (g, loc);
6027 gimple_assign_set_rhs1 (stmt, t);
6028 if (rhs1 == rhs2)
6030 gimple_assign_set_rhs2 (stmt, t);
6031 rhs2 = t;
6033 update_stmt (stmt);
6035 if (TREE_CODE (rhs2) == SSA_NAME
6036 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2))
6038 first_large_huge = 0;
6039 tree t = make_ssa_name (TREE_TYPE (rhs2));
6040 g = gimple_build_assign (t, SSA_NAME, rhs2);
6041 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6042 gimple_set_location (g, loc);
6043 gimple_assign_set_rhs2 (stmt, t);
6044 update_stmt (stmt);
6047 break;
6048 case LROTATE_EXPR:
6049 case RROTATE_EXPR:
6051 first_large_huge = 0;
6052 location_t loc = gimple_location (stmt);
6053 gsi = gsi_for_stmt (stmt);
6054 tree rhs1 = gimple_assign_rhs1 (stmt);
6055 tree type = TREE_TYPE (rhs1);
6056 tree n = gimple_assign_rhs2 (stmt), m;
6057 tree p = build_int_cst (TREE_TYPE (n),
6058 TYPE_PRECISION (type));
6059 if (TREE_CODE (n) == INTEGER_CST)
6060 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
6061 else
6063 m = make_ssa_name (TREE_TYPE (n));
6064 g = gimple_build_assign (m, MINUS_EXPR, p, n);
6065 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6066 gimple_set_location (g, loc);
6068 if (!TYPE_UNSIGNED (type))
6070 tree utype = build_bitint_type (TYPE_PRECISION (type),
6072 if (TREE_CODE (rhs1) == INTEGER_CST)
6073 rhs1 = fold_convert (utype, rhs1);
6074 else
6076 tree t = make_ssa_name (type);
6077 g = gimple_build_assign (t, NOP_EXPR, rhs1);
6078 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6079 gimple_set_location (g, loc);
6082 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6083 rhs_code == LROTATE_EXPR
6084 ? LSHIFT_EXPR : RSHIFT_EXPR,
6085 rhs1, n);
6086 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6087 gimple_set_location (g, loc);
6088 tree op1 = gimple_assign_lhs (g);
6089 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6090 rhs_code == LROTATE_EXPR
6091 ? RSHIFT_EXPR : LSHIFT_EXPR,
6092 rhs1, m);
6093 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6094 gimple_set_location (g, loc);
6095 tree op2 = gimple_assign_lhs (g);
6096 tree lhs = gimple_assign_lhs (stmt);
6097 if (!TYPE_UNSIGNED (type))
6099 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
6100 BIT_IOR_EXPR, op1, op2);
6101 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6102 gimple_set_location (g, loc);
6103 g = gimple_build_assign (lhs, NOP_EXPR,
6104 gimple_assign_lhs (g));
6106 else
6107 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
6108 gsi_replace (&gsi, g, true);
6109 gimple_set_location (g, loc);
6111 break;
6112 case ABS_EXPR:
6113 case ABSU_EXPR:
6114 case MIN_EXPR:
6115 case MAX_EXPR:
6116 case COND_EXPR:
6117 first_large_huge = 0;
6118 gsi = gsi_for_stmt (stmt);
6119 tree lhs = gimple_assign_lhs (stmt);
6120 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
6121 location_t loc = gimple_location (stmt);
6122 if (rhs_code == ABS_EXPR)
6123 g = gimple_build_cond (LT_EXPR, rhs1,
6124 build_zero_cst (TREE_TYPE (rhs1)),
6125 NULL_TREE, NULL_TREE);
6126 else if (rhs_code == ABSU_EXPR)
6128 rhs2 = make_ssa_name (TREE_TYPE (lhs));
6129 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
6130 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6131 gimple_set_location (g, loc);
6132 g = gimple_build_cond (LT_EXPR, rhs1,
6133 build_zero_cst (TREE_TYPE (rhs1)),
6134 NULL_TREE, NULL_TREE);
6135 rhs1 = rhs2;
6137 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
6139 rhs2 = gimple_assign_rhs2 (stmt);
6140 if (TREE_CODE (rhs1) == INTEGER_CST)
6141 std::swap (rhs1, rhs2);
6142 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
6143 NULL_TREE, NULL_TREE);
6144 if (rhs_code == MAX_EXPR)
6145 std::swap (rhs1, rhs2);
6147 else
6149 g = gimple_build_cond (NE_EXPR, rhs1,
6150 build_zero_cst (TREE_TYPE (rhs1)),
6151 NULL_TREE, NULL_TREE);
6152 rhs1 = gimple_assign_rhs2 (stmt);
6153 rhs2 = gimple_assign_rhs3 (stmt);
6155 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6156 gimple_set_location (g, loc);
6157 edge e1 = split_block (gsi_bb (gsi), g);
6158 edge e2 = split_block (e1->dest, (gimple *) NULL);
6159 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
6160 e3->probability = profile_probability::even ();
6161 e1->flags = EDGE_TRUE_VALUE;
6162 e1->probability = e3->probability.invert ();
6163 if (dom_info_available_p (CDI_DOMINATORS))
6164 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
6165 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
6167 gsi = gsi_after_labels (e1->dest);
6168 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6169 NEGATE_EXPR, rhs1);
6170 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6171 gimple_set_location (g, loc);
6172 rhs2 = gimple_assign_lhs (g);
6173 std::swap (rhs1, rhs2);
6175 gsi = gsi_for_stmt (stmt);
6176 gsi_remove (&gsi, true);
6177 gphi *phi = create_phi_node (lhs, e2->dest);
6178 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
6179 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
6180 break;
6183 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6184 into memory. Such functions could have no large/huge SSA_NAMEs. */
6185 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6187 gimple *g = SSA_NAME_DEF_STMT (s);
6188 if (is_gimple_assign (g) && gimple_store_p (g))
6190 tree t = gimple_assign_rhs1 (g);
6191 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6192 && (bitint_precision_kind (TREE_TYPE (t))
6193 >= bitint_prec_large))
6194 has_large_huge = true;
6197 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6198 to floating point types need to be rewritten. */
6199 else if (SCALAR_FLOAT_TYPE_P (type))
6201 gimple *g = SSA_NAME_DEF_STMT (s);
6202 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6204 tree t = gimple_assign_rhs1 (g);
6205 if (TREE_CODE (t) == INTEGER_CST
6206 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6207 && (bitint_precision_kind (TREE_TYPE (t))
6208 >= bitint_prec_large))
6209 has_large_huge = true;
6213 for (i = first_large_huge; i < num_ssa_names; ++i)
6215 tree s = ssa_name (i);
6216 if (s == NULL)
6217 continue;
6218 tree type = TREE_TYPE (s);
6219 if (TREE_CODE (type) == COMPLEX_TYPE)
6220 type = TREE_TYPE (type);
6221 if (TREE_CODE (type) == BITINT_TYPE
6222 && bitint_precision_kind (type) >= bitint_prec_large)
6224 use_operand_p use_p;
6225 gimple *use_stmt;
6226 has_large_huge = true;
6227 if (optimize
6228 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6229 continue;
6230 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6231 the same bb and could be handled in the same loop with the
6232 immediate use. */
6233 if (optimize
6234 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6235 && single_imm_use (s, &use_p, &use_stmt)
6236 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6238 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6240 if (mergeable_op (use_stmt))
6241 continue;
6242 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6243 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6244 continue;
6245 if (gimple_assign_cast_p (use_stmt))
6247 tree lhs = gimple_assign_lhs (use_stmt);
6248 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6249 /* Don't merge with VIEW_CONVERT_EXPRs to
6250 huge INTEGER_TYPEs used sometimes in memcpy
6251 expansion. */
6252 && (TREE_CODE (TREE_TYPE (lhs)) != INTEGER_TYPE
6253 || (TYPE_PRECISION (TREE_TYPE (lhs))
6254 <= MAX_FIXED_MODE_SIZE)))
6255 continue;
6257 else if (gimple_store_p (use_stmt)
6258 && is_gimple_assign (use_stmt)
6259 && !gimple_has_volatile_ops (use_stmt)
6260 && !stmt_ends_bb_p (use_stmt))
6261 continue;
6263 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6265 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6266 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
6268 rhs1 = TREE_OPERAND (rhs1, 0);
6269 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6270 && !POINTER_TYPE_P (TREE_TYPE (rhs1))
6271 && gimple_store_p (use_stmt))
6272 continue;
6274 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6275 && ((is_gimple_assign (use_stmt)
6276 && (gimple_assign_rhs_code (use_stmt)
6277 != COMPLEX_EXPR))
6278 || gimple_code (use_stmt) == GIMPLE_COND)
6279 && (!gimple_store_p (use_stmt)
6280 || (is_gimple_assign (use_stmt)
6281 && !gimple_has_volatile_ops (use_stmt)
6282 && !stmt_ends_bb_p (use_stmt)))
6283 && (TREE_CODE (rhs1) != SSA_NAME
6284 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6286 if (is_gimple_assign (use_stmt))
6287 switch (gimple_assign_rhs_code (use_stmt))
6289 case TRUNC_DIV_EXPR:
6290 case TRUNC_MOD_EXPR:
6291 case FLOAT_EXPR:
6292 /* For division, modulo and casts to floating
6293 point, avoid representing unsigned operands
6294 using negative prec if they were sign-extended
6295 from narrower precision. */
6296 if (TYPE_UNSIGNED (TREE_TYPE (s))
6297 && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
6298 && (TYPE_PRECISION (TREE_TYPE (s))
6299 > TYPE_PRECISION (TREE_TYPE (rhs1))))
6300 goto force_name;
6301 /* FALLTHRU */
6302 case MULT_EXPR:
6303 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6304 || (bitint_precision_kind (TREE_TYPE (rhs1))
6305 < bitint_prec_large))
6306 continue;
6307 /* Uses which use handle_operand_addr can't
6308 deal with nested casts. */
6309 if (TREE_CODE (rhs1) == SSA_NAME
6310 && gimple_assign_cast_p
6311 (SSA_NAME_DEF_STMT (rhs1))
6312 && has_single_use (rhs1)
6313 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6314 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6315 goto force_name;
6316 break;
6317 case VIEW_CONVERT_EXPR:
6319 tree lhs = gimple_assign_lhs (use_stmt);
6320 /* Don't merge with VIEW_CONVERT_EXPRs to
6321 non-integral types. */
6322 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6323 goto force_name;
6324 /* Don't merge with VIEW_CONVERT_EXPRs to
6325 huge INTEGER_TYPEs used sometimes in memcpy
6326 expansion. */
6327 if (TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
6328 && (TYPE_PRECISION (TREE_TYPE (lhs))
6329 > MAX_FIXED_MODE_SIZE))
6330 goto force_name;
6332 break;
6333 default:
6334 break;
6336 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6337 || (bitint_precision_kind (TREE_TYPE (rhs1))
6338 < bitint_prec_large))
6339 continue;
6340 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6341 >= TYPE_PRECISION (TREE_TYPE (s)))
6342 && mergeable_op (use_stmt))
6343 continue;
6344 /* Prevent merging a widening non-mergeable cast
6345 on result of some narrower mergeable op
6346 together with later mergeable operations. E.g.
6347 result of _BitInt(223) addition shouldn't be
6348 sign-extended to _BitInt(513) and have another
6349 _BitInt(513) added to it, as handle_plus_minus
6350 with its PHI node handling inside of handle_cast
6351 will not work correctly. An exception is if
6352 use_stmt is a store, this is handled directly
6353 in lower_mergeable_stmt. */
6354 if (TREE_CODE (rhs1) != SSA_NAME
6355 || !has_single_use (rhs1)
6356 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6357 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6358 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6359 || gimple_store_p (use_stmt))
6360 continue;
6361 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6362 < TYPE_PRECISION (TREE_TYPE (s)))
6363 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6365 /* Another exception is if the widening cast is
6366 from mergeable same precision cast from something
6367 not mergeable. */
6368 tree rhs2
6369 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6370 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6371 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6372 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6374 if (TREE_CODE (rhs2) != SSA_NAME
6375 || !has_single_use (rhs2)
6376 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6377 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6378 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6379 continue;
6384 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6385 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6387 case IMAGPART_EXPR:
6389 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6390 rhs1 = TREE_OPERAND (rhs1, 0);
6391 if (TREE_CODE (rhs1) == SSA_NAME)
6393 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6394 if (optimizable_arith_overflow (g))
6395 continue;
6398 /* FALLTHRU */
6399 case LSHIFT_EXPR:
6400 case RSHIFT_EXPR:
6401 case MULT_EXPR:
6402 case TRUNC_DIV_EXPR:
6403 case TRUNC_MOD_EXPR:
6404 case FIX_TRUNC_EXPR:
6405 case REALPART_EXPR:
6406 if (gimple_store_p (use_stmt)
6407 && is_gimple_assign (use_stmt)
6408 && !gimple_has_volatile_ops (use_stmt)
6409 && !stmt_ends_bb_p (use_stmt))
6411 tree lhs = gimple_assign_lhs (use_stmt);
6412 /* As multiply/division passes address of the lhs
6413 to library function and that assumes it can extend
6414 it to whole number of limbs, avoid merging those
6415 with bit-field stores. Don't allow it for
6416 shifts etc. either, so that the bit-field store
6417 handling doesn't have to be done everywhere. */
6418 if (TREE_CODE (lhs) == COMPONENT_REF
6419 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6420 break;
6421 continue;
6423 break;
6424 default:
6425 break;
6429 /* Also ignore uninitialized uses. */
6430 if (SSA_NAME_IS_DEFAULT_DEF (s)
6431 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6432 continue;
6434 force_name:
6435 if (!large_huge.m_names)
6436 large_huge.m_names = BITMAP_ALLOC (NULL);
6437 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6438 if (has_single_use (s))
6440 if (!large_huge.m_single_use_names)
6441 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6442 bitmap_set_bit (large_huge.m_single_use_names,
6443 SSA_NAME_VERSION (s));
6445 if (SSA_NAME_VAR (s)
6446 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6447 && SSA_NAME_IS_DEFAULT_DEF (s))
6448 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6449 has_large_huge_parm_result = true;
6450 if (optimize
6451 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6452 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6453 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6454 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6456 use_operand_p use_p;
6457 imm_use_iterator iter;
6458 bool optimizable_load = true;
6459 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6461 gimple *use_stmt = USE_STMT (use_p);
6462 if (is_gimple_debug (use_stmt))
6463 continue;
6464 if (gimple_code (use_stmt) == GIMPLE_PHI
6465 || is_gimple_call (use_stmt)
6466 || gimple_code (use_stmt) == GIMPLE_ASM)
6468 optimizable_load = false;
6469 break;
6473 ssa_op_iter oi;
6474 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6475 oi, SSA_OP_USE)
6477 tree s2 = USE_FROM_PTR (use_p);
6478 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6480 optimizable_load = false;
6481 break;
6485 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6487 if (!large_huge.m_loads)
6488 large_huge.m_loads = BITMAP_ALLOC (NULL);
6489 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6493 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6494 into memory. Such functions could have no large/huge SSA_NAMEs. */
6495 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6497 gimple *g = SSA_NAME_DEF_STMT (s);
6498 if (is_gimple_assign (g) && gimple_store_p (g))
6500 tree t = gimple_assign_rhs1 (g);
6501 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6502 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6503 has_large_huge = true;
6508 if (large_huge.m_names || has_large_huge)
6510 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6511 calculate_dominance_info (CDI_DOMINATORS);
6512 if (optimize)
6513 enable_ranger (cfun);
6514 if (large_huge.m_loads)
6516 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6517 entry->aux = NULL;
6518 bitint_dom_walker (large_huge.m_names,
6519 large_huge.m_loads).walk (entry);
6520 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6521 clear_aux_for_blocks ();
6522 BITMAP_FREE (large_huge.m_loads);
6524 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6525 large_huge.m_limb_size
6526 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6528 if (large_huge.m_names)
6530 large_huge.m_map
6531 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6532 coalesce_ssa_name (large_huge.m_map);
6533 partition_view_normal (large_huge.m_map);
6534 if (dump_file && (dump_flags & TDF_DETAILS))
6536 fprintf (dump_file, "After Coalescing:\n");
6537 dump_var_map (dump_file, large_huge.m_map);
6539 large_huge.m_vars
6540 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6541 bitmap_iterator bi;
6542 if (has_large_huge_parm_result)
6543 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6545 tree s = ssa_name (i);
6546 if (SSA_NAME_VAR (s)
6547 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6548 && SSA_NAME_IS_DEFAULT_DEF (s))
6549 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6551 int p = var_to_partition (large_huge.m_map, s);
6552 if (large_huge.m_vars[p] == NULL_TREE)
6554 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6555 mark_addressable (SSA_NAME_VAR (s));
6559 tree atype = NULL_TREE;
6560 if (dump_file && (dump_flags & TDF_DETAILS))
6561 fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
6562 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6564 tree s = ssa_name (i);
6565 int p = var_to_partition (large_huge.m_map, s);
6566 if (large_huge.m_vars[p] == NULL_TREE)
6568 if (atype == NULL_TREE
6569 || !tree_int_cst_equal (TYPE_SIZE (atype),
6570 TYPE_SIZE (TREE_TYPE (s))))
6572 unsigned HOST_WIDE_INT nelts
6573 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6574 atype = build_array_type_nelts (large_huge.m_limb_type,
6575 nelts);
6577 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6578 mark_addressable (large_huge.m_vars[p]);
6580 if (dump_file && (dump_flags & TDF_DETAILS))
6582 print_generic_expr (dump_file, s, TDF_SLIM);
6583 fprintf (dump_file, " -> ");
6584 print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
6585 fprintf (dump_file, "\n");
6590 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6592 gimple_stmt_iterator prev;
6593 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6594 gsi = prev)
6596 prev = gsi;
6597 gsi_prev (&prev);
6598 ssa_op_iter iter;
6599 gimple *stmt = gsi_stmt (gsi);
6600 if (is_gimple_debug (stmt))
6601 continue;
6602 bitint_prec_kind kind = bitint_prec_small;
6603 tree t;
6604 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6605 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6607 bitint_prec_kind this_kind
6608 = bitint_precision_kind (TREE_TYPE (t));
6609 kind = MAX (kind, this_kind);
6611 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6613 t = gimple_assign_rhs1 (stmt);
6614 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6616 bitint_prec_kind this_kind
6617 = bitint_precision_kind (TREE_TYPE (t));
6618 kind = MAX (kind, this_kind);
6621 if (is_gimple_assign (stmt)
6622 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6624 t = gimple_assign_rhs1 (stmt);
6625 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6626 && TREE_CODE (t) == INTEGER_CST)
6628 bitint_prec_kind this_kind
6629 = bitint_precision_kind (TREE_TYPE (t));
6630 kind = MAX (kind, this_kind);
6633 if (is_gimple_call (stmt))
6635 t = gimple_call_lhs (stmt);
6636 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6638 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6639 kind = MAX (kind, this_kind);
6640 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6642 this_kind
6643 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6644 kind = MAX (kind, this_kind);
6648 if (kind == bitint_prec_small)
6649 continue;
6650 switch (gimple_code (stmt))
6652 case GIMPLE_CALL:
6653 /* For now. We'll need to handle some internal functions and
6654 perhaps some builtins. */
6655 if (kind == bitint_prec_middle)
6656 continue;
6657 break;
6658 case GIMPLE_ASM:
6659 if (kind == bitint_prec_middle)
6660 continue;
6661 break;
6662 case GIMPLE_RETURN:
6663 continue;
6664 case GIMPLE_ASSIGN:
6665 if (gimple_clobber_p (stmt))
6666 continue;
6667 if (kind >= bitint_prec_large)
6668 break;
6669 if (gimple_assign_single_p (stmt))
6670 /* No need to lower copies, loads or stores. */
6671 continue;
6672 if (gimple_assign_cast_p (stmt))
6674 tree lhs = gimple_assign_lhs (stmt);
6675 tree rhs = gimple_assign_rhs1 (stmt);
6676 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6677 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6678 && (TYPE_PRECISION (TREE_TYPE (lhs))
6679 == TYPE_PRECISION (TREE_TYPE (rhs))))
6680 /* No need to lower casts to same precision. */
6681 continue;
6683 break;
6684 default:
6685 break;
6688 if (kind == bitint_prec_middle)
6690 tree type = NULL_TREE;
6691 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6692 with the same precision and back. */
6693 unsigned int nops = gimple_num_ops (stmt);
6694 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6695 i < nops; ++i)
6696 if (tree op = gimple_op (stmt, i))
6698 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6699 if (nop != op)
6700 gimple_set_op (stmt, i, nop);
6701 else if (COMPARISON_CLASS_P (op))
6703 TREE_OPERAND (op, 0)
6704 = maybe_cast_middle_bitint (&gsi,
6705 TREE_OPERAND (op, 0),
6706 type);
6707 TREE_OPERAND (op, 1)
6708 = maybe_cast_middle_bitint (&gsi,
6709 TREE_OPERAND (op, 1),
6710 type);
6712 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6714 CASE_LOW (op)
6715 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6716 type);
6717 CASE_HIGH (op)
6718 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6719 type);
6722 if (tree lhs = gimple_get_lhs (stmt))
6723 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6724 && (bitint_precision_kind (TREE_TYPE (lhs))
6725 == bitint_prec_middle))
6727 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6728 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6729 type = build_nonstandard_integer_type (prec, uns);
6730 tree lhs2 = make_ssa_name (type);
6731 gimple_set_lhs (stmt, lhs2);
6732 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6733 if (stmt_ends_bb_p (stmt))
6735 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6736 gsi_insert_on_edge_immediate (e, g);
6738 else
6739 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6741 update_stmt (stmt);
6742 continue;
6745 if (tree lhs = gimple_get_lhs (stmt))
6746 if (TREE_CODE (lhs) == SSA_NAME)
6748 tree type = TREE_TYPE (lhs);
6749 if (TREE_CODE (type) == COMPLEX_TYPE)
6750 type = TREE_TYPE (type);
6751 if (TREE_CODE (type) == BITINT_TYPE
6752 && bitint_precision_kind (type) >= bitint_prec_large
6753 && (large_huge.m_names == NULL
6754 || !bitmap_bit_p (large_huge.m_names,
6755 SSA_NAME_VERSION (lhs))))
6756 continue;
6759 large_huge.lower_stmt (stmt);
6762 tree atype = NULL_TREE;
6763 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6764 gsi_next (&gsi))
6766 gphi *phi = gsi.phi ();
6767 tree lhs = gimple_phi_result (phi);
6768 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6769 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6770 continue;
6771 int p1 = var_to_partition (large_huge.m_map, lhs);
6772 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6773 tree v1 = large_huge.m_vars[p1];
6774 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6776 tree arg = gimple_phi_arg_def (phi, i);
6777 edge e = gimple_phi_arg_edge (phi, i);
6778 gimple *g;
6779 switch (TREE_CODE (arg))
6781 case INTEGER_CST:
6782 if (integer_zerop (arg) && VAR_P (v1))
6784 tree zero = build_zero_cst (TREE_TYPE (v1));
6785 g = gimple_build_assign (v1, zero);
6786 gsi_insert_on_edge (e, g);
6787 edge_insertions = true;
6788 break;
6790 int ext;
6791 unsigned int min_prec, prec, rem;
6792 tree c;
6793 prec = TYPE_PRECISION (TREE_TYPE (arg));
6794 rem = prec % (2 * limb_prec);
6795 min_prec = bitint_min_cst_precision (arg, ext);
6796 if (min_prec > prec - rem - 2 * limb_prec
6797 && min_prec > (unsigned) limb_prec)
6798 /* Constant which has enough significant bits that it
6799 isn't worth trying to save .rodata space by extending
6800 from smaller number. */
6801 min_prec = prec;
6802 else
6803 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6804 if (min_prec == 0)
6805 c = NULL_TREE;
6806 else if (min_prec == prec)
6807 c = tree_output_constant_def (arg);
6808 else if (min_prec == (unsigned) limb_prec)
6809 c = fold_convert (large_huge.m_limb_type, arg);
6810 else
6812 tree ctype = build_bitint_type (min_prec, 1);
6813 c = tree_output_constant_def (fold_convert (ctype, arg));
6815 if (c)
6817 if (VAR_P (v1) && min_prec == prec)
6819 tree v2 = build1 (VIEW_CONVERT_EXPR,
6820 TREE_TYPE (v1), c);
6821 g = gimple_build_assign (v1, v2);
6822 gsi_insert_on_edge (e, g);
6823 edge_insertions = true;
6824 break;
6826 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6827 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6828 TREE_TYPE (c), v1),
6830 else
6832 unsigned HOST_WIDE_INT nelts
6833 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6834 / limb_prec;
6835 tree vtype
6836 = build_array_type_nelts (large_huge.m_limb_type,
6837 nelts);
6838 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6839 vtype, v1),
6840 build1 (VIEW_CONVERT_EXPR,
6841 vtype, c));
6843 gsi_insert_on_edge (e, g);
6845 if (ext == 0)
6847 unsigned HOST_WIDE_INT nelts
6848 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6849 - min_prec) / limb_prec;
6850 tree vtype
6851 = build_array_type_nelts (large_huge.m_limb_type,
6852 nelts);
6853 tree ptype = build_pointer_type (TREE_TYPE (v1));
6854 tree off;
6855 if (c)
6856 off = fold_convert (ptype,
6857 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6858 else
6859 off = build_zero_cst (ptype);
6860 tree vd = build2 (MEM_REF, vtype,
6861 build_fold_addr_expr (v1), off);
6862 g = gimple_build_assign (vd, build_zero_cst (vtype));
6864 else
6866 tree vd = v1;
6867 if (c)
6869 tree ptype = build_pointer_type (TREE_TYPE (v1));
6870 tree off
6871 = fold_convert (ptype,
6872 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6873 vd = build2 (MEM_REF, large_huge.m_limb_type,
6874 build_fold_addr_expr (v1), off);
6876 vd = build_fold_addr_expr (vd);
6877 unsigned HOST_WIDE_INT nbytes
6878 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6879 if (c)
6880 nbytes
6881 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6882 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6883 g = gimple_build_call (fn, 3, vd,
6884 integer_minus_one_node,
6885 build_int_cst (sizetype,
6886 nbytes));
6888 gsi_insert_on_edge (e, g);
6889 edge_insertions = true;
6890 break;
6891 default:
6892 gcc_unreachable ();
6893 case SSA_NAME:
6894 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6896 if (large_huge.m_names == NULL
6897 || !bitmap_bit_p (large_huge.m_names,
6898 SSA_NAME_VERSION (arg)))
6899 continue;
6901 int p2 = var_to_partition (large_huge.m_map, arg);
6902 if (p1 == p2)
6903 continue;
6904 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6905 tree v2 = large_huge.m_vars[p2];
6906 if (VAR_P (v1) && VAR_P (v2))
6907 g = gimple_build_assign (v1, v2);
6908 else if (VAR_P (v1))
6909 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6910 TREE_TYPE (v1), v2));
6911 else if (VAR_P (v2))
6912 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6913 TREE_TYPE (v2), v1), v2);
6914 else
6916 if (atype == NULL_TREE
6917 || !tree_int_cst_equal (TYPE_SIZE (atype),
6918 TYPE_SIZE (TREE_TYPE (lhs))))
6920 unsigned HOST_WIDE_INT nelts
6921 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6922 / limb_prec;
6923 atype
6924 = build_array_type_nelts (large_huge.m_limb_type,
6925 nelts);
6927 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6928 atype, v1),
6929 build1 (VIEW_CONVERT_EXPR,
6930 atype, v2));
6932 gsi_insert_on_edge (e, g);
6933 edge_insertions = true;
6934 break;
6940 if (large_huge.m_names || has_large_huge)
6942 gimple *nop = NULL;
6943 for (i = 0; i < num_ssa_names; ++i)
6945 tree s = ssa_name (i);
6946 if (s == NULL_TREE)
6947 continue;
6948 tree type = TREE_TYPE (s);
6949 if (TREE_CODE (type) == COMPLEX_TYPE)
6950 type = TREE_TYPE (type);
6951 if (TREE_CODE (type) == BITINT_TYPE
6952 && bitint_precision_kind (type) >= bitint_prec_large)
6954 if (large_huge.m_preserved
6955 && bitmap_bit_p (large_huge.m_preserved,
6956 SSA_NAME_VERSION (s)))
6957 continue;
6958 gimple *g = SSA_NAME_DEF_STMT (s);
6959 if (gimple_code (g) == GIMPLE_NOP)
6961 if (SSA_NAME_VAR (s))
6962 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6963 release_ssa_name (s);
6964 continue;
6966 if (gimple_bb (g) == NULL)
6968 release_ssa_name (s);
6969 continue;
6971 if (gimple_code (g) != GIMPLE_ASM)
6973 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6974 bool save_vta = flag_var_tracking_assignments;
6975 flag_var_tracking_assignments = false;
6976 gsi_remove (&gsi, true);
6977 flag_var_tracking_assignments = save_vta;
6979 if (nop == NULL)
6980 nop = gimple_build_nop ();
6981 SSA_NAME_DEF_STMT (s) = nop;
6982 release_ssa_name (s);
6985 if (optimize)
6986 disable_ranger (cfun);
6989 if (edge_insertions)
6990 gsi_commit_edge_inserts ();
6992 return ret;
6995 namespace {
6997 const pass_data pass_data_lower_bitint =
6999 GIMPLE_PASS, /* type */
7000 "bitintlower", /* name */
7001 OPTGROUP_NONE, /* optinfo_flags */
7002 TV_NONE, /* tv_id */
7003 PROP_ssa, /* properties_required */
7004 PROP_gimple_lbitint, /* properties_provided */
7005 0, /* properties_destroyed */
7006 0, /* todo_flags_start */
7007 0, /* todo_flags_finish */
7010 class pass_lower_bitint : public gimple_opt_pass
7012 public:
7013 pass_lower_bitint (gcc::context *ctxt)
7014 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
7017 /* opt_pass methods: */
7018 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
7019 unsigned int execute (function *) final override
7021 return gimple_lower_bitint ();
7024 }; // class pass_lower_bitint
7026 } // anon namespace
7028 gimple_opt_pass *
7029 make_pass_lower_bitint (gcc::context *ctxt)
7031 return new pass_lower_bitint (ctxt);
7035 namespace {
7037 const pass_data pass_data_lower_bitint_O0 =
7039 GIMPLE_PASS, /* type */
7040 "bitintlower0", /* name */
7041 OPTGROUP_NONE, /* optinfo_flags */
7042 TV_NONE, /* tv_id */
7043 PROP_cfg, /* properties_required */
7044 PROP_gimple_lbitint, /* properties_provided */
7045 0, /* properties_destroyed */
7046 0, /* todo_flags_start */
7047 0, /* todo_flags_finish */
7050 class pass_lower_bitint_O0 : public gimple_opt_pass
7052 public:
7053 pass_lower_bitint_O0 (gcc::context *ctxt)
7054 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
7057 /* opt_pass methods: */
7058 bool gate (function *fun) final override
7060 /* With errors, normal optimization passes are not run. If we don't
7061 lower bitint operations at all, rtl expansion will abort. */
7062 return !(fun->curr_properties & PROP_gimple_lbitint);
7065 unsigned int execute (function *) final override
7067 return gimple_lower_bitint ();
7070 }; // class pass_lower_bitint_O0
7072 } // anon namespace
7074 gimple_opt_pass *
7075 make_pass_lower_bitint_O0 (gcc::context *ctxt)
7077 return new pass_lower_bitint_O0 (ctxt);