lower-bitint: Remove single label _BitInt switches [PR113737]
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blobe92f5731d9e2bd87fec582770fc10f11a127fbe4
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
235 == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
237 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
238 return true;
239 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
240 return true;
241 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
242 return true;
244 break;
246 default:
247 break;
249 return false;
252 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
253 _Complex large/huge _BitInt lhs which has at most two immediate uses,
254 at most one use in REALPART_EXPR stmt in the same bb and exactly one
255 IMAGPART_EXPR use in the same bb with a single use which casts it to
256 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
257 return 2. Such cases (most common uses of those builtins) can be
258 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
259 of REALPART_EXPR as not needed to be backed up by a stack variable.
260 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
263 optimizable_arith_overflow (gimple *stmt)
265 bool is_ubsan = false;
266 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
267 return false;
268 switch (gimple_call_internal_fn (stmt))
270 case IFN_ADD_OVERFLOW:
271 case IFN_SUB_OVERFLOW:
272 case IFN_MUL_OVERFLOW:
273 break;
274 case IFN_UBSAN_CHECK_ADD:
275 case IFN_UBSAN_CHECK_SUB:
276 case IFN_UBSAN_CHECK_MUL:
277 is_ubsan = true;
278 break;
279 default:
280 return 0;
282 tree lhs = gimple_call_lhs (stmt);
283 if (!lhs)
284 return 0;
285 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
286 return 0;
287 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
288 if (TREE_CODE (type) != BITINT_TYPE
289 || bitint_precision_kind (type) < bitint_prec_large)
290 return 0;
292 if (is_ubsan)
294 use_operand_p use_p;
295 gimple *use_stmt;
296 if (!single_imm_use (lhs, &use_p, &use_stmt)
297 || gimple_bb (use_stmt) != gimple_bb (stmt)
298 || !gimple_store_p (use_stmt)
299 || !is_gimple_assign (use_stmt)
300 || gimple_has_volatile_ops (use_stmt)
301 || stmt_ends_bb_p (use_stmt))
302 return 0;
303 return 3;
306 imm_use_iterator ui;
307 use_operand_p use_p;
308 int seen = 0;
309 gimple *realpart = NULL, *cast = NULL;
310 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
312 gimple *g = USE_STMT (use_p);
313 if (is_gimple_debug (g))
314 continue;
315 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
316 return 0;
317 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
319 if ((seen & 1) != 0)
320 return 0;
321 seen |= 1;
322 realpart = g;
324 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
326 if ((seen & 2) != 0)
327 return 0;
328 seen |= 2;
330 use_operand_p use2_p;
331 gimple *use_stmt;
332 tree lhs2 = gimple_assign_lhs (g);
333 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
334 return 0;
335 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
336 || gimple_bb (use_stmt) != gimple_bb (stmt)
337 || !gimple_assign_cast_p (use_stmt))
338 return 0;
340 lhs2 = gimple_assign_lhs (use_stmt);
341 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
342 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
343 return 0;
344 cast = use_stmt;
346 else
347 return 0;
349 if ((seen & 2) == 0)
350 return 0;
351 if (seen == 3)
353 /* Punt if the cast stmt appears before realpart stmt, because
354 if both appear, the lowering wants to emit all the code
355 at the location of realpart stmt. */
356 gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
357 unsigned int cnt = 0;
360 gsi_prev_nondebug (&gsi);
361 if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
362 return 0;
363 if (gsi_stmt (gsi) == stmt)
364 return 2;
365 /* If realpart is too far from stmt, punt as well.
366 Usually it will appear right after it. */
367 if (++cnt == 32)
368 return 0;
370 while (1);
372 return 1;
375 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
376 comparing large/huge _BitInt types, return the comparison code and if
377 non-NULL fill in the comparison operands to *POP1 and *POP2. */
379 tree_code
380 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
382 tree op1 = NULL_TREE, op2 = NULL_TREE;
383 tree_code code = ERROR_MARK;
384 if (gimple_code (stmt) == GIMPLE_COND)
386 code = gimple_cond_code (stmt);
387 op1 = gimple_cond_lhs (stmt);
388 op2 = gimple_cond_rhs (stmt);
390 else if (is_gimple_assign (stmt))
392 code = gimple_assign_rhs_code (stmt);
393 op1 = gimple_assign_rhs1 (stmt);
394 if (TREE_CODE_CLASS (code) == tcc_comparison
395 || TREE_CODE_CLASS (code) == tcc_binary)
396 op2 = gimple_assign_rhs2 (stmt);
398 if (TREE_CODE_CLASS (code) != tcc_comparison)
399 return ERROR_MARK;
400 tree type = TREE_TYPE (op1);
401 if (TREE_CODE (type) != BITINT_TYPE
402 || bitint_precision_kind (type) < bitint_prec_large)
403 return ERROR_MARK;
404 if (pop1)
406 *pop1 = op1;
407 *pop2 = op2;
409 return code;
412 /* Class used during large/huge _BitInt lowering containing all the
413 state for the methods. */
415 struct bitint_large_huge
417 bitint_large_huge ()
418 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
419 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
420 m_limb_type (NULL_TREE), m_data (vNULL) {}
422 ~bitint_large_huge ();
424 void insert_before (gimple *);
425 tree limb_access_type (tree, tree);
426 tree limb_access (tree, tree, tree, bool);
427 void if_then (gimple *, profile_probability, edge &, edge &);
428 void if_then_else (gimple *, profile_probability, edge &, edge &);
429 void if_then_if_then_else (gimple *g, gimple *,
430 profile_probability, profile_probability,
431 edge &, edge &, edge &);
432 tree handle_operand (tree, tree);
433 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
434 tree add_cast (tree, tree);
435 tree handle_plus_minus (tree_code, tree, tree, tree);
436 tree handle_lshift (tree, tree, tree);
437 tree handle_cast (tree, tree, tree);
438 tree handle_load (gimple *, tree);
439 tree handle_stmt (gimple *, tree);
440 tree handle_operand_addr (tree, gimple *, int *, int *);
441 tree create_loop (tree, tree *);
442 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
443 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
444 void lower_shift_stmt (tree, gimple *);
445 void lower_muldiv_stmt (tree, gimple *);
446 void lower_float_conv_stmt (tree, gimple *);
447 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
448 unsigned int, bool);
449 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
450 tree_code);
451 void lower_addsub_overflow (tree, gimple *);
452 void lower_mul_overflow (tree, gimple *);
453 void lower_cplxpart_stmt (tree, gimple *);
454 void lower_complexexpr_stmt (gimple *);
455 void lower_bit_query (gimple *);
456 void lower_call (tree, gimple *);
457 void lower_asm (gimple *);
458 void lower_stmt (gimple *);
460 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
461 merged with their uses. */
462 bitmap m_names;
463 /* Subset of those for lhs of load statements. These will be
464 cleared in m_names if the loads will be mergeable with all
465 their uses. */
466 bitmap m_loads;
467 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
468 to later passes (arguments or return values of calls). */
469 bitmap m_preserved;
470 /* Subset of m_names which have a single use. As the lowering
471 can replace various original statements with their lowered
472 form even before it is done iterating over all basic blocks,
473 testing has_single_use for the purpose of emitting clobbers
474 doesn't work properly. */
475 bitmap m_single_use_names;
476 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
477 set in m_names. */
478 var_map m_map;
479 /* Mapping of the partitions to corresponding decls. */
480 tree *m_vars;
481 /* Unsigned integer type with limb precision. */
482 tree m_limb_type;
483 /* Its TYPE_SIZE_UNIT. */
484 unsigned HOST_WIDE_INT m_limb_size;
485 /* Location of a gimple stmt which is being currently lowered. */
486 location_t m_loc;
487 /* Current stmt iterator where code is being lowered currently. */
488 gimple_stmt_iterator m_gsi;
489 /* Statement after which any clobbers should be added if non-NULL. */
490 gimple *m_after_stmt;
491 /* Set when creating loops to the loop header bb and its preheader. */
492 basic_block m_bb, m_preheader_bb;
493 /* Stmt iterator after which initialization statements should be emitted. */
494 gimple_stmt_iterator m_init_gsi;
495 /* Decl into which a mergeable statement stores result. */
496 tree m_lhs;
497 /* handle_operand/handle_stmt can be invoked in various ways.
499 lower_mergeable_stmt for large _BitInt calls those with constant
500 idx only, expanding to straight line code, for huge _BitInt
501 emits a loop from least significant limb upwards, where each loop
502 iteration handles 2 limbs, plus there can be up to one full limb
503 and one partial limb processed after the loop, where handle_operand
504 and/or handle_stmt are called with constant idx. m_upwards_2limb
505 is set for this case, false otherwise. m_upwards is true if it
506 is either large or huge _BitInt handled by lower_mergeable_stmt,
507 i.e. indexes always increase.
509 Another way is used by lower_comparison_stmt, which walks limbs
510 from most significant to least significant, partial limb if any
511 processed first with constant idx and then loop processing a single
512 limb per iteration with non-constant idx.
514 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
515 destination limbs are processed from most significant to least
516 significant or for RSHIFT_EXPR the other way around, in loops or
517 straight line code, but idx usually is non-constant (so from
518 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
519 handling there can access even partial limbs using non-constant
520 idx (then m_var_msb should be true, for all the other cases
521 including lower_mergeable_stmt/lower_comparison_stmt that is
522 not the case and so m_var_msb should be false.
524 m_first should be set the first time handle_operand/handle_stmt
525 is called and clear when it is called for some other limb with
526 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
527 or statement (e.g. +/-/<< with < limb_prec constant) needs some
528 state between the different calls, when m_first is true it should
529 push some trees to m_data vector and also make sure m_data_cnt is
530 incremented by how many trees were pushed, and when m_first is
531 false, it can use the m_data[m_data_cnt] etc. data or update them,
532 just needs to bump m_data_cnt by the same amount as when it was
533 called with m_first set. The toplevel calls to
534 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
535 m_data vector when setting m_first to true.
537 m_cast_conditional and m_bitfld_load are used when handling a
538 bit-field load inside of a widening cast. handle_cast sometimes
539 needs to do runtime comparisons and handle_operand only conditionally
540 or even in two separate conditional blocks for one idx (once with
541 constant index after comparing the runtime one for equality with the
542 constant). In these cases, m_cast_conditional is set to true and
543 the bit-field load then communicates its m_data_cnt to handle_cast
544 using m_bitfld_load. */
545 bool m_first;
546 bool m_var_msb;
547 unsigned m_upwards_2limb;
548 bool m_upwards;
549 bool m_cast_conditional;
550 unsigned m_bitfld_load;
551 vec<tree> m_data;
552 unsigned int m_data_cnt;
555 bitint_large_huge::~bitint_large_huge ()
557 BITMAP_FREE (m_names);
558 BITMAP_FREE (m_loads);
559 BITMAP_FREE (m_preserved);
560 BITMAP_FREE (m_single_use_names);
561 if (m_map)
562 delete_var_map (m_map);
563 XDELETEVEC (m_vars);
564 m_data.release ();
567 /* Insert gimple statement G before current location
568 and set its gimple_location. */
570 void
571 bitint_large_huge::insert_before (gimple *g)
573 gimple_set_location (g, m_loc);
574 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
577 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
578 This is normally m_limb_type, except for a partial most
579 significant limb if any. */
581 tree
582 bitint_large_huge::limb_access_type (tree type, tree idx)
584 if (type == NULL_TREE)
585 return m_limb_type;
586 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
587 unsigned int prec = TYPE_PRECISION (type);
588 gcc_assert (i * limb_prec < prec);
589 if ((i + 1) * limb_prec <= prec)
590 return m_limb_type;
591 else
592 return build_nonstandard_integer_type (prec % limb_prec,
593 TYPE_UNSIGNED (type));
596 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
597 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
599 tree
600 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
602 tree atype = (tree_fits_uhwi_p (idx)
603 ? limb_access_type (type, idx) : m_limb_type);
604 tree ret;
605 if (DECL_P (var) && tree_fits_uhwi_p (idx))
607 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
608 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
609 ret = build2 (MEM_REF, m_limb_type,
610 build_fold_addr_expr (var),
611 build_int_cst (ptype, off));
612 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
613 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
615 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
618 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
619 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
620 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
621 tree_to_uhwi (idx)
622 * m_limb_size)));
623 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
624 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
625 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
627 else
629 var = unshare_expr (var);
630 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
631 || !useless_type_conversion_p (m_limb_type,
632 TREE_TYPE (TREE_TYPE (var))))
634 unsigned HOST_WIDE_INT nelts
635 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
636 tree atype = build_array_type_nelts (m_limb_type, nelts);
637 var = build1 (VIEW_CONVERT_EXPR, atype, var);
639 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
641 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
643 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
644 insert_before (g);
645 ret = gimple_assign_lhs (g);
646 ret = build1 (NOP_EXPR, atype, ret);
648 return ret;
651 /* Emit a half diamond,
652 if (COND)
656 | new_bb1
660 or if (COND) new_bb1;
661 PROB is the probability that the condition is true.
662 Updates m_gsi to start of new_bb1.
663 Sets EDGE_TRUE to edge from new_bb1 to successor and
664 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
666 void
667 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
668 edge &edge_true, edge &edge_false)
670 insert_before (cond);
671 edge e1 = split_block (gsi_bb (m_gsi), cond);
672 edge e2 = split_block (e1->dest, (gimple *) NULL);
673 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
674 e1->flags = EDGE_TRUE_VALUE;
675 e1->probability = prob;
676 e3->probability = prob.invert ();
677 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
678 edge_true = e2;
679 edge_false = e3;
680 m_gsi = gsi_after_labels (e1->dest);
683 /* Emit a full diamond,
684 if (COND)
688 new_bb1 new_bb2
692 or if (COND) new_bb2; else new_bb1;
693 PROB is the probability that the condition is true.
694 Updates m_gsi to start of new_bb2.
695 Sets EDGE_TRUE to edge from new_bb1 to successor and
696 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
698 void
699 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
700 edge &edge_true, edge &edge_false)
702 insert_before (cond);
703 edge e1 = split_block (gsi_bb (m_gsi), cond);
704 edge e2 = split_block (e1->dest, (gimple *) NULL);
705 basic_block bb = create_empty_bb (e1->dest);
706 add_bb_to_loop (bb, e1->dest->loop_father);
707 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
708 e1->flags = EDGE_FALSE_VALUE;
709 e3->probability = prob;
710 e1->probability = prob.invert ();
711 bb->count = e1->src->count.apply_probability (prob);
712 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
713 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
714 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
715 edge_false = e2;
716 m_gsi = gsi_after_labels (bb);
719 /* Emit a half diamond with full diamond in it
720 if (COND1)
724 | if (COND2)
725 | / \
726 | / \
727 |new_bb1 new_bb2
728 | | /
729 \ | /
730 \ | /
731 \ | /
733 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
734 PROB1 is the probability that the condition 1 is true.
735 PROB2 is the probability that the condition 2 is true.
736 Updates m_gsi to start of new_bb1.
737 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
738 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
739 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
740 If COND2 is NULL, this is equivalent to
741 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
742 EDGE_TRUE_TRUE = NULL; */
744 void
745 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
746 profile_probability prob1,
747 profile_probability prob2,
748 edge &edge_true_true,
749 edge &edge_true_false,
750 edge &edge_false)
752 edge e2, e3, e4 = NULL;
753 if_then (cond1, prob1, e2, e3);
754 if (cond2 == NULL)
756 edge_true_true = NULL;
757 edge_true_false = e2;
758 edge_false = e3;
759 return;
761 insert_before (cond2);
762 e2 = split_block (gsi_bb (m_gsi), cond2);
763 basic_block bb = create_empty_bb (e2->dest);
764 add_bb_to_loop (bb, e2->dest->loop_father);
765 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
766 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
767 e4->probability = prob2;
768 e2->flags = EDGE_FALSE_VALUE;
769 e2->probability = prob2.invert ();
770 bb->count = e2->src->count.apply_probability (prob2);
771 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
772 e2 = find_edge (e2->dest, e3->dest);
773 edge_true_true = e4;
774 edge_true_false = e2;
775 edge_false = e3;
776 m_gsi = gsi_after_labels (e2->src);
779 /* Emit code to access limb IDX from OP. */
781 tree
782 bitint_large_huge::handle_operand (tree op, tree idx)
784 switch (TREE_CODE (op))
786 case SSA_NAME:
787 if (m_names == NULL
788 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
790 if (SSA_NAME_IS_DEFAULT_DEF (op))
792 if (m_first)
794 tree v = create_tmp_reg (m_limb_type);
795 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
797 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
798 DECL_SOURCE_LOCATION (v)
799 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
801 v = get_or_create_ssa_default_def (cfun, v);
802 m_data.safe_push (v);
804 tree ret = m_data[m_data_cnt];
805 m_data_cnt++;
806 if (tree_fits_uhwi_p (idx))
808 tree type = limb_access_type (TREE_TYPE (op), idx);
809 ret = add_cast (type, ret);
811 return ret;
813 location_t loc_save = m_loc;
814 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
815 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
816 m_loc = loc_save;
817 return ret;
819 int p;
820 gimple *g;
821 tree t;
822 p = var_to_partition (m_map, op);
823 gcc_assert (m_vars[p] != NULL_TREE);
824 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
825 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
826 insert_before (g);
827 t = gimple_assign_lhs (g);
828 if (m_first
829 && m_single_use_names
830 && m_vars[p] != m_lhs
831 && m_after_stmt
832 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
834 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
835 CLOBBER_STORAGE_END);
836 g = gimple_build_assign (m_vars[p], clobber);
837 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
838 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
840 return t;
841 case INTEGER_CST:
842 if (tree_fits_uhwi_p (idx))
844 tree c, type = limb_access_type (TREE_TYPE (op), idx);
845 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
846 if (m_first)
848 m_data.safe_push (NULL_TREE);
849 m_data.safe_push (NULL_TREE);
851 if (limb_prec != HOST_BITS_PER_WIDE_INT)
853 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
854 TYPE_SIGN (TREE_TYPE (op)));
855 c = wide_int_to_tree (type,
856 wide_int::from (w, TYPE_PRECISION (type),
857 UNSIGNED));
859 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
860 c = build_int_cst (type,
861 tree_int_cst_sgn (op) < 0 ? -1 : 0);
862 else
863 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
864 m_data_cnt += 2;
865 return c;
867 if (m_first
868 || (m_data[m_data_cnt] == NULL_TREE
869 && m_data[m_data_cnt + 1] == NULL_TREE))
871 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
872 unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
873 int ext;
874 unsigned min_prec = bitint_min_cst_precision (op, ext);
875 if (m_first)
877 m_data.safe_push (NULL_TREE);
878 m_data.safe_push (NULL_TREE);
880 if (integer_zerop (op))
882 tree c = build_zero_cst (m_limb_type);
883 m_data[m_data_cnt] = c;
884 m_data[m_data_cnt + 1] = c;
886 else if (integer_all_onesp (op))
888 tree c = build_all_ones_cst (m_limb_type);
889 m_data[m_data_cnt] = c;
890 m_data[m_data_cnt + 1] = c;
892 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
894 /* Single limb constant. Use a phi with that limb from
895 the preheader edge and 0 or -1 constant from the other edge
896 and for the second limb in the loop. */
897 tree out;
898 gcc_assert (m_first);
899 m_data.pop ();
900 m_data.pop ();
901 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
902 build_int_cst (m_limb_type, ext));
904 else if (min_prec > prec - rem - 2 * limb_prec)
906 /* Constant which has enough significant bits that it isn't
907 worth trying to save .rodata space by extending from smaller
908 number. */
909 tree type;
910 if (m_var_msb)
911 type = TREE_TYPE (op);
912 else
913 /* If we have a guarantee the most significant partial limb
914 (if any) will be only accessed through handle_operand
915 with INTEGER_CST idx, we don't need to include the partial
916 limb in .rodata. */
917 type = build_bitint_type (prec - rem, 1);
918 tree c = tree_output_constant_def (fold_convert (type, op));
919 m_data[m_data_cnt] = c;
920 m_data[m_data_cnt + 1] = NULL_TREE;
922 else if (m_upwards_2limb)
924 /* Constant with smaller number of bits. Trade conditional
925 code for .rodata space by extending from smaller number. */
926 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
927 tree type = build_bitint_type (min_prec, 1);
928 tree c = tree_output_constant_def (fold_convert (type, op));
929 tree idx2 = make_ssa_name (sizetype);
930 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
931 insert_before (g);
932 g = gimple_build_cond (LT_EXPR, idx,
933 size_int (min_prec / limb_prec),
934 NULL_TREE, NULL_TREE);
935 edge edge_true, edge_false;
936 if_then (g, (min_prec >= (prec - rem) / 2
937 ? profile_probability::likely ()
938 : profile_probability::unlikely ()),
939 edge_true, edge_false);
940 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
941 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
942 insert_before (g);
943 c1 = gimple_assign_lhs (g);
944 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
945 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
946 insert_before (g);
947 c2 = gimple_assign_lhs (g);
948 tree c3 = build_int_cst (m_limb_type, ext);
949 m_gsi = gsi_after_labels (edge_true->dest);
950 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
951 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
952 gphi *phi = create_phi_node (m_data[m_data_cnt],
953 edge_true->dest);
954 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
955 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
956 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
957 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
958 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
960 else
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number.
964 Version for loops with random access to the limbs or
965 downwards loops. */
966 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
967 tree c;
968 if (min_prec <= (unsigned) limb_prec)
969 c = fold_convert (m_limb_type, op);
970 else
972 tree type = build_bitint_type (min_prec, 1);
973 c = tree_output_constant_def (fold_convert (type, op));
975 m_data[m_data_cnt] = c;
976 m_data[m_data_cnt + 1] = integer_type_node;
978 t = m_data[m_data_cnt];
979 if (m_data[m_data_cnt + 1] == NULL_TREE)
981 t = limb_access (TREE_TYPE (op), t, idx, false);
982 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
983 insert_before (g);
984 t = gimple_assign_lhs (g);
987 else if (m_data[m_data_cnt + 1] == NULL_TREE)
989 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
990 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
991 insert_before (g);
992 t = gimple_assign_lhs (g);
994 else
995 t = m_data[m_data_cnt + 1];
996 if (m_data[m_data_cnt + 1] == integer_type_node)
998 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
999 unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1000 int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1001 tree c = m_data[m_data_cnt];
1002 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1003 g = gimple_build_cond (LT_EXPR, idx,
1004 size_int (min_prec / limb_prec),
1005 NULL_TREE, NULL_TREE);
1006 edge edge_true, edge_false;
1007 if_then (g, (min_prec >= (prec - rem) / 2
1008 ? profile_probability::likely ()
1009 : profile_probability::unlikely ()),
1010 edge_true, edge_false);
1011 if (min_prec > (unsigned) limb_prec)
1013 c = limb_access (TREE_TYPE (op), c, idx, false);
1014 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1015 insert_before (g);
1016 c = gimple_assign_lhs (g);
1018 tree c2 = build_int_cst (m_limb_type, ext);
1019 m_gsi = gsi_after_labels (edge_true->dest);
1020 t = make_ssa_name (m_limb_type);
1021 gphi *phi = create_phi_node (t, edge_true->dest);
1022 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1023 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1025 m_data_cnt += 2;
1026 return t;
1027 default:
1028 gcc_unreachable ();
1032 /* Helper method, add a PHI node with VAL from preheader edge if
1033 inside of a loop and m_first. Keep state in a pair of m_data
1034 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1035 the latch edge, otherwise create a new SSA_NAME for it and let
1036 caller initialize it. */
1038 tree
1039 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1040 tree val_out)
1042 if (!m_first)
1044 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1045 return m_data[m_data_cnt];
1048 *data_out = NULL_TREE;
1049 if (tree_fits_uhwi_p (idx))
1051 m_data.safe_push (val);
1052 m_data.safe_push (NULL_TREE);
1053 return val;
1056 tree in = make_ssa_name (TREE_TYPE (val));
1057 gphi *phi = create_phi_node (in, m_bb);
1058 edge e1 = find_edge (m_preheader_bb, m_bb);
1059 edge e2 = EDGE_PRED (m_bb, 0);
1060 if (e1 == e2)
1061 e2 = EDGE_PRED (m_bb, 1);
1062 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1063 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1064 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1065 m_data.safe_push (in);
1066 m_data.safe_push (out);
1067 return in;
1070 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1071 convert it without emitting any code, otherwise emit
1072 the conversion statement before the current location. */
1074 tree
1075 bitint_large_huge::add_cast (tree type, tree val)
1077 if (TREE_CODE (val) == INTEGER_CST)
1078 return fold_convert (type, val);
1080 tree lhs = make_ssa_name (type);
1081 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1082 insert_before (g);
1083 return lhs;
1086 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1088 tree
1089 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1090 tree idx)
1092 tree lhs, data_out, ctype;
1093 tree rhs1_type = TREE_TYPE (rhs1);
1094 gimple *g;
1095 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1096 &data_out);
1098 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1099 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1101 ctype = build_complex_type (m_limb_type);
1102 if (!types_compatible_p (rhs1_type, m_limb_type))
1104 if (!TYPE_UNSIGNED (rhs1_type))
1106 tree type = unsigned_type_for (rhs1_type);
1107 rhs1 = add_cast (type, rhs1);
1108 rhs2 = add_cast (type, rhs2);
1110 rhs1 = add_cast (m_limb_type, rhs1);
1111 rhs2 = add_cast (m_limb_type, rhs2);
1113 lhs = make_ssa_name (ctype);
1114 g = gimple_build_call_internal (code == PLUS_EXPR
1115 ? IFN_UADDC : IFN_USUBC,
1116 3, rhs1, rhs2, data_in);
1117 gimple_call_set_lhs (g, lhs);
1118 insert_before (g);
1119 if (data_out == NULL_TREE)
1120 data_out = make_ssa_name (m_limb_type);
1121 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1122 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1123 insert_before (g);
1125 else if (types_compatible_p (rhs1_type, m_limb_type))
1127 ctype = build_complex_type (m_limb_type);
1128 lhs = make_ssa_name (ctype);
1129 g = gimple_build_call_internal (code == PLUS_EXPR
1130 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1131 2, rhs1, rhs2);
1132 gimple_call_set_lhs (g, lhs);
1133 insert_before (g);
1134 if (data_out == NULL_TREE)
1135 data_out = make_ssa_name (m_limb_type);
1136 if (!integer_zerop (data_in))
1138 rhs1 = make_ssa_name (m_limb_type);
1139 g = gimple_build_assign (rhs1, REALPART_EXPR,
1140 build1 (REALPART_EXPR, m_limb_type, lhs));
1141 insert_before (g);
1142 rhs2 = make_ssa_name (m_limb_type);
1143 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1144 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1145 insert_before (g);
1146 lhs = make_ssa_name (ctype);
1147 g = gimple_build_call_internal (code == PLUS_EXPR
1148 ? IFN_ADD_OVERFLOW
1149 : IFN_SUB_OVERFLOW,
1150 2, rhs1, data_in);
1151 gimple_call_set_lhs (g, lhs);
1152 insert_before (g);
1153 data_in = make_ssa_name (m_limb_type);
1154 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1155 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1156 insert_before (g);
1157 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1158 insert_before (g);
1160 else
1162 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1163 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1164 insert_before (g);
1167 else
1169 tree in = add_cast (rhs1_type, data_in);
1170 lhs = make_ssa_name (rhs1_type);
1171 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1172 insert_before (g);
1173 rhs1 = make_ssa_name (rhs1_type);
1174 g = gimple_build_assign (rhs1, code, lhs, in);
1175 insert_before (g);
1176 m_data[m_data_cnt] = NULL_TREE;
1177 m_data_cnt += 2;
1178 return rhs1;
1180 rhs1 = make_ssa_name (m_limb_type);
1181 g = gimple_build_assign (rhs1, REALPART_EXPR,
1182 build1 (REALPART_EXPR, m_limb_type, lhs));
1183 insert_before (g);
1184 if (!types_compatible_p (rhs1_type, m_limb_type))
1185 rhs1 = add_cast (rhs1_type, rhs1);
1186 m_data[m_data_cnt] = data_out;
1187 m_data_cnt += 2;
1188 return rhs1;
1191 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1192 count in [0, limb_prec - 1] range. */
1194 tree
1195 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1197 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1198 gcc_checking_assert (cnt < (unsigned) limb_prec);
1199 if (cnt == 0)
1200 return rhs1;
1202 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1203 gimple *g;
1204 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1205 &data_out);
1207 if (!integer_zerop (data_in))
1209 lhs = make_ssa_name (m_limb_type);
1210 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1211 build_int_cst (unsigned_type_node,
1212 limb_prec - cnt));
1213 insert_before (g);
1214 if (!types_compatible_p (rhs1_type, m_limb_type))
1215 lhs = add_cast (rhs1_type, lhs);
1216 data_in = lhs;
1218 if (types_compatible_p (rhs1_type, m_limb_type))
1220 if (data_out == NULL_TREE)
1221 data_out = make_ssa_name (m_limb_type);
1222 g = gimple_build_assign (data_out, rhs1);
1223 insert_before (g);
1225 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1227 lhs = make_ssa_name (rhs1_type);
1228 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1229 insert_before (g);
1230 if (!integer_zerop (data_in))
1232 rhs1 = lhs;
1233 lhs = make_ssa_name (rhs1_type);
1234 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1235 insert_before (g);
1238 else
1239 lhs = data_in;
1240 m_data[m_data_cnt] = data_out;
1241 m_data_cnt += 2;
1242 return lhs;
1245 /* Helper function for handle_stmt method, handle an integral
1246 to integral conversion. */
1248 tree
1249 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1251 tree rhs_type = TREE_TYPE (rhs1);
1252 gimple *g;
1253 if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1254 && TREE_CODE (lhs_type) == BITINT_TYPE
1255 && TREE_CODE (rhs_type) == BITINT_TYPE
1256 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1257 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1259 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1260 /* If lhs has bigger precision than rhs, we can use
1261 the simple case only if there is a guarantee that
1262 the most significant limb is handled in straight
1263 line code. If m_var_msb (on left shifts) or
1264 if m_upwards_2limb * limb_prec is equal to
1265 lhs precision that is not the case. */
1266 || (!m_var_msb
1267 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1268 == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1269 && (!m_upwards_2limb
1270 || (m_upwards_2limb * limb_prec
1271 < TYPE_PRECISION (lhs_type)))))
1273 rhs1 = handle_operand (rhs1, idx);
1274 if (tree_fits_uhwi_p (idx))
1276 tree type = limb_access_type (lhs_type, idx);
1277 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1278 rhs1 = add_cast (type, rhs1);
1280 return rhs1;
1282 tree t;
1283 /* Indexes lower than this don't need any special processing. */
1284 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1285 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1286 /* Indexes >= than this always contain an extension. */
1287 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1288 bool save_first = m_first;
1289 if (m_first)
1291 m_data.safe_push (NULL_TREE);
1292 m_data.safe_push (NULL_TREE);
1293 m_data.safe_push (NULL_TREE);
1294 if (TYPE_UNSIGNED (rhs_type))
1295 /* No need to keep state between iterations. */
1297 else if (m_upwards && !m_upwards_2limb)
1298 /* We need to keep state between iterations, but
1299 not within any loop, everything is straight line
1300 code with only increasing indexes. */
1302 else if (!m_upwards_2limb)
1304 unsigned save_data_cnt = m_data_cnt;
1305 gimple_stmt_iterator save_gsi = m_gsi;
1306 m_gsi = m_init_gsi;
1307 if (gsi_end_p (m_gsi))
1308 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1309 else
1310 gsi_next (&m_gsi);
1311 m_data_cnt = save_data_cnt + 3;
1312 t = handle_operand (rhs1, size_int (low));
1313 m_first = false;
1314 m_data[save_data_cnt + 2]
1315 = build_int_cst (NULL_TREE, m_data_cnt);
1316 m_data_cnt = save_data_cnt;
1317 t = add_cast (signed_type_for (m_limb_type), t);
1318 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1319 tree n = make_ssa_name (TREE_TYPE (t));
1320 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1321 insert_before (g);
1322 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1323 m_init_gsi = m_gsi;
1324 if (gsi_end_p (m_init_gsi))
1325 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1326 else
1327 gsi_prev (&m_init_gsi);
1328 m_gsi = save_gsi;
1330 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1331 /* We need to keep state between iterations, but
1332 fortunately not within the loop, only afterwards. */
1334 else
1336 tree out;
1337 m_data.truncate (m_data_cnt);
1338 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1339 m_data.safe_push (NULL_TREE);
1343 unsigned save_data_cnt = m_data_cnt;
1344 m_data_cnt += 3;
1345 if (!tree_fits_uhwi_p (idx))
1347 if (m_upwards_2limb
1348 && (m_upwards_2limb * limb_prec
1349 <= ((unsigned) TYPE_PRECISION (rhs_type)
1350 - !TYPE_UNSIGNED (rhs_type))))
1352 rhs1 = handle_operand (rhs1, idx);
1353 if (m_first)
1354 m_data[save_data_cnt + 2]
1355 = build_int_cst (NULL_TREE, m_data_cnt);
1356 m_first = save_first;
1357 return rhs1;
1359 bool single_comparison
1360 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1361 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1362 idx, size_int (low), NULL_TREE, NULL_TREE);
1363 edge edge_true_true, edge_true_false, edge_false;
1364 if_then_if_then_else (g, (single_comparison ? NULL
1365 : gimple_build_cond (EQ_EXPR, idx,
1366 size_int (low),
1367 NULL_TREE,
1368 NULL_TREE)),
1369 profile_probability::likely (),
1370 profile_probability::unlikely (),
1371 edge_true_true, edge_true_false, edge_false);
1372 bool save_cast_conditional = m_cast_conditional;
1373 m_cast_conditional = true;
1374 m_bitfld_load = 0;
1375 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1376 if (m_first)
1377 m_data[save_data_cnt + 2]
1378 = build_int_cst (NULL_TREE, m_data_cnt);
1379 tree ext = NULL_TREE;
1380 tree bitfld = NULL_TREE;
1381 if (!single_comparison)
1383 m_gsi = gsi_after_labels (edge_true_true->src);
1384 m_first = false;
1385 m_data_cnt = save_data_cnt + 3;
1386 if (m_bitfld_load)
1388 bitfld = m_data[m_bitfld_load];
1389 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1390 m_bitfld_load = 0;
1392 t2 = handle_operand (rhs1, size_int (low));
1393 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1394 t2 = add_cast (m_limb_type, t2);
1395 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1397 ext = add_cast (signed_type_for (m_limb_type), t2);
1398 tree lpm1 = build_int_cst (unsigned_type_node,
1399 limb_prec - 1);
1400 tree n = make_ssa_name (TREE_TYPE (ext));
1401 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1402 insert_before (g);
1403 ext = add_cast (m_limb_type, n);
1406 tree t3;
1407 if (TYPE_UNSIGNED (rhs_type))
1408 t3 = build_zero_cst (m_limb_type);
1409 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1410 t3 = m_data[save_data_cnt];
1411 else
1412 t3 = m_data[save_data_cnt + 1];
1413 m_gsi = gsi_after_labels (edge_true_false->dest);
1414 t = make_ssa_name (m_limb_type);
1415 gphi *phi = create_phi_node (t, edge_true_false->dest);
1416 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1417 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1418 if (edge_true_true)
1419 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1420 if (ext)
1422 tree t4 = make_ssa_name (m_limb_type);
1423 phi = create_phi_node (t4, edge_true_false->dest);
1424 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1425 UNKNOWN_LOCATION);
1426 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1427 UNKNOWN_LOCATION);
1428 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1429 if (!save_cast_conditional)
1431 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1432 insert_before (g);
1434 else
1435 for (basic_block bb = gsi_bb (m_gsi);;)
1437 edge e1 = single_succ_edge (bb);
1438 edge e2 = find_edge (e1->dest, m_bb), e3;
1439 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1440 : make_ssa_name (m_limb_type));
1441 phi = create_phi_node (t5, e1->dest);
1442 edge_iterator ei;
1443 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1444 add_phi_arg (phi, (e3 == e1 ? t4
1445 : build_zero_cst (m_limb_type)),
1446 e3, UNKNOWN_LOCATION);
1447 if (e2)
1448 break;
1449 t4 = t5;
1450 bb = e1->dest;
1453 if (m_bitfld_load)
1455 tree t4;
1456 if (!m_first)
1457 t4 = m_data[m_bitfld_load + 1];
1458 else
1459 t4 = make_ssa_name (m_limb_type);
1460 phi = create_phi_node (t4, edge_true_false->dest);
1461 add_phi_arg (phi,
1462 edge_true_true ? bitfld : m_data[m_bitfld_load],
1463 edge_true_false, UNKNOWN_LOCATION);
1464 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1465 edge_false, UNKNOWN_LOCATION);
1466 if (edge_true_true)
1467 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1468 UNKNOWN_LOCATION);
1469 m_data[m_bitfld_load] = t4;
1470 m_data[m_bitfld_load + 2] = t4;
1471 m_bitfld_load = 0;
1473 m_cast_conditional = save_cast_conditional;
1474 m_first = save_first;
1475 return t;
1477 else
1479 if (tree_to_uhwi (idx) < low)
1481 t = handle_operand (rhs1, idx);
1482 if (m_first)
1483 m_data[save_data_cnt + 2]
1484 = build_int_cst (NULL_TREE, m_data_cnt);
1486 else if (tree_to_uhwi (idx) < high)
1488 t = handle_operand (rhs1, size_int (low));
1489 if (m_first)
1490 m_data[save_data_cnt + 2]
1491 = build_int_cst (NULL_TREE, m_data_cnt);
1492 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1493 t = add_cast (m_limb_type, t);
1494 tree ext = NULL_TREE;
1495 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1497 ext = add_cast (signed_type_for (m_limb_type), t);
1498 tree lpm1 = build_int_cst (unsigned_type_node,
1499 limb_prec - 1);
1500 tree n = make_ssa_name (TREE_TYPE (ext));
1501 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1502 insert_before (g);
1503 ext = add_cast (m_limb_type, n);
1504 m_data[save_data_cnt + 1] = ext;
1507 else
1509 if (TYPE_UNSIGNED (rhs_type) && m_first)
1511 handle_operand (rhs1, size_zero_node);
1512 m_data[save_data_cnt + 2]
1513 = build_int_cst (NULL_TREE, m_data_cnt);
1515 else
1516 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1517 if (TYPE_UNSIGNED (rhs_type))
1518 t = build_zero_cst (m_limb_type);
1519 else if (m_bb && m_data[save_data_cnt])
1520 t = m_data[save_data_cnt];
1521 else
1522 t = m_data[save_data_cnt + 1];
1524 tree type = limb_access_type (lhs_type, idx);
1525 if (!useless_type_conversion_p (type, m_limb_type))
1526 t = add_cast (type, t);
1527 m_first = save_first;
1528 return t;
1531 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1532 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1533 && INTEGRAL_TYPE_P (rhs_type))
1535 /* Add support for 3 or more limbs filled in from normal integral
1536 type if this assert fails. If no target chooses limb mode smaller
1537 than half of largest supported normal integral type, this will not
1538 be needed. */
1539 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1540 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1541 if (m_first)
1543 gimple_stmt_iterator save_gsi = m_gsi;
1544 m_gsi = m_init_gsi;
1545 if (gsi_end_p (m_gsi))
1546 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1547 else
1548 gsi_next (&m_gsi);
1549 if (TREE_CODE (rhs_type) == BITINT_TYPE
1550 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1552 tree type = NULL_TREE;
1553 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1554 rhs_type = TREE_TYPE (rhs1);
1556 r1 = rhs1;
1557 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1558 r1 = add_cast (m_limb_type, rhs1);
1559 if (TYPE_PRECISION (rhs_type) > limb_prec)
1561 g = gimple_build_assign (make_ssa_name (rhs_type),
1562 RSHIFT_EXPR, rhs1,
1563 build_int_cst (unsigned_type_node,
1564 limb_prec));
1565 insert_before (g);
1566 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1568 if (TYPE_UNSIGNED (rhs_type))
1569 rext = build_zero_cst (m_limb_type);
1570 else
1572 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1573 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1574 RSHIFT_EXPR, rext,
1575 build_int_cst (unsigned_type_node,
1576 limb_prec - 1));
1577 insert_before (g);
1578 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1580 m_init_gsi = m_gsi;
1581 if (gsi_end_p (m_init_gsi))
1582 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1583 else
1584 gsi_prev (&m_init_gsi);
1585 m_gsi = save_gsi;
1587 tree t;
1588 if (m_upwards_2limb)
1590 if (m_first)
1592 tree out1, out2;
1593 prepare_data_in_out (r1, idx, &out1, rext);
1594 if (TYPE_PRECISION (rhs_type) > limb_prec)
1596 prepare_data_in_out (r2, idx, &out2, rext);
1597 m_data.pop ();
1598 t = m_data.pop ();
1599 m_data[m_data_cnt + 1] = t;
1601 else
1602 m_data[m_data_cnt + 1] = rext;
1603 m_data.safe_push (rext);
1604 t = m_data[m_data_cnt];
1606 else if (!tree_fits_uhwi_p (idx))
1607 t = m_data[m_data_cnt + 1];
1608 else
1610 tree type = limb_access_type (lhs_type, idx);
1611 t = m_data[m_data_cnt + 2];
1612 if (!useless_type_conversion_p (type, m_limb_type))
1613 t = add_cast (type, t);
1615 m_data_cnt += 3;
1616 return t;
1618 else if (m_first)
1620 m_data.safe_push (r1);
1621 m_data.safe_push (r2);
1622 m_data.safe_push (rext);
1624 if (tree_fits_uhwi_p (idx))
1626 tree type = limb_access_type (lhs_type, idx);
1627 if (integer_zerop (idx))
1628 t = m_data[m_data_cnt];
1629 else if (TYPE_PRECISION (rhs_type) > limb_prec
1630 && integer_onep (idx))
1631 t = m_data[m_data_cnt + 1];
1632 else
1633 t = m_data[m_data_cnt + 2];
1634 if (!useless_type_conversion_p (type, m_limb_type))
1635 t = add_cast (type, t);
1636 m_data_cnt += 3;
1637 return t;
1639 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1640 NULL_TREE, NULL_TREE);
1641 edge e2, e3, e4 = NULL;
1642 if_then (g, profile_probability::likely (), e2, e3);
1643 if (m_data[m_data_cnt + 1])
1645 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1646 NULL_TREE, NULL_TREE);
1647 insert_before (g);
1648 edge e5 = split_block (gsi_bb (m_gsi), g);
1649 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1650 e2 = find_edge (e5->dest, e2->dest);
1651 e4->probability = profile_probability::unlikely ();
1652 e5->flags = EDGE_FALSE_VALUE;
1653 e5->probability = e4->probability.invert ();
1655 m_gsi = gsi_after_labels (e2->dest);
1656 t = make_ssa_name (m_limb_type);
1657 gphi *phi = create_phi_node (t, e2->dest);
1658 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1659 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1660 if (e4)
1661 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1662 m_data_cnt += 3;
1663 return t;
1665 return NULL_TREE;
1668 /* Helper function for handle_stmt method, handle a load from memory. */
1670 tree
1671 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1673 tree rhs1 = gimple_assign_rhs1 (stmt);
1674 tree rhs_type = TREE_TYPE (rhs1);
1675 bool eh = stmt_ends_bb_p (stmt);
1676 edge eh_edge = NULL;
1677 gimple *g;
1679 if (eh)
1681 edge_iterator ei;
1682 basic_block bb = gimple_bb (stmt);
1684 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1685 if (eh_edge->flags & EDGE_EH)
1686 break;
1689 if (TREE_CODE (rhs1) == COMPONENT_REF
1690 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1692 tree fld = TREE_OPERAND (rhs1, 1);
1693 /* For little-endian, we can allow as inputs bit-fields
1694 which start at a limb boundary. */
1695 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1696 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1697 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1698 goto normal_load;
1699 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1700 handle it normally for now. */
1701 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1702 goto normal_load;
1703 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1704 poly_int64 bitoffset;
1705 poly_uint64 field_offset, repr_offset;
1706 bool var_field_off = false;
1707 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1708 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1709 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1710 else
1712 bitoffset = 0;
1713 var_field_off = true;
1715 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1716 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1717 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1718 TREE_OPERAND (rhs1, 0), repr,
1719 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1720 HOST_WIDE_INT bo = bitoffset.to_constant ();
1721 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1722 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1723 if (m_first)
1725 if (m_upwards)
1727 gimple_stmt_iterator save_gsi = m_gsi;
1728 m_gsi = m_init_gsi;
1729 if (gsi_end_p (m_gsi))
1730 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1731 else
1732 gsi_next (&m_gsi);
1733 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1734 tree iv = make_ssa_name (m_limb_type);
1735 g = gimple_build_assign (iv, t);
1736 insert_before (g);
1737 if (eh)
1739 maybe_duplicate_eh_stmt (g, stmt);
1740 if (eh_edge)
1742 edge e = split_block (gsi_bb (m_gsi), g);
1743 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1744 = profile_probability::very_unlikely ();
1745 m_gsi = gsi_after_labels (e->dest);
1746 if (gsi_bb (save_gsi) == e->src)
1748 if (gsi_end_p (save_gsi))
1749 save_gsi = gsi_end_bb (e->dest);
1750 else
1751 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1753 if (m_preheader_bb == e->src)
1754 m_preheader_bb = e->dest;
1757 m_init_gsi = m_gsi;
1758 if (gsi_end_p (m_init_gsi))
1759 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1760 else
1761 gsi_prev (&m_init_gsi);
1762 m_gsi = save_gsi;
1763 tree out;
1764 prepare_data_in_out (iv, idx, &out);
1765 out = m_data[m_data_cnt];
1766 m_data.safe_push (out);
1768 else
1770 m_data.safe_push (NULL_TREE);
1771 m_data.safe_push (NULL_TREE);
1772 m_data.safe_push (NULL_TREE);
1776 tree nidx0 = NULL_TREE, nidx1;
1777 tree iv = m_data[m_data_cnt];
1778 if (m_cast_conditional && iv)
1780 gcc_assert (!m_bitfld_load);
1781 m_bitfld_load = m_data_cnt;
1783 if (tree_fits_uhwi_p (idx))
1785 unsigned prec = TYPE_PRECISION (rhs_type);
1786 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1787 gcc_assert (i * limb_prec < prec);
1788 nidx1 = size_int (i + bo_idx + 1);
1789 if ((i + 1) * limb_prec > prec)
1791 prec %= limb_prec;
1792 if (prec + bo_bit <= (unsigned) limb_prec)
1793 nidx1 = NULL_TREE;
1795 if (!iv)
1796 nidx0 = size_int (i + bo_idx);
1798 else
1800 if (!iv)
1802 if (bo_idx == 0)
1803 nidx0 = idx;
1804 else
1806 nidx0 = make_ssa_name (sizetype);
1807 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1808 size_int (bo_idx));
1809 insert_before (g);
1812 nidx1 = make_ssa_name (sizetype);
1813 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1814 size_int (bo_idx + 1));
1815 insert_before (g);
1818 tree iv2 = NULL_TREE;
1819 if (nidx0)
1821 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1822 iv = make_ssa_name (m_limb_type);
1823 g = gimple_build_assign (iv, t);
1824 insert_before (g);
1825 gcc_assert (!eh);
1827 if (nidx1)
1829 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1830 unsigned prec = TYPE_PRECISION (rhs_type);
1831 if (conditional)
1833 if ((prec % limb_prec) == 0
1834 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1835 conditional = false;
1837 edge edge_true = NULL, edge_false = NULL;
1838 if (conditional)
1840 g = gimple_build_cond (NE_EXPR, idx,
1841 size_int (prec / limb_prec),
1842 NULL_TREE, NULL_TREE);
1843 if_then (g, profile_probability::likely (),
1844 edge_true, edge_false);
1846 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1847 if (m_upwards_2limb
1848 && !m_first
1849 && !m_bitfld_load
1850 && !tree_fits_uhwi_p (idx))
1851 iv2 = m_data[m_data_cnt + 1];
1852 else
1853 iv2 = make_ssa_name (m_limb_type);
1854 g = gimple_build_assign (iv2, t);
1855 insert_before (g);
1856 if (eh)
1858 maybe_duplicate_eh_stmt (g, stmt);
1859 if (eh_edge)
1861 edge e = split_block (gsi_bb (m_gsi), g);
1862 m_gsi = gsi_after_labels (e->dest);
1863 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1864 = profile_probability::very_unlikely ();
1867 if (conditional)
1869 tree iv3 = make_ssa_name (m_limb_type);
1870 if (eh)
1871 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1872 gphi *phi = create_phi_node (iv3, edge_true->dest);
1873 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1874 add_phi_arg (phi, build_zero_cst (m_limb_type),
1875 edge_false, UNKNOWN_LOCATION);
1876 m_gsi = gsi_after_labels (edge_true->dest);
1879 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1880 iv, build_int_cst (unsigned_type_node, bo_bit));
1881 insert_before (g);
1882 iv = gimple_assign_lhs (g);
1883 if (iv2)
1885 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1886 iv2, build_int_cst (unsigned_type_node,
1887 limb_prec - bo_bit));
1888 insert_before (g);
1889 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1890 gimple_assign_lhs (g), iv);
1891 insert_before (g);
1892 iv = gimple_assign_lhs (g);
1893 if (m_data[m_data_cnt])
1894 m_data[m_data_cnt] = iv2;
1896 if (tree_fits_uhwi_p (idx))
1898 tree atype = limb_access_type (rhs_type, idx);
1899 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1900 iv = add_cast (atype, iv);
1902 m_data_cnt += 3;
1903 return iv;
1906 normal_load:
1907 /* Use write_p = true for loads with EH edges to make
1908 sure limb_access doesn't add a cast as separate
1909 statement after it. */
1910 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1911 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1912 g = gimple_build_assign (ret, rhs1);
1913 insert_before (g);
1914 if (eh)
1916 maybe_duplicate_eh_stmt (g, stmt);
1917 if (eh_edge)
1919 edge e = split_block (gsi_bb (m_gsi), g);
1920 m_gsi = gsi_after_labels (e->dest);
1921 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1922 = profile_probability::very_unlikely ();
1924 if (tree_fits_uhwi_p (idx))
1926 tree atype = limb_access_type (rhs_type, idx);
1927 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1928 ret = add_cast (atype, ret);
1931 return ret;
1934 /* Return a limb IDX from a mergeable statement STMT. */
1936 tree
1937 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1939 tree lhs, rhs1, rhs2 = NULL_TREE;
1940 gimple *g;
1941 switch (gimple_code (stmt))
1943 case GIMPLE_ASSIGN:
1944 if (gimple_assign_load_p (stmt))
1945 return handle_load (stmt, idx);
1946 switch (gimple_assign_rhs_code (stmt))
1948 case BIT_AND_EXPR:
1949 case BIT_IOR_EXPR:
1950 case BIT_XOR_EXPR:
1951 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1952 /* FALLTHRU */
1953 case BIT_NOT_EXPR:
1954 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1955 lhs = make_ssa_name (TREE_TYPE (rhs1));
1956 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1957 rhs1, rhs2);
1958 insert_before (g);
1959 return lhs;
1960 case PLUS_EXPR:
1961 case MINUS_EXPR:
1962 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1963 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1964 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1965 rhs1, rhs2, idx);
1966 case NEGATE_EXPR:
1967 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1968 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1969 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1970 case LSHIFT_EXPR:
1971 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1972 idx),
1973 gimple_assign_rhs2 (stmt), idx);
1974 case SSA_NAME:
1975 case INTEGER_CST:
1976 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1977 CASE_CONVERT:
1978 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1979 gimple_assign_rhs1 (stmt), idx);
1980 case VIEW_CONVERT_EXPR:
1981 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1982 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
1983 idx);
1984 default:
1985 break;
1987 break;
1988 default:
1989 break;
1991 gcc_unreachable ();
1994 /* Return minimum precision of OP at STMT.
1995 Positive value is minimum precision above which all bits
1996 are zero, negative means all bits above negation of the
1997 value are copies of the sign bit. */
1999 static int
2000 range_to_prec (tree op, gimple *stmt)
2002 int_range_max r;
2003 wide_int w;
2004 tree type = TREE_TYPE (op);
2005 unsigned int prec = TYPE_PRECISION (type);
2007 if (!optimize
2008 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2009 || r.undefined_p ())
2011 if (TYPE_UNSIGNED (type))
2012 return prec;
2013 else
2014 return MIN ((int) -prec, -2);
2017 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2019 w = r.lower_bound ();
2020 if (wi::neg_p (w))
2022 int min_prec1 = wi::min_precision (w, SIGNED);
2023 w = r.upper_bound ();
2024 int min_prec2 = wi::min_precision (w, SIGNED);
2025 int min_prec = MAX (min_prec1, min_prec2);
2026 return MIN (-min_prec, -2);
2030 w = r.upper_bound ();
2031 int min_prec = wi::min_precision (w, UNSIGNED);
2032 return MAX (min_prec, 1);
2035 /* Return address of the first limb of OP and write into *PREC
2036 its precision. If positive, the operand is zero extended
2037 from that precision, if it is negative, the operand is sign-extended
2038 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2039 otherwise *PREC_STORED is prec from the innermost call without
2040 range optimizations. */
2042 tree
2043 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2044 int *prec_stored, int *prec)
2046 wide_int w;
2047 location_t loc_save = m_loc;
2048 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2049 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2050 && TREE_CODE (op) != INTEGER_CST)
2052 do_int:
2053 *prec = range_to_prec (op, stmt);
2054 bitint_prec_kind kind = bitint_prec_small;
2055 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2056 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2057 kind = bitint_precision_kind (TREE_TYPE (op));
2058 if (kind == bitint_prec_middle)
2060 tree type = NULL_TREE;
2061 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2063 tree op_type = TREE_TYPE (op);
2064 unsigned HOST_WIDE_INT nelts
2065 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2066 /* Add support for 3 or more limbs filled in from normal
2067 integral type if this assert fails. If no target chooses
2068 limb mode smaller than half of largest supported normal
2069 integral type, this will not be needed. */
2070 gcc_assert (nelts <= 2);
2071 if (prec_stored)
2072 *prec_stored = (TYPE_UNSIGNED (op_type)
2073 ? TYPE_PRECISION (op_type)
2074 : -TYPE_PRECISION (op_type));
2075 if (*prec <= limb_prec && *prec >= -limb_prec)
2077 nelts = 1;
2078 if (prec_stored)
2080 if (TYPE_UNSIGNED (op_type))
2082 if (*prec_stored > limb_prec)
2083 *prec_stored = limb_prec;
2085 else if (*prec_stored < -limb_prec)
2086 *prec_stored = -limb_prec;
2089 tree atype = build_array_type_nelts (m_limb_type, nelts);
2090 tree var = create_tmp_var (atype);
2091 tree t1 = op;
2092 if (!useless_type_conversion_p (m_limb_type, op_type))
2093 t1 = add_cast (m_limb_type, t1);
2094 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2095 NULL_TREE, NULL_TREE);
2096 gimple *g = gimple_build_assign (v, t1);
2097 insert_before (g);
2098 if (nelts > 1)
2100 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2101 g = gimple_build_assign (make_ssa_name (op_type),
2102 RSHIFT_EXPR, op, lp);
2103 insert_before (g);
2104 tree t2 = gimple_assign_lhs (g);
2105 t2 = add_cast (m_limb_type, t2);
2106 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2107 NULL_TREE, NULL_TREE);
2108 g = gimple_build_assign (v, t2);
2109 insert_before (g);
2111 tree ret = build_fold_addr_expr (var);
2112 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2114 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2115 g = gimple_build_assign (var, clobber);
2116 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2118 m_loc = loc_save;
2119 return ret;
2121 switch (TREE_CODE (op))
2123 case SSA_NAME:
2124 if (m_names == NULL
2125 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2127 gimple *g = SSA_NAME_DEF_STMT (op);
2128 tree ret;
2129 m_loc = gimple_location (g);
2130 if (gimple_assign_load_p (g))
2132 *prec = range_to_prec (op, NULL);
2133 if (prec_stored)
2134 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2135 ? TYPE_PRECISION (TREE_TYPE (op))
2136 : -TYPE_PRECISION (TREE_TYPE (op)));
2137 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2138 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2139 NULL_TREE, true, GSI_SAME_STMT);
2141 else if (gimple_code (g) == GIMPLE_NOP)
2143 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2144 if (prec_stored)
2145 *prec_stored = *prec;
2146 tree var = create_tmp_var (m_limb_type);
2147 TREE_ADDRESSABLE (var) = 1;
2148 ret = build_fold_addr_expr (var);
2149 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2151 tree clobber = build_clobber (m_limb_type,
2152 CLOBBER_STORAGE_END);
2153 g = gimple_build_assign (var, clobber);
2154 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2157 else
2159 gcc_assert (gimple_assign_cast_p (g));
2160 tree rhs1 = gimple_assign_rhs1 (g);
2161 bitint_prec_kind kind = bitint_prec_small;
2162 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2163 rhs1 = TREE_OPERAND (rhs1, 0);
2164 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2165 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2166 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2167 if (kind >= bitint_prec_large)
2169 tree lhs_type = TREE_TYPE (op);
2170 tree rhs_type = TREE_TYPE (rhs1);
2171 int prec_stored_val = 0;
2172 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2173 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2175 if (TYPE_UNSIGNED (lhs_type)
2176 && !TYPE_UNSIGNED (rhs_type))
2177 gcc_assert (*prec >= 0 || prec_stored == NULL);
2179 else
2181 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2183 else if (TYPE_UNSIGNED (lhs_type))
2185 gcc_assert (*prec > 0
2186 || prec_stored_val > 0
2187 || (-prec_stored_val
2188 >= TYPE_PRECISION (lhs_type)));
2189 *prec = TYPE_PRECISION (lhs_type);
2191 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2193 else
2194 *prec = -TYPE_PRECISION (lhs_type);
2197 else
2199 op = rhs1;
2200 stmt = g;
2201 goto do_int;
2204 m_loc = loc_save;
2205 return ret;
2207 else
2209 int p = var_to_partition (m_map, op);
2210 gcc_assert (m_vars[p] != NULL_TREE);
2211 *prec = range_to_prec (op, stmt);
2212 if (prec_stored)
2213 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2214 ? TYPE_PRECISION (TREE_TYPE (op))
2215 : -TYPE_PRECISION (TREE_TYPE (op)));
2216 return build_fold_addr_expr (m_vars[p]);
2218 case INTEGER_CST:
2219 unsigned int min_prec, mp;
2220 tree type;
2221 w = wi::to_wide (op);
2222 if (tree_int_cst_sgn (op) >= 0)
2224 min_prec = wi::min_precision (w, UNSIGNED);
2225 *prec = MAX (min_prec, 1);
2227 else
2229 min_prec = wi::min_precision (w, SIGNED);
2230 *prec = MIN ((int) -min_prec, -2);
2232 mp = CEIL (min_prec, limb_prec) * limb_prec;
2233 if (mp == 0)
2234 mp = 1;
2235 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2236 && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2237 || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2238 type = TREE_TYPE (op);
2239 else
2240 type = build_bitint_type (mp, 1);
2241 if (TREE_CODE (type) != BITINT_TYPE
2242 || bitint_precision_kind (type) == bitint_prec_small)
2244 if (TYPE_PRECISION (type) <= limb_prec)
2245 type = m_limb_type;
2246 else
2248 while (bitint_precision_kind (mp) == bitint_prec_small)
2249 mp += limb_prec;
2250 /* This case is for targets which e.g. have 64-bit
2251 limb but categorize up to 128-bits _BitInts as
2252 small. We could use type of m_limb_type[2] and
2253 similar instead to save space. */
2254 type = build_bitint_type (mp, 1);
2257 if (prec_stored)
2259 if (tree_int_cst_sgn (op) >= 0)
2260 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2261 else
2262 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2264 op = tree_output_constant_def (fold_convert (type, op));
2265 return build_fold_addr_expr (op);
2266 default:
2267 gcc_unreachable ();
2271 /* Helper function, create a loop before the current location,
2272 start with sizetype INIT value from the preheader edge. Return
2273 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2274 from the latch edge. */
2276 tree
2277 bitint_large_huge::create_loop (tree init, tree *idx_next)
2279 if (!gsi_end_p (m_gsi))
2280 gsi_prev (&m_gsi);
2281 else
2282 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2283 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2284 edge e2 = split_block (e1->dest, (gimple *) NULL);
2285 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2286 e3->probability = profile_probability::very_unlikely ();
2287 e2->flags = EDGE_FALSE_VALUE;
2288 e2->probability = e3->probability.invert ();
2289 tree idx = make_ssa_name (sizetype);
2290 gphi *phi = create_phi_node (idx, e1->dest);
2291 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2292 *idx_next = make_ssa_name (sizetype);
2293 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2294 m_gsi = gsi_after_labels (e1->dest);
2295 m_bb = e1->dest;
2296 m_preheader_bb = e1->src;
2297 class loop *loop = alloc_loop ();
2298 loop->header = e1->dest;
2299 add_loop (loop, e1->src->loop_father);
2300 return idx;
2303 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2304 lowered using iteration from the least significant limb up to the most
2305 significant limb. For large _BitInt it is emitted as straight line code
2306 before current location, for huge _BitInt as a loop handling two limbs
2307 at once, followed by handling up to limbs in straight line code (at most
2308 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2309 comparisons, in that case CMP_CODE should be the comparison code and
2310 CMP_OP1/CMP_OP2 the comparison operands. */
2312 tree
2313 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2314 tree cmp_op1, tree cmp_op2)
2316 bool eq_p = cmp_code != ERROR_MARK;
2317 tree type;
2318 if (eq_p)
2319 type = TREE_TYPE (cmp_op1);
2320 else
2321 type = TREE_TYPE (gimple_assign_lhs (stmt));
2322 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2323 bitint_prec_kind kind = bitint_precision_kind (type);
2324 gcc_assert (kind >= bitint_prec_large);
2325 gimple *g;
2326 tree lhs = gimple_get_lhs (stmt);
2327 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2328 if (lhs
2329 && TREE_CODE (lhs) == SSA_NAME
2330 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2331 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2333 int p = var_to_partition (m_map, lhs);
2334 gcc_assert (m_vars[p] != NULL_TREE);
2335 m_lhs = lhs = m_vars[p];
2337 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2338 bool sext = false;
2339 tree ext = NULL_TREE, store_operand = NULL_TREE;
2340 bool eh = false;
2341 basic_block eh_pad = NULL;
2342 tree nlhs = NULL_TREE;
2343 unsigned HOST_WIDE_INT bo_idx = 0;
2344 unsigned HOST_WIDE_INT bo_bit = 0;
2345 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2346 if (gimple_store_p (stmt))
2348 store_operand = gimple_assign_rhs1 (stmt);
2349 eh = stmt_ends_bb_p (stmt);
2350 if (eh)
2352 edge e;
2353 edge_iterator ei;
2354 basic_block bb = gimple_bb (stmt);
2356 FOR_EACH_EDGE (e, ei, bb->succs)
2357 if (e->flags & EDGE_EH)
2359 eh_pad = e->dest;
2360 break;
2363 if (TREE_CODE (lhs) == COMPONENT_REF
2364 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2366 tree fld = TREE_OPERAND (lhs, 1);
2367 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2368 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2369 poly_int64 bitoffset;
2370 poly_uint64 field_offset, repr_offset;
2371 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2372 nlhs = lhs;
2373 else
2375 bool var_field_off = false;
2376 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2377 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2378 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2379 else
2381 bitoffset = 0;
2382 var_field_off = true;
2384 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2385 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2386 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2387 TREE_OPERAND (lhs, 0), repr,
2388 var_field_off
2389 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2390 HOST_WIDE_INT bo = bitoffset.to_constant ();
2391 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2392 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2396 if ((store_operand
2397 && TREE_CODE (store_operand) == SSA_NAME
2398 && (m_names == NULL
2399 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2400 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2401 || gimple_assign_cast_p (stmt))
2403 rhs1 = gimple_assign_rhs1 (store_operand
2404 ? SSA_NAME_DEF_STMT (store_operand)
2405 : stmt);
2406 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2407 rhs1 = TREE_OPERAND (rhs1, 0);
2408 /* Optimize mergeable ops ending with widening cast to _BitInt
2409 (or followed by store). We can lower just the limbs of the
2410 cast operand and widen afterwards. */
2411 if (TREE_CODE (rhs1) == SSA_NAME
2412 && (m_names == NULL
2413 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2414 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2415 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2416 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2417 limb_prec) < CEIL (prec, limb_prec)
2418 || (kind == bitint_prec_huge
2419 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2421 store_operand = rhs1;
2422 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2423 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2424 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2425 sext = true;
2428 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2429 if (kind == bitint_prec_large)
2430 cnt = CEIL (prec, limb_prec);
2431 else
2433 rem = (prec % (2 * limb_prec));
2434 end = (prec - rem) / limb_prec;
2435 cnt = 2 + CEIL (rem, limb_prec);
2436 idx = idx_first = create_loop (size_zero_node, &idx_next);
2439 basic_block edge_bb = NULL;
2440 if (eq_p)
2442 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2443 gsi_prev (&gsi);
2444 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2445 edge_bb = e->src;
2446 if (kind == bitint_prec_large)
2447 m_gsi = gsi_end_bb (edge_bb);
2449 else
2450 m_after_stmt = stmt;
2451 if (kind != bitint_prec_large)
2452 m_upwards_2limb = end;
2453 m_upwards = true;
2455 bool separate_ext
2456 = (prec != (unsigned) TYPE_PRECISION (type)
2457 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2458 > CEIL (prec, limb_prec)));
2460 for (unsigned i = 0; i < cnt; i++)
2462 m_data_cnt = 0;
2463 if (kind == bitint_prec_large)
2464 idx = size_int (i);
2465 else if (i >= 2)
2466 idx = size_int (end + (i > 2));
2467 if (eq_p)
2469 rhs1 = handle_operand (cmp_op1, idx);
2470 tree rhs2 = handle_operand (cmp_op2, idx);
2471 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2472 insert_before (g);
2473 edge e1 = split_block (gsi_bb (m_gsi), g);
2474 e1->flags = EDGE_FALSE_VALUE;
2475 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2476 e1->probability = profile_probability::unlikely ();
2477 e2->probability = e1->probability.invert ();
2478 if (i == 0)
2479 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2480 m_gsi = gsi_after_labels (e1->dest);
2482 else
2484 if (store_operand)
2485 rhs1 = handle_operand (store_operand, idx);
2486 else
2487 rhs1 = handle_stmt (stmt, idx);
2488 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2489 rhs1 = add_cast (m_limb_type, rhs1);
2490 if (sext && i == cnt - 1)
2491 ext = rhs1;
2492 tree nidx = idx;
2493 if (bo_idx)
2495 if (tree_fits_uhwi_p (idx))
2496 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2497 else
2499 nidx = make_ssa_name (sizetype);
2500 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2501 size_int (bo_idx));
2502 insert_before (g);
2505 bool done = false;
2506 basic_block new_bb = NULL;
2507 /* Handle stores into bit-fields. */
2508 if (bo_bit)
2510 if (i == 0)
2512 edge e2 = NULL;
2513 if (kind != bitint_prec_large)
2515 prepare_data_in_out (build_zero_cst (m_limb_type),
2516 idx, &bf_next);
2517 bf_next = m_data.pop ();
2518 bf_cur = m_data.pop ();
2519 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2520 NULL_TREE, NULL_TREE);
2521 edge edge_true;
2522 if_then_else (g, profile_probability::unlikely (),
2523 edge_true, e2);
2524 new_bb = e2->dest;
2526 tree ftype
2527 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2528 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2529 bitsize_int (limb_prec - bo_bit),
2530 bitsize_int (bo_idx * limb_prec + bo_bit));
2531 tree t = add_cast (ftype, rhs1);
2532 g = gimple_build_assign (bfr, t);
2533 insert_before (g);
2534 if (eh)
2536 maybe_duplicate_eh_stmt (g, stmt);
2537 if (eh_pad)
2539 edge e = split_block (gsi_bb (m_gsi), g);
2540 m_gsi = gsi_after_labels (e->dest);
2541 make_edge (e->src, eh_pad, EDGE_EH)->probability
2542 = profile_probability::very_unlikely ();
2545 if (kind == bitint_prec_large)
2547 bf_cur = rhs1;
2548 done = true;
2550 else if (e2)
2551 m_gsi = gsi_after_labels (e2->src);
2553 if (!done)
2555 tree t1 = make_ssa_name (m_limb_type);
2556 tree t2 = make_ssa_name (m_limb_type);
2557 tree t3 = make_ssa_name (m_limb_type);
2558 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2559 build_int_cst (unsigned_type_node,
2560 limb_prec - bo_bit));
2561 insert_before (g);
2562 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2563 build_int_cst (unsigned_type_node,
2564 bo_bit));
2565 insert_before (g);
2566 bf_cur = rhs1;
2567 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2568 insert_before (g);
2569 rhs1 = t3;
2570 if (bf_next && i == 1)
2572 g = gimple_build_assign (bf_next, bf_cur);
2573 insert_before (g);
2577 if (!done)
2579 /* Handle bit-field access to partial last limb if needed. */
2580 if (nlhs
2581 && i == cnt - 1
2582 && !separate_ext
2583 && tree_fits_uhwi_p (idx))
2585 unsigned int tprec = TYPE_PRECISION (type);
2586 unsigned int rprec = tprec % limb_prec;
2587 if (rprec + bo_bit < (unsigned) limb_prec)
2589 tree ftype
2590 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2591 tree bfr = build3 (BIT_FIELD_REF, ftype,
2592 unshare_expr (nlhs),
2593 bitsize_int (rprec + bo_bit),
2594 bitsize_int ((bo_idx
2595 + tprec / limb_prec)
2596 * limb_prec));
2597 tree t = add_cast (ftype, rhs1);
2598 g = gimple_build_assign (bfr, t);
2599 done = true;
2600 bf_cur = NULL_TREE;
2602 else if (rprec + bo_bit == (unsigned) limb_prec)
2603 bf_cur = NULL_TREE;
2605 /* Otherwise, stores to any other lhs. */
2606 if (!done)
2608 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2609 nidx, true);
2610 g = gimple_build_assign (l, rhs1);
2612 insert_before (g);
2613 if (eh)
2615 maybe_duplicate_eh_stmt (g, stmt);
2616 if (eh_pad)
2618 edge e = split_block (gsi_bb (m_gsi), g);
2619 m_gsi = gsi_after_labels (e->dest);
2620 make_edge (e->src, eh_pad, EDGE_EH)->probability
2621 = profile_probability::very_unlikely ();
2624 if (new_bb)
2625 m_gsi = gsi_after_labels (new_bb);
2628 m_first = false;
2629 if (kind == bitint_prec_huge && i <= 1)
2631 if (i == 0)
2633 idx = make_ssa_name (sizetype);
2634 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2635 size_one_node);
2636 insert_before (g);
2638 else
2640 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2641 size_int (2));
2642 insert_before (g);
2643 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2644 NULL_TREE, NULL_TREE);
2645 insert_before (g);
2646 if (eq_p)
2647 m_gsi = gsi_after_labels (edge_bb);
2648 else
2649 m_gsi = gsi_for_stmt (stmt);
2650 m_bb = NULL;
2655 if (separate_ext)
2657 if (sext)
2659 ext = add_cast (signed_type_for (m_limb_type), ext);
2660 tree lpm1 = build_int_cst (unsigned_type_node,
2661 limb_prec - 1);
2662 tree n = make_ssa_name (TREE_TYPE (ext));
2663 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2664 insert_before (g);
2665 ext = add_cast (m_limb_type, n);
2667 else
2668 ext = build_zero_cst (m_limb_type);
2669 kind = bitint_precision_kind (type);
2670 unsigned start = CEIL (prec, limb_prec);
2671 prec = TYPE_PRECISION (type);
2672 idx = idx_first = idx_next = NULL_TREE;
2673 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2674 kind = bitint_prec_large;
2675 if (kind == bitint_prec_large)
2676 cnt = CEIL (prec, limb_prec) - start;
2677 else
2679 rem = prec % limb_prec;
2680 end = (prec - rem) / limb_prec;
2681 cnt = (bo_bit != 0) + 1 + (rem != 0);
2683 for (unsigned i = 0; i < cnt; i++)
2685 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2686 idx = size_int (start + i);
2687 else if (i == cnt - 1 && (rem != 0))
2688 idx = size_int (end);
2689 else if (i == (bo_bit != 0))
2690 idx = create_loop (size_int (start + i), &idx_next);
2691 rhs1 = ext;
2692 if (bf_cur != NULL_TREE && bf_cur != ext)
2694 tree t1 = make_ssa_name (m_limb_type);
2695 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2696 build_int_cst (unsigned_type_node,
2697 limb_prec - bo_bit));
2698 insert_before (g);
2699 if (integer_zerop (ext))
2700 rhs1 = t1;
2701 else
2703 tree t2 = make_ssa_name (m_limb_type);
2704 rhs1 = make_ssa_name (m_limb_type);
2705 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2706 build_int_cst (unsigned_type_node,
2707 bo_bit));
2708 insert_before (g);
2709 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2710 insert_before (g);
2712 bf_cur = ext;
2714 tree nidx = idx;
2715 if (bo_idx)
2717 if (tree_fits_uhwi_p (idx))
2718 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2719 else
2721 nidx = make_ssa_name (sizetype);
2722 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2723 size_int (bo_idx));
2724 insert_before (g);
2727 bool done = false;
2728 /* Handle bit-field access to partial last limb if needed. */
2729 if (nlhs && i == cnt - 1)
2731 unsigned int tprec = TYPE_PRECISION (type);
2732 unsigned int rprec = tprec % limb_prec;
2733 if (rprec + bo_bit < (unsigned) limb_prec)
2735 tree ftype
2736 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2737 tree bfr = build3 (BIT_FIELD_REF, ftype,
2738 unshare_expr (nlhs),
2739 bitsize_int (rprec + bo_bit),
2740 bitsize_int ((bo_idx + tprec / limb_prec)
2741 * limb_prec));
2742 tree t = add_cast (ftype, rhs1);
2743 g = gimple_build_assign (bfr, t);
2744 done = true;
2745 bf_cur = NULL_TREE;
2747 else if (rprec + bo_bit == (unsigned) limb_prec)
2748 bf_cur = NULL_TREE;
2750 /* Otherwise, stores to any other lhs. */
2751 if (!done)
2753 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2754 g = gimple_build_assign (l, rhs1);
2756 insert_before (g);
2757 if (eh)
2759 maybe_duplicate_eh_stmt (g, stmt);
2760 if (eh_pad)
2762 edge e = split_block (gsi_bb (m_gsi), g);
2763 m_gsi = gsi_after_labels (e->dest);
2764 make_edge (e->src, eh_pad, EDGE_EH)->probability
2765 = profile_probability::very_unlikely ();
2768 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2770 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2771 size_one_node);
2772 insert_before (g);
2773 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2774 NULL_TREE, NULL_TREE);
2775 insert_before (g);
2776 m_gsi = gsi_for_stmt (stmt);
2777 m_bb = NULL;
2781 if (bf_cur != NULL_TREE)
2783 unsigned int tprec = TYPE_PRECISION (type);
2784 unsigned int rprec = tprec % limb_prec;
2785 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2786 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2787 bitsize_int (rprec + bo_bit),
2788 bitsize_int ((bo_idx + tprec / limb_prec)
2789 * limb_prec));
2790 rhs1 = bf_cur;
2791 if (bf_cur != ext)
2793 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2794 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2795 build_int_cst (unsigned_type_node,
2796 limb_prec - bo_bit));
2797 insert_before (g);
2799 rhs1 = add_cast (ftype, rhs1);
2800 g = gimple_build_assign (bfr, rhs1);
2801 insert_before (g);
2802 if (eh)
2804 maybe_duplicate_eh_stmt (g, stmt);
2805 if (eh_pad)
2807 edge e = split_block (gsi_bb (m_gsi), g);
2808 m_gsi = gsi_after_labels (e->dest);
2809 make_edge (e->src, eh_pad, EDGE_EH)->probability
2810 = profile_probability::very_unlikely ();
2815 if (gimple_store_p (stmt))
2817 unlink_stmt_vdef (stmt);
2818 release_ssa_name (gimple_vdef (stmt));
2819 gsi_remove (&m_gsi, true);
2821 if (eq_p)
2823 lhs = make_ssa_name (boolean_type_node);
2824 basic_block bb = gimple_bb (stmt);
2825 gphi *phi = create_phi_node (lhs, bb);
2826 edge e = find_edge (gsi_bb (m_gsi), bb);
2827 unsigned int n = EDGE_COUNT (bb->preds);
2828 for (unsigned int i = 0; i < n; i++)
2830 edge e2 = EDGE_PRED (bb, i);
2831 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2832 e2, UNKNOWN_LOCATION);
2834 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2835 return lhs;
2837 else
2838 return NULL_TREE;
2841 /* Handle a large/huge _BitInt comparison statement STMT other than
2842 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2843 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2844 lowered by iteration from the most significant limb downwards to
2845 the least significant one, for large _BitInt in straight line code,
2846 otherwise with most significant limb handled in
2847 straight line code followed by a loop handling one limb at a time.
2848 Comparisons with unsigned huge _BitInt with precisions which are
2849 multiples of limb precision can use just the loop and don't need to
2850 handle most significant limb before the loop. The loop or straight
2851 line code jumps to final basic block if a particular pair of limbs
2852 is not equal. */
2854 tree
2855 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2856 tree cmp_op1, tree cmp_op2)
2858 tree type = TREE_TYPE (cmp_op1);
2859 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2860 bitint_prec_kind kind = bitint_precision_kind (type);
2861 gcc_assert (kind >= bitint_prec_large);
2862 gimple *g;
2863 if (!TYPE_UNSIGNED (type)
2864 && integer_zerop (cmp_op2)
2865 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2867 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2868 tree idx = size_int (end);
2869 m_data_cnt = 0;
2870 tree rhs1 = handle_operand (cmp_op1, idx);
2871 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2873 tree stype = signed_type_for (TREE_TYPE (rhs1));
2874 rhs1 = add_cast (stype, rhs1);
2876 tree lhs = make_ssa_name (boolean_type_node);
2877 g = gimple_build_assign (lhs, cmp_code, rhs1,
2878 build_zero_cst (TREE_TYPE (rhs1)));
2879 insert_before (g);
2880 cmp_code = NE_EXPR;
2881 return lhs;
2884 unsigned cnt, rem = 0, end = 0;
2885 tree idx = NULL_TREE, idx_next = NULL_TREE;
2886 if (kind == bitint_prec_large)
2887 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2888 else
2890 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2891 if (rem == 0 && !TYPE_UNSIGNED (type))
2892 rem = limb_prec;
2893 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2894 cnt = 1 + (rem != 0);
2897 basic_block edge_bb = NULL;
2898 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2899 gsi_prev (&gsi);
2900 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2901 edge_bb = e->src;
2902 m_gsi = gsi_end_bb (edge_bb);
2904 edge *edges = XALLOCAVEC (edge, cnt * 2);
2905 for (unsigned i = 0; i < cnt; i++)
2907 m_data_cnt = 0;
2908 if (kind == bitint_prec_large)
2909 idx = size_int (cnt - i - 1);
2910 else if (i == cnt - 1)
2911 idx = create_loop (size_int (end - 1), &idx_next);
2912 else
2913 idx = size_int (end);
2914 tree rhs1 = handle_operand (cmp_op1, idx);
2915 tree rhs2 = handle_operand (cmp_op2, idx);
2916 if (i == 0
2917 && !TYPE_UNSIGNED (type)
2918 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2920 tree stype = signed_type_for (TREE_TYPE (rhs1));
2921 rhs1 = add_cast (stype, rhs1);
2922 rhs2 = add_cast (stype, rhs2);
2924 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2925 insert_before (g);
2926 edge e1 = split_block (gsi_bb (m_gsi), g);
2927 e1->flags = EDGE_FALSE_VALUE;
2928 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2929 e1->probability = profile_probability::likely ();
2930 e2->probability = e1->probability.invert ();
2931 if (i == 0)
2932 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2933 m_gsi = gsi_after_labels (e1->dest);
2934 edges[2 * i] = e2;
2935 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2936 insert_before (g);
2937 e1 = split_block (gsi_bb (m_gsi), g);
2938 e1->flags = EDGE_FALSE_VALUE;
2939 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2940 e1->probability = profile_probability::unlikely ();
2941 e2->probability = e1->probability.invert ();
2942 m_gsi = gsi_after_labels (e1->dest);
2943 edges[2 * i + 1] = e2;
2944 m_first = false;
2945 if (kind == bitint_prec_huge && i == cnt - 1)
2947 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2948 insert_before (g);
2949 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2950 NULL_TREE, NULL_TREE);
2951 insert_before (g);
2952 edge true_edge, false_edge;
2953 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2954 &true_edge, &false_edge);
2955 m_gsi = gsi_after_labels (false_edge->dest);
2956 m_bb = NULL;
2960 tree lhs = make_ssa_name (boolean_type_node);
2961 basic_block bb = gimple_bb (stmt);
2962 gphi *phi = create_phi_node (lhs, bb);
2963 for (unsigned int i = 0; i < cnt * 2; i++)
2965 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2966 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2967 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2969 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2970 ? boolean_true_node : boolean_false_node,
2971 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2972 cmp_code = NE_EXPR;
2973 return lhs;
2976 /* Lower large/huge _BitInt left and right shift except for left
2977 shift by < limb_prec constant. */
2979 void
2980 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2982 tree rhs1 = gimple_assign_rhs1 (stmt);
2983 tree lhs = gimple_assign_lhs (stmt);
2984 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2985 tree type = TREE_TYPE (rhs1);
2986 gimple *final_stmt = gsi_stmt (m_gsi);
2987 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2988 && bitint_precision_kind (type) >= bitint_prec_large);
2989 int prec = TYPE_PRECISION (type);
2990 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2991 gimple *g;
2992 if (obj == NULL_TREE)
2994 int part = var_to_partition (m_map, lhs);
2995 gcc_assert (m_vars[part] != NULL_TREE);
2996 obj = m_vars[part];
2998 /* Preparation code common for both left and right shifts.
2999 unsigned n1 = n % limb_prec;
3000 size_t n2 = n / limb_prec;
3001 size_t n3 = n1 != 0;
3002 unsigned n4 = (limb_prec - n1) % limb_prec;
3003 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3004 if (TREE_CODE (n) == INTEGER_CST)
3006 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3007 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3008 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3009 n3 = size_int (!integer_zerop (n1));
3010 n4 = int_const_binop (TRUNC_MOD_EXPR,
3011 int_const_binop (MINUS_EXPR, lp, n1), lp);
3013 else
3015 n1 = make_ssa_name (TREE_TYPE (n));
3016 n2 = make_ssa_name (sizetype);
3017 n3 = make_ssa_name (sizetype);
3018 n4 = make_ssa_name (TREE_TYPE (n));
3019 if (pow2p_hwi (limb_prec))
3021 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3022 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3023 insert_before (g);
3024 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3025 TREE_TYPE (n))
3026 ? n2 : make_ssa_name (TREE_TYPE (n)),
3027 RSHIFT_EXPR, n,
3028 build_int_cst (TREE_TYPE (n),
3029 exact_log2 (limb_prec)));
3030 insert_before (g);
3031 if (gimple_assign_lhs (g) != n2)
3033 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3034 insert_before (g);
3036 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3037 NEGATE_EXPR, n1);
3038 insert_before (g);
3039 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3040 lpm1);
3041 insert_before (g);
3043 else
3045 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3046 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3047 insert_before (g);
3048 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3049 TREE_TYPE (n))
3050 ? n2 : make_ssa_name (TREE_TYPE (n)),
3051 TRUNC_DIV_EXPR, n, lp);
3052 insert_before (g);
3053 if (gimple_assign_lhs (g) != n2)
3055 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3056 insert_before (g);
3058 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3059 MINUS_EXPR, lp, n1);
3060 insert_before (g);
3061 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3062 lp);
3063 insert_before (g);
3065 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3066 build_zero_cst (TREE_TYPE (n)));
3067 insert_before (g);
3068 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3069 insert_before (g);
3071 tree p = build_int_cst (sizetype,
3072 prec / limb_prec - (prec % limb_prec == 0));
3073 if (rhs_code == RSHIFT_EXPR)
3075 /* Lower
3076 dst = src >> n;
3078 unsigned n1 = n % limb_prec;
3079 size_t n2 = n / limb_prec;
3080 size_t n3 = n1 != 0;
3081 unsigned n4 = (limb_prec - n1) % limb_prec;
3082 size_t idx;
3083 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3084 int signed_p = (typeof (src) -1) < 0;
3085 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3086 ? p : p - n3); ++idx)
3087 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3088 limb_type ext;
3089 if (prec % limb_prec == 0)
3090 ext = src[p];
3091 else if (signed_p)
3092 ext = ((signed limb_type) (src[p] << (limb_prec
3093 - (prec % limb_prec))))
3094 >> (limb_prec - (prec % limb_prec));
3095 else
3096 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3097 if (!signed_p && (prec % limb_prec == 0))
3099 else if (idx < prec / 64)
3101 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3102 ++idx;
3104 idx -= n2;
3105 if (signed_p)
3107 dst[idx] = ((signed limb_type) ext) >> n1;
3108 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3110 else
3112 dst[idx] = ext >> n1;
3113 ext = 0;
3115 for (++idx; idx <= p; ++idx)
3116 dst[idx] = ext; */
3117 tree pmn3;
3118 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3119 pmn3 = p;
3120 else if (TREE_CODE (n3) == INTEGER_CST)
3121 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3122 else
3124 pmn3 = make_ssa_name (sizetype);
3125 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3126 insert_before (g);
3128 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3129 edge edge_true, edge_false;
3130 if_then (g, profile_probability::likely (), edge_true, edge_false);
3131 tree idx_next;
3132 tree idx = create_loop (n2, &idx_next);
3133 tree idxmn2 = make_ssa_name (sizetype);
3134 tree idxpn3 = make_ssa_name (sizetype);
3135 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3136 insert_before (g);
3137 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3138 insert_before (g);
3139 m_data_cnt = 0;
3140 tree t1 = handle_operand (rhs1, idx);
3141 m_first = false;
3142 g = gimple_build_assign (make_ssa_name (m_limb_type),
3143 RSHIFT_EXPR, t1, n1);
3144 insert_before (g);
3145 t1 = gimple_assign_lhs (g);
3146 if (!integer_zerop (n3))
3148 m_data_cnt = 0;
3149 tree t2 = handle_operand (rhs1, idxpn3);
3150 g = gimple_build_assign (make_ssa_name (m_limb_type),
3151 LSHIFT_EXPR, t2, n4);
3152 insert_before (g);
3153 t2 = gimple_assign_lhs (g);
3154 g = gimple_build_assign (make_ssa_name (m_limb_type),
3155 BIT_IOR_EXPR, t1, t2);
3156 insert_before (g);
3157 t1 = gimple_assign_lhs (g);
3159 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3160 g = gimple_build_assign (l, t1);
3161 insert_before (g);
3162 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3163 insert_before (g);
3164 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3165 insert_before (g);
3166 idx = make_ssa_name (sizetype);
3167 m_gsi = gsi_for_stmt (final_stmt);
3168 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3169 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3170 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3171 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3172 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3173 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3174 m_data_cnt = 0;
3175 tree ms = handle_operand (rhs1, p);
3176 tree ext = ms;
3177 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3178 ext = add_cast (m_limb_type, ms);
3179 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3180 && !integer_zerop (n3))
3182 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3183 if_then (g, profile_probability::likely (), edge_true, edge_false);
3184 m_data_cnt = 0;
3185 t1 = handle_operand (rhs1, idx);
3186 g = gimple_build_assign (make_ssa_name (m_limb_type),
3187 RSHIFT_EXPR, t1, n1);
3188 insert_before (g);
3189 t1 = gimple_assign_lhs (g);
3190 g = gimple_build_assign (make_ssa_name (m_limb_type),
3191 LSHIFT_EXPR, ext, n4);
3192 insert_before (g);
3193 tree t2 = gimple_assign_lhs (g);
3194 g = gimple_build_assign (make_ssa_name (m_limb_type),
3195 BIT_IOR_EXPR, t1, t2);
3196 insert_before (g);
3197 t1 = gimple_assign_lhs (g);
3198 idxmn2 = make_ssa_name (sizetype);
3199 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3200 insert_before (g);
3201 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3202 g = gimple_build_assign (l, t1);
3203 insert_before (g);
3204 idx_next = make_ssa_name (sizetype);
3205 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3206 insert_before (g);
3207 m_gsi = gsi_for_stmt (final_stmt);
3208 tree nidx = make_ssa_name (sizetype);
3209 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3210 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3211 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3212 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3213 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3214 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3215 idx = nidx;
3217 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3218 insert_before (g);
3219 idx = gimple_assign_lhs (g);
3220 tree sext = ext;
3221 if (!TYPE_UNSIGNED (type))
3222 sext = add_cast (signed_type_for (m_limb_type), ext);
3223 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3224 RSHIFT_EXPR, sext, n1);
3225 insert_before (g);
3226 t1 = gimple_assign_lhs (g);
3227 if (!TYPE_UNSIGNED (type))
3229 t1 = add_cast (m_limb_type, t1);
3230 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3231 RSHIFT_EXPR, sext,
3232 build_int_cst (TREE_TYPE (n),
3233 limb_prec - 1));
3234 insert_before (g);
3235 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3237 else
3238 ext = build_zero_cst (m_limb_type);
3239 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3240 g = gimple_build_assign (l, t1);
3241 insert_before (g);
3242 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3243 size_one_node);
3244 insert_before (g);
3245 idx = gimple_assign_lhs (g);
3246 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3247 if_then (g, profile_probability::likely (), edge_true, edge_false);
3248 idx = create_loop (idx, &idx_next);
3249 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3250 g = gimple_build_assign (l, ext);
3251 insert_before (g);
3252 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3253 insert_before (g);
3254 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3255 insert_before (g);
3257 else
3259 /* Lower
3260 dst = src << n;
3262 unsigned n1 = n % limb_prec;
3263 size_t n2 = n / limb_prec;
3264 size_t n3 = n1 != 0;
3265 unsigned n4 = (limb_prec - n1) % limb_prec;
3266 size_t idx;
3267 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3268 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3269 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3270 if (n1)
3272 dst[idx] = src[idx - n2] << n1;
3273 --idx;
3275 for (; (ssize_t) idx >= 0; --idx)
3276 dst[idx] = 0; */
3277 tree n2pn3;
3278 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3279 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3280 else
3282 n2pn3 = make_ssa_name (sizetype);
3283 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3284 insert_before (g);
3286 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3287 idx even to access the most significant partial limb. */
3288 m_var_msb = true;
3289 if (integer_zerop (n3))
3290 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3291 counts. Emit if (true) condition that can be optimized later. */
3292 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3293 NULL_TREE, NULL_TREE);
3294 else
3295 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3296 edge edge_true, edge_false;
3297 if_then (g, profile_probability::likely (), edge_true, edge_false);
3298 tree idx_next;
3299 tree idx = create_loop (p, &idx_next);
3300 tree idxmn2 = make_ssa_name (sizetype);
3301 tree idxmn2mn3 = make_ssa_name (sizetype);
3302 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3303 insert_before (g);
3304 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3305 insert_before (g);
3306 m_data_cnt = 0;
3307 tree t1 = handle_operand (rhs1, idxmn2);
3308 m_first = false;
3309 g = gimple_build_assign (make_ssa_name (m_limb_type),
3310 LSHIFT_EXPR, t1, n1);
3311 insert_before (g);
3312 t1 = gimple_assign_lhs (g);
3313 if (!integer_zerop (n3))
3315 m_data_cnt = 0;
3316 tree t2 = handle_operand (rhs1, idxmn2mn3);
3317 g = gimple_build_assign (make_ssa_name (m_limb_type),
3318 RSHIFT_EXPR, t2, n4);
3319 insert_before (g);
3320 t2 = gimple_assign_lhs (g);
3321 g = gimple_build_assign (make_ssa_name (m_limb_type),
3322 BIT_IOR_EXPR, t1, t2);
3323 insert_before (g);
3324 t1 = gimple_assign_lhs (g);
3326 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3327 g = gimple_build_assign (l, t1);
3328 insert_before (g);
3329 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3330 insert_before (g);
3331 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3332 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3333 NULL_TREE, NULL_TREE);
3334 insert_before (g);
3335 idx = make_ssa_name (sizetype);
3336 m_gsi = gsi_for_stmt (final_stmt);
3337 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3338 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3339 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3340 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3341 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3342 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3343 m_data_cnt = 0;
3344 if (!integer_zerop (n3))
3346 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3347 NULL_TREE, NULL_TREE);
3348 if_then (g, profile_probability::likely (), edge_true, edge_false);
3349 idxmn2 = make_ssa_name (sizetype);
3350 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3351 insert_before (g);
3352 m_data_cnt = 0;
3353 t1 = handle_operand (rhs1, idxmn2);
3354 g = gimple_build_assign (make_ssa_name (m_limb_type),
3355 LSHIFT_EXPR, t1, n1);
3356 insert_before (g);
3357 t1 = gimple_assign_lhs (g);
3358 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3359 g = gimple_build_assign (l, t1);
3360 insert_before (g);
3361 idx_next = make_ssa_name (sizetype);
3362 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3363 insert_before (g);
3364 m_gsi = gsi_for_stmt (final_stmt);
3365 tree nidx = make_ssa_name (sizetype);
3366 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3367 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3368 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3369 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3370 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3371 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3372 idx = nidx;
3374 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3375 ssize_int (0), NULL_TREE, NULL_TREE);
3376 if_then (g, profile_probability::likely (), edge_true, edge_false);
3377 idx = create_loop (idx, &idx_next);
3378 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3379 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3380 insert_before (g);
3381 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3382 insert_before (g);
3383 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3384 ssize_int (0), NULL_TREE, NULL_TREE);
3385 insert_before (g);
3389 /* Lower large/huge _BitInt multiplication or division. */
3391 void
3392 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3394 tree rhs1 = gimple_assign_rhs1 (stmt);
3395 tree rhs2 = gimple_assign_rhs2 (stmt);
3396 tree lhs = gimple_assign_lhs (stmt);
3397 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3398 tree type = TREE_TYPE (rhs1);
3399 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3400 && bitint_precision_kind (type) >= bitint_prec_large);
3401 int prec = TYPE_PRECISION (type), prec1, prec2;
3402 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3403 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3404 if (obj == NULL_TREE)
3406 int part = var_to_partition (m_map, lhs);
3407 gcc_assert (m_vars[part] != NULL_TREE);
3408 obj = m_vars[part];
3409 lhs = build_fold_addr_expr (obj);
3411 else
3413 lhs = build_fold_addr_expr (obj);
3414 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3415 NULL_TREE, true, GSI_SAME_STMT);
3417 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3418 gimple *g;
3419 switch (rhs_code)
3421 case MULT_EXPR:
3422 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3423 lhs, build_int_cst (sitype, prec),
3424 rhs1, build_int_cst (sitype, prec1),
3425 rhs2, build_int_cst (sitype, prec2));
3426 insert_before (g);
3427 break;
3428 case TRUNC_DIV_EXPR:
3429 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3430 lhs, build_int_cst (sitype, prec),
3431 null_pointer_node,
3432 build_int_cst (sitype, 0),
3433 rhs1, build_int_cst (sitype, prec1),
3434 rhs2, build_int_cst (sitype, prec2));
3435 if (!stmt_ends_bb_p (stmt))
3436 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3437 insert_before (g);
3438 break;
3439 case TRUNC_MOD_EXPR:
3440 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3441 build_int_cst (sitype, 0),
3442 lhs, build_int_cst (sitype, prec),
3443 rhs1, build_int_cst (sitype, prec1),
3444 rhs2, build_int_cst (sitype, prec2));
3445 if (!stmt_ends_bb_p (stmt))
3446 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3447 insert_before (g);
3448 break;
3449 default:
3450 gcc_unreachable ();
3452 if (stmt_ends_bb_p (stmt))
3454 maybe_duplicate_eh_stmt (g, stmt);
3455 edge e1;
3456 edge_iterator ei;
3457 basic_block bb = gimple_bb (stmt);
3459 FOR_EACH_EDGE (e1, ei, bb->succs)
3460 if (e1->flags & EDGE_EH)
3461 break;
3462 if (e1)
3464 edge e2 = split_block (gsi_bb (m_gsi), g);
3465 m_gsi = gsi_after_labels (e2->dest);
3466 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3467 = profile_probability::very_unlikely ();
3472 /* Lower large/huge _BitInt conversion to/from floating point. */
3474 void
3475 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3477 tree rhs1 = gimple_assign_rhs1 (stmt);
3478 tree lhs = gimple_assign_lhs (stmt);
3479 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3480 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3481 gimple *g;
3482 if (rhs_code == FIX_TRUNC_EXPR)
3484 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3485 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3486 prec = -prec;
3487 if (obj == NULL_TREE)
3489 int part = var_to_partition (m_map, lhs);
3490 gcc_assert (m_vars[part] != NULL_TREE);
3491 obj = m_vars[part];
3492 lhs = build_fold_addr_expr (obj);
3494 else
3496 lhs = build_fold_addr_expr (obj);
3497 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3498 NULL_TREE, true, GSI_SAME_STMT);
3500 scalar_mode from_mode
3501 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3502 #ifdef HAVE_SFmode
3503 /* IEEE single is a full superset of both IEEE half and
3504 bfloat formats, convert to float first and then to _BitInt
3505 to avoid the need of another 2 library routines. */
3506 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3507 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3508 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3510 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3511 if (type)
3512 rhs1 = add_cast (type, rhs1);
3514 #endif
3515 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3516 lhs, build_int_cst (sitype, prec),
3517 rhs1);
3518 insert_before (g);
3520 else
3522 int prec;
3523 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3524 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3525 rhs1, build_int_cst (sitype, prec));
3526 gimple_call_set_lhs (g, lhs);
3527 if (!stmt_ends_bb_p (stmt))
3528 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3529 gsi_replace (&m_gsi, g, true);
3533 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3534 If check_zero is true, caller wants to check if all bits in [start, end)
3535 are zero, otherwise if bits in [start, end) are either all zero or
3536 all ones. L is the limb with index LIMB, START and END are measured
3537 in bits. */
3539 tree
3540 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3541 unsigned int end, tree l,
3542 unsigned int limb,
3543 bool check_zero)
3545 unsigned startlimb = start / limb_prec;
3546 unsigned endlimb = (end - 1) / limb_prec;
3547 gimple *g;
3549 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3550 return l;
3551 if (startlimb == endlimb && limb == startlimb)
3553 if (check_zero)
3555 wide_int w = wi::shifted_mask (start % limb_prec,
3556 end - start, false, limb_prec);
3557 g = gimple_build_assign (make_ssa_name (m_limb_type),
3558 BIT_AND_EXPR, l,
3559 wide_int_to_tree (m_limb_type, w));
3560 insert_before (g);
3561 return gimple_assign_lhs (g);
3563 unsigned int shift = start % limb_prec;
3564 if ((end % limb_prec) != 0)
3566 unsigned int lshift = (-end) % limb_prec;
3567 shift += lshift;
3568 g = gimple_build_assign (make_ssa_name (m_limb_type),
3569 LSHIFT_EXPR, l,
3570 build_int_cst (unsigned_type_node,
3571 lshift));
3572 insert_before (g);
3573 l = gimple_assign_lhs (g);
3575 l = add_cast (signed_type_for (m_limb_type), l);
3576 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3577 RSHIFT_EXPR, l,
3578 build_int_cst (unsigned_type_node, shift));
3579 insert_before (g);
3580 return add_cast (m_limb_type, gimple_assign_lhs (g));
3582 else if (limb == startlimb)
3584 if ((start % limb_prec) == 0)
3585 return l;
3586 if (!check_zero)
3587 l = add_cast (signed_type_for (m_limb_type), l);
3588 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3589 RSHIFT_EXPR, l,
3590 build_int_cst (unsigned_type_node,
3591 start % limb_prec));
3592 insert_before (g);
3593 l = gimple_assign_lhs (g);
3594 if (!check_zero)
3595 l = add_cast (m_limb_type, l);
3596 return l;
3598 else if (limb == endlimb)
3600 if ((end % limb_prec) == 0)
3601 return l;
3602 if (check_zero)
3604 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3605 g = gimple_build_assign (make_ssa_name (m_limb_type),
3606 BIT_AND_EXPR, l,
3607 wide_int_to_tree (m_limb_type, w));
3608 insert_before (g);
3609 return gimple_assign_lhs (g);
3611 unsigned int shift = (-end) % limb_prec;
3612 g = gimple_build_assign (make_ssa_name (m_limb_type),
3613 LSHIFT_EXPR, l,
3614 build_int_cst (unsigned_type_node, shift));
3615 insert_before (g);
3616 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3617 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3618 RSHIFT_EXPR, l,
3619 build_int_cst (unsigned_type_node, shift));
3620 insert_before (g);
3621 return add_cast (m_limb_type, gimple_assign_lhs (g));
3623 return l;
3626 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3627 result including overflow flag into the right locations. */
3629 void
3630 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3631 tree ovf, tree lhs, tree orig_obj,
3632 gimple *stmt, tree_code code)
3634 gimple *g;
3636 if (obj == NULL_TREE
3637 && (TREE_CODE (type) != BITINT_TYPE
3638 || bitint_precision_kind (type) < bitint_prec_large))
3640 /* Add support for 3 or more limbs filled in from normal integral
3641 type if this assert fails. If no target chooses limb mode smaller
3642 than half of largest supported normal integral type, this will not
3643 be needed. */
3644 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3645 tree lhs_type = type;
3646 if (TREE_CODE (type) == BITINT_TYPE
3647 && bitint_precision_kind (type) == bitint_prec_middle)
3648 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3649 TYPE_UNSIGNED (type));
3650 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3651 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3652 insert_before (g);
3653 r1 = gimple_assign_lhs (g);
3654 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3655 r1 = add_cast (lhs_type, r1);
3656 if (TYPE_PRECISION (lhs_type) > limb_prec)
3658 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3659 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3660 insert_before (g);
3661 r2 = gimple_assign_lhs (g);
3662 r2 = add_cast (lhs_type, r2);
3663 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3664 build_int_cst (unsigned_type_node,
3665 limb_prec));
3666 insert_before (g);
3667 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3668 gimple_assign_lhs (g));
3669 insert_before (g);
3670 r1 = gimple_assign_lhs (g);
3672 if (lhs_type != type)
3673 r1 = add_cast (type, r1);
3674 ovf = add_cast (lhs_type, ovf);
3675 if (lhs_type != type)
3676 ovf = add_cast (type, ovf);
3677 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3678 m_gsi = gsi_for_stmt (stmt);
3679 gsi_replace (&m_gsi, g, true);
3681 else
3683 unsigned HOST_WIDE_INT nelts = 0;
3684 tree atype = NULL_TREE;
3685 if (obj)
3687 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3688 if (orig_obj == NULL_TREE)
3689 nelts >>= 1;
3690 atype = build_array_type_nelts (m_limb_type, nelts);
3692 if (var && obj)
3694 tree v1, v2;
3695 tree zero;
3696 if (orig_obj == NULL_TREE)
3698 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3699 v1 = build2 (MEM_REF, atype,
3700 build_fold_addr_expr (unshare_expr (obj)), zero);
3702 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3703 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3704 else
3705 v1 = unshare_expr (obj);
3706 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3707 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3708 g = gimple_build_assign (v1, v2);
3709 insert_before (g);
3711 if (orig_obj == NULL_TREE && obj)
3713 ovf = add_cast (m_limb_type, ovf);
3714 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3715 g = gimple_build_assign (l, ovf);
3716 insert_before (g);
3717 if (nelts > 1)
3719 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3720 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3721 (nelts + 1) * m_limb_size);
3722 tree v1 = build2 (MEM_REF, atype,
3723 build_fold_addr_expr (unshare_expr (obj)),
3724 off);
3725 g = gimple_build_assign (v1, build_zero_cst (atype));
3726 insert_before (g);
3729 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3731 imm_use_iterator ui;
3732 use_operand_p use_p;
3733 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3735 g = USE_STMT (use_p);
3736 if (!is_gimple_assign (g)
3737 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3738 continue;
3739 tree lhs2 = gimple_assign_lhs (g);
3740 gimple *use_stmt;
3741 single_imm_use (lhs2, &use_p, &use_stmt);
3742 lhs2 = gimple_assign_lhs (use_stmt);
3743 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3744 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3745 g = gimple_build_assign (lhs2, ovf);
3746 else
3747 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3748 gsi_replace (&gsi, g, true);
3749 if (gsi_stmt (m_gsi) == use_stmt)
3750 m_gsi = gsi_for_stmt (g);
3751 break;
3754 else if (ovf != boolean_false_node)
3756 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3757 NULL_TREE, NULL_TREE);
3758 edge edge_true, edge_false;
3759 if_then (g, profile_probability::very_unlikely (),
3760 edge_true, edge_false);
3761 tree zero = build_zero_cst (TREE_TYPE (lhs));
3762 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3763 TREE_TYPE (lhs),
3764 zero, zero, NULL);
3765 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3766 true, GSI_SAME_STMT);
3767 m_gsi = gsi_after_labels (edge_true->dest);
3770 if (var)
3772 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3773 g = gimple_build_assign (var, clobber);
3774 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3778 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3779 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3780 argument 1 precision PREC1 and minimum precision for the result
3781 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3783 static tree
3784 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3785 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3787 *start = 0;
3788 *end = 0;
3789 *check_zero = true;
3790 /* Ignore this special rule for subtraction, even if both
3791 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3792 in infinite precision. */
3793 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3795 /* Result in [0, prec2) is unsigned, if prec > prec2,
3796 all bits above it will be zero. */
3797 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3798 return boolean_false_node;
3799 else
3801 /* ovf if any of bits in [start, end) is non-zero. */
3802 *start = prec - !TYPE_UNSIGNED (type);
3803 *end = prec2;
3806 else if (TYPE_UNSIGNED (type))
3808 /* If result in [0, prec2) is signed and if prec > prec2,
3809 all bits above it will be sign bit copies. */
3810 if (prec >= prec2)
3812 /* ovf if bit prec - 1 is non-zero. */
3813 *start = prec - 1;
3814 *end = prec;
3816 else
3818 /* ovf if any of bits in [start, end) is non-zero. */
3819 *start = prec;
3820 *end = prec2;
3823 else if (prec >= prec2)
3824 return boolean_false_node;
3825 else
3827 /* ovf if [start, end) bits aren't all zeros or all ones. */
3828 *start = prec - 1;
3829 *end = prec2;
3830 *check_zero = false;
3832 return NULL_TREE;
3835 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3836 argument or return type _Complex large/huge _BitInt. */
3838 void
3839 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3841 tree arg0 = gimple_call_arg (stmt, 0);
3842 tree arg1 = gimple_call_arg (stmt, 1);
3843 tree lhs = gimple_call_lhs (stmt);
3844 gimple *g;
3846 if (!lhs)
3848 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3849 gsi_remove (&gsi, true);
3850 return;
3852 gimple *final_stmt = gsi_stmt (m_gsi);
3853 tree type = TREE_TYPE (lhs);
3854 if (TREE_CODE (type) == COMPLEX_TYPE)
3855 type = TREE_TYPE (type);
3856 int prec = TYPE_PRECISION (type);
3857 int prec0 = range_to_prec (arg0, stmt);
3858 int prec1 = range_to_prec (arg1, stmt);
3859 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3860 the be minimum unsigned precision of any possible operation's
3861 result, otherwise it is minimum signed precision.
3862 Some examples:
3863 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3864 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3865 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3866 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3867 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3868 8 + 8 [0, 0x1fe] 9 UNSIGNED
3869 8 + 10 [0, 0x4fe] 11 UNSIGNED
3870 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3871 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3872 8 + -8 [-0x80, 0x17e] 10 SIGNED
3873 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3874 10 + -8 [-0x80, 0x47e] 12 SIGNED
3875 8 - 8 [-0xff, 0xff] 9 SIGNED
3876 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3877 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3878 -8 - -8 [-0xff, 0xff] 9 SIGNED
3879 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3880 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3881 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3882 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3883 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3884 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3885 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3886 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3887 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3888 prec1 < 0 ? -prec1 : prec1);
3889 /* If operands are either both signed or both unsigned,
3890 we need just one additional bit. */
3891 prec2 = (((prec0 < 0) == (prec1 < 0)
3892 /* If one operand is signed and one unsigned and
3893 the signed one has larger precision, we need
3894 just one extra bit, otherwise two. */
3895 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3896 : (prec2 == -prec1 && prec2 != prec0)))
3897 ? prec2 + 1 : prec2 + 2);
3898 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3899 prec1 < 0 ? -prec1 : prec1);
3900 prec3 = MAX (prec3, prec);
3901 tree var = NULL_TREE;
3902 tree orig_obj = obj;
3903 if (obj == NULL_TREE
3904 && TREE_CODE (type) == BITINT_TYPE
3905 && bitint_precision_kind (type) >= bitint_prec_large
3906 && m_names
3907 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3909 int part = var_to_partition (m_map, lhs);
3910 gcc_assert (m_vars[part] != NULL_TREE);
3911 obj = m_vars[part];
3912 if (TREE_TYPE (lhs) == type)
3913 orig_obj = obj;
3915 if (TREE_CODE (type) != BITINT_TYPE
3916 || bitint_precision_kind (type) < bitint_prec_large)
3918 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3919 tree atype = build_array_type_nelts (m_limb_type, nelts);
3920 var = create_tmp_var (atype);
3923 enum tree_code code;
3924 switch (gimple_call_internal_fn (stmt))
3926 case IFN_ADD_OVERFLOW:
3927 case IFN_UBSAN_CHECK_ADD:
3928 code = PLUS_EXPR;
3929 break;
3930 case IFN_SUB_OVERFLOW:
3931 case IFN_UBSAN_CHECK_SUB:
3932 code = MINUS_EXPR;
3933 break;
3934 default:
3935 gcc_unreachable ();
3937 unsigned start, end;
3938 bool check_zero;
3939 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3940 &start, &end, &check_zero);
3942 unsigned startlimb, endlimb;
3943 if (ovf)
3945 startlimb = ~0U;
3946 endlimb = ~0U;
3948 else
3950 startlimb = start / limb_prec;
3951 endlimb = (end - 1) / limb_prec;
3954 int prec4 = ovf != NULL_TREE ? prec : prec3;
3955 bitint_prec_kind kind = bitint_precision_kind (prec4);
3956 unsigned cnt, rem = 0, fin = 0;
3957 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3958 bool last_ovf = (ovf == NULL_TREE
3959 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3960 if (kind != bitint_prec_huge)
3961 cnt = CEIL (prec4, limb_prec) + last_ovf;
3962 else
3964 rem = (prec4 % (2 * limb_prec));
3965 fin = (prec4 - rem) / limb_prec;
3966 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3967 idx = idx_first = create_loop (size_zero_node, &idx_next);
3970 if (kind == bitint_prec_huge)
3971 m_upwards_2limb = fin;
3972 m_upwards = true;
3974 tree type0 = TREE_TYPE (arg0);
3975 tree type1 = TREE_TYPE (arg1);
3976 int prec5 = prec3;
3977 if (bitint_precision_kind (prec5) < bitint_prec_large)
3978 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3979 if (TYPE_PRECISION (type0) < prec5)
3981 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3982 if (TREE_CODE (arg0) == INTEGER_CST)
3983 arg0 = fold_convert (type0, arg0);
3985 if (TYPE_PRECISION (type1) < prec5)
3987 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3988 if (TREE_CODE (arg1) == INTEGER_CST)
3989 arg1 = fold_convert (type1, arg1);
3991 unsigned int data_cnt = 0;
3992 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3993 tree cmp = build_zero_cst (m_limb_type);
3994 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3995 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3996 for (unsigned i = 0; i < cnt; i++)
3998 m_data_cnt = 0;
3999 tree rhs1, rhs2;
4000 if (kind != bitint_prec_huge)
4001 idx = size_int (i);
4002 else if (i >= 2)
4003 idx = size_int (fin + (i > 2));
4004 if (!last_ovf || i < cnt - 1)
4006 if (type0 != TREE_TYPE (arg0))
4007 rhs1 = handle_cast (type0, arg0, idx);
4008 else
4009 rhs1 = handle_operand (arg0, idx);
4010 if (type1 != TREE_TYPE (arg1))
4011 rhs2 = handle_cast (type1, arg1, idx);
4012 else
4013 rhs2 = handle_operand (arg1, idx);
4014 if (i == 0)
4015 data_cnt = m_data_cnt;
4016 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4017 rhs1 = add_cast (m_limb_type, rhs1);
4018 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4019 rhs2 = add_cast (m_limb_type, rhs2);
4020 last_rhs1 = rhs1;
4021 last_rhs2 = rhs2;
4023 else
4025 m_data_cnt = data_cnt;
4026 if (TYPE_UNSIGNED (type0))
4027 rhs1 = build_zero_cst (m_limb_type);
4028 else
4030 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4031 if (TREE_CODE (rhs1) == INTEGER_CST)
4032 rhs1 = build_int_cst (m_limb_type,
4033 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4034 else
4036 tree lpm1 = build_int_cst (unsigned_type_node,
4037 limb_prec - 1);
4038 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4039 RSHIFT_EXPR, rhs1, lpm1);
4040 insert_before (g);
4041 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4044 if (TYPE_UNSIGNED (type1))
4045 rhs2 = build_zero_cst (m_limb_type);
4046 else
4048 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4049 if (TREE_CODE (rhs2) == INTEGER_CST)
4050 rhs2 = build_int_cst (m_limb_type,
4051 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4052 else
4054 tree lpm1 = build_int_cst (unsigned_type_node,
4055 limb_prec - 1);
4056 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4057 RSHIFT_EXPR, rhs2, lpm1);
4058 insert_before (g);
4059 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4063 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4064 if (ovf != boolean_false_node)
4066 if (tree_fits_uhwi_p (idx))
4068 unsigned limb = tree_to_uhwi (idx);
4069 if (limb >= startlimb && limb <= endlimb)
4071 tree l = arith_overflow_extract_bits (start, end, rhs,
4072 limb, check_zero);
4073 tree this_ovf = make_ssa_name (boolean_type_node);
4074 if (ovf == NULL_TREE && !check_zero)
4076 cmp = l;
4077 g = gimple_build_assign (make_ssa_name (m_limb_type),
4078 PLUS_EXPR, l,
4079 build_int_cst (m_limb_type, 1));
4080 insert_before (g);
4081 g = gimple_build_assign (this_ovf, GT_EXPR,
4082 gimple_assign_lhs (g),
4083 build_int_cst (m_limb_type, 1));
4085 else
4086 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4087 insert_before (g);
4088 if (ovf == NULL_TREE)
4089 ovf = this_ovf;
4090 else
4092 tree b = make_ssa_name (boolean_type_node);
4093 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4094 insert_before (g);
4095 ovf = b;
4099 else if (startlimb < fin)
4101 if (m_first && startlimb + 2 < fin)
4103 tree data_out;
4104 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4105 ovf_out = m_data.pop ();
4106 m_data.pop ();
4107 if (!check_zero)
4109 cmp = prepare_data_in_out (cmp, idx, &data_out);
4110 cmp_out = m_data.pop ();
4111 m_data.pop ();
4114 if (i != 0 || startlimb != fin - 1)
4116 tree_code cmp_code;
4117 bool single_comparison
4118 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4119 if (!single_comparison)
4121 cmp_code = GE_EXPR;
4122 if (!check_zero && (start % limb_prec) == 0)
4123 single_comparison = true;
4125 else if ((startlimb & 1) == (i & 1))
4126 cmp_code = EQ_EXPR;
4127 else
4128 cmp_code = GT_EXPR;
4129 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4130 NULL_TREE, NULL_TREE);
4131 edge edge_true_true, edge_true_false, edge_false;
4132 gimple *g2 = NULL;
4133 if (!single_comparison)
4134 g2 = gimple_build_cond (NE_EXPR, idx,
4135 size_int (startlimb), NULL_TREE,
4136 NULL_TREE);
4137 if_then_if_then_else (g, g2, profile_probability::likely (),
4138 profile_probability::likely (),
4139 edge_true_true, edge_true_false,
4140 edge_false);
4141 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4142 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4143 check_zero);
4144 tree this_ovf = make_ssa_name (boolean_type_node);
4145 if (cmp_code != GT_EXPR && !check_zero)
4147 g = gimple_build_assign (make_ssa_name (m_limb_type),
4148 PLUS_EXPR, l,
4149 build_int_cst (m_limb_type, 1));
4150 insert_before (g);
4151 g = gimple_build_assign (this_ovf, GT_EXPR,
4152 gimple_assign_lhs (g),
4153 build_int_cst (m_limb_type, 1));
4155 else
4156 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4157 insert_before (g);
4158 if (cmp_code == GT_EXPR)
4160 tree t = make_ssa_name (boolean_type_node);
4161 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4162 insert_before (g);
4163 this_ovf = t;
4165 tree this_ovf2 = NULL_TREE;
4166 if (!single_comparison)
4168 m_gsi = gsi_after_labels (edge_true_true->src);
4169 tree t = make_ssa_name (boolean_type_node);
4170 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4171 insert_before (g);
4172 this_ovf2 = make_ssa_name (boolean_type_node);
4173 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4174 ovf, t);
4175 insert_before (g);
4177 m_gsi = gsi_after_labels (edge_true_false->dest);
4178 tree t;
4179 if (i == 1 && ovf_out)
4180 t = ovf_out;
4181 else
4182 t = make_ssa_name (boolean_type_node);
4183 gphi *phi = create_phi_node (t, edge_true_false->dest);
4184 add_phi_arg (phi, this_ovf, edge_true_false,
4185 UNKNOWN_LOCATION);
4186 add_phi_arg (phi, ovf ? ovf
4187 : boolean_false_node, edge_false,
4188 UNKNOWN_LOCATION);
4189 if (edge_true_true)
4190 add_phi_arg (phi, this_ovf2, edge_true_true,
4191 UNKNOWN_LOCATION);
4192 ovf = t;
4193 if (!check_zero && cmp_code != GT_EXPR)
4195 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4196 phi = create_phi_node (t, edge_true_false->dest);
4197 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4198 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4199 if (edge_true_true)
4200 add_phi_arg (phi, cmp, edge_true_true,
4201 UNKNOWN_LOCATION);
4202 cmp = t;
4208 if (var || obj)
4210 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4212 else if (!tree_fits_uhwi_p (idx)
4213 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4215 bool single_comparison
4216 = (((unsigned) prec % limb_prec) == 0
4217 || prec_limbs + 1 >= fin
4218 || (prec_limbs & 1) == (i & 1));
4219 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4220 NULL_TREE, NULL_TREE);
4221 gimple *g2 = NULL;
4222 if (!single_comparison)
4223 g2 = gimple_build_cond (LT_EXPR, idx,
4224 size_int (prec_limbs - 1),
4225 NULL_TREE, NULL_TREE);
4226 edge edge_true_true, edge_true_false, edge_false;
4227 if_then_if_then_else (g, g2, profile_probability::likely (),
4228 profile_probability::likely (),
4229 edge_true_true, edge_true_false,
4230 edge_false);
4231 tree l = limb_access (type, var ? var : obj, idx, true);
4232 g = gimple_build_assign (l, rhs);
4233 insert_before (g);
4234 if (!single_comparison)
4236 m_gsi = gsi_after_labels (edge_true_true->src);
4237 l = limb_access (type, var ? var : obj,
4238 size_int (prec_limbs - 1), true);
4239 if (!useless_type_conversion_p (TREE_TYPE (l),
4240 TREE_TYPE (rhs)))
4241 rhs = add_cast (TREE_TYPE (l), rhs);
4242 g = gimple_build_assign (l, rhs);
4243 insert_before (g);
4245 m_gsi = gsi_after_labels (edge_true_false->dest);
4247 else
4249 tree l = limb_access (type, var ? var : obj, idx, true);
4250 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4251 rhs = add_cast (TREE_TYPE (l), rhs);
4252 g = gimple_build_assign (l, rhs);
4253 insert_before (g);
4256 m_first = false;
4257 if (kind == bitint_prec_huge && i <= 1)
4259 if (i == 0)
4261 idx = make_ssa_name (sizetype);
4262 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4263 size_one_node);
4264 insert_before (g);
4266 else
4268 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4269 size_int (2));
4270 insert_before (g);
4271 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4272 NULL_TREE, NULL_TREE);
4273 insert_before (g);
4274 m_gsi = gsi_for_stmt (final_stmt);
4275 m_bb = NULL;
4280 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4283 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4284 argument or return type _Complex large/huge _BitInt. */
4286 void
4287 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4289 tree arg0 = gimple_call_arg (stmt, 0);
4290 tree arg1 = gimple_call_arg (stmt, 1);
4291 tree lhs = gimple_call_lhs (stmt);
4292 if (!lhs)
4294 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4295 gsi_remove (&gsi, true);
4296 return;
4298 gimple *final_stmt = gsi_stmt (m_gsi);
4299 tree type = TREE_TYPE (lhs);
4300 if (TREE_CODE (type) == COMPLEX_TYPE)
4301 type = TREE_TYPE (type);
4302 int prec = TYPE_PRECISION (type), prec0, prec1;
4303 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4304 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4305 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4306 + (prec1 < 0 ? -prec1 : prec1));
4307 if (prec0 == 1 || prec1 == 1)
4308 --prec2;
4309 tree var = NULL_TREE;
4310 tree orig_obj = obj;
4311 bool force_var = false;
4312 if (obj == NULL_TREE
4313 && TREE_CODE (type) == BITINT_TYPE
4314 && bitint_precision_kind (type) >= bitint_prec_large
4315 && m_names
4316 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4318 int part = var_to_partition (m_map, lhs);
4319 gcc_assert (m_vars[part] != NULL_TREE);
4320 obj = m_vars[part];
4321 if (TREE_TYPE (lhs) == type)
4322 orig_obj = obj;
4324 else if (obj != NULL_TREE && DECL_P (obj))
4326 for (int i = 0; i < 2; ++i)
4328 tree arg = i ? arg1 : arg0;
4329 if (TREE_CODE (arg) == ADDR_EXPR)
4330 arg = TREE_OPERAND (arg, 0);
4331 if (get_base_address (arg) == obj)
4333 force_var = true;
4334 break;
4338 if (obj == NULL_TREE
4339 || force_var
4340 || TREE_CODE (type) != BITINT_TYPE
4341 || bitint_precision_kind (type) < bitint_prec_large
4342 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4344 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4345 tree atype = build_array_type_nelts (m_limb_type, nelts);
4346 var = create_tmp_var (atype);
4348 tree addr = build_fold_addr_expr (var ? var : obj);
4349 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4350 NULL_TREE, true, GSI_SAME_STMT);
4351 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4352 gimple *g
4353 = gimple_build_call_internal (IFN_MULBITINT, 6,
4354 addr, build_int_cst (sitype,
4355 MAX (prec2, prec)),
4356 arg0, build_int_cst (sitype, prec0),
4357 arg1, build_int_cst (sitype, prec1));
4358 insert_before (g);
4360 unsigned start, end;
4361 bool check_zero;
4362 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4363 &start, &end, &check_zero);
4364 if (ovf == NULL_TREE)
4366 unsigned startlimb = start / limb_prec;
4367 unsigned endlimb = (end - 1) / limb_prec;
4368 unsigned cnt;
4369 bool use_loop = false;
4370 if (startlimb == endlimb)
4371 cnt = 1;
4372 else if (startlimb + 1 == endlimb)
4373 cnt = 2;
4374 else if ((end % limb_prec) == 0)
4376 cnt = 2;
4377 use_loop = true;
4379 else
4381 cnt = 3;
4382 use_loop = startlimb + 2 < endlimb;
4384 if (cnt == 1)
4386 tree l = limb_access (NULL_TREE, var ? var : obj,
4387 size_int (startlimb), true);
4388 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4389 insert_before (g);
4390 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4391 startlimb, check_zero);
4392 ovf = make_ssa_name (boolean_type_node);
4393 if (check_zero)
4394 g = gimple_build_assign (ovf, NE_EXPR, l,
4395 build_zero_cst (m_limb_type));
4396 else
4398 g = gimple_build_assign (make_ssa_name (m_limb_type),
4399 PLUS_EXPR, l,
4400 build_int_cst (m_limb_type, 1));
4401 insert_before (g);
4402 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4403 build_int_cst (m_limb_type, 1));
4405 insert_before (g);
4407 else
4409 basic_block edge_bb = NULL;
4410 gimple_stmt_iterator gsi = m_gsi;
4411 gsi_prev (&gsi);
4412 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4413 edge_bb = e->src;
4414 m_gsi = gsi_end_bb (edge_bb);
4416 tree cmp = build_zero_cst (m_limb_type);
4417 for (unsigned i = 0; i < cnt; i++)
4419 tree idx, idx_next = NULL_TREE;
4420 if (i == 0)
4421 idx = size_int (startlimb);
4422 else if (i == 2)
4423 idx = size_int (endlimb);
4424 else if (use_loop)
4425 idx = create_loop (size_int (startlimb + 1), &idx_next);
4426 else
4427 idx = size_int (startlimb + 1);
4428 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4429 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4430 insert_before (g);
4431 l = gimple_assign_lhs (g);
4432 if (i == 0 || i == 2)
4433 l = arith_overflow_extract_bits (start, end, l,
4434 tree_to_uhwi (idx),
4435 check_zero);
4436 if (i == 0 && !check_zero)
4438 cmp = l;
4439 g = gimple_build_assign (make_ssa_name (m_limb_type),
4440 PLUS_EXPR, l,
4441 build_int_cst (m_limb_type, 1));
4442 insert_before (g);
4443 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4444 build_int_cst (m_limb_type, 1),
4445 NULL_TREE, NULL_TREE);
4447 else
4448 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4449 insert_before (g);
4450 edge e1 = split_block (gsi_bb (m_gsi), g);
4451 e1->flags = EDGE_FALSE_VALUE;
4452 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4453 EDGE_TRUE_VALUE);
4454 e1->probability = profile_probability::likely ();
4455 e2->probability = e1->probability.invert ();
4456 if (i == 0)
4457 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4458 m_gsi = gsi_after_labels (e1->dest);
4459 if (i == 1 && use_loop)
4461 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4462 size_one_node);
4463 insert_before (g);
4464 g = gimple_build_cond (NE_EXPR, idx_next,
4465 size_int (endlimb + (cnt == 1)),
4466 NULL_TREE, NULL_TREE);
4467 insert_before (g);
4468 edge true_edge, false_edge;
4469 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4470 &true_edge,
4471 &false_edge);
4472 m_gsi = gsi_after_labels (false_edge->dest);
4473 m_bb = NULL;
4477 ovf = make_ssa_name (boolean_type_node);
4478 basic_block bb = gimple_bb (final_stmt);
4479 gphi *phi = create_phi_node (ovf, bb);
4480 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4481 edge_iterator ei;
4482 FOR_EACH_EDGE (e, ei, bb->preds)
4484 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4485 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4487 m_gsi = gsi_for_stmt (final_stmt);
4491 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4494 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4495 .{ADD,SUB,MUL}_OVERFLOW call. */
4497 void
4498 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4500 tree rhs1 = gimple_assign_rhs1 (stmt);
4501 rhs1 = TREE_OPERAND (rhs1, 0);
4502 if (obj == NULL_TREE)
4504 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4505 gcc_assert (m_vars[part] != NULL_TREE);
4506 obj = m_vars[part];
4508 if (TREE_CODE (rhs1) == SSA_NAME
4509 && (m_names == NULL
4510 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4512 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4513 return;
4515 int part = var_to_partition (m_map, rhs1);
4516 gcc_assert (m_vars[part] != NULL_TREE);
4517 tree var = m_vars[part];
4518 unsigned HOST_WIDE_INT nelts
4519 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4520 tree atype = build_array_type_nelts (m_limb_type, nelts);
4521 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4522 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4523 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4524 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4525 ? 0 : nelts * m_limb_size);
4526 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4527 gimple *g = gimple_build_assign (obj, v2);
4528 insert_before (g);
4531 /* Lower COMPLEX_EXPR stmt. */
4533 void
4534 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4536 tree lhs = gimple_assign_lhs (stmt);
4537 tree rhs1 = gimple_assign_rhs1 (stmt);
4538 tree rhs2 = gimple_assign_rhs2 (stmt);
4539 int part = var_to_partition (m_map, lhs);
4540 gcc_assert (m_vars[part] != NULL_TREE);
4541 lhs = m_vars[part];
4542 unsigned HOST_WIDE_INT nelts
4543 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4544 tree atype = build_array_type_nelts (m_limb_type, nelts);
4545 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4546 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4547 tree v2;
4548 if (TREE_CODE (rhs1) == SSA_NAME)
4550 part = var_to_partition (m_map, rhs1);
4551 gcc_assert (m_vars[part] != NULL_TREE);
4552 v2 = m_vars[part];
4554 else if (integer_zerop (rhs1))
4555 v2 = build_zero_cst (atype);
4556 else
4557 v2 = tree_output_constant_def (rhs1);
4558 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4559 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4560 gimple *g = gimple_build_assign (v1, v2);
4561 insert_before (g);
4562 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4563 TYPE_SIZE_UNIT (atype));
4564 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4565 if (TREE_CODE (rhs2) == SSA_NAME)
4567 part = var_to_partition (m_map, rhs2);
4568 gcc_assert (m_vars[part] != NULL_TREE);
4569 v2 = m_vars[part];
4571 else if (integer_zerop (rhs2))
4572 v2 = build_zero_cst (atype);
4573 else
4574 v2 = tree_output_constant_def (rhs2);
4575 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4576 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4577 g = gimple_build_assign (v1, v2);
4578 insert_before (g);
4581 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4582 argument. */
4584 void
4585 bitint_large_huge::lower_bit_query (gimple *stmt)
4587 tree arg0 = gimple_call_arg (stmt, 0);
4588 tree arg1 = (gimple_call_num_args (stmt) == 2
4589 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4590 tree lhs = gimple_call_lhs (stmt);
4591 gimple *g;
4593 if (!lhs)
4595 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4596 gsi_remove (&gsi, true);
4597 return;
4599 tree type = TREE_TYPE (arg0);
4600 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4601 bitint_prec_kind kind = bitint_precision_kind (type);
4602 gcc_assert (kind >= bitint_prec_large);
4603 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4604 enum built_in_function fcode = END_BUILTINS;
4605 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4606 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4607 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4608 switch (ifn)
4610 case IFN_CLZ:
4611 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4612 fcode = BUILT_IN_CLZ;
4613 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4614 fcode = BUILT_IN_CLZL;
4615 else
4616 fcode = BUILT_IN_CLZLL;
4617 break;
4618 case IFN_FFS:
4619 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4620 we don't add the addend at the end. */
4621 arg1 = integer_zero_node;
4622 /* FALLTHRU */
4623 case IFN_CTZ:
4624 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4625 fcode = BUILT_IN_CTZ;
4626 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4627 fcode = BUILT_IN_CTZL;
4628 else
4629 fcode = BUILT_IN_CTZLL;
4630 m_upwards = true;
4631 break;
4632 case IFN_CLRSB:
4633 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4634 fcode = BUILT_IN_CLRSB;
4635 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4636 fcode = BUILT_IN_CLRSBL;
4637 else
4638 fcode = BUILT_IN_CLRSBLL;
4639 break;
4640 case IFN_PARITY:
4641 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4642 fcode = BUILT_IN_PARITY;
4643 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4644 fcode = BUILT_IN_PARITYL;
4645 else
4646 fcode = BUILT_IN_PARITYLL;
4647 m_upwards = true;
4648 break;
4649 case IFN_POPCOUNT:
4650 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4651 fcode = BUILT_IN_POPCOUNT;
4652 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4653 fcode = BUILT_IN_POPCOUNTL;
4654 else
4655 fcode = BUILT_IN_POPCOUNTLL;
4656 m_upwards = true;
4657 break;
4658 default:
4659 gcc_unreachable ();
4661 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4662 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4663 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4664 basic_block edge_bb = NULL;
4665 if (m_upwards)
4667 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4668 if (kind == bitint_prec_large)
4669 cnt = CEIL (prec, limb_prec);
4670 else
4672 rem = (prec % (2 * limb_prec));
4673 end = (prec - rem) / limb_prec;
4674 cnt = 2 + CEIL (rem, limb_prec);
4675 idx = idx_first = create_loop (size_zero_node, &idx_next);
4678 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4680 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4681 gsi_prev (&gsi);
4682 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4683 edge_bb = e->src;
4684 if (kind == bitint_prec_large)
4685 m_gsi = gsi_end_bb (edge_bb);
4686 bqp = XALLOCAVEC (struct bq_details, cnt);
4688 else
4689 m_after_stmt = stmt;
4690 if (kind != bitint_prec_large)
4691 m_upwards_2limb = end;
4693 for (unsigned i = 0; i < cnt; i++)
4695 m_data_cnt = 0;
4696 if (kind == bitint_prec_large)
4697 idx = size_int (i);
4698 else if (i >= 2)
4699 idx = size_int (end + (i > 2));
4701 tree rhs1 = handle_operand (arg0, idx);
4702 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4704 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4705 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4706 rhs1 = add_cast (m_limb_type, rhs1);
4709 tree in, out, tem;
4710 if (ifn == IFN_PARITY)
4711 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4712 else if (ifn == IFN_FFS)
4713 in = prepare_data_in_out (integer_one_node, idx, &out);
4714 else
4715 in = prepare_data_in_out (integer_zero_node, idx, &out);
4717 switch (ifn)
4719 case IFN_CTZ:
4720 case IFN_FFS:
4721 g = gimple_build_cond (NE_EXPR, rhs1,
4722 build_zero_cst (m_limb_type),
4723 NULL_TREE, NULL_TREE);
4724 insert_before (g);
4725 edge e1, e2;
4726 e1 = split_block (gsi_bb (m_gsi), g);
4727 e1->flags = EDGE_FALSE_VALUE;
4728 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4729 e1->probability = profile_probability::unlikely ();
4730 e2->probability = e1->probability.invert ();
4731 if (i == 0)
4732 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4733 m_gsi = gsi_after_labels (e1->dest);
4734 bqp[i].e = e2;
4735 bqp[i].val = rhs1;
4736 if (tree_fits_uhwi_p (idx))
4737 bqp[i].addend
4738 = build_int_cst (integer_type_node,
4739 tree_to_uhwi (idx) * limb_prec
4740 + (ifn == IFN_FFS));
4741 else
4743 bqp[i].addend = in;
4744 if (i == 1)
4745 res = out;
4746 else
4747 res = make_ssa_name (integer_type_node);
4748 g = gimple_build_assign (res, PLUS_EXPR, in,
4749 build_int_cst (integer_type_node,
4750 limb_prec));
4751 insert_before (g);
4752 m_data[m_data_cnt] = res;
4754 break;
4755 case IFN_PARITY:
4756 if (!integer_zerop (in))
4758 if (kind == bitint_prec_huge && i == 1)
4759 res = out;
4760 else
4761 res = make_ssa_name (m_limb_type);
4762 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4763 insert_before (g);
4765 else
4766 res = rhs1;
4767 m_data[m_data_cnt] = res;
4768 break;
4769 case IFN_POPCOUNT:
4770 g = gimple_build_call (fndecl, 1, rhs1);
4771 tem = make_ssa_name (integer_type_node);
4772 gimple_call_set_lhs (g, tem);
4773 insert_before (g);
4774 if (!integer_zerop (in))
4776 if (kind == bitint_prec_huge && i == 1)
4777 res = out;
4778 else
4779 res = make_ssa_name (integer_type_node);
4780 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4781 insert_before (g);
4783 else
4784 res = tem;
4785 m_data[m_data_cnt] = res;
4786 break;
4787 default:
4788 gcc_unreachable ();
4791 m_first = false;
4792 if (kind == bitint_prec_huge && i <= 1)
4794 if (i == 0)
4796 idx = make_ssa_name (sizetype);
4797 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4798 size_one_node);
4799 insert_before (g);
4801 else
4803 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4804 size_int (2));
4805 insert_before (g);
4806 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4807 NULL_TREE, NULL_TREE);
4808 insert_before (g);
4809 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4810 m_gsi = gsi_after_labels (edge_bb);
4811 else
4812 m_gsi = gsi_for_stmt (stmt);
4813 m_bb = NULL;
4818 else
4820 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4821 int sub_one = 0;
4822 if (kind == bitint_prec_large)
4823 cnt = CEIL (prec, limb_prec);
4824 else
4826 rem = prec % limb_prec;
4827 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4828 rem = limb_prec;
4829 end = (prec - rem) / limb_prec;
4830 cnt = 1 + (rem != 0);
4831 if (ifn == IFN_CLRSB)
4832 sub_one = 1;
4835 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4836 gsi_prev (&gsi);
4837 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4838 edge_bb = e->src;
4839 m_gsi = gsi_end_bb (edge_bb);
4841 if (ifn == IFN_CLZ)
4842 bqp = XALLOCAVEC (struct bq_details, cnt);
4843 else
4845 gsi = gsi_for_stmt (stmt);
4846 gsi_prev (&gsi);
4847 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4848 edge_bb = e->src;
4849 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4852 for (unsigned i = 0; i < cnt; i++)
4854 m_data_cnt = 0;
4855 if (kind == bitint_prec_large)
4856 idx = size_int (cnt - i - 1);
4857 else if (i == cnt - 1)
4858 idx = create_loop (size_int (end - 1), &idx_next);
4859 else
4860 idx = size_int (end);
4862 tree rhs1 = handle_operand (arg0, idx);
4863 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4865 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4866 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4867 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4868 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4869 rhs1 = add_cast (m_limb_type, rhs1);
4872 if (ifn == IFN_CLZ)
4874 g = gimple_build_cond (NE_EXPR, rhs1,
4875 build_zero_cst (m_limb_type),
4876 NULL_TREE, NULL_TREE);
4877 insert_before (g);
4878 edge e1 = split_block (gsi_bb (m_gsi), g);
4879 e1->flags = EDGE_FALSE_VALUE;
4880 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4881 e1->probability = profile_probability::unlikely ();
4882 e2->probability = e1->probability.invert ();
4883 if (i == 0)
4884 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4885 m_gsi = gsi_after_labels (e1->dest);
4886 bqp[i].e = e2;
4887 bqp[i].val = rhs1;
4889 else
4891 if (i == 0)
4893 first = rhs1;
4894 g = gimple_build_assign (make_ssa_name (m_limb_type),
4895 PLUS_EXPR, rhs1,
4896 build_int_cst (m_limb_type, 1));
4897 insert_before (g);
4898 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4899 build_int_cst (m_limb_type, 1),
4900 NULL_TREE, NULL_TREE);
4901 insert_before (g);
4903 else
4905 g = gimple_build_assign (make_ssa_name (m_limb_type),
4906 BIT_XOR_EXPR, rhs1, first);
4907 insert_before (g);
4908 tree stype = signed_type_for (m_limb_type);
4909 g = gimple_build_cond (LT_EXPR,
4910 add_cast (stype,
4911 gimple_assign_lhs (g)),
4912 build_zero_cst (stype),
4913 NULL_TREE, NULL_TREE);
4914 insert_before (g);
4915 edge e1 = split_block (gsi_bb (m_gsi), g);
4916 e1->flags = EDGE_FALSE_VALUE;
4917 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4918 EDGE_TRUE_VALUE);
4919 e1->probability = profile_probability::unlikely ();
4920 e2->probability = e1->probability.invert ();
4921 if (i == 1)
4922 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4923 e2->src);
4924 m_gsi = gsi_after_labels (e1->dest);
4925 bqp[2 * i].e = e2;
4926 g = gimple_build_cond (NE_EXPR, rhs1, first,
4927 NULL_TREE, NULL_TREE);
4928 insert_before (g);
4930 edge e1 = split_block (gsi_bb (m_gsi), g);
4931 e1->flags = EDGE_FALSE_VALUE;
4932 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4933 e1->probability = profile_probability::unlikely ();
4934 e2->probability = e1->probability.invert ();
4935 if (i == 0)
4936 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4937 m_gsi = gsi_after_labels (e1->dest);
4938 bqp[2 * i + 1].e = e2;
4939 bqp[i].val = rhs1;
4941 if (tree_fits_uhwi_p (idx))
4942 bqp[i].addend
4943 = build_int_cst (integer_type_node,
4944 (int) prec
4945 - (((int) tree_to_uhwi (idx) + 1)
4946 * limb_prec) - sub_one);
4947 else
4949 tree in, out;
4950 in = build_int_cst (integer_type_node, rem - sub_one);
4951 m_first = true;
4952 in = prepare_data_in_out (in, idx, &out);
4953 out = m_data[m_data_cnt + 1];
4954 bqp[i].addend = in;
4955 g = gimple_build_assign (out, PLUS_EXPR, in,
4956 build_int_cst (integer_type_node,
4957 limb_prec));
4958 insert_before (g);
4959 m_data[m_data_cnt] = out;
4962 m_first = false;
4963 if (kind == bitint_prec_huge && i == cnt - 1)
4965 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4966 size_int (-1));
4967 insert_before (g);
4968 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4969 NULL_TREE, NULL_TREE);
4970 insert_before (g);
4971 edge true_edge, false_edge;
4972 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4973 &true_edge, &false_edge);
4974 m_gsi = gsi_after_labels (false_edge->dest);
4975 m_bb = NULL;
4979 switch (ifn)
4981 case IFN_CLZ:
4982 case IFN_CTZ:
4983 case IFN_FFS:
4984 gphi *phi1, *phi2, *phi3;
4985 basic_block bb;
4986 bb = gsi_bb (m_gsi);
4987 remove_edge (find_edge (bb, gimple_bb (stmt)));
4988 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4989 gimple_bb (stmt));
4990 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4991 gimple_bb (stmt));
4992 for (unsigned i = 0; i < cnt; i++)
4994 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4995 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4997 if (arg1 == NULL_TREE)
4999 g = gimple_build_builtin_unreachable (m_loc);
5000 insert_before (g);
5002 m_gsi = gsi_for_stmt (stmt);
5003 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
5004 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5005 insert_before (g);
5006 if (arg1 == NULL_TREE)
5007 g = gimple_build_assign (lhs, PLUS_EXPR,
5008 gimple_phi_result (phi2),
5009 gimple_call_lhs (g));
5010 else
5012 g = gimple_build_assign (make_ssa_name (integer_type_node),
5013 PLUS_EXPR, gimple_phi_result (phi2),
5014 gimple_call_lhs (g));
5015 insert_before (g);
5016 edge e1 = split_block (gimple_bb (stmt), g);
5017 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5018 e2->probability = profile_probability::always ();
5019 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5020 get_immediate_dominator (CDI_DOMINATORS,
5021 e1->src));
5022 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5023 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5024 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5025 m_gsi = gsi_for_stmt (stmt);
5026 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5028 gsi_replace (&m_gsi, g, true);
5029 break;
5030 case IFN_CLRSB:
5031 bb = gsi_bb (m_gsi);
5032 remove_edge (find_edge (bb, edge_bb));
5033 edge e;
5034 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5035 e->probability = profile_probability::always ();
5036 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5037 get_immediate_dominator (CDI_DOMINATORS,
5038 edge_bb));
5039 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5040 edge_bb);
5041 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5042 edge_bb);
5043 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5044 gimple_bb (stmt));
5045 for (unsigned i = 0; i < cnt; i++)
5047 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5048 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5049 UNKNOWN_LOCATION);
5050 tree a = bqp[i].addend;
5051 if (i && kind == bitint_prec_large)
5052 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5053 if (i)
5054 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5056 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5057 UNKNOWN_LOCATION);
5058 m_gsi = gsi_after_labels (edge_bb);
5059 g = gimple_build_call (fndecl, 1,
5060 add_cast (signed_type_for (m_limb_type),
5061 gimple_phi_result (phi1)));
5062 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5063 insert_before (g);
5064 g = gimple_build_assign (make_ssa_name (integer_type_node),
5065 PLUS_EXPR, gimple_call_lhs (g),
5066 gimple_phi_result (phi2));
5067 insert_before (g);
5068 if (kind != bitint_prec_large)
5070 g = gimple_build_assign (make_ssa_name (integer_type_node),
5071 PLUS_EXPR, gimple_assign_lhs (g),
5072 integer_one_node);
5073 insert_before (g);
5075 add_phi_arg (phi3, gimple_assign_lhs (g),
5076 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5077 m_gsi = gsi_for_stmt (stmt);
5078 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5079 gsi_replace (&m_gsi, g, true);
5080 break;
5081 case IFN_PARITY:
5082 g = gimple_build_call (fndecl, 1, res);
5083 gimple_call_set_lhs (g, lhs);
5084 gsi_replace (&m_gsi, g, true);
5085 break;
5086 case IFN_POPCOUNT:
5087 g = gimple_build_assign (lhs, res);
5088 gsi_replace (&m_gsi, g, true);
5089 break;
5090 default:
5091 gcc_unreachable ();
5095 /* Lower a call statement with one or more large/huge _BitInt
5096 arguments or large/huge _BitInt return value. */
5098 void
5099 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5101 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5102 unsigned int nargs = gimple_call_num_args (stmt);
5103 if (gimple_call_internal_p (stmt))
5104 switch (gimple_call_internal_fn (stmt))
5106 case IFN_ADD_OVERFLOW:
5107 case IFN_SUB_OVERFLOW:
5108 case IFN_UBSAN_CHECK_ADD:
5109 case IFN_UBSAN_CHECK_SUB:
5110 lower_addsub_overflow (obj, stmt);
5111 return;
5112 case IFN_MUL_OVERFLOW:
5113 case IFN_UBSAN_CHECK_MUL:
5114 lower_mul_overflow (obj, stmt);
5115 return;
5116 case IFN_CLZ:
5117 case IFN_CTZ:
5118 case IFN_CLRSB:
5119 case IFN_FFS:
5120 case IFN_PARITY:
5121 case IFN_POPCOUNT:
5122 lower_bit_query (stmt);
5123 return;
5124 default:
5125 break;
5127 for (unsigned int i = 0; i < nargs; ++i)
5129 tree arg = gimple_call_arg (stmt, i);
5130 if (TREE_CODE (arg) != SSA_NAME
5131 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5132 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5133 continue;
5134 if (SSA_NAME_IS_DEFAULT_DEF (arg)
5135 && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
5137 tree var = create_tmp_reg (TREE_TYPE (arg));
5138 arg = get_or_create_ssa_default_def (cfun, var);
5140 else
5142 int p = var_to_partition (m_map, arg);
5143 tree v = m_vars[p];
5144 gcc_assert (v != NULL_TREE);
5145 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5146 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5147 arg = make_ssa_name (TREE_TYPE (arg));
5148 gimple *g = gimple_build_assign (arg, v);
5149 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5151 gimple_call_set_arg (stmt, i, arg);
5152 if (m_preserved == NULL)
5153 m_preserved = BITMAP_ALLOC (NULL);
5154 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5156 tree lhs = gimple_call_lhs (stmt);
5157 if (lhs
5158 && TREE_CODE (lhs) == SSA_NAME
5159 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5160 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5162 int p = var_to_partition (m_map, lhs);
5163 tree v = m_vars[p];
5164 gcc_assert (v != NULL_TREE);
5165 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5166 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5167 gimple_call_set_lhs (stmt, v);
5168 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5170 update_stmt (stmt);
5173 /* Lower __asm STMT which involves large/huge _BitInt values. */
5175 void
5176 bitint_large_huge::lower_asm (gimple *stmt)
5178 gasm *g = as_a <gasm *> (stmt);
5179 unsigned noutputs = gimple_asm_noutputs (g);
5180 unsigned ninputs = gimple_asm_ninputs (g);
5182 for (unsigned i = 0; i < noutputs; ++i)
5184 tree t = gimple_asm_output_op (g, i);
5185 tree s = TREE_VALUE (t);
5186 if (TREE_CODE (s) == SSA_NAME
5187 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5188 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5190 int part = var_to_partition (m_map, s);
5191 gcc_assert (m_vars[part] != NULL_TREE);
5192 TREE_VALUE (t) = m_vars[part];
5195 for (unsigned i = 0; i < ninputs; ++i)
5197 tree t = gimple_asm_input_op (g, i);
5198 tree s = TREE_VALUE (t);
5199 if (TREE_CODE (s) == SSA_NAME
5200 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5201 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5203 if (SSA_NAME_IS_DEFAULT_DEF (s)
5204 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
5206 TREE_VALUE (t) = create_tmp_var (TREE_TYPE (s), "bitint");
5207 mark_addressable (TREE_VALUE (t));
5209 else
5211 int part = var_to_partition (m_map, s);
5212 gcc_assert (m_vars[part] != NULL_TREE);
5213 TREE_VALUE (t) = m_vars[part];
5217 update_stmt (stmt);
5220 /* Lower statement STMT which involves large/huge _BitInt values
5221 into code accessing individual limbs. */
5223 void
5224 bitint_large_huge::lower_stmt (gimple *stmt)
5226 m_first = true;
5227 m_lhs = NULL_TREE;
5228 m_data.truncate (0);
5229 m_data_cnt = 0;
5230 m_gsi = gsi_for_stmt (stmt);
5231 m_after_stmt = NULL;
5232 m_bb = NULL;
5233 m_init_gsi = m_gsi;
5234 gsi_prev (&m_init_gsi);
5235 m_preheader_bb = NULL;
5236 m_upwards_2limb = 0;
5237 m_upwards = false;
5238 m_var_msb = false;
5239 m_cast_conditional = false;
5240 m_bitfld_load = 0;
5241 m_loc = gimple_location (stmt);
5242 if (is_gimple_call (stmt))
5244 lower_call (NULL_TREE, stmt);
5245 return;
5247 if (gimple_code (stmt) == GIMPLE_ASM)
5249 lower_asm (stmt);
5250 return;
5252 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5253 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5254 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5255 bool mergeable_cast_p = false;
5256 bool final_cast_p = false;
5257 if (gimple_assign_cast_p (stmt))
5259 lhs = gimple_assign_lhs (stmt);
5260 tree rhs1 = gimple_assign_rhs1 (stmt);
5261 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5262 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5263 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5264 mergeable_cast_p = true;
5265 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5266 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5267 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5268 || POINTER_TYPE_P (TREE_TYPE (lhs))))
5270 final_cast_p = true;
5271 if (TREE_CODE (rhs1) == SSA_NAME
5272 && (m_names == NULL
5273 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5275 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5276 if (is_gimple_assign (g)
5277 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5279 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5280 if (TREE_CODE (rhs2) == SSA_NAME
5281 && (m_names == NULL
5282 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5284 g = SSA_NAME_DEF_STMT (rhs2);
5285 int ovf = optimizable_arith_overflow (g);
5286 if (ovf == 2)
5287 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5288 and IMAGPART_EXPR uses, where the latter is cast to
5289 non-_BitInt, it will be optimized when handling
5290 the REALPART_EXPR. */
5291 return;
5292 if (ovf == 1)
5294 lower_call (NULL_TREE, g);
5295 return;
5302 if (gimple_store_p (stmt))
5304 tree rhs1 = gimple_assign_rhs1 (stmt);
5305 if (TREE_CODE (rhs1) == SSA_NAME
5306 && (m_names == NULL
5307 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5309 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5310 m_loc = gimple_location (g);
5311 lhs = gimple_assign_lhs (stmt);
5312 if (is_gimple_assign (g) && !mergeable_op (g))
5313 switch (gimple_assign_rhs_code (g))
5315 case LSHIFT_EXPR:
5316 case RSHIFT_EXPR:
5317 lower_shift_stmt (lhs, g);
5318 handled:
5319 m_gsi = gsi_for_stmt (stmt);
5320 unlink_stmt_vdef (stmt);
5321 release_ssa_name (gimple_vdef (stmt));
5322 gsi_remove (&m_gsi, true);
5323 return;
5324 case MULT_EXPR:
5325 case TRUNC_DIV_EXPR:
5326 case TRUNC_MOD_EXPR:
5327 lower_muldiv_stmt (lhs, g);
5328 goto handled;
5329 case FIX_TRUNC_EXPR:
5330 lower_float_conv_stmt (lhs, g);
5331 goto handled;
5332 case REALPART_EXPR:
5333 case IMAGPART_EXPR:
5334 lower_cplxpart_stmt (lhs, g);
5335 goto handled;
5336 default:
5337 break;
5339 else if (optimizable_arith_overflow (g) == 3)
5341 lower_call (lhs, g);
5342 goto handled;
5344 m_loc = gimple_location (stmt);
5347 if (mergeable_op (stmt)
5348 || gimple_store_p (stmt)
5349 || gimple_assign_load_p (stmt)
5350 || eq_p
5351 || mergeable_cast_p)
5353 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5354 if (!eq_p)
5355 return;
5357 else if (cmp_code != ERROR_MARK)
5358 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5359 if (cmp_code != ERROR_MARK)
5361 if (gimple_code (stmt) == GIMPLE_COND)
5363 gcond *cstmt = as_a <gcond *> (stmt);
5364 gimple_cond_set_lhs (cstmt, lhs);
5365 gimple_cond_set_rhs (cstmt, boolean_false_node);
5366 gimple_cond_set_code (cstmt, cmp_code);
5367 update_stmt (stmt);
5368 return;
5370 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5372 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5373 boolean_false_node);
5374 gimple_assign_set_rhs1 (stmt, cond);
5375 lhs = gimple_assign_lhs (stmt);
5376 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5377 || (bitint_precision_kind (TREE_TYPE (lhs))
5378 <= bitint_prec_middle));
5379 update_stmt (stmt);
5380 return;
5382 gimple_assign_set_rhs1 (stmt, lhs);
5383 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5384 gimple_assign_set_rhs_code (stmt, cmp_code);
5385 update_stmt (stmt);
5386 return;
5388 if (final_cast_p)
5390 tree lhs_type = TREE_TYPE (lhs);
5391 /* Add support for 3 or more limbs filled in from normal integral
5392 type if this assert fails. If no target chooses limb mode smaller
5393 than half of largest supported normal integral type, this will not
5394 be needed. */
5395 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5396 gimple *g;
5397 if ((TREE_CODE (lhs_type) == BITINT_TYPE
5398 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5399 || POINTER_TYPE_P (lhs_type))
5400 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5401 TYPE_UNSIGNED (lhs_type));
5402 m_data_cnt = 0;
5403 tree rhs1 = gimple_assign_rhs1 (stmt);
5404 tree r1 = handle_operand (rhs1, size_int (0));
5405 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5406 r1 = add_cast (lhs_type, r1);
5407 if (TYPE_PRECISION (lhs_type) > limb_prec)
5409 m_data_cnt = 0;
5410 m_first = false;
5411 tree r2 = handle_operand (rhs1, size_int (1));
5412 r2 = add_cast (lhs_type, r2);
5413 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5414 build_int_cst (unsigned_type_node,
5415 limb_prec));
5416 insert_before (g);
5417 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5418 gimple_assign_lhs (g));
5419 insert_before (g);
5420 r1 = gimple_assign_lhs (g);
5422 if (lhs_type != TREE_TYPE (lhs))
5423 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5424 else
5425 g = gimple_build_assign (lhs, r1);
5426 gsi_replace (&m_gsi, g, true);
5427 return;
5429 if (is_gimple_assign (stmt))
5430 switch (gimple_assign_rhs_code (stmt))
5432 case LSHIFT_EXPR:
5433 case RSHIFT_EXPR:
5434 lower_shift_stmt (NULL_TREE, stmt);
5435 return;
5436 case MULT_EXPR:
5437 case TRUNC_DIV_EXPR:
5438 case TRUNC_MOD_EXPR:
5439 lower_muldiv_stmt (NULL_TREE, stmt);
5440 return;
5441 case FIX_TRUNC_EXPR:
5442 case FLOAT_EXPR:
5443 lower_float_conv_stmt (NULL_TREE, stmt);
5444 return;
5445 case REALPART_EXPR:
5446 case IMAGPART_EXPR:
5447 lower_cplxpart_stmt (NULL_TREE, stmt);
5448 return;
5449 case COMPLEX_EXPR:
5450 lower_complexexpr_stmt (stmt);
5451 return;
5452 default:
5453 break;
5455 gcc_unreachable ();
5458 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5459 the desired memory state. */
5461 void *
5462 vuse_eq (ao_ref *, tree vuse1, void *data)
5464 tree vuse2 = (tree) data;
5465 if (vuse1 == vuse2)
5466 return data;
5468 return NULL;
5471 /* Return true if STMT uses a library function and needs to take
5472 address of its inputs. We need to avoid bit-fields in those
5473 cases. Similarly, we need to avoid overlap between destination
5474 and source limb arrays. */
5476 bool
5477 stmt_needs_operand_addr (gimple *stmt)
5479 if (is_gimple_assign (stmt))
5480 switch (gimple_assign_rhs_code (stmt))
5482 case MULT_EXPR:
5483 case TRUNC_DIV_EXPR:
5484 case TRUNC_MOD_EXPR:
5485 case FLOAT_EXPR:
5486 return true;
5487 default:
5488 break;
5490 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5491 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5492 return true;
5493 return false;
5496 /* Dominator walker used to discover which large/huge _BitInt
5497 loads could be sunk into all their uses. */
5499 class bitint_dom_walker : public dom_walker
5501 public:
5502 bitint_dom_walker (bitmap names, bitmap loads)
5503 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5505 edge before_dom_children (basic_block) final override;
5507 private:
5508 bitmap m_names, m_loads;
5511 edge
5512 bitint_dom_walker::before_dom_children (basic_block bb)
5514 gphi *phi = get_virtual_phi (bb);
5515 tree vop;
5516 if (phi)
5517 vop = gimple_phi_result (phi);
5518 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5519 vop = NULL_TREE;
5520 else
5521 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5523 auto_vec<tree, 16> worklist;
5524 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5525 !gsi_end_p (gsi); gsi_next (&gsi))
5527 gimple *stmt = gsi_stmt (gsi);
5528 if (is_gimple_debug (stmt))
5529 continue;
5531 if (!vop && gimple_vuse (stmt))
5532 vop = gimple_vuse (stmt);
5534 tree cvop = vop;
5535 if (gimple_vdef (stmt))
5536 vop = gimple_vdef (stmt);
5538 tree lhs = gimple_get_lhs (stmt);
5539 if (lhs
5540 && TREE_CODE (lhs) == SSA_NAME
5541 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5542 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5543 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5544 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5545 it means it will be handled in a loop or straight line code
5546 at the location of its (ultimate) immediate use, so for
5547 vop checking purposes check these only at the ultimate
5548 immediate use. */
5549 continue;
5551 ssa_op_iter oi;
5552 use_operand_p use_p;
5553 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5555 tree s = USE_FROM_PTR (use_p);
5556 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5557 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5558 worklist.safe_push (s);
5561 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5562 while (worklist.length () > 0)
5564 tree s = worklist.pop ();
5566 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5568 gimple *g = SSA_NAME_DEF_STMT (s);
5569 needs_operand_addr |= stmt_needs_operand_addr (g);
5570 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5572 tree s2 = USE_FROM_PTR (use_p);
5573 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5574 && (bitint_precision_kind (TREE_TYPE (s2))
5575 >= bitint_prec_large))
5576 worklist.safe_push (s2);
5578 continue;
5580 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5581 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5583 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5584 if (TREE_CODE (rhs) == SSA_NAME
5585 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5586 s = rhs;
5587 else
5588 continue;
5590 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5591 continue;
5593 gimple *g = SSA_NAME_DEF_STMT (s);
5594 tree rhs1 = gimple_assign_rhs1 (g);
5595 if (needs_operand_addr
5596 && TREE_CODE (rhs1) == COMPONENT_REF
5597 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5599 tree fld = TREE_OPERAND (rhs1, 1);
5600 /* For little-endian, we can allow as inputs bit-fields
5601 which start at a limb boundary. */
5602 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5603 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5604 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5605 % limb_prec) == 0)
5607 else
5609 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5610 continue;
5614 ao_ref ref;
5615 ao_ref_init (&ref, rhs1);
5616 tree lvop = gimple_vuse (g);
5617 unsigned limit = 64;
5618 tree vuse = cvop;
5619 if (vop != cvop
5620 && is_gimple_assign (stmt)
5621 && gimple_store_p (stmt)
5622 && (needs_operand_addr
5623 || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
5624 vuse = vop;
5625 if (vuse != lvop
5626 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5627 NULL, NULL, limit, lvop) == NULL)
5628 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5632 bb->aux = (void *) vop;
5633 return NULL;
5638 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5639 build_ssa_conflict_graph.
5640 The differences are:
5641 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5642 2) for large/huge _BitInt multiplication/division/modulo process def
5643 only after processing uses rather than before to make uses conflict
5644 with the definition
5645 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5646 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5647 the final statement. */
5649 void
5650 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5651 ssa_conflicts *graph, bitmap names,
5652 void (*def) (live_track *, tree,
5653 ssa_conflicts *),
5654 void (*use) (live_track *, tree))
5656 bool muldiv_p = false;
5657 tree lhs = NULL_TREE;
5658 if (is_gimple_assign (stmt))
5660 lhs = gimple_assign_lhs (stmt);
5661 if (TREE_CODE (lhs) == SSA_NAME
5662 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5663 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5665 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5666 return;
5667 switch (gimple_assign_rhs_code (stmt))
5669 case MULT_EXPR:
5670 case TRUNC_DIV_EXPR:
5671 case TRUNC_MOD_EXPR:
5672 muldiv_p = true;
5673 default:
5674 break;
5679 ssa_op_iter iter;
5680 tree var;
5681 if (!muldiv_p)
5683 /* For stmts with more than one SSA_NAME definition pretend all the
5684 SSA_NAME outputs but the first one are live at this point, so
5685 that conflicts are added in between all those even when they are
5686 actually not really live after the asm, because expansion might
5687 copy those into pseudos after the asm and if multiple outputs
5688 share the same partition, it might overwrite those that should
5689 be live. E.g.
5690 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5691 return a;
5692 See PR70593. */
5693 bool first = true;
5694 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5695 if (first)
5696 first = false;
5697 else
5698 use (live, var);
5700 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5701 def (live, var, graph);
5704 auto_vec<tree, 16> worklist;
5705 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5706 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5707 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5709 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5710 use (live, var);
5711 else
5712 worklist.safe_push (var);
5715 while (worklist.length () > 0)
5717 tree s = worklist.pop ();
5718 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5719 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5720 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5722 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5723 use (live, var);
5724 else
5725 worklist.safe_push (var);
5729 if (muldiv_p)
5730 def (live, lhs, graph);
5733 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5734 return the largest bitint_prec_kind of them, otherwise return
5735 bitint_prec_small. */
5737 static bitint_prec_kind
5738 arith_overflow_arg_kind (gimple *stmt)
5740 bitint_prec_kind ret = bitint_prec_small;
5741 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5742 switch (gimple_call_internal_fn (stmt))
5744 case IFN_ADD_OVERFLOW:
5745 case IFN_SUB_OVERFLOW:
5746 case IFN_MUL_OVERFLOW:
5747 for (int i = 0; i < 2; ++i)
5749 tree a = gimple_call_arg (stmt, i);
5750 if (TREE_CODE (a) == INTEGER_CST
5751 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5753 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5754 ret = MAX (ret, kind);
5757 break;
5758 default:
5759 break;
5761 return ret;
5764 /* Entry point for _BitInt(N) operation lowering during optimization. */
5766 static unsigned int
5767 gimple_lower_bitint (void)
5769 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5770 limb_prec = 0;
5772 unsigned int i;
5773 for (i = 0; i < num_ssa_names; ++i)
5775 tree s = ssa_name (i);
5776 if (s == NULL)
5777 continue;
5778 tree type = TREE_TYPE (s);
5779 if (TREE_CODE (type) == COMPLEX_TYPE)
5781 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5782 != bitint_prec_small)
5783 break;
5784 type = TREE_TYPE (type);
5786 if (TREE_CODE (type) == BITINT_TYPE
5787 && bitint_precision_kind (type) != bitint_prec_small)
5788 break;
5789 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5790 into memory. Such functions could have no large/huge SSA_NAMEs. */
5791 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5793 gimple *g = SSA_NAME_DEF_STMT (s);
5794 if (is_gimple_assign (g) && gimple_store_p (g))
5796 tree t = gimple_assign_rhs1 (g);
5797 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5798 && (bitint_precision_kind (TREE_TYPE (t))
5799 >= bitint_prec_large))
5800 break;
5803 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5804 to floating point types need to be rewritten. */
5805 else if (SCALAR_FLOAT_TYPE_P (type))
5807 gimple *g = SSA_NAME_DEF_STMT (s);
5808 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5810 tree t = gimple_assign_rhs1 (g);
5811 if (TREE_CODE (t) == INTEGER_CST
5812 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5813 && (bitint_precision_kind (TREE_TYPE (t))
5814 != bitint_prec_small))
5815 break;
5819 if (i == num_ssa_names)
5820 return 0;
5822 basic_block bb;
5823 auto_vec<gimple *, 4> switch_statements;
5824 FOR_EACH_BB_FN (bb, cfun)
5826 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5828 tree idx = gimple_switch_index (swtch);
5829 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5830 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5831 continue;
5833 if (optimize)
5834 group_case_labels_stmt (swtch);
5835 if (gimple_switch_num_labels (swtch) == 1)
5837 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5838 gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
5839 gsi_remove (&gsi, true);
5841 else
5842 switch_statements.safe_push (swtch);
5846 if (!switch_statements.is_empty ())
5848 bool expanded = false;
5849 gimple *stmt;
5850 unsigned int j;
5851 i = 0;
5852 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5854 gswitch *swtch = as_a<gswitch *> (stmt);
5855 tree_switch_conversion::switch_decision_tree dt (swtch);
5856 expanded |= dt.analyze_switch_statement ();
5859 if (expanded)
5861 free_dominance_info (CDI_DOMINATORS);
5862 free_dominance_info (CDI_POST_DOMINATORS);
5863 mark_virtual_operands_for_renaming (cfun);
5864 cleanup_tree_cfg (TODO_update_ssa);
5868 struct bitint_large_huge large_huge;
5869 bool has_large_huge_parm_result = false;
5870 bool has_large_huge = false;
5871 unsigned int ret = 0, first_large_huge = ~0U;
5872 bool edge_insertions = false;
5873 for (; i < num_ssa_names; ++i)
5875 tree s = ssa_name (i);
5876 if (s == NULL)
5877 continue;
5878 tree type = TREE_TYPE (s);
5879 if (TREE_CODE (type) == COMPLEX_TYPE)
5881 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5882 >= bitint_prec_large)
5883 has_large_huge = true;
5884 type = TREE_TYPE (type);
5886 if (TREE_CODE (type) == BITINT_TYPE
5887 && bitint_precision_kind (type) >= bitint_prec_large)
5889 if (first_large_huge == ~0U)
5890 first_large_huge = i;
5891 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5892 gimple_stmt_iterator gsi;
5893 tree_code rhs_code;
5894 /* Unoptimize certain constructs to simpler alternatives to
5895 avoid having to lower all of them. */
5896 if (is_gimple_assign (stmt) && gimple_bb (stmt))
5897 switch (rhs_code = gimple_assign_rhs_code (stmt))
5899 default:
5900 break;
5901 case LROTATE_EXPR:
5902 case RROTATE_EXPR:
5904 first_large_huge = 0;
5905 location_t loc = gimple_location (stmt);
5906 gsi = gsi_for_stmt (stmt);
5907 tree rhs1 = gimple_assign_rhs1 (stmt);
5908 tree type = TREE_TYPE (rhs1);
5909 tree n = gimple_assign_rhs2 (stmt), m;
5910 tree p = build_int_cst (TREE_TYPE (n),
5911 TYPE_PRECISION (type));
5912 if (TREE_CODE (n) == INTEGER_CST)
5913 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5914 else
5916 m = make_ssa_name (TREE_TYPE (n));
5917 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5918 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5919 gimple_set_location (g, loc);
5921 if (!TYPE_UNSIGNED (type))
5923 tree utype = build_bitint_type (TYPE_PRECISION (type),
5925 if (TREE_CODE (rhs1) == INTEGER_CST)
5926 rhs1 = fold_convert (utype, rhs1);
5927 else
5929 tree t = make_ssa_name (type);
5930 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5931 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5932 gimple_set_location (g, loc);
5935 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5936 rhs_code == LROTATE_EXPR
5937 ? LSHIFT_EXPR : RSHIFT_EXPR,
5938 rhs1, n);
5939 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5940 gimple_set_location (g, loc);
5941 tree op1 = gimple_assign_lhs (g);
5942 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5943 rhs_code == LROTATE_EXPR
5944 ? RSHIFT_EXPR : LSHIFT_EXPR,
5945 rhs1, m);
5946 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5947 gimple_set_location (g, loc);
5948 tree op2 = gimple_assign_lhs (g);
5949 tree lhs = gimple_assign_lhs (stmt);
5950 if (!TYPE_UNSIGNED (type))
5952 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5953 BIT_IOR_EXPR, op1, op2);
5954 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5955 gimple_set_location (g, loc);
5956 g = gimple_build_assign (lhs, NOP_EXPR,
5957 gimple_assign_lhs (g));
5959 else
5960 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5961 gsi_replace (&gsi, g, true);
5962 gimple_set_location (g, loc);
5964 break;
5965 case ABS_EXPR:
5966 case ABSU_EXPR:
5967 case MIN_EXPR:
5968 case MAX_EXPR:
5969 case COND_EXPR:
5970 first_large_huge = 0;
5971 gsi = gsi_for_stmt (stmt);
5972 tree lhs = gimple_assign_lhs (stmt);
5973 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5974 location_t loc = gimple_location (stmt);
5975 if (rhs_code == ABS_EXPR)
5976 g = gimple_build_cond (LT_EXPR, rhs1,
5977 build_zero_cst (TREE_TYPE (rhs1)),
5978 NULL_TREE, NULL_TREE);
5979 else if (rhs_code == ABSU_EXPR)
5981 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5982 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5983 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5984 gimple_set_location (g, loc);
5985 g = gimple_build_cond (LT_EXPR, rhs1,
5986 build_zero_cst (TREE_TYPE (rhs1)),
5987 NULL_TREE, NULL_TREE);
5988 rhs1 = rhs2;
5990 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5992 rhs2 = gimple_assign_rhs2 (stmt);
5993 if (TREE_CODE (rhs1) == INTEGER_CST)
5994 std::swap (rhs1, rhs2);
5995 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5996 NULL_TREE, NULL_TREE);
5997 if (rhs_code == MAX_EXPR)
5998 std::swap (rhs1, rhs2);
6000 else
6002 g = gimple_build_cond (NE_EXPR, rhs1,
6003 build_zero_cst (TREE_TYPE (rhs1)),
6004 NULL_TREE, NULL_TREE);
6005 rhs1 = gimple_assign_rhs2 (stmt);
6006 rhs2 = gimple_assign_rhs3 (stmt);
6008 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6009 gimple_set_location (g, loc);
6010 edge e1 = split_block (gsi_bb (gsi), g);
6011 edge e2 = split_block (e1->dest, (gimple *) NULL);
6012 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
6013 e3->probability = profile_probability::even ();
6014 e1->flags = EDGE_TRUE_VALUE;
6015 e1->probability = e3->probability.invert ();
6016 if (dom_info_available_p (CDI_DOMINATORS))
6017 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
6018 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
6020 gsi = gsi_after_labels (e1->dest);
6021 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6022 NEGATE_EXPR, rhs1);
6023 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6024 gimple_set_location (g, loc);
6025 rhs2 = gimple_assign_lhs (g);
6026 std::swap (rhs1, rhs2);
6028 gsi = gsi_for_stmt (stmt);
6029 gsi_remove (&gsi, true);
6030 gphi *phi = create_phi_node (lhs, e2->dest);
6031 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
6032 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
6033 break;
6036 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6037 into memory. Such functions could have no large/huge SSA_NAMEs. */
6038 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6040 gimple *g = SSA_NAME_DEF_STMT (s);
6041 if (is_gimple_assign (g) && gimple_store_p (g))
6043 tree t = gimple_assign_rhs1 (g);
6044 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6045 && (bitint_precision_kind (TREE_TYPE (t))
6046 >= bitint_prec_large))
6047 has_large_huge = true;
6050 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6051 to floating point types need to be rewritten. */
6052 else if (SCALAR_FLOAT_TYPE_P (type))
6054 gimple *g = SSA_NAME_DEF_STMT (s);
6055 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6057 tree t = gimple_assign_rhs1 (g);
6058 if (TREE_CODE (t) == INTEGER_CST
6059 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6060 && (bitint_precision_kind (TREE_TYPE (t))
6061 >= bitint_prec_large))
6062 has_large_huge = true;
6066 for (i = first_large_huge; i < num_ssa_names; ++i)
6068 tree s = ssa_name (i);
6069 if (s == NULL)
6070 continue;
6071 tree type = TREE_TYPE (s);
6072 if (TREE_CODE (type) == COMPLEX_TYPE)
6073 type = TREE_TYPE (type);
6074 if (TREE_CODE (type) == BITINT_TYPE
6075 && bitint_precision_kind (type) >= bitint_prec_large)
6077 use_operand_p use_p;
6078 gimple *use_stmt;
6079 has_large_huge = true;
6080 if (optimize
6081 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6082 continue;
6083 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6084 the same bb and could be handled in the same loop with the
6085 immediate use. */
6086 if (optimize
6087 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6088 && single_imm_use (s, &use_p, &use_stmt)
6089 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6091 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6093 if (mergeable_op (use_stmt))
6094 continue;
6095 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6096 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6097 continue;
6098 if (gimple_assign_cast_p (use_stmt))
6100 tree lhs = gimple_assign_lhs (use_stmt);
6101 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6102 continue;
6104 else if (gimple_store_p (use_stmt)
6105 && is_gimple_assign (use_stmt)
6106 && !gimple_has_volatile_ops (use_stmt)
6107 && !stmt_ends_bb_p (use_stmt))
6108 continue;
6110 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6112 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6113 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6114 && ((is_gimple_assign (use_stmt)
6115 && (gimple_assign_rhs_code (use_stmt)
6116 != COMPLEX_EXPR))
6117 || gimple_code (use_stmt) == GIMPLE_COND)
6118 && (!gimple_store_p (use_stmt)
6119 || (is_gimple_assign (use_stmt)
6120 && !gimple_has_volatile_ops (use_stmt)
6121 && !stmt_ends_bb_p (use_stmt)))
6122 && (TREE_CODE (rhs1) != SSA_NAME
6123 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6125 if (is_gimple_assign (use_stmt))
6126 switch (gimple_assign_rhs_code (use_stmt))
6128 case TRUNC_DIV_EXPR:
6129 case TRUNC_MOD_EXPR:
6130 case FLOAT_EXPR:
6131 /* For division, modulo and casts to floating
6132 point, avoid representing unsigned operands
6133 using negative prec if they were sign-extended
6134 from narrower precision. */
6135 if (TYPE_UNSIGNED (TREE_TYPE (s))
6136 && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
6137 && (TYPE_PRECISION (TREE_TYPE (s))
6138 > TYPE_PRECISION (TREE_TYPE (rhs1))))
6139 goto force_name;
6140 /* FALLTHRU */
6141 case MULT_EXPR:
6142 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6143 || (bitint_precision_kind (TREE_TYPE (rhs1))
6144 < bitint_prec_large))
6145 continue;
6146 /* Uses which use handle_operand_addr can't
6147 deal with nested casts. */
6148 if (TREE_CODE (rhs1) == SSA_NAME
6149 && gimple_assign_cast_p
6150 (SSA_NAME_DEF_STMT (rhs1))
6151 && has_single_use (rhs1)
6152 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6153 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6154 goto force_name;
6155 break;
6156 default:
6157 break;
6159 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6160 || (bitint_precision_kind (TREE_TYPE (rhs1))
6161 < bitint_prec_large))
6162 continue;
6163 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6164 >= TYPE_PRECISION (TREE_TYPE (s)))
6165 && mergeable_op (use_stmt))
6166 continue;
6167 /* Prevent merging a widening non-mergeable cast
6168 on result of some narrower mergeable op
6169 together with later mergeable operations. E.g.
6170 result of _BitInt(223) addition shouldn't be
6171 sign-extended to _BitInt(513) and have another
6172 _BitInt(513) added to it, as handle_plus_minus
6173 with its PHI node handling inside of handle_cast
6174 will not work correctly. An exception is if
6175 use_stmt is a store, this is handled directly
6176 in lower_mergeable_stmt. */
6177 if (TREE_CODE (rhs1) != SSA_NAME
6178 || !has_single_use (rhs1)
6179 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6180 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6181 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6182 || gimple_store_p (use_stmt))
6183 continue;
6184 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6185 < TYPE_PRECISION (TREE_TYPE (s)))
6186 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6188 /* Another exception is if the widening cast is
6189 from mergeable same precision cast from something
6190 not mergeable. */
6191 tree rhs2
6192 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6193 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6194 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6195 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6197 if (TREE_CODE (rhs2) != SSA_NAME
6198 || !has_single_use (rhs2)
6199 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6200 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6201 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6202 continue;
6207 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6208 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6210 case IMAGPART_EXPR:
6212 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6213 rhs1 = TREE_OPERAND (rhs1, 0);
6214 if (TREE_CODE (rhs1) == SSA_NAME)
6216 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6217 if (optimizable_arith_overflow (g))
6218 continue;
6221 /* FALLTHRU */
6222 case LSHIFT_EXPR:
6223 case RSHIFT_EXPR:
6224 case MULT_EXPR:
6225 case TRUNC_DIV_EXPR:
6226 case TRUNC_MOD_EXPR:
6227 case FIX_TRUNC_EXPR:
6228 case REALPART_EXPR:
6229 if (gimple_store_p (use_stmt)
6230 && is_gimple_assign (use_stmt)
6231 && !gimple_has_volatile_ops (use_stmt)
6232 && !stmt_ends_bb_p (use_stmt))
6234 tree lhs = gimple_assign_lhs (use_stmt);
6235 /* As multiply/division passes address of the lhs
6236 to library function and that assumes it can extend
6237 it to whole number of limbs, avoid merging those
6238 with bit-field stores. Don't allow it for
6239 shifts etc. either, so that the bit-field store
6240 handling doesn't have to be done everywhere. */
6241 if (TREE_CODE (lhs) == COMPONENT_REF
6242 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6243 break;
6244 continue;
6246 break;
6247 default:
6248 break;
6252 /* Also ignore uninitialized uses. */
6253 if (SSA_NAME_IS_DEFAULT_DEF (s)
6254 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6255 continue;
6257 force_name:
6258 if (!large_huge.m_names)
6259 large_huge.m_names = BITMAP_ALLOC (NULL);
6260 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6261 if (has_single_use (s))
6263 if (!large_huge.m_single_use_names)
6264 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6265 bitmap_set_bit (large_huge.m_single_use_names,
6266 SSA_NAME_VERSION (s));
6268 if (SSA_NAME_VAR (s)
6269 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6270 && SSA_NAME_IS_DEFAULT_DEF (s))
6271 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6272 has_large_huge_parm_result = true;
6273 if (optimize
6274 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6275 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6276 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6277 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6279 use_operand_p use_p;
6280 imm_use_iterator iter;
6281 bool optimizable_load = true;
6282 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6284 gimple *use_stmt = USE_STMT (use_p);
6285 if (is_gimple_debug (use_stmt))
6286 continue;
6287 if (gimple_code (use_stmt) == GIMPLE_PHI
6288 || is_gimple_call (use_stmt)
6289 || gimple_code (use_stmt) == GIMPLE_ASM)
6291 optimizable_load = false;
6292 break;
6296 ssa_op_iter oi;
6297 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6298 oi, SSA_OP_USE)
6300 tree s2 = USE_FROM_PTR (use_p);
6301 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6303 optimizable_load = false;
6304 break;
6308 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6310 if (!large_huge.m_loads)
6311 large_huge.m_loads = BITMAP_ALLOC (NULL);
6312 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6316 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6317 into memory. Such functions could have no large/huge SSA_NAMEs. */
6318 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6320 gimple *g = SSA_NAME_DEF_STMT (s);
6321 if (is_gimple_assign (g) && gimple_store_p (g))
6323 tree t = gimple_assign_rhs1 (g);
6324 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6325 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6326 has_large_huge = true;
6331 if (large_huge.m_names || has_large_huge)
6333 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6334 calculate_dominance_info (CDI_DOMINATORS);
6335 if (optimize)
6336 enable_ranger (cfun);
6337 if (large_huge.m_loads)
6339 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6340 entry->aux = NULL;
6341 bitint_dom_walker (large_huge.m_names,
6342 large_huge.m_loads).walk (entry);
6343 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6344 clear_aux_for_blocks ();
6345 BITMAP_FREE (large_huge.m_loads);
6347 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6348 large_huge.m_limb_size
6349 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6351 if (large_huge.m_names)
6353 large_huge.m_map
6354 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6355 coalesce_ssa_name (large_huge.m_map);
6356 partition_view_normal (large_huge.m_map);
6357 if (dump_file && (dump_flags & TDF_DETAILS))
6359 fprintf (dump_file, "After Coalescing:\n");
6360 dump_var_map (dump_file, large_huge.m_map);
6362 large_huge.m_vars
6363 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6364 bitmap_iterator bi;
6365 if (has_large_huge_parm_result)
6366 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6368 tree s = ssa_name (i);
6369 if (SSA_NAME_VAR (s)
6370 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6371 && SSA_NAME_IS_DEFAULT_DEF (s))
6372 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6374 int p = var_to_partition (large_huge.m_map, s);
6375 if (large_huge.m_vars[p] == NULL_TREE)
6377 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6378 mark_addressable (SSA_NAME_VAR (s));
6382 tree atype = NULL_TREE;
6383 if (dump_file && (dump_flags & TDF_DETAILS))
6384 fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
6385 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6387 tree s = ssa_name (i);
6388 int p = var_to_partition (large_huge.m_map, s);
6389 if (large_huge.m_vars[p] == NULL_TREE)
6391 if (atype == NULL_TREE
6392 || !tree_int_cst_equal (TYPE_SIZE (atype),
6393 TYPE_SIZE (TREE_TYPE (s))))
6395 unsigned HOST_WIDE_INT nelts
6396 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6397 atype = build_array_type_nelts (large_huge.m_limb_type,
6398 nelts);
6400 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6401 mark_addressable (large_huge.m_vars[p]);
6403 if (dump_file && (dump_flags & TDF_DETAILS))
6405 print_generic_expr (dump_file, s, TDF_SLIM);
6406 fprintf (dump_file, " -> ");
6407 print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
6408 fprintf (dump_file, "\n");
6413 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6415 gimple_stmt_iterator prev;
6416 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6417 gsi = prev)
6419 prev = gsi;
6420 gsi_prev (&prev);
6421 ssa_op_iter iter;
6422 gimple *stmt = gsi_stmt (gsi);
6423 if (is_gimple_debug (stmt))
6424 continue;
6425 bitint_prec_kind kind = bitint_prec_small;
6426 tree t;
6427 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6428 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6430 bitint_prec_kind this_kind
6431 = bitint_precision_kind (TREE_TYPE (t));
6432 kind = MAX (kind, this_kind);
6434 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6436 t = gimple_assign_rhs1 (stmt);
6437 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6439 bitint_prec_kind this_kind
6440 = bitint_precision_kind (TREE_TYPE (t));
6441 kind = MAX (kind, this_kind);
6444 if (is_gimple_assign (stmt)
6445 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6447 t = gimple_assign_rhs1 (stmt);
6448 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6449 && TREE_CODE (t) == INTEGER_CST)
6451 bitint_prec_kind this_kind
6452 = bitint_precision_kind (TREE_TYPE (t));
6453 kind = MAX (kind, this_kind);
6456 if (is_gimple_call (stmt))
6458 t = gimple_call_lhs (stmt);
6459 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6461 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6462 kind = MAX (kind, this_kind);
6463 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6465 this_kind
6466 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6467 kind = MAX (kind, this_kind);
6471 if (kind == bitint_prec_small)
6472 continue;
6473 switch (gimple_code (stmt))
6475 case GIMPLE_CALL:
6476 /* For now. We'll need to handle some internal functions and
6477 perhaps some builtins. */
6478 if (kind == bitint_prec_middle)
6479 continue;
6480 break;
6481 case GIMPLE_ASM:
6482 if (kind == bitint_prec_middle)
6483 continue;
6484 break;
6485 case GIMPLE_RETURN:
6486 continue;
6487 case GIMPLE_ASSIGN:
6488 if (gimple_clobber_p (stmt))
6489 continue;
6490 if (kind >= bitint_prec_large)
6491 break;
6492 if (gimple_assign_single_p (stmt))
6493 /* No need to lower copies, loads or stores. */
6494 continue;
6495 if (gimple_assign_cast_p (stmt))
6497 tree lhs = gimple_assign_lhs (stmt);
6498 tree rhs = gimple_assign_rhs1 (stmt);
6499 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6500 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6501 && (TYPE_PRECISION (TREE_TYPE (lhs))
6502 == TYPE_PRECISION (TREE_TYPE (rhs))))
6503 /* No need to lower casts to same precision. */
6504 continue;
6506 break;
6507 default:
6508 break;
6511 if (kind == bitint_prec_middle)
6513 tree type = NULL_TREE;
6514 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6515 with the same precision and back. */
6516 unsigned int nops = gimple_num_ops (stmt);
6517 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6518 i < nops; ++i)
6519 if (tree op = gimple_op (stmt, i))
6521 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6522 if (nop != op)
6523 gimple_set_op (stmt, i, nop);
6524 else if (COMPARISON_CLASS_P (op))
6526 TREE_OPERAND (op, 0)
6527 = maybe_cast_middle_bitint (&gsi,
6528 TREE_OPERAND (op, 0),
6529 type);
6530 TREE_OPERAND (op, 1)
6531 = maybe_cast_middle_bitint (&gsi,
6532 TREE_OPERAND (op, 1),
6533 type);
6535 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6537 CASE_LOW (op)
6538 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6539 type);
6540 CASE_HIGH (op)
6541 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6542 type);
6545 if (tree lhs = gimple_get_lhs (stmt))
6546 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6547 && (bitint_precision_kind (TREE_TYPE (lhs))
6548 == bitint_prec_middle))
6550 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6551 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6552 type = build_nonstandard_integer_type (prec, uns);
6553 tree lhs2 = make_ssa_name (type);
6554 gimple_set_lhs (stmt, lhs2);
6555 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6556 if (stmt_ends_bb_p (stmt))
6558 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6559 gsi_insert_on_edge_immediate (e, g);
6561 else
6562 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6564 update_stmt (stmt);
6565 continue;
6568 if (tree lhs = gimple_get_lhs (stmt))
6569 if (TREE_CODE (lhs) == SSA_NAME)
6571 tree type = TREE_TYPE (lhs);
6572 if (TREE_CODE (type) == COMPLEX_TYPE)
6573 type = TREE_TYPE (type);
6574 if (TREE_CODE (type) == BITINT_TYPE
6575 && bitint_precision_kind (type) >= bitint_prec_large
6576 && (large_huge.m_names == NULL
6577 || !bitmap_bit_p (large_huge.m_names,
6578 SSA_NAME_VERSION (lhs))))
6579 continue;
6582 large_huge.lower_stmt (stmt);
6585 tree atype = NULL_TREE;
6586 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6587 gsi_next (&gsi))
6589 gphi *phi = gsi.phi ();
6590 tree lhs = gimple_phi_result (phi);
6591 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6592 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6593 continue;
6594 int p1 = var_to_partition (large_huge.m_map, lhs);
6595 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6596 tree v1 = large_huge.m_vars[p1];
6597 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6599 tree arg = gimple_phi_arg_def (phi, i);
6600 edge e = gimple_phi_arg_edge (phi, i);
6601 gimple *g;
6602 switch (TREE_CODE (arg))
6604 case INTEGER_CST:
6605 if (integer_zerop (arg) && VAR_P (v1))
6607 tree zero = build_zero_cst (TREE_TYPE (v1));
6608 g = gimple_build_assign (v1, zero);
6609 gsi_insert_on_edge (e, g);
6610 edge_insertions = true;
6611 break;
6613 int ext;
6614 unsigned int min_prec, prec, rem;
6615 tree c;
6616 prec = TYPE_PRECISION (TREE_TYPE (arg));
6617 rem = prec % (2 * limb_prec);
6618 min_prec = bitint_min_cst_precision (arg, ext);
6619 if (min_prec > prec - rem - 2 * limb_prec
6620 && min_prec > (unsigned) limb_prec)
6621 /* Constant which has enough significant bits that it
6622 isn't worth trying to save .rodata space by extending
6623 from smaller number. */
6624 min_prec = prec;
6625 else
6626 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6627 if (min_prec == 0)
6628 c = NULL_TREE;
6629 else if (min_prec == prec)
6630 c = tree_output_constant_def (arg);
6631 else if (min_prec == (unsigned) limb_prec)
6632 c = fold_convert (large_huge.m_limb_type, arg);
6633 else
6635 tree ctype = build_bitint_type (min_prec, 1);
6636 c = tree_output_constant_def (fold_convert (ctype, arg));
6638 if (c)
6640 if (VAR_P (v1) && min_prec == prec)
6642 tree v2 = build1 (VIEW_CONVERT_EXPR,
6643 TREE_TYPE (v1), c);
6644 g = gimple_build_assign (v1, v2);
6645 gsi_insert_on_edge (e, g);
6646 edge_insertions = true;
6647 break;
6649 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6650 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6651 TREE_TYPE (c), v1),
6653 else
6655 unsigned HOST_WIDE_INT nelts
6656 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6657 / limb_prec;
6658 tree vtype
6659 = build_array_type_nelts (large_huge.m_limb_type,
6660 nelts);
6661 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6662 vtype, v1),
6663 build1 (VIEW_CONVERT_EXPR,
6664 vtype, c));
6666 gsi_insert_on_edge (e, g);
6668 if (ext == 0)
6670 unsigned HOST_WIDE_INT nelts
6671 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6672 - min_prec) / limb_prec;
6673 tree vtype
6674 = build_array_type_nelts (large_huge.m_limb_type,
6675 nelts);
6676 tree ptype = build_pointer_type (TREE_TYPE (v1));
6677 tree off;
6678 if (c)
6679 off = fold_convert (ptype,
6680 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6681 else
6682 off = build_zero_cst (ptype);
6683 tree vd = build2 (MEM_REF, vtype,
6684 build_fold_addr_expr (v1), off);
6685 g = gimple_build_assign (vd, build_zero_cst (vtype));
6687 else
6689 tree vd = v1;
6690 if (c)
6692 tree ptype = build_pointer_type (TREE_TYPE (v1));
6693 tree off
6694 = fold_convert (ptype,
6695 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6696 vd = build2 (MEM_REF, large_huge.m_limb_type,
6697 build_fold_addr_expr (v1), off);
6699 vd = build_fold_addr_expr (vd);
6700 unsigned HOST_WIDE_INT nbytes
6701 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6702 if (c)
6703 nbytes
6704 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6705 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6706 g = gimple_build_call (fn, 3, vd,
6707 integer_minus_one_node,
6708 build_int_cst (sizetype,
6709 nbytes));
6711 gsi_insert_on_edge (e, g);
6712 edge_insertions = true;
6713 break;
6714 default:
6715 gcc_unreachable ();
6716 case SSA_NAME:
6717 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6719 if (large_huge.m_names == NULL
6720 || !bitmap_bit_p (large_huge.m_names,
6721 SSA_NAME_VERSION (arg)))
6722 continue;
6724 int p2 = var_to_partition (large_huge.m_map, arg);
6725 if (p1 == p2)
6726 continue;
6727 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6728 tree v2 = large_huge.m_vars[p2];
6729 if (VAR_P (v1) && VAR_P (v2))
6730 g = gimple_build_assign (v1, v2);
6731 else if (VAR_P (v1))
6732 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6733 TREE_TYPE (v1), v2));
6734 else if (VAR_P (v2))
6735 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6736 TREE_TYPE (v2), v1), v2);
6737 else
6739 if (atype == NULL_TREE
6740 || !tree_int_cst_equal (TYPE_SIZE (atype),
6741 TYPE_SIZE (TREE_TYPE (lhs))))
6743 unsigned HOST_WIDE_INT nelts
6744 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6745 / limb_prec;
6746 atype
6747 = build_array_type_nelts (large_huge.m_limb_type,
6748 nelts);
6750 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6751 atype, v1),
6752 build1 (VIEW_CONVERT_EXPR,
6753 atype, v2));
6755 gsi_insert_on_edge (e, g);
6756 edge_insertions = true;
6757 break;
6763 if (large_huge.m_names || has_large_huge)
6765 gimple *nop = NULL;
6766 for (i = 0; i < num_ssa_names; ++i)
6768 tree s = ssa_name (i);
6769 if (s == NULL_TREE)
6770 continue;
6771 tree type = TREE_TYPE (s);
6772 if (TREE_CODE (type) == COMPLEX_TYPE)
6773 type = TREE_TYPE (type);
6774 if (TREE_CODE (type) == BITINT_TYPE
6775 && bitint_precision_kind (type) >= bitint_prec_large)
6777 if (large_huge.m_preserved
6778 && bitmap_bit_p (large_huge.m_preserved,
6779 SSA_NAME_VERSION (s)))
6780 continue;
6781 gimple *g = SSA_NAME_DEF_STMT (s);
6782 if (gimple_code (g) == GIMPLE_NOP)
6784 if (SSA_NAME_VAR (s))
6785 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6786 release_ssa_name (s);
6787 continue;
6789 if (gimple_bb (g) == NULL)
6791 release_ssa_name (s);
6792 continue;
6794 if (gimple_code (g) != GIMPLE_ASM)
6796 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6797 bool save_vta = flag_var_tracking_assignments;
6798 flag_var_tracking_assignments = false;
6799 gsi_remove (&gsi, true);
6800 flag_var_tracking_assignments = save_vta;
6802 if (nop == NULL)
6803 nop = gimple_build_nop ();
6804 SSA_NAME_DEF_STMT (s) = nop;
6805 release_ssa_name (s);
6808 if (optimize)
6809 disable_ranger (cfun);
6812 if (edge_insertions)
6813 gsi_commit_edge_inserts ();
6815 return ret;
6818 namespace {
6820 const pass_data pass_data_lower_bitint =
6822 GIMPLE_PASS, /* type */
6823 "bitintlower", /* name */
6824 OPTGROUP_NONE, /* optinfo_flags */
6825 TV_NONE, /* tv_id */
6826 PROP_ssa, /* properties_required */
6827 PROP_gimple_lbitint, /* properties_provided */
6828 0, /* properties_destroyed */
6829 0, /* todo_flags_start */
6830 0, /* todo_flags_finish */
6833 class pass_lower_bitint : public gimple_opt_pass
6835 public:
6836 pass_lower_bitint (gcc::context *ctxt)
6837 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6840 /* opt_pass methods: */
6841 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6842 unsigned int execute (function *) final override
6844 return gimple_lower_bitint ();
6847 }; // class pass_lower_bitint
6849 } // anon namespace
6851 gimple_opt_pass *
6852 make_pass_lower_bitint (gcc::context *ctxt)
6854 return new pass_lower_bitint (ctxt);
6858 namespace {
6860 const pass_data pass_data_lower_bitint_O0 =
6862 GIMPLE_PASS, /* type */
6863 "bitintlower0", /* name */
6864 OPTGROUP_NONE, /* optinfo_flags */
6865 TV_NONE, /* tv_id */
6866 PROP_cfg, /* properties_required */
6867 PROP_gimple_lbitint, /* properties_provided */
6868 0, /* properties_destroyed */
6869 0, /* todo_flags_start */
6870 0, /* todo_flags_finish */
6873 class pass_lower_bitint_O0 : public gimple_opt_pass
6875 public:
6876 pass_lower_bitint_O0 (gcc::context *ctxt)
6877 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6880 /* opt_pass methods: */
6881 bool gate (function *fun) final override
6883 /* With errors, normal optimization passes are not run. If we don't
6884 lower bitint operations at all, rtl expansion will abort. */
6885 return !(fun->curr_properties & PROP_gimple_lbitint);
6888 unsigned int execute (function *) final override
6890 return gimple_lower_bitint ();
6893 }; // class pass_lower_bitint_O0
6895 } // anon namespace
6897 gimple_opt_pass *
6898 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6900 return new pass_lower_bitint_O0 (ctxt);