Daily bump.
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob8993a6136e7dc119b078dcd983212c7dd1460d4e
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 gimple *realpart = NULL, *cast = NULL;
309 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
311 gimple *g = USE_STMT (use_p);
312 if (is_gimple_debug (g))
313 continue;
314 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
315 return 0;
316 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
318 if ((seen & 1) != 0)
319 return 0;
320 seen |= 1;
321 realpart = g;
323 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
325 if ((seen & 2) != 0)
326 return 0;
327 seen |= 2;
329 use_operand_p use2_p;
330 gimple *use_stmt;
331 tree lhs2 = gimple_assign_lhs (g);
332 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
333 return 0;
334 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
335 || gimple_bb (use_stmt) != gimple_bb (stmt)
336 || !gimple_assign_cast_p (use_stmt))
337 return 0;
339 lhs2 = gimple_assign_lhs (use_stmt);
340 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
341 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
342 return 0;
343 cast = use_stmt;
345 else
346 return 0;
348 if ((seen & 2) == 0)
349 return 0;
350 if (seen == 3)
352 /* Punt if the cast stmt appears before realpart stmt, because
353 if both appear, the lowering wants to emit all the code
354 at the location of realpart stmt. */
355 gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
356 unsigned int cnt = 0;
359 gsi_prev_nondebug (&gsi);
360 if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
361 return 0;
362 if (gsi_stmt (gsi) == stmt)
363 return 2;
364 /* If realpart is too far from stmt, punt as well.
365 Usually it will appear right after it. */
366 if (++cnt == 32)
367 return 0;
369 while (1);
371 return 1;
374 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
375 comparing large/huge _BitInt types, return the comparison code and if
376 non-NULL fill in the comparison operands to *POP1 and *POP2. */
378 tree_code
379 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
381 tree op1 = NULL_TREE, op2 = NULL_TREE;
382 tree_code code = ERROR_MARK;
383 if (gimple_code (stmt) == GIMPLE_COND)
385 code = gimple_cond_code (stmt);
386 op1 = gimple_cond_lhs (stmt);
387 op2 = gimple_cond_rhs (stmt);
389 else if (is_gimple_assign (stmt))
391 code = gimple_assign_rhs_code (stmt);
392 op1 = gimple_assign_rhs1 (stmt);
393 if (TREE_CODE_CLASS (code) == tcc_comparison
394 || TREE_CODE_CLASS (code) == tcc_binary)
395 op2 = gimple_assign_rhs2 (stmt);
397 if (TREE_CODE_CLASS (code) != tcc_comparison)
398 return ERROR_MARK;
399 tree type = TREE_TYPE (op1);
400 if (TREE_CODE (type) != BITINT_TYPE
401 || bitint_precision_kind (type) < bitint_prec_large)
402 return ERROR_MARK;
403 if (pop1)
405 *pop1 = op1;
406 *pop2 = op2;
408 return code;
411 /* Class used during large/huge _BitInt lowering containing all the
412 state for the methods. */
414 struct bitint_large_huge
416 bitint_large_huge ()
417 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
418 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
419 m_limb_type (NULL_TREE), m_data (vNULL) {}
421 ~bitint_large_huge ();
423 void insert_before (gimple *);
424 tree limb_access_type (tree, tree);
425 tree limb_access (tree, tree, tree, bool);
426 void if_then (gimple *, profile_probability, edge &, edge &);
427 void if_then_else (gimple *, profile_probability, edge &, edge &);
428 void if_then_if_then_else (gimple *g, gimple *,
429 profile_probability, profile_probability,
430 edge &, edge &, edge &);
431 tree handle_operand (tree, tree);
432 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
433 tree add_cast (tree, tree);
434 tree handle_plus_minus (tree_code, tree, tree, tree);
435 tree handle_lshift (tree, tree, tree);
436 tree handle_cast (tree, tree, tree);
437 tree handle_load (gimple *, tree);
438 tree handle_stmt (gimple *, tree);
439 tree handle_operand_addr (tree, gimple *, int *, int *);
440 tree create_loop (tree, tree *);
441 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
442 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
443 void lower_shift_stmt (tree, gimple *);
444 void lower_muldiv_stmt (tree, gimple *);
445 void lower_float_conv_stmt (tree, gimple *);
446 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
447 unsigned int, bool);
448 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
449 tree_code);
450 void lower_addsub_overflow (tree, gimple *);
451 void lower_mul_overflow (tree, gimple *);
452 void lower_cplxpart_stmt (tree, gimple *);
453 void lower_complexexpr_stmt (gimple *);
454 void lower_bit_query (gimple *);
455 void lower_call (tree, gimple *);
456 void lower_asm (gimple *);
457 void lower_stmt (gimple *);
459 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
460 merged with their uses. */
461 bitmap m_names;
462 /* Subset of those for lhs of load statements. These will be
463 cleared in m_names if the loads will be mergeable with all
464 their uses. */
465 bitmap m_loads;
466 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
467 to later passes (arguments or return values of calls). */
468 bitmap m_preserved;
469 /* Subset of m_names which have a single use. As the lowering
470 can replace various original statements with their lowered
471 form even before it is done iterating over all basic blocks,
472 testing has_single_use for the purpose of emitting clobbers
473 doesn't work properly. */
474 bitmap m_single_use_names;
475 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
476 set in m_names. */
477 var_map m_map;
478 /* Mapping of the partitions to corresponding decls. */
479 tree *m_vars;
480 /* Unsigned integer type with limb precision. */
481 tree m_limb_type;
482 /* Its TYPE_SIZE_UNIT. */
483 unsigned HOST_WIDE_INT m_limb_size;
484 /* Location of a gimple stmt which is being currently lowered. */
485 location_t m_loc;
486 /* Current stmt iterator where code is being lowered currently. */
487 gimple_stmt_iterator m_gsi;
488 /* Statement after which any clobbers should be added if non-NULL. */
489 gimple *m_after_stmt;
490 /* Set when creating loops to the loop header bb and its preheader. */
491 basic_block m_bb, m_preheader_bb;
492 /* Stmt iterator after which initialization statements should be emitted. */
493 gimple_stmt_iterator m_init_gsi;
494 /* Decl into which a mergeable statement stores result. */
495 tree m_lhs;
496 /* handle_operand/handle_stmt can be invoked in various ways.
498 lower_mergeable_stmt for large _BitInt calls those with constant
499 idx only, expanding to straight line code, for huge _BitInt
500 emits a loop from least significant limb upwards, where each loop
501 iteration handles 2 limbs, plus there can be up to one full limb
502 and one partial limb processed after the loop, where handle_operand
503 and/or handle_stmt are called with constant idx. m_upwards_2limb
504 is set for this case, false otherwise. m_upwards is true if it
505 is either large or huge _BitInt handled by lower_mergeable_stmt,
506 i.e. indexes always increase.
508 Another way is used by lower_comparison_stmt, which walks limbs
509 from most significant to least significant, partial limb if any
510 processed first with constant idx and then loop processing a single
511 limb per iteration with non-constant idx.
513 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
514 destination limbs are processed from most significant to least
515 significant or for RSHIFT_EXPR the other way around, in loops or
516 straight line code, but idx usually is non-constant (so from
517 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
518 handling there can access even partial limbs using non-constant
519 idx (then m_var_msb should be true, for all the other cases
520 including lower_mergeable_stmt/lower_comparison_stmt that is
521 not the case and so m_var_msb should be false.
523 m_first should be set the first time handle_operand/handle_stmt
524 is called and clear when it is called for some other limb with
525 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
526 or statement (e.g. +/-/<< with < limb_prec constant) needs some
527 state between the different calls, when m_first is true it should
528 push some trees to m_data vector and also make sure m_data_cnt is
529 incremented by how many trees were pushed, and when m_first is
530 false, it can use the m_data[m_data_cnt] etc. data or update them,
531 just needs to bump m_data_cnt by the same amount as when it was
532 called with m_first set. The toplevel calls to
533 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
534 m_data vector when setting m_first to true.
536 m_cast_conditional and m_bitfld_load are used when handling a
537 bit-field load inside of a widening cast. handle_cast sometimes
538 needs to do runtime comparisons and handle_operand only conditionally
539 or even in two separate conditional blocks for one idx (once with
540 constant index after comparing the runtime one for equality with the
541 constant). In these cases, m_cast_conditional is set to true and
542 the bit-field load then communicates its m_data_cnt to handle_cast
543 using m_bitfld_load. */
544 bool m_first;
545 bool m_var_msb;
546 unsigned m_upwards_2limb;
547 bool m_upwards;
548 bool m_cast_conditional;
549 unsigned m_bitfld_load;
550 vec<tree> m_data;
551 unsigned int m_data_cnt;
554 bitint_large_huge::~bitint_large_huge ()
556 BITMAP_FREE (m_names);
557 BITMAP_FREE (m_loads);
558 BITMAP_FREE (m_preserved);
559 BITMAP_FREE (m_single_use_names);
560 if (m_map)
561 delete_var_map (m_map);
562 XDELETEVEC (m_vars);
563 m_data.release ();
566 /* Insert gimple statement G before current location
567 and set its gimple_location. */
569 void
570 bitint_large_huge::insert_before (gimple *g)
572 gimple_set_location (g, m_loc);
573 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
576 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
577 This is normally m_limb_type, except for a partial most
578 significant limb if any. */
580 tree
581 bitint_large_huge::limb_access_type (tree type, tree idx)
583 if (type == NULL_TREE)
584 return m_limb_type;
585 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
586 unsigned int prec = TYPE_PRECISION (type);
587 gcc_assert (i * limb_prec < prec);
588 if ((i + 1) * limb_prec <= prec)
589 return m_limb_type;
590 else
591 return build_nonstandard_integer_type (prec % limb_prec,
592 TYPE_UNSIGNED (type));
595 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
596 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
598 tree
599 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
601 tree atype = (tree_fits_uhwi_p (idx)
602 ? limb_access_type (type, idx) : m_limb_type);
603 tree ret;
604 if (DECL_P (var) && tree_fits_uhwi_p (idx))
606 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
607 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
608 ret = build2 (MEM_REF, m_limb_type,
609 build_fold_addr_expr (var),
610 build_int_cst (ptype, off));
611 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
612 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
614 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
617 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
618 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
619 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
620 tree_to_uhwi (idx)
621 * m_limb_size)));
622 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
623 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
624 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
626 else
628 var = unshare_expr (var);
629 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
630 || !useless_type_conversion_p (m_limb_type,
631 TREE_TYPE (TREE_TYPE (var))))
633 unsigned HOST_WIDE_INT nelts
634 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
635 tree atype = build_array_type_nelts (m_limb_type, nelts);
636 var = build1 (VIEW_CONVERT_EXPR, atype, var);
638 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
640 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
642 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
643 insert_before (g);
644 ret = gimple_assign_lhs (g);
645 ret = build1 (NOP_EXPR, atype, ret);
647 return ret;
650 /* Emit a half diamond,
651 if (COND)
655 | new_bb1
659 or if (COND) new_bb1;
660 PROB is the probability that the condition is true.
661 Updates m_gsi to start of new_bb1.
662 Sets EDGE_TRUE to edge from new_bb1 to successor and
663 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
665 void
666 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
667 edge &edge_true, edge &edge_false)
669 insert_before (cond);
670 edge e1 = split_block (gsi_bb (m_gsi), cond);
671 edge e2 = split_block (e1->dest, (gimple *) NULL);
672 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
673 e1->flags = EDGE_TRUE_VALUE;
674 e1->probability = prob;
675 e3->probability = prob.invert ();
676 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
677 edge_true = e2;
678 edge_false = e3;
679 m_gsi = gsi_after_labels (e1->dest);
682 /* Emit a full diamond,
683 if (COND)
687 new_bb1 new_bb2
691 or if (COND) new_bb2; else new_bb1;
692 PROB is the probability that the condition is true.
693 Updates m_gsi to start of new_bb2.
694 Sets EDGE_TRUE to edge from new_bb1 to successor and
695 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
697 void
698 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
699 edge &edge_true, edge &edge_false)
701 insert_before (cond);
702 edge e1 = split_block (gsi_bb (m_gsi), cond);
703 edge e2 = split_block (e1->dest, (gimple *) NULL);
704 basic_block bb = create_empty_bb (e1->dest);
705 add_bb_to_loop (bb, e1->dest->loop_father);
706 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
707 e1->flags = EDGE_FALSE_VALUE;
708 e3->probability = prob;
709 e1->probability = prob.invert ();
710 bb->count = e1->src->count.apply_probability (prob);
711 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
712 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
713 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
714 edge_false = e2;
715 m_gsi = gsi_after_labels (bb);
718 /* Emit a half diamond with full diamond in it
719 if (COND1)
723 | if (COND2)
724 | / \
725 | / \
726 |new_bb1 new_bb2
727 | | /
728 \ | /
729 \ | /
730 \ | /
732 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
733 PROB1 is the probability that the condition 1 is true.
734 PROB2 is the probability that the condition 2 is true.
735 Updates m_gsi to start of new_bb1.
736 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
737 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
738 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
739 If COND2 is NULL, this is equivalent to
740 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
741 EDGE_TRUE_TRUE = NULL; */
743 void
744 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
745 profile_probability prob1,
746 profile_probability prob2,
747 edge &edge_true_true,
748 edge &edge_true_false,
749 edge &edge_false)
751 edge e2, e3, e4 = NULL;
752 if_then (cond1, prob1, e2, e3);
753 if (cond2 == NULL)
755 edge_true_true = NULL;
756 edge_true_false = e2;
757 edge_false = e3;
758 return;
760 insert_before (cond2);
761 e2 = split_block (gsi_bb (m_gsi), cond2);
762 basic_block bb = create_empty_bb (e2->dest);
763 add_bb_to_loop (bb, e2->dest->loop_father);
764 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
765 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
766 e4->probability = prob2;
767 e2->flags = EDGE_FALSE_VALUE;
768 e2->probability = prob2.invert ();
769 bb->count = e2->src->count.apply_probability (prob2);
770 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
771 e2 = find_edge (e2->dest, e3->dest);
772 edge_true_true = e4;
773 edge_true_false = e2;
774 edge_false = e3;
775 m_gsi = gsi_after_labels (e2->src);
778 /* Emit code to access limb IDX from OP. */
780 tree
781 bitint_large_huge::handle_operand (tree op, tree idx)
783 switch (TREE_CODE (op))
785 case SSA_NAME:
786 if (m_names == NULL
787 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
789 if (SSA_NAME_IS_DEFAULT_DEF (op))
791 if (m_first)
793 tree v = create_tmp_reg (m_limb_type);
794 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
796 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
797 DECL_SOURCE_LOCATION (v)
798 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
800 v = get_or_create_ssa_default_def (cfun, v);
801 m_data.safe_push (v);
803 tree ret = m_data[m_data_cnt];
804 m_data_cnt++;
805 if (tree_fits_uhwi_p (idx))
807 tree type = limb_access_type (TREE_TYPE (op), idx);
808 ret = add_cast (type, ret);
810 return ret;
812 location_t loc_save = m_loc;
813 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
814 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
815 m_loc = loc_save;
816 return ret;
818 int p;
819 gimple *g;
820 tree t;
821 p = var_to_partition (m_map, op);
822 gcc_assert (m_vars[p] != NULL_TREE);
823 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
824 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
825 insert_before (g);
826 t = gimple_assign_lhs (g);
827 if (m_first
828 && m_single_use_names
829 && m_vars[p] != m_lhs
830 && m_after_stmt
831 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
833 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
834 CLOBBER_STORAGE_END);
835 g = gimple_build_assign (m_vars[p], clobber);
836 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
837 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
839 return t;
840 case INTEGER_CST:
841 if (tree_fits_uhwi_p (idx))
843 tree c, type = limb_access_type (TREE_TYPE (op), idx);
844 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
845 if (m_first)
847 m_data.safe_push (NULL_TREE);
848 m_data.safe_push (NULL_TREE);
850 if (limb_prec != HOST_BITS_PER_WIDE_INT)
852 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
853 TYPE_SIGN (TREE_TYPE (op)));
854 c = wide_int_to_tree (type,
855 wide_int::from (w, TYPE_PRECISION (type),
856 UNSIGNED));
858 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
859 c = build_int_cst (type,
860 tree_int_cst_sgn (op) < 0 ? -1 : 0);
861 else
862 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
863 m_data_cnt += 2;
864 return c;
866 if (m_first
867 || (m_data[m_data_cnt] == NULL_TREE
868 && m_data[m_data_cnt + 1] == NULL_TREE))
870 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
871 unsigned int rem = prec % (2 * limb_prec);
872 int ext;
873 unsigned min_prec = bitint_min_cst_precision (op, ext);
874 if (m_first)
876 m_data.safe_push (NULL_TREE);
877 m_data.safe_push (NULL_TREE);
879 if (integer_zerop (op))
881 tree c = build_zero_cst (m_limb_type);
882 m_data[m_data_cnt] = c;
883 m_data[m_data_cnt + 1] = c;
885 else if (integer_all_onesp (op))
887 tree c = build_all_ones_cst (m_limb_type);
888 m_data[m_data_cnt] = c;
889 m_data[m_data_cnt + 1] = c;
891 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
893 /* Single limb constant. Use a phi with that limb from
894 the preheader edge and 0 or -1 constant from the other edge
895 and for the second limb in the loop. */
896 tree out;
897 gcc_assert (m_first);
898 m_data.pop ();
899 m_data.pop ();
900 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
901 build_int_cst (m_limb_type, ext));
903 else if (min_prec > prec - rem - 2 * limb_prec)
905 /* Constant which has enough significant bits that it isn't
906 worth trying to save .rodata space by extending from smaller
907 number. */
908 tree type;
909 if (m_var_msb)
910 type = TREE_TYPE (op);
911 else
912 /* If we have a guarantee the most significant partial limb
913 (if any) will be only accessed through handle_operand
914 with INTEGER_CST idx, we don't need to include the partial
915 limb in .rodata. */
916 type = build_bitint_type (prec - rem, 1);
917 tree c = tree_output_constant_def (fold_convert (type, op));
918 m_data[m_data_cnt] = c;
919 m_data[m_data_cnt + 1] = NULL_TREE;
921 else if (m_upwards_2limb)
923 /* Constant with smaller number of bits. Trade conditional
924 code for .rodata space by extending from smaller number. */
925 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
926 tree type = build_bitint_type (min_prec, 1);
927 tree c = tree_output_constant_def (fold_convert (type, op));
928 tree idx2 = make_ssa_name (sizetype);
929 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
930 insert_before (g);
931 g = gimple_build_cond (LT_EXPR, idx,
932 size_int (min_prec / limb_prec),
933 NULL_TREE, NULL_TREE);
934 edge edge_true, edge_false;
935 if_then (g, (min_prec >= (prec - rem) / 2
936 ? profile_probability::likely ()
937 : profile_probability::unlikely ()),
938 edge_true, edge_false);
939 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
940 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
941 insert_before (g);
942 c1 = gimple_assign_lhs (g);
943 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
944 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
945 insert_before (g);
946 c2 = gimple_assign_lhs (g);
947 tree c3 = build_int_cst (m_limb_type, ext);
948 m_gsi = gsi_after_labels (edge_true->dest);
949 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
950 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
951 gphi *phi = create_phi_node (m_data[m_data_cnt],
952 edge_true->dest);
953 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
954 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
955 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
956 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
957 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
959 else
961 /* Constant with smaller number of bits. Trade conditional
962 code for .rodata space by extending from smaller number.
963 Version for loops with random access to the limbs or
964 downwards loops. */
965 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
966 tree c;
967 if (min_prec <= (unsigned) limb_prec)
968 c = fold_convert (m_limb_type, op);
969 else
971 tree type = build_bitint_type (min_prec, 1);
972 c = tree_output_constant_def (fold_convert (type, op));
974 m_data[m_data_cnt] = c;
975 m_data[m_data_cnt + 1] = integer_type_node;
977 t = m_data[m_data_cnt];
978 if (m_data[m_data_cnt + 1] == NULL_TREE)
980 t = limb_access (TREE_TYPE (op), t, idx, false);
981 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
982 insert_before (g);
983 t = gimple_assign_lhs (g);
986 else if (m_data[m_data_cnt + 1] == NULL_TREE)
988 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
989 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
990 insert_before (g);
991 t = gimple_assign_lhs (g);
993 else
994 t = m_data[m_data_cnt + 1];
995 if (m_data[m_data_cnt + 1] == integer_type_node)
997 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
998 unsigned rem = prec % (2 * limb_prec);
999 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
1000 tree c = m_data[m_data_cnt];
1001 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1002 g = gimple_build_cond (LT_EXPR, idx,
1003 size_int (min_prec / limb_prec),
1004 NULL_TREE, NULL_TREE);
1005 edge edge_true, edge_false;
1006 if_then (g, (min_prec >= (prec - rem) / 2
1007 ? profile_probability::likely ()
1008 : profile_probability::unlikely ()),
1009 edge_true, edge_false);
1010 if (min_prec > (unsigned) limb_prec)
1012 c = limb_access (TREE_TYPE (op), c, idx, false);
1013 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1014 insert_before (g);
1015 c = gimple_assign_lhs (g);
1017 tree c2 = build_int_cst (m_limb_type, ext);
1018 m_gsi = gsi_after_labels (edge_true->dest);
1019 t = make_ssa_name (m_limb_type);
1020 gphi *phi = create_phi_node (t, edge_true->dest);
1021 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1022 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1024 m_data_cnt += 2;
1025 return t;
1026 default:
1027 gcc_unreachable ();
1031 /* Helper method, add a PHI node with VAL from preheader edge if
1032 inside of a loop and m_first. Keep state in a pair of m_data
1033 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1034 the latch edge, otherwise create a new SSA_NAME for it and let
1035 caller initialize it. */
1037 tree
1038 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1039 tree val_out)
1041 if (!m_first)
1043 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1044 return m_data[m_data_cnt];
1047 *data_out = NULL_TREE;
1048 if (tree_fits_uhwi_p (idx))
1050 m_data.safe_push (val);
1051 m_data.safe_push (NULL_TREE);
1052 return val;
1055 tree in = make_ssa_name (TREE_TYPE (val));
1056 gphi *phi = create_phi_node (in, m_bb);
1057 edge e1 = find_edge (m_preheader_bb, m_bb);
1058 edge e2 = EDGE_PRED (m_bb, 0);
1059 if (e1 == e2)
1060 e2 = EDGE_PRED (m_bb, 1);
1061 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1062 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1063 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1064 m_data.safe_push (in);
1065 m_data.safe_push (out);
1066 return in;
1069 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1070 convert it without emitting any code, otherwise emit
1071 the conversion statement before the current location. */
1073 tree
1074 bitint_large_huge::add_cast (tree type, tree val)
1076 if (TREE_CODE (val) == INTEGER_CST)
1077 return fold_convert (type, val);
1079 tree lhs = make_ssa_name (type);
1080 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1081 insert_before (g);
1082 return lhs;
1085 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1087 tree
1088 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1089 tree idx)
1091 tree lhs, data_out, ctype;
1092 tree rhs1_type = TREE_TYPE (rhs1);
1093 gimple *g;
1094 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1095 &data_out);
1097 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1098 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1100 ctype = build_complex_type (m_limb_type);
1101 if (!types_compatible_p (rhs1_type, m_limb_type))
1103 if (!TYPE_UNSIGNED (rhs1_type))
1105 tree type = unsigned_type_for (rhs1_type);
1106 rhs1 = add_cast (type, rhs1);
1107 rhs2 = add_cast (type, rhs2);
1109 rhs1 = add_cast (m_limb_type, rhs1);
1110 rhs2 = add_cast (m_limb_type, rhs2);
1112 lhs = make_ssa_name (ctype);
1113 g = gimple_build_call_internal (code == PLUS_EXPR
1114 ? IFN_UADDC : IFN_USUBC,
1115 3, rhs1, rhs2, data_in);
1116 gimple_call_set_lhs (g, lhs);
1117 insert_before (g);
1118 if (data_out == NULL_TREE)
1119 data_out = make_ssa_name (m_limb_type);
1120 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1121 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1122 insert_before (g);
1124 else if (types_compatible_p (rhs1_type, m_limb_type))
1126 ctype = build_complex_type (m_limb_type);
1127 lhs = make_ssa_name (ctype);
1128 g = gimple_build_call_internal (code == PLUS_EXPR
1129 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1130 2, rhs1, rhs2);
1131 gimple_call_set_lhs (g, lhs);
1132 insert_before (g);
1133 if (data_out == NULL_TREE)
1134 data_out = make_ssa_name (m_limb_type);
1135 if (!integer_zerop (data_in))
1137 rhs1 = make_ssa_name (m_limb_type);
1138 g = gimple_build_assign (rhs1, REALPART_EXPR,
1139 build1 (REALPART_EXPR, m_limb_type, lhs));
1140 insert_before (g);
1141 rhs2 = make_ssa_name (m_limb_type);
1142 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1143 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1144 insert_before (g);
1145 lhs = make_ssa_name (ctype);
1146 g = gimple_build_call_internal (code == PLUS_EXPR
1147 ? IFN_ADD_OVERFLOW
1148 : IFN_SUB_OVERFLOW,
1149 2, rhs1, data_in);
1150 gimple_call_set_lhs (g, lhs);
1151 insert_before (g);
1152 data_in = make_ssa_name (m_limb_type);
1153 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1154 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1155 insert_before (g);
1156 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1157 insert_before (g);
1159 else
1161 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1162 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1163 insert_before (g);
1166 else
1168 tree in = add_cast (rhs1_type, data_in);
1169 lhs = make_ssa_name (rhs1_type);
1170 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1171 insert_before (g);
1172 rhs1 = make_ssa_name (rhs1_type);
1173 g = gimple_build_assign (rhs1, code, lhs, in);
1174 insert_before (g);
1175 m_data[m_data_cnt] = NULL_TREE;
1176 m_data_cnt += 2;
1177 return rhs1;
1179 rhs1 = make_ssa_name (m_limb_type);
1180 g = gimple_build_assign (rhs1, REALPART_EXPR,
1181 build1 (REALPART_EXPR, m_limb_type, lhs));
1182 insert_before (g);
1183 if (!types_compatible_p (rhs1_type, m_limb_type))
1184 rhs1 = add_cast (rhs1_type, rhs1);
1185 m_data[m_data_cnt] = data_out;
1186 m_data_cnt += 2;
1187 return rhs1;
1190 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1191 count in [0, limb_prec - 1] range. */
1193 tree
1194 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1196 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1197 gcc_checking_assert (cnt < (unsigned) limb_prec);
1198 if (cnt == 0)
1199 return rhs1;
1201 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1202 gimple *g;
1203 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1204 &data_out);
1206 if (!integer_zerop (data_in))
1208 lhs = make_ssa_name (m_limb_type);
1209 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1210 build_int_cst (unsigned_type_node,
1211 limb_prec - cnt));
1212 insert_before (g);
1213 if (!types_compatible_p (rhs1_type, m_limb_type))
1214 lhs = add_cast (rhs1_type, lhs);
1215 data_in = lhs;
1217 if (types_compatible_p (rhs1_type, m_limb_type))
1219 if (data_out == NULL_TREE)
1220 data_out = make_ssa_name (m_limb_type);
1221 g = gimple_build_assign (data_out, rhs1);
1222 insert_before (g);
1224 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1226 lhs = make_ssa_name (rhs1_type);
1227 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1228 insert_before (g);
1229 if (!integer_zerop (data_in))
1231 rhs1 = lhs;
1232 lhs = make_ssa_name (rhs1_type);
1233 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1234 insert_before (g);
1237 else
1238 lhs = data_in;
1239 m_data[m_data_cnt] = data_out;
1240 m_data_cnt += 2;
1241 return lhs;
1244 /* Helper function for handle_stmt method, handle an integral
1245 to integral conversion. */
1247 tree
1248 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1250 tree rhs_type = TREE_TYPE (rhs1);
1251 gimple *g;
1252 if (TREE_CODE (rhs1) == SSA_NAME
1253 && TREE_CODE (lhs_type) == BITINT_TYPE
1254 && TREE_CODE (rhs_type) == BITINT_TYPE
1255 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1256 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1258 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1259 /* If lhs has bigger precision than rhs, we can use
1260 the simple case only if there is a guarantee that
1261 the most significant limb is handled in straight
1262 line code. If m_var_msb (on left shifts) or
1263 if m_upwards_2limb * limb_prec is equal to
1264 lhs precision that is not the case. */
1265 || (!m_var_msb
1266 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1267 TYPE_SIZE (lhs_type))
1268 && (!m_upwards_2limb
1269 || (m_upwards_2limb * limb_prec
1270 < TYPE_PRECISION (lhs_type)))))
1272 rhs1 = handle_operand (rhs1, idx);
1273 if (tree_fits_uhwi_p (idx))
1275 tree type = limb_access_type (lhs_type, idx);
1276 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1277 rhs1 = add_cast (type, rhs1);
1279 return rhs1;
1281 tree t;
1282 /* Indexes lower than this don't need any special processing. */
1283 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1284 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1285 /* Indexes >= than this always contain an extension. */
1286 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1287 bool save_first = m_first;
1288 if (m_first)
1290 m_data.safe_push (NULL_TREE);
1291 m_data.safe_push (NULL_TREE);
1292 m_data.safe_push (NULL_TREE);
1293 if (TYPE_UNSIGNED (rhs_type))
1294 /* No need to keep state between iterations. */
1296 else if (m_upwards && !m_upwards_2limb)
1297 /* We need to keep state between iterations, but
1298 not within any loop, everything is straight line
1299 code with only increasing indexes. */
1301 else if (!m_upwards_2limb)
1303 unsigned save_data_cnt = m_data_cnt;
1304 gimple_stmt_iterator save_gsi = m_gsi;
1305 m_gsi = m_init_gsi;
1306 if (gsi_end_p (m_gsi))
1307 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1308 else
1309 gsi_next (&m_gsi);
1310 m_data_cnt = save_data_cnt + 3;
1311 t = handle_operand (rhs1, size_int (low));
1312 m_first = false;
1313 m_data[save_data_cnt + 2]
1314 = build_int_cst (NULL_TREE, m_data_cnt);
1315 m_data_cnt = save_data_cnt;
1316 t = add_cast (signed_type_for (m_limb_type), t);
1317 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1318 tree n = make_ssa_name (TREE_TYPE (t));
1319 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1320 insert_before (g);
1321 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1322 m_init_gsi = m_gsi;
1323 if (gsi_end_p (m_init_gsi))
1324 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1325 else
1326 gsi_prev (&m_init_gsi);
1327 m_gsi = save_gsi;
1329 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1330 /* We need to keep state between iterations, but
1331 fortunately not within the loop, only afterwards. */
1333 else
1335 tree out;
1336 m_data.truncate (m_data_cnt);
1337 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1338 m_data.safe_push (NULL_TREE);
1342 unsigned save_data_cnt = m_data_cnt;
1343 m_data_cnt += 3;
1344 if (!tree_fits_uhwi_p (idx))
1346 if (m_upwards_2limb
1347 && (m_upwards_2limb * limb_prec
1348 <= ((unsigned) TYPE_PRECISION (rhs_type)
1349 - !TYPE_UNSIGNED (rhs_type))))
1351 rhs1 = handle_operand (rhs1, idx);
1352 if (m_first)
1353 m_data[save_data_cnt + 2]
1354 = build_int_cst (NULL_TREE, m_data_cnt);
1355 m_first = save_first;
1356 return rhs1;
1358 bool single_comparison
1359 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1360 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1361 idx, size_int (low), NULL_TREE, NULL_TREE);
1362 edge edge_true_true, edge_true_false, edge_false;
1363 if_then_if_then_else (g, (single_comparison ? NULL
1364 : gimple_build_cond (EQ_EXPR, idx,
1365 size_int (low),
1366 NULL_TREE,
1367 NULL_TREE)),
1368 profile_probability::likely (),
1369 profile_probability::unlikely (),
1370 edge_true_true, edge_true_false, edge_false);
1371 bool save_cast_conditional = m_cast_conditional;
1372 m_cast_conditional = true;
1373 m_bitfld_load = 0;
1374 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1375 if (m_first)
1376 m_data[save_data_cnt + 2]
1377 = build_int_cst (NULL_TREE, m_data_cnt);
1378 tree ext = NULL_TREE;
1379 tree bitfld = NULL_TREE;
1380 if (!single_comparison)
1382 m_gsi = gsi_after_labels (edge_true_true->src);
1383 m_first = false;
1384 m_data_cnt = save_data_cnt + 3;
1385 if (m_bitfld_load)
1387 bitfld = m_data[m_bitfld_load];
1388 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1389 m_bitfld_load = 0;
1391 t2 = handle_operand (rhs1, size_int (low));
1392 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1393 t2 = add_cast (m_limb_type, t2);
1394 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1396 ext = add_cast (signed_type_for (m_limb_type), t2);
1397 tree lpm1 = build_int_cst (unsigned_type_node,
1398 limb_prec - 1);
1399 tree n = make_ssa_name (TREE_TYPE (ext));
1400 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1401 insert_before (g);
1402 ext = add_cast (m_limb_type, n);
1405 tree t3;
1406 if (TYPE_UNSIGNED (rhs_type))
1407 t3 = build_zero_cst (m_limb_type);
1408 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1409 t3 = m_data[save_data_cnt];
1410 else
1411 t3 = m_data[save_data_cnt + 1];
1412 m_gsi = gsi_after_labels (edge_true_false->dest);
1413 t = make_ssa_name (m_limb_type);
1414 gphi *phi = create_phi_node (t, edge_true_false->dest);
1415 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1416 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1417 if (edge_true_true)
1418 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1419 if (ext)
1421 tree t4 = make_ssa_name (m_limb_type);
1422 phi = create_phi_node (t4, edge_true_false->dest);
1423 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1424 UNKNOWN_LOCATION);
1425 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1426 UNKNOWN_LOCATION);
1427 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1428 if (!save_cast_conditional)
1430 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1431 insert_before (g);
1433 else
1434 for (basic_block bb = gsi_bb (m_gsi);;)
1436 edge e1 = single_succ_edge (bb);
1437 edge e2 = find_edge (e1->dest, m_bb), e3;
1438 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1439 : make_ssa_name (m_limb_type));
1440 phi = create_phi_node (t5, e1->dest);
1441 edge_iterator ei;
1442 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1443 add_phi_arg (phi, (e3 == e1 ? t4
1444 : build_zero_cst (m_limb_type)),
1445 e3, UNKNOWN_LOCATION);
1446 if (e2)
1447 break;
1448 t4 = t5;
1449 bb = e1->dest;
1452 if (m_bitfld_load)
1454 tree t4;
1455 if (!m_first)
1456 t4 = m_data[m_bitfld_load + 1];
1457 else
1458 t4 = make_ssa_name (m_limb_type);
1459 phi = create_phi_node (t4, edge_true_false->dest);
1460 add_phi_arg (phi,
1461 edge_true_true ? bitfld : m_data[m_bitfld_load],
1462 edge_true_false, UNKNOWN_LOCATION);
1463 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1464 edge_false, UNKNOWN_LOCATION);
1465 if (edge_true_true)
1466 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1467 UNKNOWN_LOCATION);
1468 m_data[m_bitfld_load] = t4;
1469 m_data[m_bitfld_load + 2] = t4;
1470 m_bitfld_load = 0;
1472 m_cast_conditional = save_cast_conditional;
1473 m_first = save_first;
1474 return t;
1476 else
1478 if (tree_to_uhwi (idx) < low)
1480 t = handle_operand (rhs1, idx);
1481 if (m_first)
1482 m_data[save_data_cnt + 2]
1483 = build_int_cst (NULL_TREE, m_data_cnt);
1485 else if (tree_to_uhwi (idx) < high)
1487 t = handle_operand (rhs1, size_int (low));
1488 if (m_first)
1489 m_data[save_data_cnt + 2]
1490 = build_int_cst (NULL_TREE, m_data_cnt);
1491 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1492 t = add_cast (m_limb_type, t);
1493 tree ext = NULL_TREE;
1494 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1496 ext = add_cast (signed_type_for (m_limb_type), t);
1497 tree lpm1 = build_int_cst (unsigned_type_node,
1498 limb_prec - 1);
1499 tree n = make_ssa_name (TREE_TYPE (ext));
1500 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1501 insert_before (g);
1502 ext = add_cast (m_limb_type, n);
1503 m_data[save_data_cnt + 1] = ext;
1506 else
1508 if (TYPE_UNSIGNED (rhs_type) && m_first)
1510 handle_operand (rhs1, size_zero_node);
1511 m_data[save_data_cnt + 2]
1512 = build_int_cst (NULL_TREE, m_data_cnt);
1514 else
1515 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1516 if (TYPE_UNSIGNED (rhs_type))
1517 t = build_zero_cst (m_limb_type);
1518 else if (m_bb && m_data[save_data_cnt])
1519 t = m_data[save_data_cnt];
1520 else
1521 t = m_data[save_data_cnt + 1];
1523 tree type = limb_access_type (lhs_type, idx);
1524 if (!useless_type_conversion_p (type, m_limb_type))
1525 t = add_cast (type, t);
1526 m_first = save_first;
1527 return t;
1530 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1531 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1532 && INTEGRAL_TYPE_P (rhs_type))
1534 /* Add support for 3 or more limbs filled in from normal integral
1535 type if this assert fails. If no target chooses limb mode smaller
1536 than half of largest supported normal integral type, this will not
1537 be needed. */
1538 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1539 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1540 if (m_first)
1542 gimple_stmt_iterator save_gsi = m_gsi;
1543 m_gsi = m_init_gsi;
1544 if (gsi_end_p (m_gsi))
1545 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1546 else
1547 gsi_next (&m_gsi);
1548 if (TREE_CODE (rhs_type) == BITINT_TYPE
1549 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1551 tree type = NULL_TREE;
1552 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1553 rhs_type = TREE_TYPE (rhs1);
1555 r1 = rhs1;
1556 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1557 r1 = add_cast (m_limb_type, rhs1);
1558 if (TYPE_PRECISION (rhs_type) > limb_prec)
1560 g = gimple_build_assign (make_ssa_name (rhs_type),
1561 RSHIFT_EXPR, rhs1,
1562 build_int_cst (unsigned_type_node,
1563 limb_prec));
1564 insert_before (g);
1565 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1567 if (TYPE_UNSIGNED (rhs_type))
1568 rext = build_zero_cst (m_limb_type);
1569 else
1571 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1572 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1573 RSHIFT_EXPR, rext,
1574 build_int_cst (unsigned_type_node,
1575 limb_prec - 1));
1576 insert_before (g);
1577 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1579 m_init_gsi = m_gsi;
1580 if (gsi_end_p (m_init_gsi))
1581 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1582 else
1583 gsi_prev (&m_init_gsi);
1584 m_gsi = save_gsi;
1586 tree t;
1587 if (m_upwards_2limb)
1589 if (m_first)
1591 tree out1, out2;
1592 prepare_data_in_out (r1, idx, &out1, rext);
1593 if (TYPE_PRECISION (rhs_type) > limb_prec)
1595 prepare_data_in_out (r2, idx, &out2, rext);
1596 m_data.pop ();
1597 t = m_data.pop ();
1598 m_data[m_data_cnt + 1] = t;
1600 else
1601 m_data[m_data_cnt + 1] = rext;
1602 m_data.safe_push (rext);
1603 t = m_data[m_data_cnt];
1605 else if (!tree_fits_uhwi_p (idx))
1606 t = m_data[m_data_cnt + 1];
1607 else
1609 tree type = limb_access_type (lhs_type, idx);
1610 t = m_data[m_data_cnt + 2];
1611 if (!useless_type_conversion_p (type, m_limb_type))
1612 t = add_cast (type, t);
1614 m_data_cnt += 3;
1615 return t;
1617 else if (m_first)
1619 m_data.safe_push (r1);
1620 m_data.safe_push (r2);
1621 m_data.safe_push (rext);
1623 if (tree_fits_uhwi_p (idx))
1625 tree type = limb_access_type (lhs_type, idx);
1626 if (integer_zerop (idx))
1627 t = m_data[m_data_cnt];
1628 else if (TYPE_PRECISION (rhs_type) > limb_prec
1629 && integer_onep (idx))
1630 t = m_data[m_data_cnt + 1];
1631 else
1632 t = m_data[m_data_cnt + 2];
1633 if (!useless_type_conversion_p (type, m_limb_type))
1634 t = add_cast (type, t);
1635 m_data_cnt += 3;
1636 return t;
1638 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1639 NULL_TREE, NULL_TREE);
1640 edge e2, e3, e4 = NULL;
1641 if_then (g, profile_probability::likely (), e2, e3);
1642 if (m_data[m_data_cnt + 1])
1644 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1645 NULL_TREE, NULL_TREE);
1646 insert_before (g);
1647 edge e5 = split_block (gsi_bb (m_gsi), g);
1648 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1649 e2 = find_edge (e5->dest, e2->dest);
1650 e4->probability = profile_probability::unlikely ();
1651 e5->flags = EDGE_FALSE_VALUE;
1652 e5->probability = e4->probability.invert ();
1654 m_gsi = gsi_after_labels (e2->dest);
1655 t = make_ssa_name (m_limb_type);
1656 gphi *phi = create_phi_node (t, e2->dest);
1657 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1658 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1659 if (e4)
1660 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1661 m_data_cnt += 3;
1662 return t;
1664 return NULL_TREE;
1667 /* Helper function for handle_stmt method, handle a load from memory. */
1669 tree
1670 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1672 tree rhs1 = gimple_assign_rhs1 (stmt);
1673 tree rhs_type = TREE_TYPE (rhs1);
1674 bool eh = stmt_ends_bb_p (stmt);
1675 edge eh_edge = NULL;
1676 gimple *g;
1678 if (eh)
1680 edge_iterator ei;
1681 basic_block bb = gimple_bb (stmt);
1683 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1684 if (eh_edge->flags & EDGE_EH)
1685 break;
1688 if (TREE_CODE (rhs1) == COMPONENT_REF
1689 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1691 tree fld = TREE_OPERAND (rhs1, 1);
1692 /* For little-endian, we can allow as inputs bit-fields
1693 which start at a limb boundary. */
1694 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1695 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1696 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1697 goto normal_load;
1698 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1699 handle it normally for now. */
1700 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1701 goto normal_load;
1702 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1703 poly_int64 bitoffset;
1704 poly_uint64 field_offset, repr_offset;
1705 bool var_field_off = false;
1706 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1707 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1708 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1709 else
1711 bitoffset = 0;
1712 var_field_off = true;
1714 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1715 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1716 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1717 TREE_OPERAND (rhs1, 0), repr,
1718 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1719 HOST_WIDE_INT bo = bitoffset.to_constant ();
1720 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1721 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1722 if (m_first)
1724 if (m_upwards)
1726 gimple_stmt_iterator save_gsi = m_gsi;
1727 m_gsi = m_init_gsi;
1728 if (gsi_end_p (m_gsi))
1729 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1730 else
1731 gsi_next (&m_gsi);
1732 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1733 tree iv = make_ssa_name (m_limb_type);
1734 g = gimple_build_assign (iv, t);
1735 insert_before (g);
1736 if (eh)
1738 maybe_duplicate_eh_stmt (g, stmt);
1739 if (eh_edge)
1741 edge e = split_block (gsi_bb (m_gsi), g);
1742 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1743 = profile_probability::very_unlikely ();
1744 m_gsi = gsi_after_labels (e->dest);
1745 if (gsi_bb (save_gsi) == e->src)
1747 if (gsi_end_p (save_gsi))
1748 save_gsi = gsi_end_bb (e->dest);
1749 else
1750 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1752 if (m_preheader_bb == e->src)
1753 m_preheader_bb = e->dest;
1756 m_init_gsi = m_gsi;
1757 if (gsi_end_p (m_init_gsi))
1758 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1759 else
1760 gsi_prev (&m_init_gsi);
1761 m_gsi = save_gsi;
1762 tree out;
1763 prepare_data_in_out (iv, idx, &out);
1764 out = m_data[m_data_cnt];
1765 m_data.safe_push (out);
1767 else
1769 m_data.safe_push (NULL_TREE);
1770 m_data.safe_push (NULL_TREE);
1771 m_data.safe_push (NULL_TREE);
1775 tree nidx0 = NULL_TREE, nidx1;
1776 tree iv = m_data[m_data_cnt];
1777 if (m_cast_conditional && iv)
1779 gcc_assert (!m_bitfld_load);
1780 m_bitfld_load = m_data_cnt;
1782 if (tree_fits_uhwi_p (idx))
1784 unsigned prec = TYPE_PRECISION (rhs_type);
1785 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1786 gcc_assert (i * limb_prec < prec);
1787 nidx1 = size_int (i + bo_idx + 1);
1788 if ((i + 1) * limb_prec > prec)
1790 prec %= limb_prec;
1791 if (prec + bo_bit <= (unsigned) limb_prec)
1792 nidx1 = NULL_TREE;
1794 if (!iv)
1795 nidx0 = size_int (i + bo_idx);
1797 else
1799 if (!iv)
1801 if (bo_idx == 0)
1802 nidx0 = idx;
1803 else
1805 nidx0 = make_ssa_name (sizetype);
1806 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1807 size_int (bo_idx));
1808 insert_before (g);
1811 nidx1 = make_ssa_name (sizetype);
1812 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1813 size_int (bo_idx + 1));
1814 insert_before (g);
1817 tree iv2 = NULL_TREE;
1818 if (nidx0)
1820 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1821 iv = make_ssa_name (m_limb_type);
1822 g = gimple_build_assign (iv, t);
1823 insert_before (g);
1824 gcc_assert (!eh);
1826 if (nidx1)
1828 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1829 unsigned prec = TYPE_PRECISION (rhs_type);
1830 if (conditional)
1832 if ((prec % limb_prec) == 0
1833 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1834 conditional = false;
1836 edge edge_true = NULL, edge_false = NULL;
1837 if (conditional)
1839 g = gimple_build_cond (NE_EXPR, idx,
1840 size_int (prec / limb_prec),
1841 NULL_TREE, NULL_TREE);
1842 if_then (g, profile_probability::likely (),
1843 edge_true, edge_false);
1845 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1846 if (m_upwards_2limb
1847 && !m_first
1848 && !m_bitfld_load
1849 && !tree_fits_uhwi_p (idx))
1850 iv2 = m_data[m_data_cnt + 1];
1851 else
1852 iv2 = make_ssa_name (m_limb_type);
1853 g = gimple_build_assign (iv2, t);
1854 insert_before (g);
1855 if (eh)
1857 maybe_duplicate_eh_stmt (g, stmt);
1858 if (eh_edge)
1860 edge e = split_block (gsi_bb (m_gsi), g);
1861 m_gsi = gsi_after_labels (e->dest);
1862 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1863 = profile_probability::very_unlikely ();
1866 if (conditional)
1868 tree iv3 = make_ssa_name (m_limb_type);
1869 if (eh)
1870 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1871 gphi *phi = create_phi_node (iv3, edge_true->dest);
1872 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1873 add_phi_arg (phi, build_zero_cst (m_limb_type),
1874 edge_false, UNKNOWN_LOCATION);
1875 m_gsi = gsi_after_labels (edge_true->dest);
1878 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1879 iv, build_int_cst (unsigned_type_node, bo_bit));
1880 insert_before (g);
1881 iv = gimple_assign_lhs (g);
1882 if (iv2)
1884 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1885 iv2, build_int_cst (unsigned_type_node,
1886 limb_prec - bo_bit));
1887 insert_before (g);
1888 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1889 gimple_assign_lhs (g), iv);
1890 insert_before (g);
1891 iv = gimple_assign_lhs (g);
1892 if (m_data[m_data_cnt])
1893 m_data[m_data_cnt] = iv2;
1895 if (tree_fits_uhwi_p (idx))
1897 tree atype = limb_access_type (rhs_type, idx);
1898 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1899 iv = add_cast (atype, iv);
1901 m_data_cnt += 3;
1902 return iv;
1905 normal_load:
1906 /* Use write_p = true for loads with EH edges to make
1907 sure limb_access doesn't add a cast as separate
1908 statement after it. */
1909 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1910 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1911 g = gimple_build_assign (ret, rhs1);
1912 insert_before (g);
1913 if (eh)
1915 maybe_duplicate_eh_stmt (g, stmt);
1916 if (eh_edge)
1918 edge e = split_block (gsi_bb (m_gsi), g);
1919 m_gsi = gsi_after_labels (e->dest);
1920 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1921 = profile_probability::very_unlikely ();
1923 if (tree_fits_uhwi_p (idx))
1925 tree atype = limb_access_type (rhs_type, idx);
1926 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1927 ret = add_cast (atype, ret);
1930 return ret;
1933 /* Return a limb IDX from a mergeable statement STMT. */
1935 tree
1936 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1938 tree lhs, rhs1, rhs2 = NULL_TREE;
1939 gimple *g;
1940 switch (gimple_code (stmt))
1942 case GIMPLE_ASSIGN:
1943 if (gimple_assign_load_p (stmt))
1944 return handle_load (stmt, idx);
1945 switch (gimple_assign_rhs_code (stmt))
1947 case BIT_AND_EXPR:
1948 case BIT_IOR_EXPR:
1949 case BIT_XOR_EXPR:
1950 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1951 /* FALLTHRU */
1952 case BIT_NOT_EXPR:
1953 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1954 lhs = make_ssa_name (TREE_TYPE (rhs1));
1955 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1956 rhs1, rhs2);
1957 insert_before (g);
1958 return lhs;
1959 case PLUS_EXPR:
1960 case MINUS_EXPR:
1961 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1962 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1963 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1964 rhs1, rhs2, idx);
1965 case NEGATE_EXPR:
1966 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1967 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1968 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1969 case LSHIFT_EXPR:
1970 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1971 idx),
1972 gimple_assign_rhs2 (stmt), idx);
1973 case SSA_NAME:
1974 case INTEGER_CST:
1975 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1976 CASE_CONVERT:
1977 case VIEW_CONVERT_EXPR:
1978 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1979 gimple_assign_rhs1 (stmt), idx);
1980 default:
1981 break;
1983 break;
1984 default:
1985 break;
1987 gcc_unreachable ();
1990 /* Return minimum precision of OP at STMT.
1991 Positive value is minimum precision above which all bits
1992 are zero, negative means all bits above negation of the
1993 value are copies of the sign bit. */
1995 static int
1996 range_to_prec (tree op, gimple *stmt)
1998 int_range_max r;
1999 wide_int w;
2000 tree type = TREE_TYPE (op);
2001 unsigned int prec = TYPE_PRECISION (type);
2003 if (!optimize
2004 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2005 || r.undefined_p ())
2007 if (TYPE_UNSIGNED (type))
2008 return prec;
2009 else
2010 return MIN ((int) -prec, -2);
2013 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2015 w = r.lower_bound ();
2016 if (wi::neg_p (w))
2018 int min_prec1 = wi::min_precision (w, SIGNED);
2019 w = r.upper_bound ();
2020 int min_prec2 = wi::min_precision (w, SIGNED);
2021 int min_prec = MAX (min_prec1, min_prec2);
2022 return MIN (-min_prec, -2);
2026 w = r.upper_bound ();
2027 int min_prec = wi::min_precision (w, UNSIGNED);
2028 return MAX (min_prec, 1);
2031 /* Return address of the first limb of OP and write into *PREC
2032 its precision. If positive, the operand is zero extended
2033 from that precision, if it is negative, the operand is sign-extended
2034 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2035 otherwise *PREC_STORED is prec from the innermost call without
2036 range optimizations. */
2038 tree
2039 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2040 int *prec_stored, int *prec)
2042 wide_int w;
2043 location_t loc_save = m_loc;
2044 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2045 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2046 && TREE_CODE (op) != INTEGER_CST)
2048 do_int:
2049 *prec = range_to_prec (op, stmt);
2050 bitint_prec_kind kind = bitint_prec_small;
2051 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2052 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2053 kind = bitint_precision_kind (TREE_TYPE (op));
2054 if (kind == bitint_prec_middle)
2056 tree type = NULL_TREE;
2057 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2059 tree op_type = TREE_TYPE (op);
2060 unsigned HOST_WIDE_INT nelts
2061 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2062 /* Add support for 3 or more limbs filled in from normal
2063 integral type if this assert fails. If no target chooses
2064 limb mode smaller than half of largest supported normal
2065 integral type, this will not be needed. */
2066 gcc_assert (nelts <= 2);
2067 if (prec_stored)
2068 *prec_stored = (TYPE_UNSIGNED (op_type)
2069 ? TYPE_PRECISION (op_type)
2070 : -TYPE_PRECISION (op_type));
2071 if (*prec <= limb_prec && *prec >= -limb_prec)
2073 nelts = 1;
2074 if (prec_stored)
2076 if (TYPE_UNSIGNED (op_type))
2078 if (*prec_stored > limb_prec)
2079 *prec_stored = limb_prec;
2081 else if (*prec_stored < -limb_prec)
2082 *prec_stored = -limb_prec;
2085 tree atype = build_array_type_nelts (m_limb_type, nelts);
2086 tree var = create_tmp_var (atype);
2087 tree t1 = op;
2088 if (!useless_type_conversion_p (m_limb_type, op_type))
2089 t1 = add_cast (m_limb_type, t1);
2090 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2091 NULL_TREE, NULL_TREE);
2092 gimple *g = gimple_build_assign (v, t1);
2093 insert_before (g);
2094 if (nelts > 1)
2096 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2097 g = gimple_build_assign (make_ssa_name (op_type),
2098 RSHIFT_EXPR, op, lp);
2099 insert_before (g);
2100 tree t2 = gimple_assign_lhs (g);
2101 t2 = add_cast (m_limb_type, t2);
2102 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2103 NULL_TREE, NULL_TREE);
2104 g = gimple_build_assign (v, t2);
2105 insert_before (g);
2107 tree ret = build_fold_addr_expr (var);
2108 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2110 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2111 g = gimple_build_assign (var, clobber);
2112 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2114 m_loc = loc_save;
2115 return ret;
2117 switch (TREE_CODE (op))
2119 case SSA_NAME:
2120 if (m_names == NULL
2121 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2123 gimple *g = SSA_NAME_DEF_STMT (op);
2124 tree ret;
2125 m_loc = gimple_location (g);
2126 if (gimple_assign_load_p (g))
2128 *prec = range_to_prec (op, NULL);
2129 if (prec_stored)
2130 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2131 ? TYPE_PRECISION (TREE_TYPE (op))
2132 : -TYPE_PRECISION (TREE_TYPE (op)));
2133 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2134 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2135 NULL_TREE, true, GSI_SAME_STMT);
2137 else if (gimple_code (g) == GIMPLE_NOP)
2139 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2140 if (prec_stored)
2141 *prec_stored = *prec;
2142 tree var = create_tmp_var (m_limb_type);
2143 TREE_ADDRESSABLE (var) = 1;
2144 ret = build_fold_addr_expr (var);
2145 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2147 tree clobber = build_clobber (m_limb_type,
2148 CLOBBER_STORAGE_END);
2149 g = gimple_build_assign (var, clobber);
2150 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2153 else
2155 gcc_assert (gimple_assign_cast_p (g));
2156 tree rhs1 = gimple_assign_rhs1 (g);
2157 bitint_prec_kind kind = bitint_prec_small;
2158 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2159 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2160 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2161 if (kind >= bitint_prec_large)
2163 tree lhs_type = TREE_TYPE (op);
2164 tree rhs_type = TREE_TYPE (rhs1);
2165 int prec_stored_val = 0;
2166 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2167 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2169 if (TYPE_UNSIGNED (lhs_type)
2170 && !TYPE_UNSIGNED (rhs_type))
2171 gcc_assert (*prec >= 0 || prec_stored == NULL);
2173 else
2175 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2177 else if (TYPE_UNSIGNED (lhs_type))
2179 gcc_assert (*prec > 0
2180 || prec_stored_val > 0
2181 || (-prec_stored_val
2182 >= TYPE_PRECISION (lhs_type)));
2183 *prec = TYPE_PRECISION (lhs_type);
2185 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2187 else
2188 *prec = -TYPE_PRECISION (lhs_type);
2191 else
2193 op = rhs1;
2194 stmt = g;
2195 goto do_int;
2198 m_loc = loc_save;
2199 return ret;
2201 else
2203 int p = var_to_partition (m_map, op);
2204 gcc_assert (m_vars[p] != NULL_TREE);
2205 *prec = range_to_prec (op, stmt);
2206 if (prec_stored)
2207 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2208 ? TYPE_PRECISION (TREE_TYPE (op))
2209 : -TYPE_PRECISION (TREE_TYPE (op)));
2210 return build_fold_addr_expr (m_vars[p]);
2212 case INTEGER_CST:
2213 unsigned int min_prec, mp;
2214 tree type;
2215 w = wi::to_wide (op);
2216 if (tree_int_cst_sgn (op) >= 0)
2218 min_prec = wi::min_precision (w, UNSIGNED);
2219 *prec = MAX (min_prec, 1);
2221 else
2223 min_prec = wi::min_precision (w, SIGNED);
2224 *prec = MIN ((int) -min_prec, -2);
2226 mp = CEIL (min_prec, limb_prec) * limb_prec;
2227 if (mp == 0)
2228 mp = 1;
2229 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2230 type = TREE_TYPE (op);
2231 else
2232 type = build_bitint_type (mp, 1);
2233 if (TREE_CODE (type) != BITINT_TYPE
2234 || bitint_precision_kind (type) == bitint_prec_small)
2236 if (TYPE_PRECISION (type) <= limb_prec)
2237 type = m_limb_type;
2238 else
2239 /* This case is for targets which e.g. have 64-bit
2240 limb but categorize up to 128-bits _BitInts as
2241 small. We could use type of m_limb_type[2] and
2242 similar instead to save space. */
2243 type = build_bitint_type (mid_min_prec, 1);
2245 if (prec_stored)
2247 if (tree_int_cst_sgn (op) >= 0)
2248 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2249 else
2250 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2252 op = tree_output_constant_def (fold_convert (type, op));
2253 return build_fold_addr_expr (op);
2254 default:
2255 gcc_unreachable ();
2259 /* Helper function, create a loop before the current location,
2260 start with sizetype INIT value from the preheader edge. Return
2261 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2262 from the latch edge. */
2264 tree
2265 bitint_large_huge::create_loop (tree init, tree *idx_next)
2267 if (!gsi_end_p (m_gsi))
2268 gsi_prev (&m_gsi);
2269 else
2270 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2271 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2272 edge e2 = split_block (e1->dest, (gimple *) NULL);
2273 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2274 e3->probability = profile_probability::very_unlikely ();
2275 e2->flags = EDGE_FALSE_VALUE;
2276 e2->probability = e3->probability.invert ();
2277 tree idx = make_ssa_name (sizetype);
2278 gphi *phi = create_phi_node (idx, e1->dest);
2279 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2280 *idx_next = make_ssa_name (sizetype);
2281 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2282 m_gsi = gsi_after_labels (e1->dest);
2283 m_bb = e1->dest;
2284 m_preheader_bb = e1->src;
2285 class loop *loop = alloc_loop ();
2286 loop->header = e1->dest;
2287 add_loop (loop, e1->src->loop_father);
2288 return idx;
2291 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2292 lowered using iteration from the least significant limb up to the most
2293 significant limb. For large _BitInt it is emitted as straight line code
2294 before current location, for huge _BitInt as a loop handling two limbs
2295 at once, followed by handling up to limbs in straight line code (at most
2296 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2297 comparisons, in that case CMP_CODE should be the comparison code and
2298 CMP_OP1/CMP_OP2 the comparison operands. */
2300 tree
2301 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2302 tree cmp_op1, tree cmp_op2)
2304 bool eq_p = cmp_code != ERROR_MARK;
2305 tree type;
2306 if (eq_p)
2307 type = TREE_TYPE (cmp_op1);
2308 else
2309 type = TREE_TYPE (gimple_assign_lhs (stmt));
2310 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2311 bitint_prec_kind kind = bitint_precision_kind (type);
2312 gcc_assert (kind >= bitint_prec_large);
2313 gimple *g;
2314 tree lhs = gimple_get_lhs (stmt);
2315 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2316 if (lhs
2317 && TREE_CODE (lhs) == SSA_NAME
2318 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2319 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2321 int p = var_to_partition (m_map, lhs);
2322 gcc_assert (m_vars[p] != NULL_TREE);
2323 m_lhs = lhs = m_vars[p];
2325 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2326 bool sext = false;
2327 tree ext = NULL_TREE, store_operand = NULL_TREE;
2328 bool eh = false;
2329 basic_block eh_pad = NULL;
2330 tree nlhs = NULL_TREE;
2331 unsigned HOST_WIDE_INT bo_idx = 0;
2332 unsigned HOST_WIDE_INT bo_bit = 0;
2333 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2334 if (gimple_store_p (stmt))
2336 store_operand = gimple_assign_rhs1 (stmt);
2337 eh = stmt_ends_bb_p (stmt);
2338 if (eh)
2340 edge e;
2341 edge_iterator ei;
2342 basic_block bb = gimple_bb (stmt);
2344 FOR_EACH_EDGE (e, ei, bb->succs)
2345 if (e->flags & EDGE_EH)
2347 eh_pad = e->dest;
2348 break;
2351 if (TREE_CODE (lhs) == COMPONENT_REF
2352 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2354 tree fld = TREE_OPERAND (lhs, 1);
2355 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2356 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2357 poly_int64 bitoffset;
2358 poly_uint64 field_offset, repr_offset;
2359 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2360 nlhs = lhs;
2361 else
2363 bool var_field_off = false;
2364 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2365 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2366 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2367 else
2369 bitoffset = 0;
2370 var_field_off = true;
2372 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2373 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2374 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2375 TREE_OPERAND (lhs, 0), repr,
2376 var_field_off
2377 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2378 HOST_WIDE_INT bo = bitoffset.to_constant ();
2379 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2380 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2384 if ((store_operand
2385 && TREE_CODE (store_operand) == SSA_NAME
2386 && (m_names == NULL
2387 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2388 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2389 || gimple_assign_cast_p (stmt))
2391 rhs1 = gimple_assign_rhs1 (store_operand
2392 ? SSA_NAME_DEF_STMT (store_operand)
2393 : stmt);
2394 /* Optimize mergeable ops ending with widening cast to _BitInt
2395 (or followed by store). We can lower just the limbs of the
2396 cast operand and widen afterwards. */
2397 if (TREE_CODE (rhs1) == SSA_NAME
2398 && (m_names == NULL
2399 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2400 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2401 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2402 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2403 limb_prec) < CEIL (prec, limb_prec)
2404 || (kind == bitint_prec_huge
2405 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2407 store_operand = rhs1;
2408 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2409 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2410 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2411 sext = true;
2414 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2415 if (kind == bitint_prec_large)
2416 cnt = CEIL (prec, limb_prec);
2417 else
2419 rem = (prec % (2 * limb_prec));
2420 end = (prec - rem) / limb_prec;
2421 cnt = 2 + CEIL (rem, limb_prec);
2422 idx = idx_first = create_loop (size_zero_node, &idx_next);
2425 basic_block edge_bb = NULL;
2426 if (eq_p)
2428 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2429 gsi_prev (&gsi);
2430 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2431 edge_bb = e->src;
2432 if (kind == bitint_prec_large)
2433 m_gsi = gsi_end_bb (edge_bb);
2435 else
2436 m_after_stmt = stmt;
2437 if (kind != bitint_prec_large)
2438 m_upwards_2limb = end;
2439 m_upwards = true;
2441 bool separate_ext
2442 = (prec != (unsigned) TYPE_PRECISION (type)
2443 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2444 > CEIL (prec, limb_prec)));
2446 for (unsigned i = 0; i < cnt; i++)
2448 m_data_cnt = 0;
2449 if (kind == bitint_prec_large)
2450 idx = size_int (i);
2451 else if (i >= 2)
2452 idx = size_int (end + (i > 2));
2453 if (eq_p)
2455 rhs1 = handle_operand (cmp_op1, idx);
2456 tree rhs2 = handle_operand (cmp_op2, idx);
2457 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2458 insert_before (g);
2459 edge e1 = split_block (gsi_bb (m_gsi), g);
2460 e1->flags = EDGE_FALSE_VALUE;
2461 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2462 e1->probability = profile_probability::unlikely ();
2463 e2->probability = e1->probability.invert ();
2464 if (i == 0)
2465 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2466 m_gsi = gsi_after_labels (e1->dest);
2468 else
2470 if (store_operand)
2471 rhs1 = handle_operand (store_operand, idx);
2472 else
2473 rhs1 = handle_stmt (stmt, idx);
2474 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2475 rhs1 = add_cast (m_limb_type, rhs1);
2476 if (sext && i == cnt - 1)
2477 ext = rhs1;
2478 tree nidx = idx;
2479 if (bo_idx)
2481 if (tree_fits_uhwi_p (idx))
2482 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2483 else
2485 nidx = make_ssa_name (sizetype);
2486 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2487 size_int (bo_idx));
2488 insert_before (g);
2491 bool done = false;
2492 basic_block new_bb = NULL;
2493 /* Handle stores into bit-fields. */
2494 if (bo_bit)
2496 if (i == 0)
2498 edge e2 = NULL;
2499 if (kind != bitint_prec_large)
2501 prepare_data_in_out (build_zero_cst (m_limb_type),
2502 idx, &bf_next);
2503 bf_next = m_data.pop ();
2504 bf_cur = m_data.pop ();
2505 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2506 NULL_TREE, NULL_TREE);
2507 edge edge_true;
2508 if_then_else (g, profile_probability::unlikely (),
2509 edge_true, e2);
2510 new_bb = e2->dest;
2512 tree ftype
2513 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2514 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2515 bitsize_int (limb_prec - bo_bit),
2516 bitsize_int (bo_idx * limb_prec + bo_bit));
2517 tree t = add_cast (ftype, rhs1);
2518 g = gimple_build_assign (bfr, t);
2519 insert_before (g);
2520 if (eh)
2522 maybe_duplicate_eh_stmt (g, stmt);
2523 if (eh_pad)
2525 edge e = split_block (gsi_bb (m_gsi), g);
2526 m_gsi = gsi_after_labels (e->dest);
2527 make_edge (e->src, eh_pad, EDGE_EH)->probability
2528 = profile_probability::very_unlikely ();
2531 if (kind == bitint_prec_large)
2533 bf_cur = rhs1;
2534 done = true;
2536 else if (e2)
2537 m_gsi = gsi_after_labels (e2->src);
2539 if (!done)
2541 tree t1 = make_ssa_name (m_limb_type);
2542 tree t2 = make_ssa_name (m_limb_type);
2543 tree t3 = make_ssa_name (m_limb_type);
2544 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2545 build_int_cst (unsigned_type_node,
2546 limb_prec - bo_bit));
2547 insert_before (g);
2548 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2549 build_int_cst (unsigned_type_node,
2550 bo_bit));
2551 insert_before (g);
2552 bf_cur = rhs1;
2553 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2554 insert_before (g);
2555 rhs1 = t3;
2556 if (bf_next && i == 1)
2558 g = gimple_build_assign (bf_next, bf_cur);
2559 insert_before (g);
2563 if (!done)
2565 /* Handle bit-field access to partial last limb if needed. */
2566 if (nlhs
2567 && i == cnt - 1
2568 && !separate_ext
2569 && tree_fits_uhwi_p (idx))
2571 unsigned int tprec = TYPE_PRECISION (type);
2572 unsigned int rprec = tprec % limb_prec;
2573 if (rprec + bo_bit < (unsigned) limb_prec)
2575 tree ftype
2576 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2577 tree bfr = build3 (BIT_FIELD_REF, ftype,
2578 unshare_expr (nlhs),
2579 bitsize_int (rprec + bo_bit),
2580 bitsize_int ((bo_idx
2581 + tprec / limb_prec)
2582 * limb_prec));
2583 tree t = add_cast (ftype, rhs1);
2584 g = gimple_build_assign (bfr, t);
2585 done = true;
2586 bf_cur = NULL_TREE;
2588 else if (rprec + bo_bit == (unsigned) limb_prec)
2589 bf_cur = NULL_TREE;
2591 /* Otherwise, stores to any other lhs. */
2592 if (!done)
2594 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2595 nidx, true);
2596 g = gimple_build_assign (l, rhs1);
2598 insert_before (g);
2599 if (eh)
2601 maybe_duplicate_eh_stmt (g, stmt);
2602 if (eh_pad)
2604 edge e = split_block (gsi_bb (m_gsi), g);
2605 m_gsi = gsi_after_labels (e->dest);
2606 make_edge (e->src, eh_pad, EDGE_EH)->probability
2607 = profile_probability::very_unlikely ();
2610 if (new_bb)
2611 m_gsi = gsi_after_labels (new_bb);
2614 m_first = false;
2615 if (kind == bitint_prec_huge && i <= 1)
2617 if (i == 0)
2619 idx = make_ssa_name (sizetype);
2620 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2621 size_one_node);
2622 insert_before (g);
2624 else
2626 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2627 size_int (2));
2628 insert_before (g);
2629 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2630 NULL_TREE, NULL_TREE);
2631 insert_before (g);
2632 if (eq_p)
2633 m_gsi = gsi_after_labels (edge_bb);
2634 else
2635 m_gsi = gsi_for_stmt (stmt);
2636 m_bb = NULL;
2641 if (separate_ext)
2643 if (sext)
2645 ext = add_cast (signed_type_for (m_limb_type), ext);
2646 tree lpm1 = build_int_cst (unsigned_type_node,
2647 limb_prec - 1);
2648 tree n = make_ssa_name (TREE_TYPE (ext));
2649 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2650 insert_before (g);
2651 ext = add_cast (m_limb_type, n);
2653 else
2654 ext = build_zero_cst (m_limb_type);
2655 kind = bitint_precision_kind (type);
2656 unsigned start = CEIL (prec, limb_prec);
2657 prec = TYPE_PRECISION (type);
2658 idx = idx_first = idx_next = NULL_TREE;
2659 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2660 kind = bitint_prec_large;
2661 if (kind == bitint_prec_large)
2662 cnt = CEIL (prec, limb_prec) - start;
2663 else
2665 rem = prec % limb_prec;
2666 end = (prec - rem) / limb_prec;
2667 cnt = (bo_bit != 0) + 1 + (rem != 0);
2669 for (unsigned i = 0; i < cnt; i++)
2671 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2672 idx = size_int (start + i);
2673 else if (i == cnt - 1 && (rem != 0))
2674 idx = size_int (end);
2675 else if (i == (bo_bit != 0))
2676 idx = create_loop (size_int (start + i), &idx_next);
2677 rhs1 = ext;
2678 if (bf_cur != NULL_TREE && bf_cur != ext)
2680 tree t1 = make_ssa_name (m_limb_type);
2681 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2682 build_int_cst (unsigned_type_node,
2683 limb_prec - bo_bit));
2684 insert_before (g);
2685 if (integer_zerop (ext))
2686 rhs1 = t1;
2687 else
2689 tree t2 = make_ssa_name (m_limb_type);
2690 rhs1 = make_ssa_name (m_limb_type);
2691 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2692 build_int_cst (unsigned_type_node,
2693 bo_bit));
2694 insert_before (g);
2695 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2696 insert_before (g);
2698 bf_cur = ext;
2700 tree nidx = idx;
2701 if (bo_idx)
2703 if (tree_fits_uhwi_p (idx))
2704 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2705 else
2707 nidx = make_ssa_name (sizetype);
2708 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2709 size_int (bo_idx));
2710 insert_before (g);
2713 bool done = false;
2714 /* Handle bit-field access to partial last limb if needed. */
2715 if (nlhs && i == cnt - 1)
2717 unsigned int tprec = TYPE_PRECISION (type);
2718 unsigned int rprec = tprec % limb_prec;
2719 if (rprec + bo_bit < (unsigned) limb_prec)
2721 tree ftype
2722 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2723 tree bfr = build3 (BIT_FIELD_REF, ftype,
2724 unshare_expr (nlhs),
2725 bitsize_int (rprec + bo_bit),
2726 bitsize_int ((bo_idx + tprec / limb_prec)
2727 * limb_prec));
2728 tree t = add_cast (ftype, rhs1);
2729 g = gimple_build_assign (bfr, t);
2730 done = true;
2731 bf_cur = NULL_TREE;
2733 else if (rprec + bo_bit == (unsigned) limb_prec)
2734 bf_cur = NULL_TREE;
2736 /* Otherwise, stores to any other lhs. */
2737 if (!done)
2739 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2740 g = gimple_build_assign (l, rhs1);
2742 insert_before (g);
2743 if (eh)
2745 maybe_duplicate_eh_stmt (g, stmt);
2746 if (eh_pad)
2748 edge e = split_block (gsi_bb (m_gsi), g);
2749 m_gsi = gsi_after_labels (e->dest);
2750 make_edge (e->src, eh_pad, EDGE_EH)->probability
2751 = profile_probability::very_unlikely ();
2754 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2756 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2757 size_one_node);
2758 insert_before (g);
2759 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2760 NULL_TREE, NULL_TREE);
2761 insert_before (g);
2762 m_gsi = gsi_for_stmt (stmt);
2763 m_bb = NULL;
2767 if (bf_cur != NULL_TREE)
2769 unsigned int tprec = TYPE_PRECISION (type);
2770 unsigned int rprec = tprec % limb_prec;
2771 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2772 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2773 bitsize_int (rprec + bo_bit),
2774 bitsize_int ((bo_idx + tprec / limb_prec)
2775 * limb_prec));
2776 rhs1 = bf_cur;
2777 if (bf_cur != ext)
2779 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2780 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2781 build_int_cst (unsigned_type_node,
2782 limb_prec - bo_bit));
2783 insert_before (g);
2785 rhs1 = add_cast (ftype, rhs1);
2786 g = gimple_build_assign (bfr, rhs1);
2787 insert_before (g);
2788 if (eh)
2790 maybe_duplicate_eh_stmt (g, stmt);
2791 if (eh_pad)
2793 edge e = split_block (gsi_bb (m_gsi), g);
2794 m_gsi = gsi_after_labels (e->dest);
2795 make_edge (e->src, eh_pad, EDGE_EH)->probability
2796 = profile_probability::very_unlikely ();
2801 if (gimple_store_p (stmt))
2803 unlink_stmt_vdef (stmt);
2804 release_ssa_name (gimple_vdef (stmt));
2805 gsi_remove (&m_gsi, true);
2807 if (eq_p)
2809 lhs = make_ssa_name (boolean_type_node);
2810 basic_block bb = gimple_bb (stmt);
2811 gphi *phi = create_phi_node (lhs, bb);
2812 edge e = find_edge (gsi_bb (m_gsi), bb);
2813 unsigned int n = EDGE_COUNT (bb->preds);
2814 for (unsigned int i = 0; i < n; i++)
2816 edge e2 = EDGE_PRED (bb, i);
2817 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2818 e2, UNKNOWN_LOCATION);
2820 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2821 return lhs;
2823 else
2824 return NULL_TREE;
2827 /* Handle a large/huge _BitInt comparison statement STMT other than
2828 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2829 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2830 lowered by iteration from the most significant limb downwards to
2831 the least significant one, for large _BitInt in straight line code,
2832 otherwise with most significant limb handled in
2833 straight line code followed by a loop handling one limb at a time.
2834 Comparisons with unsigned huge _BitInt with precisions which are
2835 multiples of limb precision can use just the loop and don't need to
2836 handle most significant limb before the loop. The loop or straight
2837 line code jumps to final basic block if a particular pair of limbs
2838 is not equal. */
2840 tree
2841 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2842 tree cmp_op1, tree cmp_op2)
2844 tree type = TREE_TYPE (cmp_op1);
2845 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2846 bitint_prec_kind kind = bitint_precision_kind (type);
2847 gcc_assert (kind >= bitint_prec_large);
2848 gimple *g;
2849 if (!TYPE_UNSIGNED (type)
2850 && integer_zerop (cmp_op2)
2851 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2853 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2854 tree idx = size_int (end);
2855 m_data_cnt = 0;
2856 tree rhs1 = handle_operand (cmp_op1, idx);
2857 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2859 tree stype = signed_type_for (TREE_TYPE (rhs1));
2860 rhs1 = add_cast (stype, rhs1);
2862 tree lhs = make_ssa_name (boolean_type_node);
2863 g = gimple_build_assign (lhs, cmp_code, rhs1,
2864 build_zero_cst (TREE_TYPE (rhs1)));
2865 insert_before (g);
2866 cmp_code = NE_EXPR;
2867 return lhs;
2870 unsigned cnt, rem = 0, end = 0;
2871 tree idx = NULL_TREE, idx_next = NULL_TREE;
2872 if (kind == bitint_prec_large)
2873 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2874 else
2876 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2877 if (rem == 0 && !TYPE_UNSIGNED (type))
2878 rem = limb_prec;
2879 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2880 cnt = 1 + (rem != 0);
2883 basic_block edge_bb = NULL;
2884 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2885 gsi_prev (&gsi);
2886 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2887 edge_bb = e->src;
2888 m_gsi = gsi_end_bb (edge_bb);
2890 edge *edges = XALLOCAVEC (edge, cnt * 2);
2891 for (unsigned i = 0; i < cnt; i++)
2893 m_data_cnt = 0;
2894 if (kind == bitint_prec_large)
2895 idx = size_int (cnt - i - 1);
2896 else if (i == cnt - 1)
2897 idx = create_loop (size_int (end - 1), &idx_next);
2898 else
2899 idx = size_int (end);
2900 tree rhs1 = handle_operand (cmp_op1, idx);
2901 tree rhs2 = handle_operand (cmp_op2, idx);
2902 if (i == 0
2903 && !TYPE_UNSIGNED (type)
2904 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2906 tree stype = signed_type_for (TREE_TYPE (rhs1));
2907 rhs1 = add_cast (stype, rhs1);
2908 rhs2 = add_cast (stype, rhs2);
2910 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2911 insert_before (g);
2912 edge e1 = split_block (gsi_bb (m_gsi), g);
2913 e1->flags = EDGE_FALSE_VALUE;
2914 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2915 e1->probability = profile_probability::likely ();
2916 e2->probability = e1->probability.invert ();
2917 if (i == 0)
2918 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2919 m_gsi = gsi_after_labels (e1->dest);
2920 edges[2 * i] = e2;
2921 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2922 insert_before (g);
2923 e1 = split_block (gsi_bb (m_gsi), g);
2924 e1->flags = EDGE_FALSE_VALUE;
2925 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2926 e1->probability = profile_probability::unlikely ();
2927 e2->probability = e1->probability.invert ();
2928 m_gsi = gsi_after_labels (e1->dest);
2929 edges[2 * i + 1] = e2;
2930 m_first = false;
2931 if (kind == bitint_prec_huge && i == cnt - 1)
2933 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2934 insert_before (g);
2935 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2936 NULL_TREE, NULL_TREE);
2937 insert_before (g);
2938 edge true_edge, false_edge;
2939 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2940 &true_edge, &false_edge);
2941 m_gsi = gsi_after_labels (false_edge->dest);
2942 m_bb = NULL;
2946 tree lhs = make_ssa_name (boolean_type_node);
2947 basic_block bb = gimple_bb (stmt);
2948 gphi *phi = create_phi_node (lhs, bb);
2949 for (unsigned int i = 0; i < cnt * 2; i++)
2951 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2952 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2953 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2955 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2956 ? boolean_true_node : boolean_false_node,
2957 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2958 cmp_code = NE_EXPR;
2959 return lhs;
2962 /* Lower large/huge _BitInt left and right shift except for left
2963 shift by < limb_prec constant. */
2965 void
2966 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2968 tree rhs1 = gimple_assign_rhs1 (stmt);
2969 tree lhs = gimple_assign_lhs (stmt);
2970 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2971 tree type = TREE_TYPE (rhs1);
2972 gimple *final_stmt = gsi_stmt (m_gsi);
2973 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2974 && bitint_precision_kind (type) >= bitint_prec_large);
2975 int prec = TYPE_PRECISION (type);
2976 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2977 gimple *g;
2978 if (obj == NULL_TREE)
2980 int part = var_to_partition (m_map, lhs);
2981 gcc_assert (m_vars[part] != NULL_TREE);
2982 obj = m_vars[part];
2984 /* Preparation code common for both left and right shifts.
2985 unsigned n1 = n % limb_prec;
2986 size_t n2 = n / limb_prec;
2987 size_t n3 = n1 != 0;
2988 unsigned n4 = (limb_prec - n1) % limb_prec;
2989 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2990 if (TREE_CODE (n) == INTEGER_CST)
2992 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2993 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2994 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2995 n3 = size_int (!integer_zerop (n1));
2996 n4 = int_const_binop (TRUNC_MOD_EXPR,
2997 int_const_binop (MINUS_EXPR, lp, n1), lp);
2999 else
3001 n1 = make_ssa_name (TREE_TYPE (n));
3002 n2 = make_ssa_name (sizetype);
3003 n3 = make_ssa_name (sizetype);
3004 n4 = make_ssa_name (TREE_TYPE (n));
3005 if (pow2p_hwi (limb_prec))
3007 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3008 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3009 insert_before (g);
3010 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3011 TREE_TYPE (n))
3012 ? n2 : make_ssa_name (TREE_TYPE (n)),
3013 RSHIFT_EXPR, n,
3014 build_int_cst (TREE_TYPE (n),
3015 exact_log2 (limb_prec)));
3016 insert_before (g);
3017 if (gimple_assign_lhs (g) != n2)
3019 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3020 insert_before (g);
3022 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3023 NEGATE_EXPR, n1);
3024 insert_before (g);
3025 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3026 lpm1);
3027 insert_before (g);
3029 else
3031 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3032 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3033 insert_before (g);
3034 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3035 TREE_TYPE (n))
3036 ? n2 : make_ssa_name (TREE_TYPE (n)),
3037 TRUNC_DIV_EXPR, n, lp);
3038 insert_before (g);
3039 if (gimple_assign_lhs (g) != n2)
3041 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3042 insert_before (g);
3044 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3045 MINUS_EXPR, lp, n1);
3046 insert_before (g);
3047 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3048 lp);
3049 insert_before (g);
3051 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3052 build_zero_cst (TREE_TYPE (n)));
3053 insert_before (g);
3054 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3055 insert_before (g);
3057 tree p = build_int_cst (sizetype,
3058 prec / limb_prec - (prec % limb_prec == 0));
3059 if (rhs_code == RSHIFT_EXPR)
3061 /* Lower
3062 dst = src >> n;
3064 unsigned n1 = n % limb_prec;
3065 size_t n2 = n / limb_prec;
3066 size_t n3 = n1 != 0;
3067 unsigned n4 = (limb_prec - n1) % limb_prec;
3068 size_t idx;
3069 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3070 int signed_p = (typeof (src) -1) < 0;
3071 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3072 ? p : p - n3); ++idx)
3073 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3074 limb_type ext;
3075 if (prec % limb_prec == 0)
3076 ext = src[p];
3077 else if (signed_p)
3078 ext = ((signed limb_type) (src[p] << (limb_prec
3079 - (prec % limb_prec))))
3080 >> (limb_prec - (prec % limb_prec));
3081 else
3082 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3083 if (!signed_p && (prec % limb_prec == 0))
3085 else if (idx < prec / 64)
3087 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3088 ++idx;
3090 idx -= n2;
3091 if (signed_p)
3093 dst[idx] = ((signed limb_type) ext) >> n1;
3094 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3096 else
3098 dst[idx] = ext >> n1;
3099 ext = 0;
3101 for (++idx; idx <= p; ++idx)
3102 dst[idx] = ext; */
3103 tree pmn3;
3104 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3105 pmn3 = p;
3106 else if (TREE_CODE (n3) == INTEGER_CST)
3107 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3108 else
3110 pmn3 = make_ssa_name (sizetype);
3111 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3112 insert_before (g);
3114 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3115 edge edge_true, edge_false;
3116 if_then (g, profile_probability::likely (), edge_true, edge_false);
3117 tree idx_next;
3118 tree idx = create_loop (n2, &idx_next);
3119 tree idxmn2 = make_ssa_name (sizetype);
3120 tree idxpn3 = make_ssa_name (sizetype);
3121 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3122 insert_before (g);
3123 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3124 insert_before (g);
3125 m_data_cnt = 0;
3126 tree t1 = handle_operand (rhs1, idx);
3127 m_first = false;
3128 g = gimple_build_assign (make_ssa_name (m_limb_type),
3129 RSHIFT_EXPR, t1, n1);
3130 insert_before (g);
3131 t1 = gimple_assign_lhs (g);
3132 if (!integer_zerop (n3))
3134 m_data_cnt = 0;
3135 tree t2 = handle_operand (rhs1, idxpn3);
3136 g = gimple_build_assign (make_ssa_name (m_limb_type),
3137 LSHIFT_EXPR, t2, n4);
3138 insert_before (g);
3139 t2 = gimple_assign_lhs (g);
3140 g = gimple_build_assign (make_ssa_name (m_limb_type),
3141 BIT_IOR_EXPR, t1, t2);
3142 insert_before (g);
3143 t1 = gimple_assign_lhs (g);
3145 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3146 g = gimple_build_assign (l, t1);
3147 insert_before (g);
3148 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3149 insert_before (g);
3150 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3151 insert_before (g);
3152 idx = make_ssa_name (sizetype);
3153 m_gsi = gsi_for_stmt (final_stmt);
3154 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3155 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3156 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3157 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3158 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3159 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3160 m_data_cnt = 0;
3161 tree ms = handle_operand (rhs1, p);
3162 tree ext = ms;
3163 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3164 ext = add_cast (m_limb_type, ms);
3165 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3166 && !integer_zerop (n3))
3168 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3169 if_then (g, profile_probability::likely (), edge_true, edge_false);
3170 m_data_cnt = 0;
3171 t1 = handle_operand (rhs1, idx);
3172 g = gimple_build_assign (make_ssa_name (m_limb_type),
3173 RSHIFT_EXPR, t1, n1);
3174 insert_before (g);
3175 t1 = gimple_assign_lhs (g);
3176 g = gimple_build_assign (make_ssa_name (m_limb_type),
3177 LSHIFT_EXPR, ext, n4);
3178 insert_before (g);
3179 tree t2 = gimple_assign_lhs (g);
3180 g = gimple_build_assign (make_ssa_name (m_limb_type),
3181 BIT_IOR_EXPR, t1, t2);
3182 insert_before (g);
3183 t1 = gimple_assign_lhs (g);
3184 idxmn2 = make_ssa_name (sizetype);
3185 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3186 insert_before (g);
3187 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3188 g = gimple_build_assign (l, t1);
3189 insert_before (g);
3190 idx_next = make_ssa_name (sizetype);
3191 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3192 insert_before (g);
3193 m_gsi = gsi_for_stmt (final_stmt);
3194 tree nidx = make_ssa_name (sizetype);
3195 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3196 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3197 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3198 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3199 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3200 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3201 idx = nidx;
3203 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3204 insert_before (g);
3205 idx = gimple_assign_lhs (g);
3206 tree sext = ext;
3207 if (!TYPE_UNSIGNED (type))
3208 sext = add_cast (signed_type_for (m_limb_type), ext);
3209 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3210 RSHIFT_EXPR, sext, n1);
3211 insert_before (g);
3212 t1 = gimple_assign_lhs (g);
3213 if (!TYPE_UNSIGNED (type))
3215 t1 = add_cast (m_limb_type, t1);
3216 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3217 RSHIFT_EXPR, sext,
3218 build_int_cst (TREE_TYPE (n),
3219 limb_prec - 1));
3220 insert_before (g);
3221 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3223 else
3224 ext = build_zero_cst (m_limb_type);
3225 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3226 g = gimple_build_assign (l, t1);
3227 insert_before (g);
3228 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3229 size_one_node);
3230 insert_before (g);
3231 idx = gimple_assign_lhs (g);
3232 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3233 if_then (g, profile_probability::likely (), edge_true, edge_false);
3234 idx = create_loop (idx, &idx_next);
3235 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3236 g = gimple_build_assign (l, ext);
3237 insert_before (g);
3238 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3239 insert_before (g);
3240 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3241 insert_before (g);
3243 else
3245 /* Lower
3246 dst = src << n;
3248 unsigned n1 = n % limb_prec;
3249 size_t n2 = n / limb_prec;
3250 size_t n3 = n1 != 0;
3251 unsigned n4 = (limb_prec - n1) % limb_prec;
3252 size_t idx;
3253 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3254 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3255 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3256 if (n1)
3258 dst[idx] = src[idx - n2] << n1;
3259 --idx;
3261 for (; (ssize_t) idx >= 0; --idx)
3262 dst[idx] = 0; */
3263 tree n2pn3;
3264 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3265 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3266 else
3268 n2pn3 = make_ssa_name (sizetype);
3269 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3270 insert_before (g);
3272 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3273 idx even to access the most significant partial limb. */
3274 m_var_msb = true;
3275 if (integer_zerop (n3))
3276 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3277 counts. Emit if (true) condition that can be optimized later. */
3278 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3279 NULL_TREE, NULL_TREE);
3280 else
3281 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3282 edge edge_true, edge_false;
3283 if_then (g, profile_probability::likely (), edge_true, edge_false);
3284 tree idx_next;
3285 tree idx = create_loop (p, &idx_next);
3286 tree idxmn2 = make_ssa_name (sizetype);
3287 tree idxmn2mn3 = make_ssa_name (sizetype);
3288 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3289 insert_before (g);
3290 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3291 insert_before (g);
3292 m_data_cnt = 0;
3293 tree t1 = handle_operand (rhs1, idxmn2);
3294 m_first = false;
3295 g = gimple_build_assign (make_ssa_name (m_limb_type),
3296 LSHIFT_EXPR, t1, n1);
3297 insert_before (g);
3298 t1 = gimple_assign_lhs (g);
3299 if (!integer_zerop (n3))
3301 m_data_cnt = 0;
3302 tree t2 = handle_operand (rhs1, idxmn2mn3);
3303 g = gimple_build_assign (make_ssa_name (m_limb_type),
3304 RSHIFT_EXPR, t2, n4);
3305 insert_before (g);
3306 t2 = gimple_assign_lhs (g);
3307 g = gimple_build_assign (make_ssa_name (m_limb_type),
3308 BIT_IOR_EXPR, t1, t2);
3309 insert_before (g);
3310 t1 = gimple_assign_lhs (g);
3312 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3313 g = gimple_build_assign (l, t1);
3314 insert_before (g);
3315 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3316 insert_before (g);
3317 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3318 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3319 NULL_TREE, NULL_TREE);
3320 insert_before (g);
3321 idx = make_ssa_name (sizetype);
3322 m_gsi = gsi_for_stmt (final_stmt);
3323 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3324 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3325 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3326 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3327 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3328 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3329 m_data_cnt = 0;
3330 if (!integer_zerop (n3))
3332 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3333 NULL_TREE, NULL_TREE);
3334 if_then (g, profile_probability::likely (), edge_true, edge_false);
3335 idxmn2 = make_ssa_name (sizetype);
3336 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3337 insert_before (g);
3338 m_data_cnt = 0;
3339 t1 = handle_operand (rhs1, idxmn2);
3340 g = gimple_build_assign (make_ssa_name (m_limb_type),
3341 LSHIFT_EXPR, t1, n1);
3342 insert_before (g);
3343 t1 = gimple_assign_lhs (g);
3344 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3345 g = gimple_build_assign (l, t1);
3346 insert_before (g);
3347 idx_next = make_ssa_name (sizetype);
3348 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3349 insert_before (g);
3350 m_gsi = gsi_for_stmt (final_stmt);
3351 tree nidx = make_ssa_name (sizetype);
3352 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3353 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3354 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3355 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3356 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3357 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3358 idx = nidx;
3360 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3361 ssize_int (0), NULL_TREE, NULL_TREE);
3362 if_then (g, profile_probability::likely (), edge_true, edge_false);
3363 idx = create_loop (idx, &idx_next);
3364 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3365 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3366 insert_before (g);
3367 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3368 insert_before (g);
3369 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3370 ssize_int (0), NULL_TREE, NULL_TREE);
3371 insert_before (g);
3375 /* Lower large/huge _BitInt multiplication or division. */
3377 void
3378 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3380 tree rhs1 = gimple_assign_rhs1 (stmt);
3381 tree rhs2 = gimple_assign_rhs2 (stmt);
3382 tree lhs = gimple_assign_lhs (stmt);
3383 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3384 tree type = TREE_TYPE (rhs1);
3385 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3386 && bitint_precision_kind (type) >= bitint_prec_large);
3387 int prec = TYPE_PRECISION (type), prec1, prec2;
3388 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3389 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3390 if (obj == NULL_TREE)
3392 int part = var_to_partition (m_map, lhs);
3393 gcc_assert (m_vars[part] != NULL_TREE);
3394 obj = m_vars[part];
3395 lhs = build_fold_addr_expr (obj);
3397 else
3399 lhs = build_fold_addr_expr (obj);
3400 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3401 NULL_TREE, true, GSI_SAME_STMT);
3403 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3404 gimple *g;
3405 switch (rhs_code)
3407 case MULT_EXPR:
3408 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3409 lhs, build_int_cst (sitype, prec),
3410 rhs1, build_int_cst (sitype, prec1),
3411 rhs2, build_int_cst (sitype, prec2));
3412 insert_before (g);
3413 break;
3414 case TRUNC_DIV_EXPR:
3415 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3416 lhs, build_int_cst (sitype, prec),
3417 null_pointer_node,
3418 build_int_cst (sitype, 0),
3419 rhs1, build_int_cst (sitype, prec1),
3420 rhs2, build_int_cst (sitype, prec2));
3421 if (!stmt_ends_bb_p (stmt))
3422 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3423 insert_before (g);
3424 break;
3425 case TRUNC_MOD_EXPR:
3426 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3427 build_int_cst (sitype, 0),
3428 lhs, build_int_cst (sitype, prec),
3429 rhs1, build_int_cst (sitype, prec1),
3430 rhs2, build_int_cst (sitype, prec2));
3431 if (!stmt_ends_bb_p (stmt))
3432 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3433 insert_before (g);
3434 break;
3435 default:
3436 gcc_unreachable ();
3438 if (stmt_ends_bb_p (stmt))
3440 maybe_duplicate_eh_stmt (g, stmt);
3441 edge e1;
3442 edge_iterator ei;
3443 basic_block bb = gimple_bb (stmt);
3445 FOR_EACH_EDGE (e1, ei, bb->succs)
3446 if (e1->flags & EDGE_EH)
3447 break;
3448 if (e1)
3450 edge e2 = split_block (gsi_bb (m_gsi), g);
3451 m_gsi = gsi_after_labels (e2->dest);
3452 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3453 = profile_probability::very_unlikely ();
3458 /* Lower large/huge _BitInt conversion to/from floating point. */
3460 void
3461 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3463 tree rhs1 = gimple_assign_rhs1 (stmt);
3464 tree lhs = gimple_assign_lhs (stmt);
3465 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3466 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3467 gimple *g;
3468 if (rhs_code == FIX_TRUNC_EXPR)
3470 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3471 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3472 prec = -prec;
3473 if (obj == NULL_TREE)
3475 int part = var_to_partition (m_map, lhs);
3476 gcc_assert (m_vars[part] != NULL_TREE);
3477 obj = m_vars[part];
3478 lhs = build_fold_addr_expr (obj);
3480 else
3482 lhs = build_fold_addr_expr (obj);
3483 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3484 NULL_TREE, true, GSI_SAME_STMT);
3486 scalar_mode from_mode
3487 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3488 #ifdef HAVE_SFmode
3489 /* IEEE single is a full superset of both IEEE half and
3490 bfloat formats, convert to float first and then to _BitInt
3491 to avoid the need of another 2 library routines. */
3492 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3493 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3494 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3496 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3497 if (type)
3498 rhs1 = add_cast (type, rhs1);
3500 #endif
3501 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3502 lhs, build_int_cst (sitype, prec),
3503 rhs1);
3504 insert_before (g);
3506 else
3508 int prec;
3509 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3510 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3511 rhs1, build_int_cst (sitype, prec));
3512 gimple_call_set_lhs (g, lhs);
3513 if (!stmt_ends_bb_p (stmt))
3514 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3515 gsi_replace (&m_gsi, g, true);
3519 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3520 If check_zero is true, caller wants to check if all bits in [start, end)
3521 are zero, otherwise if bits in [start, end) are either all zero or
3522 all ones. L is the limb with index LIMB, START and END are measured
3523 in bits. */
3525 tree
3526 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3527 unsigned int end, tree l,
3528 unsigned int limb,
3529 bool check_zero)
3531 unsigned startlimb = start / limb_prec;
3532 unsigned endlimb = (end - 1) / limb_prec;
3533 gimple *g;
3535 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3536 return l;
3537 if (startlimb == endlimb && limb == startlimb)
3539 if (check_zero)
3541 wide_int w = wi::shifted_mask (start % limb_prec,
3542 end - start, false, limb_prec);
3543 g = gimple_build_assign (make_ssa_name (m_limb_type),
3544 BIT_AND_EXPR, l,
3545 wide_int_to_tree (m_limb_type, w));
3546 insert_before (g);
3547 return gimple_assign_lhs (g);
3549 unsigned int shift = start % limb_prec;
3550 if ((end % limb_prec) != 0)
3552 unsigned int lshift = (-end) % limb_prec;
3553 shift += lshift;
3554 g = gimple_build_assign (make_ssa_name (m_limb_type),
3555 LSHIFT_EXPR, l,
3556 build_int_cst (unsigned_type_node,
3557 lshift));
3558 insert_before (g);
3559 l = gimple_assign_lhs (g);
3561 l = add_cast (signed_type_for (m_limb_type), l);
3562 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3563 RSHIFT_EXPR, l,
3564 build_int_cst (unsigned_type_node, shift));
3565 insert_before (g);
3566 return add_cast (m_limb_type, gimple_assign_lhs (g));
3568 else if (limb == startlimb)
3570 if ((start % limb_prec) == 0)
3571 return l;
3572 if (!check_zero)
3573 l = add_cast (signed_type_for (m_limb_type), l);
3574 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3575 RSHIFT_EXPR, l,
3576 build_int_cst (unsigned_type_node,
3577 start % limb_prec));
3578 insert_before (g);
3579 l = gimple_assign_lhs (g);
3580 if (!check_zero)
3581 l = add_cast (m_limb_type, l);
3582 return l;
3584 else if (limb == endlimb)
3586 if ((end % limb_prec) == 0)
3587 return l;
3588 if (check_zero)
3590 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3591 g = gimple_build_assign (make_ssa_name (m_limb_type),
3592 BIT_AND_EXPR, l,
3593 wide_int_to_tree (m_limb_type, w));
3594 insert_before (g);
3595 return gimple_assign_lhs (g);
3597 unsigned int shift = (-end) % limb_prec;
3598 g = gimple_build_assign (make_ssa_name (m_limb_type),
3599 LSHIFT_EXPR, l,
3600 build_int_cst (unsigned_type_node, shift));
3601 insert_before (g);
3602 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3603 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3604 RSHIFT_EXPR, l,
3605 build_int_cst (unsigned_type_node, shift));
3606 insert_before (g);
3607 return add_cast (m_limb_type, gimple_assign_lhs (g));
3609 return l;
3612 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3613 result including overflow flag into the right locations. */
3615 void
3616 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3617 tree ovf, tree lhs, tree orig_obj,
3618 gimple *stmt, tree_code code)
3620 gimple *g;
3622 if (obj == NULL_TREE
3623 && (TREE_CODE (type) != BITINT_TYPE
3624 || bitint_precision_kind (type) < bitint_prec_large))
3626 /* Add support for 3 or more limbs filled in from normal integral
3627 type if this assert fails. If no target chooses limb mode smaller
3628 than half of largest supported normal integral type, this will not
3629 be needed. */
3630 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3631 tree lhs_type = type;
3632 if (TREE_CODE (type) == BITINT_TYPE
3633 && bitint_precision_kind (type) == bitint_prec_middle)
3634 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3635 TYPE_UNSIGNED (type));
3636 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3637 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3638 insert_before (g);
3639 r1 = gimple_assign_lhs (g);
3640 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3641 r1 = add_cast (lhs_type, r1);
3642 if (TYPE_PRECISION (lhs_type) > limb_prec)
3644 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3645 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3646 insert_before (g);
3647 r2 = gimple_assign_lhs (g);
3648 r2 = add_cast (lhs_type, r2);
3649 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3650 build_int_cst (unsigned_type_node,
3651 limb_prec));
3652 insert_before (g);
3653 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3654 gimple_assign_lhs (g));
3655 insert_before (g);
3656 r1 = gimple_assign_lhs (g);
3658 if (lhs_type != type)
3659 r1 = add_cast (type, r1);
3660 ovf = add_cast (lhs_type, ovf);
3661 if (lhs_type != type)
3662 ovf = add_cast (type, ovf);
3663 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3664 m_gsi = gsi_for_stmt (stmt);
3665 gsi_replace (&m_gsi, g, true);
3667 else
3669 unsigned HOST_WIDE_INT nelts = 0;
3670 tree atype = NULL_TREE;
3671 if (obj)
3673 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3674 if (orig_obj == NULL_TREE)
3675 nelts >>= 1;
3676 atype = build_array_type_nelts (m_limb_type, nelts);
3678 if (var && obj)
3680 tree v1, v2;
3681 tree zero;
3682 if (orig_obj == NULL_TREE)
3684 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3685 v1 = build2 (MEM_REF, atype,
3686 build_fold_addr_expr (unshare_expr (obj)), zero);
3688 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3689 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3690 else
3691 v1 = unshare_expr (obj);
3692 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3693 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3694 g = gimple_build_assign (v1, v2);
3695 insert_before (g);
3697 if (orig_obj == NULL_TREE && obj)
3699 ovf = add_cast (m_limb_type, ovf);
3700 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3701 g = gimple_build_assign (l, ovf);
3702 insert_before (g);
3703 if (nelts > 1)
3705 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3706 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3707 (nelts + 1) * m_limb_size);
3708 tree v1 = build2 (MEM_REF, atype,
3709 build_fold_addr_expr (unshare_expr (obj)),
3710 off);
3711 g = gimple_build_assign (v1, build_zero_cst (atype));
3712 insert_before (g);
3715 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3717 imm_use_iterator ui;
3718 use_operand_p use_p;
3719 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3721 g = USE_STMT (use_p);
3722 if (!is_gimple_assign (g)
3723 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3724 continue;
3725 tree lhs2 = gimple_assign_lhs (g);
3726 gimple *use_stmt;
3727 single_imm_use (lhs2, &use_p, &use_stmt);
3728 lhs2 = gimple_assign_lhs (use_stmt);
3729 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3730 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3731 g = gimple_build_assign (lhs2, ovf);
3732 else
3733 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3734 gsi_replace (&gsi, g, true);
3735 if (gsi_stmt (m_gsi) == use_stmt)
3736 m_gsi = gsi_for_stmt (g);
3737 break;
3740 else if (ovf != boolean_false_node)
3742 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3743 NULL_TREE, NULL_TREE);
3744 edge edge_true, edge_false;
3745 if_then (g, profile_probability::very_unlikely (),
3746 edge_true, edge_false);
3747 tree zero = build_zero_cst (TREE_TYPE (lhs));
3748 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3749 TREE_TYPE (lhs),
3750 zero, zero, NULL);
3751 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3752 true, GSI_SAME_STMT);
3753 m_gsi = gsi_after_labels (edge_true->dest);
3756 if (var)
3758 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3759 g = gimple_build_assign (var, clobber);
3760 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3764 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3765 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3766 argument 1 precision PREC1 and minimum precision for the result
3767 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3769 static tree
3770 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3771 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3773 *start = 0;
3774 *end = 0;
3775 *check_zero = true;
3776 /* Ignore this special rule for subtraction, even if both
3777 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3778 in infinite precision. */
3779 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3781 /* Result in [0, prec2) is unsigned, if prec > prec2,
3782 all bits above it will be zero. */
3783 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3784 return boolean_false_node;
3785 else
3787 /* ovf if any of bits in [start, end) is non-zero. */
3788 *start = prec - !TYPE_UNSIGNED (type);
3789 *end = prec2;
3792 else if (TYPE_UNSIGNED (type))
3794 /* If result in [0, prec2) is signed and if prec > prec2,
3795 all bits above it will be sign bit copies. */
3796 if (prec >= prec2)
3798 /* ovf if bit prec - 1 is non-zero. */
3799 *start = prec - 1;
3800 *end = prec;
3802 else
3804 /* ovf if any of bits in [start, end) is non-zero. */
3805 *start = prec;
3806 *end = prec2;
3809 else if (prec >= prec2)
3810 return boolean_false_node;
3811 else
3813 /* ovf if [start, end) bits aren't all zeros or all ones. */
3814 *start = prec - 1;
3815 *end = prec2;
3816 *check_zero = false;
3818 return NULL_TREE;
3821 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3822 argument or return type _Complex large/huge _BitInt. */
3824 void
3825 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3827 tree arg0 = gimple_call_arg (stmt, 0);
3828 tree arg1 = gimple_call_arg (stmt, 1);
3829 tree lhs = gimple_call_lhs (stmt);
3830 gimple *g;
3832 if (!lhs)
3834 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3835 gsi_remove (&gsi, true);
3836 return;
3838 gimple *final_stmt = gsi_stmt (m_gsi);
3839 tree type = TREE_TYPE (lhs);
3840 if (TREE_CODE (type) == COMPLEX_TYPE)
3841 type = TREE_TYPE (type);
3842 int prec = TYPE_PRECISION (type);
3843 int prec0 = range_to_prec (arg0, stmt);
3844 int prec1 = range_to_prec (arg1, stmt);
3845 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3846 the be minimum unsigned precision of any possible operation's
3847 result, otherwise it is minimum signed precision.
3848 Some examples:
3849 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3850 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3851 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3852 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3853 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3854 8 + 8 [0, 0x1fe] 9 UNSIGNED
3855 8 + 10 [0, 0x4fe] 11 UNSIGNED
3856 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3857 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3858 8 + -8 [-0x80, 0x17e] 10 SIGNED
3859 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3860 10 + -8 [-0x80, 0x47e] 12 SIGNED
3861 8 - 8 [-0xff, 0xff] 9 SIGNED
3862 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3863 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3864 -8 - -8 [-0xff, 0xff] 9 SIGNED
3865 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3866 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3867 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3868 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3869 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3870 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3871 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3872 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3873 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3874 prec1 < 0 ? -prec1 : prec1);
3875 /* If operands are either both signed or both unsigned,
3876 we need just one additional bit. */
3877 prec2 = (((prec0 < 0) == (prec1 < 0)
3878 /* If one operand is signed and one unsigned and
3879 the signed one has larger precision, we need
3880 just one extra bit, otherwise two. */
3881 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3882 : (prec2 == -prec1 && prec2 != prec0)))
3883 ? prec2 + 1 : prec2 + 2);
3884 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3885 prec1 < 0 ? -prec1 : prec1);
3886 prec3 = MAX (prec3, prec);
3887 tree var = NULL_TREE;
3888 tree orig_obj = obj;
3889 if (obj == NULL_TREE
3890 && TREE_CODE (type) == BITINT_TYPE
3891 && bitint_precision_kind (type) >= bitint_prec_large
3892 && m_names
3893 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3895 int part = var_to_partition (m_map, lhs);
3896 gcc_assert (m_vars[part] != NULL_TREE);
3897 obj = m_vars[part];
3898 if (TREE_TYPE (lhs) == type)
3899 orig_obj = obj;
3901 if (TREE_CODE (type) != BITINT_TYPE
3902 || bitint_precision_kind (type) < bitint_prec_large)
3904 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3905 tree atype = build_array_type_nelts (m_limb_type, nelts);
3906 var = create_tmp_var (atype);
3909 enum tree_code code;
3910 switch (gimple_call_internal_fn (stmt))
3912 case IFN_ADD_OVERFLOW:
3913 case IFN_UBSAN_CHECK_ADD:
3914 code = PLUS_EXPR;
3915 break;
3916 case IFN_SUB_OVERFLOW:
3917 case IFN_UBSAN_CHECK_SUB:
3918 code = MINUS_EXPR;
3919 break;
3920 default:
3921 gcc_unreachable ();
3923 unsigned start, end;
3924 bool check_zero;
3925 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3926 &start, &end, &check_zero);
3928 unsigned startlimb, endlimb;
3929 if (ovf)
3931 startlimb = ~0U;
3932 endlimb = ~0U;
3934 else
3936 startlimb = start / limb_prec;
3937 endlimb = (end - 1) / limb_prec;
3940 int prec4 = ovf != NULL_TREE ? prec : prec3;
3941 bitint_prec_kind kind = bitint_precision_kind (prec4);
3942 unsigned cnt, rem = 0, fin = 0;
3943 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3944 bool last_ovf = (ovf == NULL_TREE
3945 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3946 if (kind != bitint_prec_huge)
3947 cnt = CEIL (prec4, limb_prec) + last_ovf;
3948 else
3950 rem = (prec4 % (2 * limb_prec));
3951 fin = (prec4 - rem) / limb_prec;
3952 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3953 idx = idx_first = create_loop (size_zero_node, &idx_next);
3956 if (kind == bitint_prec_huge)
3957 m_upwards_2limb = fin;
3958 m_upwards = true;
3960 tree type0 = TREE_TYPE (arg0);
3961 tree type1 = TREE_TYPE (arg1);
3962 int prec5 = prec3;
3963 if (bitint_precision_kind (prec5) < bitint_prec_large)
3964 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3965 if (TYPE_PRECISION (type0) < prec5)
3967 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3968 if (TREE_CODE (arg0) == INTEGER_CST)
3969 arg0 = fold_convert (type0, arg0);
3971 if (TYPE_PRECISION (type1) < prec5)
3973 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3974 if (TREE_CODE (arg1) == INTEGER_CST)
3975 arg1 = fold_convert (type1, arg1);
3977 unsigned int data_cnt = 0;
3978 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3979 tree cmp = build_zero_cst (m_limb_type);
3980 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3981 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3982 for (unsigned i = 0; i < cnt; i++)
3984 m_data_cnt = 0;
3985 tree rhs1, rhs2;
3986 if (kind != bitint_prec_huge)
3987 idx = size_int (i);
3988 else if (i >= 2)
3989 idx = size_int (fin + (i > 2));
3990 if (!last_ovf || i < cnt - 1)
3992 if (type0 != TREE_TYPE (arg0))
3993 rhs1 = handle_cast (type0, arg0, idx);
3994 else
3995 rhs1 = handle_operand (arg0, idx);
3996 if (type1 != TREE_TYPE (arg1))
3997 rhs2 = handle_cast (type1, arg1, idx);
3998 else
3999 rhs2 = handle_operand (arg1, idx);
4000 if (i == 0)
4001 data_cnt = m_data_cnt;
4002 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4003 rhs1 = add_cast (m_limb_type, rhs1);
4004 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4005 rhs2 = add_cast (m_limb_type, rhs2);
4006 last_rhs1 = rhs1;
4007 last_rhs2 = rhs2;
4009 else
4011 m_data_cnt = data_cnt;
4012 if (TYPE_UNSIGNED (type0))
4013 rhs1 = build_zero_cst (m_limb_type);
4014 else
4016 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4017 if (TREE_CODE (rhs1) == INTEGER_CST)
4018 rhs1 = build_int_cst (m_limb_type,
4019 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4020 else
4022 tree lpm1 = build_int_cst (unsigned_type_node,
4023 limb_prec - 1);
4024 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4025 RSHIFT_EXPR, rhs1, lpm1);
4026 insert_before (g);
4027 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4030 if (TYPE_UNSIGNED (type1))
4031 rhs2 = build_zero_cst (m_limb_type);
4032 else
4034 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4035 if (TREE_CODE (rhs2) == INTEGER_CST)
4036 rhs2 = build_int_cst (m_limb_type,
4037 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4038 else
4040 tree lpm1 = build_int_cst (unsigned_type_node,
4041 limb_prec - 1);
4042 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4043 RSHIFT_EXPR, rhs2, lpm1);
4044 insert_before (g);
4045 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4049 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4050 if (ovf != boolean_false_node)
4052 if (tree_fits_uhwi_p (idx))
4054 unsigned limb = tree_to_uhwi (idx);
4055 if (limb >= startlimb && limb <= endlimb)
4057 tree l = arith_overflow_extract_bits (start, end, rhs,
4058 limb, check_zero);
4059 tree this_ovf = make_ssa_name (boolean_type_node);
4060 if (ovf == NULL_TREE && !check_zero)
4062 cmp = l;
4063 g = gimple_build_assign (make_ssa_name (m_limb_type),
4064 PLUS_EXPR, l,
4065 build_int_cst (m_limb_type, 1));
4066 insert_before (g);
4067 g = gimple_build_assign (this_ovf, GT_EXPR,
4068 gimple_assign_lhs (g),
4069 build_int_cst (m_limb_type, 1));
4071 else
4072 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4073 insert_before (g);
4074 if (ovf == NULL_TREE)
4075 ovf = this_ovf;
4076 else
4078 tree b = make_ssa_name (boolean_type_node);
4079 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4080 insert_before (g);
4081 ovf = b;
4085 else if (startlimb < fin)
4087 if (m_first && startlimb + 2 < fin)
4089 tree data_out;
4090 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4091 ovf_out = m_data.pop ();
4092 m_data.pop ();
4093 if (!check_zero)
4095 cmp = prepare_data_in_out (cmp, idx, &data_out);
4096 cmp_out = m_data.pop ();
4097 m_data.pop ();
4100 if (i != 0 || startlimb != fin - 1)
4102 tree_code cmp_code;
4103 bool single_comparison
4104 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4105 if (!single_comparison)
4107 cmp_code = GE_EXPR;
4108 if (!check_zero && (start % limb_prec) == 0)
4109 single_comparison = true;
4111 else if ((startlimb & 1) == (i & 1))
4112 cmp_code = EQ_EXPR;
4113 else
4114 cmp_code = GT_EXPR;
4115 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4116 NULL_TREE, NULL_TREE);
4117 edge edge_true_true, edge_true_false, edge_false;
4118 gimple *g2 = NULL;
4119 if (!single_comparison)
4120 g2 = gimple_build_cond (NE_EXPR, idx,
4121 size_int (startlimb), NULL_TREE,
4122 NULL_TREE);
4123 if_then_if_then_else (g, g2, profile_probability::likely (),
4124 profile_probability::likely (),
4125 edge_true_true, edge_true_false,
4126 edge_false);
4127 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4128 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4129 check_zero);
4130 tree this_ovf = make_ssa_name (boolean_type_node);
4131 if (cmp_code != GT_EXPR && !check_zero)
4133 g = gimple_build_assign (make_ssa_name (m_limb_type),
4134 PLUS_EXPR, l,
4135 build_int_cst (m_limb_type, 1));
4136 insert_before (g);
4137 g = gimple_build_assign (this_ovf, GT_EXPR,
4138 gimple_assign_lhs (g),
4139 build_int_cst (m_limb_type, 1));
4141 else
4142 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4143 insert_before (g);
4144 if (cmp_code == GT_EXPR)
4146 tree t = make_ssa_name (boolean_type_node);
4147 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4148 insert_before (g);
4149 this_ovf = t;
4151 tree this_ovf2 = NULL_TREE;
4152 if (!single_comparison)
4154 m_gsi = gsi_after_labels (edge_true_true->src);
4155 tree t = make_ssa_name (boolean_type_node);
4156 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4157 insert_before (g);
4158 this_ovf2 = make_ssa_name (boolean_type_node);
4159 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4160 ovf, t);
4161 insert_before (g);
4163 m_gsi = gsi_after_labels (edge_true_false->dest);
4164 tree t;
4165 if (i == 1 && ovf_out)
4166 t = ovf_out;
4167 else
4168 t = make_ssa_name (boolean_type_node);
4169 gphi *phi = create_phi_node (t, edge_true_false->dest);
4170 add_phi_arg (phi, this_ovf, edge_true_false,
4171 UNKNOWN_LOCATION);
4172 add_phi_arg (phi, ovf ? ovf
4173 : boolean_false_node, edge_false,
4174 UNKNOWN_LOCATION);
4175 if (edge_true_true)
4176 add_phi_arg (phi, this_ovf2, edge_true_true,
4177 UNKNOWN_LOCATION);
4178 ovf = t;
4179 if (!check_zero && cmp_code != GT_EXPR)
4181 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4182 phi = create_phi_node (t, edge_true_false->dest);
4183 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4184 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4185 if (edge_true_true)
4186 add_phi_arg (phi, cmp, edge_true_true,
4187 UNKNOWN_LOCATION);
4188 cmp = t;
4194 if (var || obj)
4196 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4198 else if (!tree_fits_uhwi_p (idx)
4199 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4201 bool single_comparison
4202 = (((unsigned) prec % limb_prec) == 0
4203 || prec_limbs + 1 >= fin
4204 || (prec_limbs & 1) == (i & 1));
4205 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4206 NULL_TREE, NULL_TREE);
4207 gimple *g2 = NULL;
4208 if (!single_comparison)
4209 g2 = gimple_build_cond (LT_EXPR, idx,
4210 size_int (prec_limbs - 1),
4211 NULL_TREE, NULL_TREE);
4212 edge edge_true_true, edge_true_false, edge_false;
4213 if_then_if_then_else (g, g2, profile_probability::likely (),
4214 profile_probability::likely (),
4215 edge_true_true, edge_true_false,
4216 edge_false);
4217 tree l = limb_access (type, var ? var : obj, idx, true);
4218 g = gimple_build_assign (l, rhs);
4219 insert_before (g);
4220 if (!single_comparison)
4222 m_gsi = gsi_after_labels (edge_true_true->src);
4223 l = limb_access (type, var ? var : obj,
4224 size_int (prec_limbs - 1), true);
4225 if (!useless_type_conversion_p (TREE_TYPE (l),
4226 TREE_TYPE (rhs)))
4227 rhs = add_cast (TREE_TYPE (l), rhs);
4228 g = gimple_build_assign (l, rhs);
4229 insert_before (g);
4231 m_gsi = gsi_after_labels (edge_true_false->dest);
4233 else
4235 tree l = limb_access (type, var ? var : obj, idx, true);
4236 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4237 rhs = add_cast (TREE_TYPE (l), rhs);
4238 g = gimple_build_assign (l, rhs);
4239 insert_before (g);
4242 m_first = false;
4243 if (kind == bitint_prec_huge && i <= 1)
4245 if (i == 0)
4247 idx = make_ssa_name (sizetype);
4248 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4249 size_one_node);
4250 insert_before (g);
4252 else
4254 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4255 size_int (2));
4256 insert_before (g);
4257 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4258 NULL_TREE, NULL_TREE);
4259 insert_before (g);
4260 m_gsi = gsi_for_stmt (final_stmt);
4261 m_bb = NULL;
4266 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4269 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4270 argument or return type _Complex large/huge _BitInt. */
4272 void
4273 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4275 tree arg0 = gimple_call_arg (stmt, 0);
4276 tree arg1 = gimple_call_arg (stmt, 1);
4277 tree lhs = gimple_call_lhs (stmt);
4278 if (!lhs)
4280 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4281 gsi_remove (&gsi, true);
4282 return;
4284 gimple *final_stmt = gsi_stmt (m_gsi);
4285 tree type = TREE_TYPE (lhs);
4286 if (TREE_CODE (type) == COMPLEX_TYPE)
4287 type = TREE_TYPE (type);
4288 int prec = TYPE_PRECISION (type), prec0, prec1;
4289 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4290 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4291 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4292 + (prec1 < 0 ? -prec1 : prec1));
4293 if (prec0 == 1 || prec1 == 1)
4294 --prec2;
4295 tree var = NULL_TREE;
4296 tree orig_obj = obj;
4297 bool force_var = false;
4298 if (obj == NULL_TREE
4299 && TREE_CODE (type) == BITINT_TYPE
4300 && bitint_precision_kind (type) >= bitint_prec_large
4301 && m_names
4302 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4304 int part = var_to_partition (m_map, lhs);
4305 gcc_assert (m_vars[part] != NULL_TREE);
4306 obj = m_vars[part];
4307 if (TREE_TYPE (lhs) == type)
4308 orig_obj = obj;
4310 else if (obj != NULL_TREE && DECL_P (obj))
4312 for (int i = 0; i < 2; ++i)
4314 tree arg = i ? arg1 : arg0;
4315 if (TREE_CODE (arg) == ADDR_EXPR)
4316 arg = TREE_OPERAND (arg, 0);
4317 if (get_base_address (arg) == obj)
4319 force_var = true;
4320 break;
4324 if (obj == NULL_TREE
4325 || force_var
4326 || TREE_CODE (type) != BITINT_TYPE
4327 || bitint_precision_kind (type) < bitint_prec_large
4328 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4330 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4331 tree atype = build_array_type_nelts (m_limb_type, nelts);
4332 var = create_tmp_var (atype);
4334 tree addr = build_fold_addr_expr (var ? var : obj);
4335 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4336 NULL_TREE, true, GSI_SAME_STMT);
4337 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4338 gimple *g
4339 = gimple_build_call_internal (IFN_MULBITINT, 6,
4340 addr, build_int_cst (sitype,
4341 MAX (prec2, prec)),
4342 arg0, build_int_cst (sitype, prec0),
4343 arg1, build_int_cst (sitype, prec1));
4344 insert_before (g);
4346 unsigned start, end;
4347 bool check_zero;
4348 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4349 &start, &end, &check_zero);
4350 if (ovf == NULL_TREE)
4352 unsigned startlimb = start / limb_prec;
4353 unsigned endlimb = (end - 1) / limb_prec;
4354 unsigned cnt;
4355 bool use_loop = false;
4356 if (startlimb == endlimb)
4357 cnt = 1;
4358 else if (startlimb + 1 == endlimb)
4359 cnt = 2;
4360 else if ((end % limb_prec) == 0)
4362 cnt = 2;
4363 use_loop = true;
4365 else
4367 cnt = 3;
4368 use_loop = startlimb + 2 < endlimb;
4370 if (cnt == 1)
4372 tree l = limb_access (NULL_TREE, var ? var : obj,
4373 size_int (startlimb), true);
4374 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4375 insert_before (g);
4376 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4377 startlimb, check_zero);
4378 ovf = make_ssa_name (boolean_type_node);
4379 if (check_zero)
4380 g = gimple_build_assign (ovf, NE_EXPR, l,
4381 build_zero_cst (m_limb_type));
4382 else
4384 g = gimple_build_assign (make_ssa_name (m_limb_type),
4385 PLUS_EXPR, l,
4386 build_int_cst (m_limb_type, 1));
4387 insert_before (g);
4388 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4389 build_int_cst (m_limb_type, 1));
4391 insert_before (g);
4393 else
4395 basic_block edge_bb = NULL;
4396 gimple_stmt_iterator gsi = m_gsi;
4397 gsi_prev (&gsi);
4398 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4399 edge_bb = e->src;
4400 m_gsi = gsi_end_bb (edge_bb);
4402 tree cmp = build_zero_cst (m_limb_type);
4403 for (unsigned i = 0; i < cnt; i++)
4405 tree idx, idx_next = NULL_TREE;
4406 if (i == 0)
4407 idx = size_int (startlimb);
4408 else if (i == 2)
4409 idx = size_int (endlimb);
4410 else if (use_loop)
4411 idx = create_loop (size_int (startlimb + 1), &idx_next);
4412 else
4413 idx = size_int (startlimb + 1);
4414 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4415 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4416 insert_before (g);
4417 l = gimple_assign_lhs (g);
4418 if (i == 0 || i == 2)
4419 l = arith_overflow_extract_bits (start, end, l,
4420 tree_to_uhwi (idx),
4421 check_zero);
4422 if (i == 0 && !check_zero)
4424 cmp = l;
4425 g = gimple_build_assign (make_ssa_name (m_limb_type),
4426 PLUS_EXPR, l,
4427 build_int_cst (m_limb_type, 1));
4428 insert_before (g);
4429 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4430 build_int_cst (m_limb_type, 1),
4431 NULL_TREE, NULL_TREE);
4433 else
4434 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4435 insert_before (g);
4436 edge e1 = split_block (gsi_bb (m_gsi), g);
4437 e1->flags = EDGE_FALSE_VALUE;
4438 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4439 EDGE_TRUE_VALUE);
4440 e1->probability = profile_probability::likely ();
4441 e2->probability = e1->probability.invert ();
4442 if (i == 0)
4443 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4444 m_gsi = gsi_after_labels (e1->dest);
4445 if (i == 1 && use_loop)
4447 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4448 size_one_node);
4449 insert_before (g);
4450 g = gimple_build_cond (NE_EXPR, idx_next,
4451 size_int (endlimb + (cnt == 1)),
4452 NULL_TREE, NULL_TREE);
4453 insert_before (g);
4454 edge true_edge, false_edge;
4455 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4456 &true_edge,
4457 &false_edge);
4458 m_gsi = gsi_after_labels (false_edge->dest);
4459 m_bb = NULL;
4463 ovf = make_ssa_name (boolean_type_node);
4464 basic_block bb = gimple_bb (final_stmt);
4465 gphi *phi = create_phi_node (ovf, bb);
4466 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4467 edge_iterator ei;
4468 FOR_EACH_EDGE (e, ei, bb->preds)
4470 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4471 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4473 m_gsi = gsi_for_stmt (final_stmt);
4477 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4480 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4481 .{ADD,SUB,MUL}_OVERFLOW call. */
4483 void
4484 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4486 tree rhs1 = gimple_assign_rhs1 (stmt);
4487 rhs1 = TREE_OPERAND (rhs1, 0);
4488 if (obj == NULL_TREE)
4490 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4491 gcc_assert (m_vars[part] != NULL_TREE);
4492 obj = m_vars[part];
4494 if (TREE_CODE (rhs1) == SSA_NAME
4495 && (m_names == NULL
4496 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4498 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4499 return;
4501 int part = var_to_partition (m_map, rhs1);
4502 gcc_assert (m_vars[part] != NULL_TREE);
4503 tree var = m_vars[part];
4504 unsigned HOST_WIDE_INT nelts
4505 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4506 tree atype = build_array_type_nelts (m_limb_type, nelts);
4507 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4508 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4509 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4510 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4511 ? 0 : nelts * m_limb_size);
4512 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4513 gimple *g = gimple_build_assign (obj, v2);
4514 insert_before (g);
4517 /* Lower COMPLEX_EXPR stmt. */
4519 void
4520 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4522 tree lhs = gimple_assign_lhs (stmt);
4523 tree rhs1 = gimple_assign_rhs1 (stmt);
4524 tree rhs2 = gimple_assign_rhs2 (stmt);
4525 int part = var_to_partition (m_map, lhs);
4526 gcc_assert (m_vars[part] != NULL_TREE);
4527 lhs = m_vars[part];
4528 unsigned HOST_WIDE_INT nelts
4529 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4530 tree atype = build_array_type_nelts (m_limb_type, nelts);
4531 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4532 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4533 tree v2;
4534 if (TREE_CODE (rhs1) == SSA_NAME)
4536 part = var_to_partition (m_map, rhs1);
4537 gcc_assert (m_vars[part] != NULL_TREE);
4538 v2 = m_vars[part];
4540 else if (integer_zerop (rhs1))
4541 v2 = build_zero_cst (atype);
4542 else
4543 v2 = tree_output_constant_def (rhs1);
4544 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4545 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4546 gimple *g = gimple_build_assign (v1, v2);
4547 insert_before (g);
4548 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4549 TYPE_SIZE_UNIT (atype));
4550 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4551 if (TREE_CODE (rhs2) == SSA_NAME)
4553 part = var_to_partition (m_map, rhs2);
4554 gcc_assert (m_vars[part] != NULL_TREE);
4555 v2 = m_vars[part];
4557 else if (integer_zerop (rhs2))
4558 v2 = build_zero_cst (atype);
4559 else
4560 v2 = tree_output_constant_def (rhs2);
4561 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4562 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4563 g = gimple_build_assign (v1, v2);
4564 insert_before (g);
4567 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4568 argument. */
4570 void
4571 bitint_large_huge::lower_bit_query (gimple *stmt)
4573 tree arg0 = gimple_call_arg (stmt, 0);
4574 tree arg1 = (gimple_call_num_args (stmt) == 2
4575 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4576 tree lhs = gimple_call_lhs (stmt);
4577 gimple *g;
4579 if (!lhs)
4581 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4582 gsi_remove (&gsi, true);
4583 return;
4585 tree type = TREE_TYPE (arg0);
4586 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4587 bitint_prec_kind kind = bitint_precision_kind (type);
4588 gcc_assert (kind >= bitint_prec_large);
4589 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4590 enum built_in_function fcode = END_BUILTINS;
4591 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4592 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4593 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4594 switch (ifn)
4596 case IFN_CLZ:
4597 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4598 fcode = BUILT_IN_CLZ;
4599 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4600 fcode = BUILT_IN_CLZL;
4601 else
4602 fcode = BUILT_IN_CLZLL;
4603 break;
4604 case IFN_FFS:
4605 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4606 we don't add the addend at the end. */
4607 arg1 = integer_zero_node;
4608 /* FALLTHRU */
4609 case IFN_CTZ:
4610 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4611 fcode = BUILT_IN_CTZ;
4612 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4613 fcode = BUILT_IN_CTZL;
4614 else
4615 fcode = BUILT_IN_CTZLL;
4616 m_upwards = true;
4617 break;
4618 case IFN_CLRSB:
4619 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4620 fcode = BUILT_IN_CLRSB;
4621 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4622 fcode = BUILT_IN_CLRSBL;
4623 else
4624 fcode = BUILT_IN_CLRSBLL;
4625 break;
4626 case IFN_PARITY:
4627 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4628 fcode = BUILT_IN_PARITY;
4629 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4630 fcode = BUILT_IN_PARITYL;
4631 else
4632 fcode = BUILT_IN_PARITYLL;
4633 m_upwards = true;
4634 break;
4635 case IFN_POPCOUNT:
4636 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4637 fcode = BUILT_IN_POPCOUNT;
4638 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4639 fcode = BUILT_IN_POPCOUNTL;
4640 else
4641 fcode = BUILT_IN_POPCOUNTLL;
4642 m_upwards = true;
4643 break;
4644 default:
4645 gcc_unreachable ();
4647 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4648 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4649 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4650 basic_block edge_bb = NULL;
4651 if (m_upwards)
4653 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4654 if (kind == bitint_prec_large)
4655 cnt = CEIL (prec, limb_prec);
4656 else
4658 rem = (prec % (2 * limb_prec));
4659 end = (prec - rem) / limb_prec;
4660 cnt = 2 + CEIL (rem, limb_prec);
4661 idx = idx_first = create_loop (size_zero_node, &idx_next);
4664 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4666 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4667 gsi_prev (&gsi);
4668 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4669 edge_bb = e->src;
4670 if (kind == bitint_prec_large)
4671 m_gsi = gsi_end_bb (edge_bb);
4672 bqp = XALLOCAVEC (struct bq_details, cnt);
4674 else
4675 m_after_stmt = stmt;
4676 if (kind != bitint_prec_large)
4677 m_upwards_2limb = end;
4679 for (unsigned i = 0; i < cnt; i++)
4681 m_data_cnt = 0;
4682 if (kind == bitint_prec_large)
4683 idx = size_int (i);
4684 else if (i >= 2)
4685 idx = size_int (end + (i > 2));
4687 tree rhs1 = handle_operand (arg0, idx);
4688 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4690 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4691 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4692 rhs1 = add_cast (m_limb_type, rhs1);
4695 tree in, out, tem;
4696 if (ifn == IFN_PARITY)
4697 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4698 else if (ifn == IFN_FFS)
4699 in = prepare_data_in_out (integer_one_node, idx, &out);
4700 else
4701 in = prepare_data_in_out (integer_zero_node, idx, &out);
4703 switch (ifn)
4705 case IFN_CTZ:
4706 case IFN_FFS:
4707 g = gimple_build_cond (NE_EXPR, rhs1,
4708 build_zero_cst (m_limb_type),
4709 NULL_TREE, NULL_TREE);
4710 insert_before (g);
4711 edge e1, e2;
4712 e1 = split_block (gsi_bb (m_gsi), g);
4713 e1->flags = EDGE_FALSE_VALUE;
4714 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4715 e1->probability = profile_probability::unlikely ();
4716 e2->probability = e1->probability.invert ();
4717 if (i == 0)
4718 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4719 m_gsi = gsi_after_labels (e1->dest);
4720 bqp[i].e = e2;
4721 bqp[i].val = rhs1;
4722 if (tree_fits_uhwi_p (idx))
4723 bqp[i].addend
4724 = build_int_cst (integer_type_node,
4725 tree_to_uhwi (idx) * limb_prec
4726 + (ifn == IFN_FFS));
4727 else
4729 bqp[i].addend = in;
4730 if (i == 1)
4731 res = out;
4732 else
4733 res = make_ssa_name (integer_type_node);
4734 g = gimple_build_assign (res, PLUS_EXPR, in,
4735 build_int_cst (integer_type_node,
4736 limb_prec));
4737 insert_before (g);
4738 m_data[m_data_cnt] = res;
4740 break;
4741 case IFN_PARITY:
4742 if (!integer_zerop (in))
4744 if (kind == bitint_prec_huge && i == 1)
4745 res = out;
4746 else
4747 res = make_ssa_name (m_limb_type);
4748 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4749 insert_before (g);
4751 else
4752 res = rhs1;
4753 m_data[m_data_cnt] = res;
4754 break;
4755 case IFN_POPCOUNT:
4756 g = gimple_build_call (fndecl, 1, rhs1);
4757 tem = make_ssa_name (integer_type_node);
4758 gimple_call_set_lhs (g, tem);
4759 insert_before (g);
4760 if (!integer_zerop (in))
4762 if (kind == bitint_prec_huge && i == 1)
4763 res = out;
4764 else
4765 res = make_ssa_name (integer_type_node);
4766 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4767 insert_before (g);
4769 else
4770 res = tem;
4771 m_data[m_data_cnt] = res;
4772 break;
4773 default:
4774 gcc_unreachable ();
4777 m_first = false;
4778 if (kind == bitint_prec_huge && i <= 1)
4780 if (i == 0)
4782 idx = make_ssa_name (sizetype);
4783 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4784 size_one_node);
4785 insert_before (g);
4787 else
4789 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4790 size_int (2));
4791 insert_before (g);
4792 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4793 NULL_TREE, NULL_TREE);
4794 insert_before (g);
4795 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4796 m_gsi = gsi_after_labels (edge_bb);
4797 else
4798 m_gsi = gsi_for_stmt (stmt);
4799 m_bb = NULL;
4804 else
4806 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4807 int sub_one = 0;
4808 if (kind == bitint_prec_large)
4809 cnt = CEIL (prec, limb_prec);
4810 else
4812 rem = prec % limb_prec;
4813 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4814 rem = limb_prec;
4815 end = (prec - rem) / limb_prec;
4816 cnt = 1 + (rem != 0);
4817 if (ifn == IFN_CLRSB)
4818 sub_one = 1;
4821 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4822 gsi_prev (&gsi);
4823 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4824 edge_bb = e->src;
4825 m_gsi = gsi_end_bb (edge_bb);
4827 if (ifn == IFN_CLZ)
4828 bqp = XALLOCAVEC (struct bq_details, cnt);
4829 else
4831 gsi = gsi_for_stmt (stmt);
4832 gsi_prev (&gsi);
4833 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4834 edge_bb = e->src;
4835 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4838 for (unsigned i = 0; i < cnt; i++)
4840 m_data_cnt = 0;
4841 if (kind == bitint_prec_large)
4842 idx = size_int (cnt - i - 1);
4843 else if (i == cnt - 1)
4844 idx = create_loop (size_int (end - 1), &idx_next);
4845 else
4846 idx = size_int (end);
4848 tree rhs1 = handle_operand (arg0, idx);
4849 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4851 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4852 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4853 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4854 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4855 rhs1 = add_cast (m_limb_type, rhs1);
4858 if (ifn == IFN_CLZ)
4860 g = gimple_build_cond (NE_EXPR, rhs1,
4861 build_zero_cst (m_limb_type),
4862 NULL_TREE, NULL_TREE);
4863 insert_before (g);
4864 edge e1 = split_block (gsi_bb (m_gsi), g);
4865 e1->flags = EDGE_FALSE_VALUE;
4866 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4867 e1->probability = profile_probability::unlikely ();
4868 e2->probability = e1->probability.invert ();
4869 if (i == 0)
4870 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4871 m_gsi = gsi_after_labels (e1->dest);
4872 bqp[i].e = e2;
4873 bqp[i].val = rhs1;
4875 else
4877 if (i == 0)
4879 first = rhs1;
4880 g = gimple_build_assign (make_ssa_name (m_limb_type),
4881 PLUS_EXPR, rhs1,
4882 build_int_cst (m_limb_type, 1));
4883 insert_before (g);
4884 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4885 build_int_cst (m_limb_type, 1),
4886 NULL_TREE, NULL_TREE);
4887 insert_before (g);
4889 else
4891 g = gimple_build_assign (make_ssa_name (m_limb_type),
4892 BIT_XOR_EXPR, rhs1, first);
4893 insert_before (g);
4894 tree stype = signed_type_for (m_limb_type);
4895 g = gimple_build_cond (LT_EXPR,
4896 add_cast (stype,
4897 gimple_assign_lhs (g)),
4898 build_zero_cst (stype),
4899 NULL_TREE, NULL_TREE);
4900 insert_before (g);
4901 edge e1 = split_block (gsi_bb (m_gsi), g);
4902 e1->flags = EDGE_FALSE_VALUE;
4903 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4904 EDGE_TRUE_VALUE);
4905 e1->probability = profile_probability::unlikely ();
4906 e2->probability = e1->probability.invert ();
4907 if (i == 1)
4908 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4909 e2->src);
4910 m_gsi = gsi_after_labels (e1->dest);
4911 bqp[2 * i].e = e2;
4912 g = gimple_build_cond (NE_EXPR, rhs1, first,
4913 NULL_TREE, NULL_TREE);
4914 insert_before (g);
4916 edge e1 = split_block (gsi_bb (m_gsi), g);
4917 e1->flags = EDGE_FALSE_VALUE;
4918 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4919 e1->probability = profile_probability::unlikely ();
4920 e2->probability = e1->probability.invert ();
4921 if (i == 0)
4922 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4923 m_gsi = gsi_after_labels (e1->dest);
4924 bqp[2 * i + 1].e = e2;
4925 bqp[i].val = rhs1;
4927 if (tree_fits_uhwi_p (idx))
4928 bqp[i].addend
4929 = build_int_cst (integer_type_node,
4930 (int) prec
4931 - (((int) tree_to_uhwi (idx) + 1)
4932 * limb_prec) - sub_one);
4933 else
4935 tree in, out;
4936 in = build_int_cst (integer_type_node, rem - sub_one);
4937 m_first = true;
4938 in = prepare_data_in_out (in, idx, &out);
4939 out = m_data[m_data_cnt + 1];
4940 bqp[i].addend = in;
4941 g = gimple_build_assign (out, PLUS_EXPR, in,
4942 build_int_cst (integer_type_node,
4943 limb_prec));
4944 insert_before (g);
4945 m_data[m_data_cnt] = out;
4948 m_first = false;
4949 if (kind == bitint_prec_huge && i == cnt - 1)
4951 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4952 size_int (-1));
4953 insert_before (g);
4954 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4955 NULL_TREE, NULL_TREE);
4956 insert_before (g);
4957 edge true_edge, false_edge;
4958 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4959 &true_edge, &false_edge);
4960 m_gsi = gsi_after_labels (false_edge->dest);
4961 m_bb = NULL;
4965 switch (ifn)
4967 case IFN_CLZ:
4968 case IFN_CTZ:
4969 case IFN_FFS:
4970 gphi *phi1, *phi2, *phi3;
4971 basic_block bb;
4972 bb = gsi_bb (m_gsi);
4973 remove_edge (find_edge (bb, gimple_bb (stmt)));
4974 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4975 gimple_bb (stmt));
4976 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4977 gimple_bb (stmt));
4978 for (unsigned i = 0; i < cnt; i++)
4980 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4981 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4983 if (arg1 == NULL_TREE)
4985 g = gimple_build_builtin_unreachable (m_loc);
4986 insert_before (g);
4988 m_gsi = gsi_for_stmt (stmt);
4989 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4990 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4991 insert_before (g);
4992 if (arg1 == NULL_TREE)
4993 g = gimple_build_assign (lhs, PLUS_EXPR,
4994 gimple_phi_result (phi2),
4995 gimple_call_lhs (g));
4996 else
4998 g = gimple_build_assign (make_ssa_name (integer_type_node),
4999 PLUS_EXPR, gimple_phi_result (phi2),
5000 gimple_call_lhs (g));
5001 insert_before (g);
5002 edge e1 = split_block (gimple_bb (stmt), g);
5003 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5004 e2->probability = profile_probability::always ();
5005 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5006 get_immediate_dominator (CDI_DOMINATORS,
5007 e1->src));
5008 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5009 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5010 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5011 m_gsi = gsi_for_stmt (stmt);
5012 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5014 gsi_replace (&m_gsi, g, true);
5015 break;
5016 case IFN_CLRSB:
5017 bb = gsi_bb (m_gsi);
5018 remove_edge (find_edge (bb, edge_bb));
5019 edge e;
5020 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5021 e->probability = profile_probability::always ();
5022 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5023 get_immediate_dominator (CDI_DOMINATORS,
5024 edge_bb));
5025 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5026 edge_bb);
5027 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5028 edge_bb);
5029 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5030 gimple_bb (stmt));
5031 for (unsigned i = 0; i < cnt; i++)
5033 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5034 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5035 UNKNOWN_LOCATION);
5036 tree a = bqp[i].addend;
5037 if (i && kind == bitint_prec_large)
5038 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5039 if (i)
5040 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5042 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5043 UNKNOWN_LOCATION);
5044 m_gsi = gsi_after_labels (edge_bb);
5045 g = gimple_build_call (fndecl, 1,
5046 add_cast (signed_type_for (m_limb_type),
5047 gimple_phi_result (phi1)));
5048 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5049 insert_before (g);
5050 g = gimple_build_assign (make_ssa_name (integer_type_node),
5051 PLUS_EXPR, gimple_call_lhs (g),
5052 gimple_phi_result (phi2));
5053 insert_before (g);
5054 if (kind != bitint_prec_large)
5056 g = gimple_build_assign (make_ssa_name (integer_type_node),
5057 PLUS_EXPR, gimple_assign_lhs (g),
5058 integer_one_node);
5059 insert_before (g);
5061 add_phi_arg (phi3, gimple_assign_lhs (g),
5062 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5063 m_gsi = gsi_for_stmt (stmt);
5064 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5065 gsi_replace (&m_gsi, g, true);
5066 break;
5067 case IFN_PARITY:
5068 g = gimple_build_call (fndecl, 1, res);
5069 gimple_call_set_lhs (g, lhs);
5070 gsi_replace (&m_gsi, g, true);
5071 break;
5072 case IFN_POPCOUNT:
5073 g = gimple_build_assign (lhs, res);
5074 gsi_replace (&m_gsi, g, true);
5075 break;
5076 default:
5077 gcc_unreachable ();
5081 /* Lower a call statement with one or more large/huge _BitInt
5082 arguments or large/huge _BitInt return value. */
5084 void
5085 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5087 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5088 unsigned int nargs = gimple_call_num_args (stmt);
5089 if (gimple_call_internal_p (stmt))
5090 switch (gimple_call_internal_fn (stmt))
5092 case IFN_ADD_OVERFLOW:
5093 case IFN_SUB_OVERFLOW:
5094 case IFN_UBSAN_CHECK_ADD:
5095 case IFN_UBSAN_CHECK_SUB:
5096 lower_addsub_overflow (obj, stmt);
5097 return;
5098 case IFN_MUL_OVERFLOW:
5099 case IFN_UBSAN_CHECK_MUL:
5100 lower_mul_overflow (obj, stmt);
5101 return;
5102 case IFN_CLZ:
5103 case IFN_CTZ:
5104 case IFN_CLRSB:
5105 case IFN_FFS:
5106 case IFN_PARITY:
5107 case IFN_POPCOUNT:
5108 lower_bit_query (stmt);
5109 return;
5110 default:
5111 break;
5113 for (unsigned int i = 0; i < nargs; ++i)
5115 tree arg = gimple_call_arg (stmt, i);
5116 if (TREE_CODE (arg) != SSA_NAME
5117 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5118 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5119 continue;
5120 int p = var_to_partition (m_map, arg);
5121 tree v = m_vars[p];
5122 gcc_assert (v != NULL_TREE);
5123 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5124 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5125 arg = make_ssa_name (TREE_TYPE (arg));
5126 gimple *g = gimple_build_assign (arg, v);
5127 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5128 gimple_call_set_arg (stmt, i, arg);
5129 if (m_preserved == NULL)
5130 m_preserved = BITMAP_ALLOC (NULL);
5131 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5133 tree lhs = gimple_call_lhs (stmt);
5134 if (lhs
5135 && TREE_CODE (lhs) == SSA_NAME
5136 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5137 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5139 int p = var_to_partition (m_map, lhs);
5140 tree v = m_vars[p];
5141 gcc_assert (v != NULL_TREE);
5142 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5143 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5144 gimple_call_set_lhs (stmt, v);
5145 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5147 update_stmt (stmt);
5150 /* Lower __asm STMT which involves large/huge _BitInt values. */
5152 void
5153 bitint_large_huge::lower_asm (gimple *stmt)
5155 gasm *g = as_a <gasm *> (stmt);
5156 unsigned noutputs = gimple_asm_noutputs (g);
5157 unsigned ninputs = gimple_asm_ninputs (g);
5159 for (unsigned i = 0; i < noutputs; ++i)
5161 tree t = gimple_asm_output_op (g, i);
5162 tree s = TREE_VALUE (t);
5163 if (TREE_CODE (s) == SSA_NAME
5164 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5165 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5167 int part = var_to_partition (m_map, s);
5168 gcc_assert (m_vars[part] != NULL_TREE);
5169 TREE_VALUE (t) = m_vars[part];
5172 for (unsigned i = 0; i < ninputs; ++i)
5174 tree t = gimple_asm_input_op (g, i);
5175 tree s = TREE_VALUE (t);
5176 if (TREE_CODE (s) == SSA_NAME
5177 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5178 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5180 int part = var_to_partition (m_map, s);
5181 gcc_assert (m_vars[part] != NULL_TREE);
5182 TREE_VALUE (t) = m_vars[part];
5185 update_stmt (stmt);
5188 /* Lower statement STMT which involves large/huge _BitInt values
5189 into code accessing individual limbs. */
5191 void
5192 bitint_large_huge::lower_stmt (gimple *stmt)
5194 m_first = true;
5195 m_lhs = NULL_TREE;
5196 m_data.truncate (0);
5197 m_data_cnt = 0;
5198 m_gsi = gsi_for_stmt (stmt);
5199 m_after_stmt = NULL;
5200 m_bb = NULL;
5201 m_init_gsi = m_gsi;
5202 gsi_prev (&m_init_gsi);
5203 m_preheader_bb = NULL;
5204 m_upwards_2limb = 0;
5205 m_upwards = false;
5206 m_var_msb = false;
5207 m_cast_conditional = false;
5208 m_bitfld_load = 0;
5209 m_loc = gimple_location (stmt);
5210 if (is_gimple_call (stmt))
5212 lower_call (NULL_TREE, stmt);
5213 return;
5215 if (gimple_code (stmt) == GIMPLE_ASM)
5217 lower_asm (stmt);
5218 return;
5220 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5221 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5222 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5223 bool mergeable_cast_p = false;
5224 bool final_cast_p = false;
5225 if (gimple_assign_cast_p (stmt))
5227 lhs = gimple_assign_lhs (stmt);
5228 tree rhs1 = gimple_assign_rhs1 (stmt);
5229 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5230 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5231 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5232 mergeable_cast_p = true;
5233 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5234 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5235 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5237 final_cast_p = true;
5238 if (TREE_CODE (rhs1) == SSA_NAME
5239 && (m_names == NULL
5240 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5242 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5243 if (is_gimple_assign (g)
5244 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5246 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5247 if (TREE_CODE (rhs2) == SSA_NAME
5248 && (m_names == NULL
5249 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5251 g = SSA_NAME_DEF_STMT (rhs2);
5252 int ovf = optimizable_arith_overflow (g);
5253 if (ovf == 2)
5254 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5255 and IMAGPART_EXPR uses, where the latter is cast to
5256 non-_BitInt, it will be optimized when handling
5257 the REALPART_EXPR. */
5258 return;
5259 if (ovf == 1)
5261 lower_call (NULL_TREE, g);
5262 return;
5269 if (gimple_store_p (stmt))
5271 tree rhs1 = gimple_assign_rhs1 (stmt);
5272 if (TREE_CODE (rhs1) == SSA_NAME
5273 && (m_names == NULL
5274 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5276 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5277 m_loc = gimple_location (g);
5278 lhs = gimple_assign_lhs (stmt);
5279 if (is_gimple_assign (g) && !mergeable_op (g))
5280 switch (gimple_assign_rhs_code (g))
5282 case LSHIFT_EXPR:
5283 case RSHIFT_EXPR:
5284 lower_shift_stmt (lhs, g);
5285 handled:
5286 m_gsi = gsi_for_stmt (stmt);
5287 unlink_stmt_vdef (stmt);
5288 release_ssa_name (gimple_vdef (stmt));
5289 gsi_remove (&m_gsi, true);
5290 return;
5291 case MULT_EXPR:
5292 case TRUNC_DIV_EXPR:
5293 case TRUNC_MOD_EXPR:
5294 lower_muldiv_stmt (lhs, g);
5295 goto handled;
5296 case FIX_TRUNC_EXPR:
5297 lower_float_conv_stmt (lhs, g);
5298 goto handled;
5299 case REALPART_EXPR:
5300 case IMAGPART_EXPR:
5301 lower_cplxpart_stmt (lhs, g);
5302 goto handled;
5303 default:
5304 break;
5306 else if (optimizable_arith_overflow (g) == 3)
5308 lower_call (lhs, g);
5309 goto handled;
5311 m_loc = gimple_location (stmt);
5314 if (mergeable_op (stmt)
5315 || gimple_store_p (stmt)
5316 || gimple_assign_load_p (stmt)
5317 || eq_p
5318 || mergeable_cast_p)
5320 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5321 if (!eq_p)
5322 return;
5324 else if (cmp_code != ERROR_MARK)
5325 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5326 if (cmp_code != ERROR_MARK)
5328 if (gimple_code (stmt) == GIMPLE_COND)
5330 gcond *cstmt = as_a <gcond *> (stmt);
5331 gimple_cond_set_lhs (cstmt, lhs);
5332 gimple_cond_set_rhs (cstmt, boolean_false_node);
5333 gimple_cond_set_code (cstmt, cmp_code);
5334 update_stmt (stmt);
5335 return;
5337 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5339 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5340 boolean_false_node);
5341 gimple_assign_set_rhs1 (stmt, cond);
5342 lhs = gimple_assign_lhs (stmt);
5343 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5344 || (bitint_precision_kind (TREE_TYPE (lhs))
5345 <= bitint_prec_middle));
5346 update_stmt (stmt);
5347 return;
5349 gimple_assign_set_rhs1 (stmt, lhs);
5350 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5351 gimple_assign_set_rhs_code (stmt, cmp_code);
5352 update_stmt (stmt);
5353 return;
5355 if (final_cast_p)
5357 tree lhs_type = TREE_TYPE (lhs);
5358 /* Add support for 3 or more limbs filled in from normal integral
5359 type if this assert fails. If no target chooses limb mode smaller
5360 than half of largest supported normal integral type, this will not
5361 be needed. */
5362 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5363 gimple *g;
5364 if (TREE_CODE (lhs_type) == BITINT_TYPE
5365 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5366 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5367 TYPE_UNSIGNED (lhs_type));
5368 m_data_cnt = 0;
5369 tree rhs1 = gimple_assign_rhs1 (stmt);
5370 tree r1 = handle_operand (rhs1, size_int (0));
5371 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5372 r1 = add_cast (lhs_type, r1);
5373 if (TYPE_PRECISION (lhs_type) > limb_prec)
5375 m_data_cnt = 0;
5376 m_first = false;
5377 tree r2 = handle_operand (rhs1, size_int (1));
5378 r2 = add_cast (lhs_type, r2);
5379 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5380 build_int_cst (unsigned_type_node,
5381 limb_prec));
5382 insert_before (g);
5383 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5384 gimple_assign_lhs (g));
5385 insert_before (g);
5386 r1 = gimple_assign_lhs (g);
5388 if (lhs_type != TREE_TYPE (lhs))
5389 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5390 else
5391 g = gimple_build_assign (lhs, r1);
5392 gsi_replace (&m_gsi, g, true);
5393 return;
5395 if (is_gimple_assign (stmt))
5396 switch (gimple_assign_rhs_code (stmt))
5398 case LSHIFT_EXPR:
5399 case RSHIFT_EXPR:
5400 lower_shift_stmt (NULL_TREE, stmt);
5401 return;
5402 case MULT_EXPR:
5403 case TRUNC_DIV_EXPR:
5404 case TRUNC_MOD_EXPR:
5405 lower_muldiv_stmt (NULL_TREE, stmt);
5406 return;
5407 case FIX_TRUNC_EXPR:
5408 case FLOAT_EXPR:
5409 lower_float_conv_stmt (NULL_TREE, stmt);
5410 return;
5411 case REALPART_EXPR:
5412 case IMAGPART_EXPR:
5413 lower_cplxpart_stmt (NULL_TREE, stmt);
5414 return;
5415 case COMPLEX_EXPR:
5416 lower_complexexpr_stmt (stmt);
5417 return;
5418 default:
5419 break;
5421 gcc_unreachable ();
5424 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5425 the desired memory state. */
5427 void *
5428 vuse_eq (ao_ref *, tree vuse1, void *data)
5430 tree vuse2 = (tree) data;
5431 if (vuse1 == vuse2)
5432 return data;
5434 return NULL;
5437 /* Return true if STMT uses a library function and needs to take
5438 address of its inputs. We need to avoid bit-fields in those
5439 cases. */
5441 bool
5442 stmt_needs_operand_addr (gimple *stmt)
5444 if (is_gimple_assign (stmt))
5445 switch (gimple_assign_rhs_code (stmt))
5447 case MULT_EXPR:
5448 case TRUNC_DIV_EXPR:
5449 case TRUNC_MOD_EXPR:
5450 case FLOAT_EXPR:
5451 return true;
5452 default:
5453 break;
5455 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5456 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5457 return true;
5458 return false;
5461 /* Dominator walker used to discover which large/huge _BitInt
5462 loads could be sunk into all their uses. */
5464 class bitint_dom_walker : public dom_walker
5466 public:
5467 bitint_dom_walker (bitmap names, bitmap loads)
5468 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5470 edge before_dom_children (basic_block) final override;
5472 private:
5473 bitmap m_names, m_loads;
5476 edge
5477 bitint_dom_walker::before_dom_children (basic_block bb)
5479 gphi *phi = get_virtual_phi (bb);
5480 tree vop;
5481 if (phi)
5482 vop = gimple_phi_result (phi);
5483 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5484 vop = NULL_TREE;
5485 else
5486 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5488 auto_vec<tree, 16> worklist;
5489 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5490 !gsi_end_p (gsi); gsi_next (&gsi))
5492 gimple *stmt = gsi_stmt (gsi);
5493 if (is_gimple_debug (stmt))
5494 continue;
5496 if (!vop && gimple_vuse (stmt))
5497 vop = gimple_vuse (stmt);
5499 tree cvop = vop;
5500 if (gimple_vdef (stmt))
5501 vop = gimple_vdef (stmt);
5503 tree lhs = gimple_get_lhs (stmt);
5504 if (lhs
5505 && TREE_CODE (lhs) == SSA_NAME
5506 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5507 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5508 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5509 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5510 it means it will be handled in a loop or straight line code
5511 at the location of its (ultimate) immediate use, so for
5512 vop checking purposes check these only at the ultimate
5513 immediate use. */
5514 continue;
5516 ssa_op_iter oi;
5517 use_operand_p use_p;
5518 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5520 tree s = USE_FROM_PTR (use_p);
5521 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5522 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5523 worklist.safe_push (s);
5526 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5527 while (worklist.length () > 0)
5529 tree s = worklist.pop ();
5531 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5533 gimple *g = SSA_NAME_DEF_STMT (s);
5534 needs_operand_addr |= stmt_needs_operand_addr (g);
5535 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5537 tree s2 = USE_FROM_PTR (use_p);
5538 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5539 && (bitint_precision_kind (TREE_TYPE (s2))
5540 >= bitint_prec_large))
5541 worklist.safe_push (s2);
5543 continue;
5545 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5546 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5548 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5549 if (TREE_CODE (rhs) == SSA_NAME
5550 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5551 s = rhs;
5552 else
5553 continue;
5555 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5556 continue;
5558 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5559 if (needs_operand_addr
5560 && TREE_CODE (rhs1) == COMPONENT_REF
5561 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5563 tree fld = TREE_OPERAND (rhs1, 1);
5564 /* For little-endian, we can allow as inputs bit-fields
5565 which start at a limb boundary. */
5566 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5567 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5568 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5569 % limb_prec) == 0)
5571 else
5573 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5574 continue;
5578 ao_ref ref;
5579 ao_ref_init (&ref, rhs1);
5580 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5581 unsigned limit = 64;
5582 tree vuse = cvop;
5583 if (vop != cvop
5584 && is_gimple_assign (stmt)
5585 && gimple_store_p (stmt)
5586 && !operand_equal_p (lhs,
5587 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5589 vuse = vop;
5590 if (vuse != lvop
5591 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5592 NULL, NULL, limit, lvop) == NULL)
5593 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5597 bb->aux = (void *) vop;
5598 return NULL;
5603 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5604 build_ssa_conflict_graph.
5605 The differences are:
5606 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5607 2) for large/huge _BitInt multiplication/division/modulo process def
5608 only after processing uses rather than before to make uses conflict
5609 with the definition
5610 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5611 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5612 the final statement. */
5614 void
5615 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5616 ssa_conflicts *graph, bitmap names,
5617 void (*def) (live_track *, tree,
5618 ssa_conflicts *),
5619 void (*use) (live_track *, tree))
5621 bool muldiv_p = false;
5622 tree lhs = NULL_TREE;
5623 if (is_gimple_assign (stmt))
5625 lhs = gimple_assign_lhs (stmt);
5626 if (TREE_CODE (lhs) == SSA_NAME
5627 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5628 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5630 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5631 return;
5632 switch (gimple_assign_rhs_code (stmt))
5634 case MULT_EXPR:
5635 case TRUNC_DIV_EXPR:
5636 case TRUNC_MOD_EXPR:
5637 muldiv_p = true;
5638 default:
5639 break;
5644 ssa_op_iter iter;
5645 tree var;
5646 if (!muldiv_p)
5648 /* For stmts with more than one SSA_NAME definition pretend all the
5649 SSA_NAME outputs but the first one are live at this point, so
5650 that conflicts are added in between all those even when they are
5651 actually not really live after the asm, because expansion might
5652 copy those into pseudos after the asm and if multiple outputs
5653 share the same partition, it might overwrite those that should
5654 be live. E.g.
5655 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5656 return a;
5657 See PR70593. */
5658 bool first = true;
5659 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5660 if (first)
5661 first = false;
5662 else
5663 use (live, var);
5665 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5666 def (live, var, graph);
5669 auto_vec<tree, 16> worklist;
5670 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5671 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5672 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5674 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5675 use (live, var);
5676 else
5677 worklist.safe_push (var);
5680 while (worklist.length () > 0)
5682 tree s = worklist.pop ();
5683 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5684 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5685 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5687 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5688 use (live, var);
5689 else
5690 worklist.safe_push (var);
5694 if (muldiv_p)
5695 def (live, lhs, graph);
5698 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5699 return the largest bitint_prec_kind of them, otherwise return
5700 bitint_prec_small. */
5702 static bitint_prec_kind
5703 arith_overflow_arg_kind (gimple *stmt)
5705 bitint_prec_kind ret = bitint_prec_small;
5706 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5707 switch (gimple_call_internal_fn (stmt))
5709 case IFN_ADD_OVERFLOW:
5710 case IFN_SUB_OVERFLOW:
5711 case IFN_MUL_OVERFLOW:
5712 for (int i = 0; i < 2; ++i)
5714 tree a = gimple_call_arg (stmt, i);
5715 if (TREE_CODE (a) == INTEGER_CST
5716 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5718 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5719 ret = MAX (ret, kind);
5722 break;
5723 default:
5724 break;
5726 return ret;
5729 /* Entry point for _BitInt(N) operation lowering during optimization. */
5731 static unsigned int
5732 gimple_lower_bitint (void)
5734 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5735 limb_prec = 0;
5737 unsigned int i;
5738 for (i = 0; i < num_ssa_names; ++i)
5740 tree s = ssa_name (i);
5741 if (s == NULL)
5742 continue;
5743 tree type = TREE_TYPE (s);
5744 if (TREE_CODE (type) == COMPLEX_TYPE)
5746 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5747 != bitint_prec_small)
5748 break;
5749 type = TREE_TYPE (type);
5751 if (TREE_CODE (type) == BITINT_TYPE
5752 && bitint_precision_kind (type) != bitint_prec_small)
5753 break;
5754 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5755 into memory. Such functions could have no large/huge SSA_NAMEs. */
5756 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5758 gimple *g = SSA_NAME_DEF_STMT (s);
5759 if (is_gimple_assign (g) && gimple_store_p (g))
5761 tree t = gimple_assign_rhs1 (g);
5762 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5763 && (bitint_precision_kind (TREE_TYPE (t))
5764 >= bitint_prec_large))
5765 break;
5768 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5769 to floating point types need to be rewritten. */
5770 else if (SCALAR_FLOAT_TYPE_P (type))
5772 gimple *g = SSA_NAME_DEF_STMT (s);
5773 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5775 tree t = gimple_assign_rhs1 (g);
5776 if (TREE_CODE (t) == INTEGER_CST
5777 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5778 && (bitint_precision_kind (TREE_TYPE (t))
5779 != bitint_prec_small))
5780 break;
5784 if (i == num_ssa_names)
5785 return 0;
5787 basic_block bb;
5788 auto_vec<gimple *, 4> switch_statements;
5789 FOR_EACH_BB_FN (bb, cfun)
5791 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5793 tree idx = gimple_switch_index (swtch);
5794 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5795 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5796 continue;
5798 if (optimize)
5799 group_case_labels_stmt (swtch);
5800 switch_statements.safe_push (swtch);
5804 if (!switch_statements.is_empty ())
5806 bool expanded = false;
5807 gimple *stmt;
5808 unsigned int j;
5809 i = 0;
5810 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5812 gswitch *swtch = as_a<gswitch *> (stmt);
5813 tree_switch_conversion::switch_decision_tree dt (swtch);
5814 expanded |= dt.analyze_switch_statement ();
5817 if (expanded)
5819 free_dominance_info (CDI_DOMINATORS);
5820 free_dominance_info (CDI_POST_DOMINATORS);
5821 mark_virtual_operands_for_renaming (cfun);
5822 cleanup_tree_cfg (TODO_update_ssa);
5826 struct bitint_large_huge large_huge;
5827 bool has_large_huge_parm_result = false;
5828 bool has_large_huge = false;
5829 unsigned int ret = 0, first_large_huge = ~0U;
5830 bool edge_insertions = false;
5831 for (; i < num_ssa_names; ++i)
5833 tree s = ssa_name (i);
5834 if (s == NULL)
5835 continue;
5836 tree type = TREE_TYPE (s);
5837 if (TREE_CODE (type) == COMPLEX_TYPE)
5839 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5840 >= bitint_prec_large)
5841 has_large_huge = true;
5842 type = TREE_TYPE (type);
5844 if (TREE_CODE (type) == BITINT_TYPE
5845 && bitint_precision_kind (type) >= bitint_prec_large)
5847 if (first_large_huge == ~0U)
5848 first_large_huge = i;
5849 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5850 gimple_stmt_iterator gsi;
5851 tree_code rhs_code;
5852 /* Unoptimize certain constructs to simpler alternatives to
5853 avoid having to lower all of them. */
5854 if (is_gimple_assign (stmt) && gimple_bb (stmt))
5855 switch (rhs_code = gimple_assign_rhs_code (stmt))
5857 default:
5858 break;
5859 case LROTATE_EXPR:
5860 case RROTATE_EXPR:
5862 first_large_huge = 0;
5863 location_t loc = gimple_location (stmt);
5864 gsi = gsi_for_stmt (stmt);
5865 tree rhs1 = gimple_assign_rhs1 (stmt);
5866 tree type = TREE_TYPE (rhs1);
5867 tree n = gimple_assign_rhs2 (stmt), m;
5868 tree p = build_int_cst (TREE_TYPE (n),
5869 TYPE_PRECISION (type));
5870 if (TREE_CODE (n) == INTEGER_CST)
5871 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5872 else
5874 m = make_ssa_name (TREE_TYPE (n));
5875 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5876 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5877 gimple_set_location (g, loc);
5879 if (!TYPE_UNSIGNED (type))
5881 tree utype = build_bitint_type (TYPE_PRECISION (type),
5883 if (TREE_CODE (rhs1) == INTEGER_CST)
5884 rhs1 = fold_convert (utype, rhs1);
5885 else
5887 tree t = make_ssa_name (type);
5888 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5889 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5890 gimple_set_location (g, loc);
5893 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5894 rhs_code == LROTATE_EXPR
5895 ? LSHIFT_EXPR : RSHIFT_EXPR,
5896 rhs1, n);
5897 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5898 gimple_set_location (g, loc);
5899 tree op1 = gimple_assign_lhs (g);
5900 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5901 rhs_code == LROTATE_EXPR
5902 ? RSHIFT_EXPR : LSHIFT_EXPR,
5903 rhs1, m);
5904 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5905 gimple_set_location (g, loc);
5906 tree op2 = gimple_assign_lhs (g);
5907 tree lhs = gimple_assign_lhs (stmt);
5908 if (!TYPE_UNSIGNED (type))
5910 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5911 BIT_IOR_EXPR, op1, op2);
5912 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5913 gimple_set_location (g, loc);
5914 g = gimple_build_assign (lhs, NOP_EXPR,
5915 gimple_assign_lhs (g));
5917 else
5918 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5919 gsi_replace (&gsi, g, true);
5920 gimple_set_location (g, loc);
5922 break;
5923 case ABS_EXPR:
5924 case ABSU_EXPR:
5925 case MIN_EXPR:
5926 case MAX_EXPR:
5927 case COND_EXPR:
5928 first_large_huge = 0;
5929 gsi = gsi_for_stmt (stmt);
5930 tree lhs = gimple_assign_lhs (stmt);
5931 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5932 location_t loc = gimple_location (stmt);
5933 if (rhs_code == ABS_EXPR)
5934 g = gimple_build_cond (LT_EXPR, rhs1,
5935 build_zero_cst (TREE_TYPE (rhs1)),
5936 NULL_TREE, NULL_TREE);
5937 else if (rhs_code == ABSU_EXPR)
5939 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5940 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5941 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5942 gimple_set_location (g, loc);
5943 g = gimple_build_cond (LT_EXPR, rhs1,
5944 build_zero_cst (TREE_TYPE (rhs1)),
5945 NULL_TREE, NULL_TREE);
5946 rhs1 = rhs2;
5948 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5950 rhs2 = gimple_assign_rhs2 (stmt);
5951 if (TREE_CODE (rhs1) == INTEGER_CST)
5952 std::swap (rhs1, rhs2);
5953 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5954 NULL_TREE, NULL_TREE);
5955 if (rhs_code == MAX_EXPR)
5956 std::swap (rhs1, rhs2);
5958 else
5960 g = gimple_build_cond (NE_EXPR, rhs1,
5961 build_zero_cst (TREE_TYPE (rhs1)),
5962 NULL_TREE, NULL_TREE);
5963 rhs1 = gimple_assign_rhs2 (stmt);
5964 rhs2 = gimple_assign_rhs3 (stmt);
5966 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5967 gimple_set_location (g, loc);
5968 edge e1 = split_block (gsi_bb (gsi), g);
5969 edge e2 = split_block (e1->dest, (gimple *) NULL);
5970 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5971 e3->probability = profile_probability::even ();
5972 e1->flags = EDGE_TRUE_VALUE;
5973 e1->probability = e3->probability.invert ();
5974 if (dom_info_available_p (CDI_DOMINATORS))
5975 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5976 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5978 gsi = gsi_after_labels (e1->dest);
5979 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5980 NEGATE_EXPR, rhs1);
5981 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5982 gimple_set_location (g, loc);
5983 rhs2 = gimple_assign_lhs (g);
5984 std::swap (rhs1, rhs2);
5986 gsi = gsi_for_stmt (stmt);
5987 gsi_remove (&gsi, true);
5988 gphi *phi = create_phi_node (lhs, e2->dest);
5989 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5990 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5991 break;
5994 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5995 into memory. Such functions could have no large/huge SSA_NAMEs. */
5996 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5998 gimple *g = SSA_NAME_DEF_STMT (s);
5999 if (is_gimple_assign (g) && gimple_store_p (g))
6001 tree t = gimple_assign_rhs1 (g);
6002 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6003 && (bitint_precision_kind (TREE_TYPE (t))
6004 >= bitint_prec_large))
6005 has_large_huge = true;
6008 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6009 to floating point types need to be rewritten. */
6010 else if (SCALAR_FLOAT_TYPE_P (type))
6012 gimple *g = SSA_NAME_DEF_STMT (s);
6013 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6015 tree t = gimple_assign_rhs1 (g);
6016 if (TREE_CODE (t) == INTEGER_CST
6017 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6018 && (bitint_precision_kind (TREE_TYPE (t))
6019 >= bitint_prec_large))
6020 has_large_huge = true;
6024 for (i = first_large_huge; i < num_ssa_names; ++i)
6026 tree s = ssa_name (i);
6027 if (s == NULL)
6028 continue;
6029 tree type = TREE_TYPE (s);
6030 if (TREE_CODE (type) == COMPLEX_TYPE)
6031 type = TREE_TYPE (type);
6032 if (TREE_CODE (type) == BITINT_TYPE
6033 && bitint_precision_kind (type) >= bitint_prec_large)
6035 use_operand_p use_p;
6036 gimple *use_stmt;
6037 has_large_huge = true;
6038 if (optimize
6039 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6040 continue;
6041 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6042 the same bb and could be handled in the same loop with the
6043 immediate use. */
6044 if (optimize
6045 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6046 && single_imm_use (s, &use_p, &use_stmt)
6047 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6049 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6051 if (mergeable_op (use_stmt))
6052 continue;
6053 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6054 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6055 continue;
6056 if (gimple_assign_cast_p (use_stmt))
6058 tree lhs = gimple_assign_lhs (use_stmt);
6059 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6060 continue;
6062 else if (gimple_store_p (use_stmt)
6063 && is_gimple_assign (use_stmt)
6064 && !gimple_has_volatile_ops (use_stmt)
6065 && !stmt_ends_bb_p (use_stmt))
6066 continue;
6068 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6070 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6071 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6072 && ((is_gimple_assign (use_stmt)
6073 && (gimple_assign_rhs_code (use_stmt)
6074 != COMPLEX_EXPR))
6075 || gimple_code (use_stmt) == GIMPLE_COND)
6076 && (!gimple_store_p (use_stmt)
6077 || (is_gimple_assign (use_stmt)
6078 && !gimple_has_volatile_ops (use_stmt)
6079 && !stmt_ends_bb_p (use_stmt)))
6080 && (TREE_CODE (rhs1) != SSA_NAME
6081 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6083 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6084 || (bitint_precision_kind (TREE_TYPE (rhs1))
6085 < bitint_prec_large))
6086 continue;
6087 if (is_gimple_assign (use_stmt))
6088 switch (gimple_assign_rhs_code (use_stmt))
6090 case MULT_EXPR:
6091 case TRUNC_DIV_EXPR:
6092 case TRUNC_MOD_EXPR:
6093 case FLOAT_EXPR:
6094 /* Uses which use handle_operand_addr can't
6095 deal with nested casts. */
6096 if (TREE_CODE (rhs1) == SSA_NAME
6097 && gimple_assign_cast_p
6098 (SSA_NAME_DEF_STMT (rhs1))
6099 && has_single_use (rhs1)
6100 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6101 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6102 goto force_name;
6103 break;
6104 default:
6105 break;
6107 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6108 >= TYPE_PRECISION (TREE_TYPE (s)))
6109 && mergeable_op (use_stmt))
6110 continue;
6111 /* Prevent merging a widening non-mergeable cast
6112 on result of some narrower mergeable op
6113 together with later mergeable operations. E.g.
6114 result of _BitInt(223) addition shouldn't be
6115 sign-extended to _BitInt(513) and have another
6116 _BitInt(513) added to it, as handle_plus_minus
6117 with its PHI node handling inside of handle_cast
6118 will not work correctly. An exception is if
6119 use_stmt is a store, this is handled directly
6120 in lower_mergeable_stmt. */
6121 if (TREE_CODE (rhs1) != SSA_NAME
6122 || !has_single_use (rhs1)
6123 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6124 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6125 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6126 || gimple_store_p (use_stmt))
6127 continue;
6128 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6129 < TYPE_PRECISION (TREE_TYPE (s)))
6130 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6132 /* Another exception is if the widening cast is
6133 from mergeable same precision cast from something
6134 not mergeable. */
6135 tree rhs2
6136 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6137 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6138 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6139 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6141 if (TREE_CODE (rhs2) != SSA_NAME
6142 || !has_single_use (rhs2)
6143 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6144 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6145 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6146 continue;
6151 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6152 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6154 case IMAGPART_EXPR:
6156 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6157 rhs1 = TREE_OPERAND (rhs1, 0);
6158 if (TREE_CODE (rhs1) == SSA_NAME)
6160 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6161 if (optimizable_arith_overflow (g))
6162 continue;
6165 /* FALLTHRU */
6166 case LSHIFT_EXPR:
6167 case RSHIFT_EXPR:
6168 case MULT_EXPR:
6169 case TRUNC_DIV_EXPR:
6170 case TRUNC_MOD_EXPR:
6171 case FIX_TRUNC_EXPR:
6172 case REALPART_EXPR:
6173 if (gimple_store_p (use_stmt)
6174 && is_gimple_assign (use_stmt)
6175 && !gimple_has_volatile_ops (use_stmt)
6176 && !stmt_ends_bb_p (use_stmt))
6178 tree lhs = gimple_assign_lhs (use_stmt);
6179 /* As multiply/division passes address of the lhs
6180 to library function and that assumes it can extend
6181 it to whole number of limbs, avoid merging those
6182 with bit-field stores. Don't allow it for
6183 shifts etc. either, so that the bit-field store
6184 handling doesn't have to be done everywhere. */
6185 if (TREE_CODE (lhs) == COMPONENT_REF
6186 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6187 break;
6188 continue;
6190 break;
6191 default:
6192 break;
6196 /* Also ignore uninitialized uses. */
6197 if (SSA_NAME_IS_DEFAULT_DEF (s)
6198 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6199 continue;
6201 force_name:
6202 if (!large_huge.m_names)
6203 large_huge.m_names = BITMAP_ALLOC (NULL);
6204 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6205 if (has_single_use (s))
6207 if (!large_huge.m_single_use_names)
6208 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6209 bitmap_set_bit (large_huge.m_single_use_names,
6210 SSA_NAME_VERSION (s));
6212 if (SSA_NAME_VAR (s)
6213 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6214 && SSA_NAME_IS_DEFAULT_DEF (s))
6215 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6216 has_large_huge_parm_result = true;
6217 if (optimize
6218 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6219 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6220 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6221 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6223 use_operand_p use_p;
6224 imm_use_iterator iter;
6225 bool optimizable_load = true;
6226 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6228 gimple *use_stmt = USE_STMT (use_p);
6229 if (is_gimple_debug (use_stmt))
6230 continue;
6231 if (gimple_code (use_stmt) == GIMPLE_PHI
6232 || is_gimple_call (use_stmt))
6234 optimizable_load = false;
6235 break;
6239 ssa_op_iter oi;
6240 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6241 oi, SSA_OP_USE)
6243 tree s2 = USE_FROM_PTR (use_p);
6244 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6246 optimizable_load = false;
6247 break;
6251 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6253 if (!large_huge.m_loads)
6254 large_huge.m_loads = BITMAP_ALLOC (NULL);
6255 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6259 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6260 into memory. Such functions could have no large/huge SSA_NAMEs. */
6261 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6263 gimple *g = SSA_NAME_DEF_STMT (s);
6264 if (is_gimple_assign (g) && gimple_store_p (g))
6266 tree t = gimple_assign_rhs1 (g);
6267 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6268 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6269 has_large_huge = true;
6274 if (large_huge.m_names || has_large_huge)
6276 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6277 calculate_dominance_info (CDI_DOMINATORS);
6278 if (optimize)
6279 enable_ranger (cfun);
6280 if (large_huge.m_loads)
6282 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6283 entry->aux = NULL;
6284 bitint_dom_walker (large_huge.m_names,
6285 large_huge.m_loads).walk (entry);
6286 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6287 clear_aux_for_blocks ();
6288 BITMAP_FREE (large_huge.m_loads);
6290 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6291 large_huge.m_limb_size
6292 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6294 if (large_huge.m_names)
6296 large_huge.m_map
6297 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6298 coalesce_ssa_name (large_huge.m_map);
6299 partition_view_normal (large_huge.m_map);
6300 if (dump_file && (dump_flags & TDF_DETAILS))
6302 fprintf (dump_file, "After Coalescing:\n");
6303 dump_var_map (dump_file, large_huge.m_map);
6305 large_huge.m_vars
6306 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6307 bitmap_iterator bi;
6308 if (has_large_huge_parm_result)
6309 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6311 tree s = ssa_name (i);
6312 if (SSA_NAME_VAR (s)
6313 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6314 && SSA_NAME_IS_DEFAULT_DEF (s))
6315 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6317 int p = var_to_partition (large_huge.m_map, s);
6318 if (large_huge.m_vars[p] == NULL_TREE)
6320 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6321 mark_addressable (SSA_NAME_VAR (s));
6325 tree atype = NULL_TREE;
6326 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6328 tree s = ssa_name (i);
6329 int p = var_to_partition (large_huge.m_map, s);
6330 if (large_huge.m_vars[p] != NULL_TREE)
6331 continue;
6332 if (atype == NULL_TREE
6333 || !tree_int_cst_equal (TYPE_SIZE (atype),
6334 TYPE_SIZE (TREE_TYPE (s))))
6336 unsigned HOST_WIDE_INT nelts
6337 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6338 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6340 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6341 mark_addressable (large_huge.m_vars[p]);
6345 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6347 gimple_stmt_iterator prev;
6348 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6349 gsi = prev)
6351 prev = gsi;
6352 gsi_prev (&prev);
6353 ssa_op_iter iter;
6354 gimple *stmt = gsi_stmt (gsi);
6355 if (is_gimple_debug (stmt))
6356 continue;
6357 bitint_prec_kind kind = bitint_prec_small;
6358 tree t;
6359 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6360 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6362 bitint_prec_kind this_kind
6363 = bitint_precision_kind (TREE_TYPE (t));
6364 kind = MAX (kind, this_kind);
6366 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6368 t = gimple_assign_rhs1 (stmt);
6369 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6371 bitint_prec_kind this_kind
6372 = bitint_precision_kind (TREE_TYPE (t));
6373 kind = MAX (kind, this_kind);
6376 if (is_gimple_assign (stmt)
6377 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6379 t = gimple_assign_rhs1 (stmt);
6380 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6381 && TREE_CODE (t) == INTEGER_CST)
6383 bitint_prec_kind this_kind
6384 = bitint_precision_kind (TREE_TYPE (t));
6385 kind = MAX (kind, this_kind);
6388 if (is_gimple_call (stmt))
6390 t = gimple_call_lhs (stmt);
6391 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6393 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6394 kind = MAX (kind, this_kind);
6395 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6397 this_kind
6398 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6399 kind = MAX (kind, this_kind);
6403 if (kind == bitint_prec_small)
6404 continue;
6405 switch (gimple_code (stmt))
6407 case GIMPLE_CALL:
6408 /* For now. We'll need to handle some internal functions and
6409 perhaps some builtins. */
6410 if (kind == bitint_prec_middle)
6411 continue;
6412 break;
6413 case GIMPLE_ASM:
6414 if (kind == bitint_prec_middle)
6415 continue;
6416 break;
6417 case GIMPLE_RETURN:
6418 continue;
6419 case GIMPLE_ASSIGN:
6420 if (gimple_clobber_p (stmt))
6421 continue;
6422 if (kind >= bitint_prec_large)
6423 break;
6424 if (gimple_assign_single_p (stmt))
6425 /* No need to lower copies, loads or stores. */
6426 continue;
6427 if (gimple_assign_cast_p (stmt))
6429 tree lhs = gimple_assign_lhs (stmt);
6430 tree rhs = gimple_assign_rhs1 (stmt);
6431 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6432 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6433 && (TYPE_PRECISION (TREE_TYPE (lhs))
6434 == TYPE_PRECISION (TREE_TYPE (rhs))))
6435 /* No need to lower casts to same precision. */
6436 continue;
6438 break;
6439 default:
6440 break;
6443 if (kind == bitint_prec_middle)
6445 tree type = NULL_TREE;
6446 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6447 with the same precision and back. */
6448 unsigned int nops = gimple_num_ops (stmt);
6449 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6450 i < nops; ++i)
6451 if (tree op = gimple_op (stmt, i))
6453 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6454 if (nop != op)
6455 gimple_set_op (stmt, i, nop);
6456 else if (COMPARISON_CLASS_P (op))
6458 TREE_OPERAND (op, 0)
6459 = maybe_cast_middle_bitint (&gsi,
6460 TREE_OPERAND (op, 0),
6461 type);
6462 TREE_OPERAND (op, 1)
6463 = maybe_cast_middle_bitint (&gsi,
6464 TREE_OPERAND (op, 1),
6465 type);
6467 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6469 CASE_LOW (op)
6470 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6471 type);
6472 CASE_HIGH (op)
6473 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6474 type);
6477 if (tree lhs = gimple_get_lhs (stmt))
6478 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6479 && (bitint_precision_kind (TREE_TYPE (lhs))
6480 == bitint_prec_middle))
6482 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6483 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6484 type = build_nonstandard_integer_type (prec, uns);
6485 tree lhs2 = make_ssa_name (type);
6486 gimple_set_lhs (stmt, lhs2);
6487 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6488 if (stmt_ends_bb_p (stmt))
6490 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6491 gsi_insert_on_edge_immediate (e, g);
6493 else
6494 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6496 update_stmt (stmt);
6497 continue;
6500 if (tree lhs = gimple_get_lhs (stmt))
6501 if (TREE_CODE (lhs) == SSA_NAME)
6503 tree type = TREE_TYPE (lhs);
6504 if (TREE_CODE (type) == COMPLEX_TYPE)
6505 type = TREE_TYPE (type);
6506 if (TREE_CODE (type) == BITINT_TYPE
6507 && bitint_precision_kind (type) >= bitint_prec_large
6508 && (large_huge.m_names == NULL
6509 || !bitmap_bit_p (large_huge.m_names,
6510 SSA_NAME_VERSION (lhs))))
6511 continue;
6514 large_huge.lower_stmt (stmt);
6517 tree atype = NULL_TREE;
6518 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6519 gsi_next (&gsi))
6521 gphi *phi = gsi.phi ();
6522 tree lhs = gimple_phi_result (phi);
6523 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6524 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6525 continue;
6526 int p1 = var_to_partition (large_huge.m_map, lhs);
6527 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6528 tree v1 = large_huge.m_vars[p1];
6529 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6531 tree arg = gimple_phi_arg_def (phi, i);
6532 edge e = gimple_phi_arg_edge (phi, i);
6533 gimple *g;
6534 switch (TREE_CODE (arg))
6536 case INTEGER_CST:
6537 if (integer_zerop (arg) && VAR_P (v1))
6539 tree zero = build_zero_cst (TREE_TYPE (v1));
6540 g = gimple_build_assign (v1, zero);
6541 gsi_insert_on_edge (e, g);
6542 edge_insertions = true;
6543 break;
6545 int ext;
6546 unsigned int min_prec, prec, rem;
6547 tree c;
6548 prec = TYPE_PRECISION (TREE_TYPE (arg));
6549 rem = prec % (2 * limb_prec);
6550 min_prec = bitint_min_cst_precision (arg, ext);
6551 if (min_prec > prec - rem - 2 * limb_prec
6552 && min_prec > (unsigned) limb_prec)
6553 /* Constant which has enough significant bits that it
6554 isn't worth trying to save .rodata space by extending
6555 from smaller number. */
6556 min_prec = prec;
6557 else
6558 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6559 if (min_prec == 0)
6560 c = NULL_TREE;
6561 else if (min_prec == prec)
6562 c = tree_output_constant_def (arg);
6563 else if (min_prec == (unsigned) limb_prec)
6564 c = fold_convert (large_huge.m_limb_type, arg);
6565 else
6567 tree ctype = build_bitint_type (min_prec, 1);
6568 c = tree_output_constant_def (fold_convert (ctype, arg));
6570 if (c)
6572 if (VAR_P (v1) && min_prec == prec)
6574 tree v2 = build1 (VIEW_CONVERT_EXPR,
6575 TREE_TYPE (v1), c);
6576 g = gimple_build_assign (v1, v2);
6577 gsi_insert_on_edge (e, g);
6578 edge_insertions = true;
6579 break;
6581 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6582 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6583 TREE_TYPE (c), v1),
6585 else
6587 unsigned HOST_WIDE_INT nelts
6588 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6589 / limb_prec;
6590 tree vtype
6591 = build_array_type_nelts (large_huge.m_limb_type,
6592 nelts);
6593 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6594 vtype, v1),
6595 build1 (VIEW_CONVERT_EXPR,
6596 vtype, c));
6598 gsi_insert_on_edge (e, g);
6600 if (ext == 0)
6602 unsigned HOST_WIDE_INT nelts
6603 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6604 - min_prec) / limb_prec;
6605 tree vtype
6606 = build_array_type_nelts (large_huge.m_limb_type,
6607 nelts);
6608 tree ptype = build_pointer_type (TREE_TYPE (v1));
6609 tree off;
6610 if (c)
6611 off = fold_convert (ptype,
6612 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6613 else
6614 off = build_zero_cst (ptype);
6615 tree vd = build2 (MEM_REF, vtype,
6616 build_fold_addr_expr (v1), off);
6617 g = gimple_build_assign (vd, build_zero_cst (vtype));
6619 else
6621 tree vd = v1;
6622 if (c)
6624 tree ptype = build_pointer_type (TREE_TYPE (v1));
6625 tree off
6626 = fold_convert (ptype,
6627 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6628 vd = build2 (MEM_REF, large_huge.m_limb_type,
6629 build_fold_addr_expr (v1), off);
6631 vd = build_fold_addr_expr (vd);
6632 unsigned HOST_WIDE_INT nbytes
6633 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6634 if (c)
6635 nbytes
6636 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6637 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6638 g = gimple_build_call (fn, 3, vd,
6639 integer_minus_one_node,
6640 build_int_cst (sizetype,
6641 nbytes));
6643 gsi_insert_on_edge (e, g);
6644 edge_insertions = true;
6645 break;
6646 default:
6647 gcc_unreachable ();
6648 case SSA_NAME:
6649 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6651 if (large_huge.m_names == NULL
6652 || !bitmap_bit_p (large_huge.m_names,
6653 SSA_NAME_VERSION (arg)))
6654 continue;
6656 int p2 = var_to_partition (large_huge.m_map, arg);
6657 if (p1 == p2)
6658 continue;
6659 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6660 tree v2 = large_huge.m_vars[p2];
6661 if (VAR_P (v1) && VAR_P (v2))
6662 g = gimple_build_assign (v1, v2);
6663 else if (VAR_P (v1))
6664 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6665 TREE_TYPE (v1), v2));
6666 else if (VAR_P (v2))
6667 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6668 TREE_TYPE (v2), v1), v2);
6669 else
6671 if (atype == NULL_TREE
6672 || !tree_int_cst_equal (TYPE_SIZE (atype),
6673 TYPE_SIZE (TREE_TYPE (lhs))))
6675 unsigned HOST_WIDE_INT nelts
6676 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6677 / limb_prec;
6678 atype
6679 = build_array_type_nelts (large_huge.m_limb_type,
6680 nelts);
6682 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6683 atype, v1),
6684 build1 (VIEW_CONVERT_EXPR,
6685 atype, v2));
6687 gsi_insert_on_edge (e, g);
6688 edge_insertions = true;
6689 break;
6695 if (large_huge.m_names || has_large_huge)
6697 gimple *nop = NULL;
6698 for (i = 0; i < num_ssa_names; ++i)
6700 tree s = ssa_name (i);
6701 if (s == NULL_TREE)
6702 continue;
6703 tree type = TREE_TYPE (s);
6704 if (TREE_CODE (type) == COMPLEX_TYPE)
6705 type = TREE_TYPE (type);
6706 if (TREE_CODE (type) == BITINT_TYPE
6707 && bitint_precision_kind (type) >= bitint_prec_large)
6709 if (large_huge.m_preserved
6710 && bitmap_bit_p (large_huge.m_preserved,
6711 SSA_NAME_VERSION (s)))
6712 continue;
6713 gimple *g = SSA_NAME_DEF_STMT (s);
6714 if (gimple_code (g) == GIMPLE_NOP)
6716 if (SSA_NAME_VAR (s))
6717 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6718 release_ssa_name (s);
6719 continue;
6721 if (gimple_bb (g) == NULL)
6723 release_ssa_name (s);
6724 continue;
6726 if (gimple_code (g) != GIMPLE_ASM)
6728 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6729 bool save_vta = flag_var_tracking_assignments;
6730 flag_var_tracking_assignments = false;
6731 gsi_remove (&gsi, true);
6732 flag_var_tracking_assignments = save_vta;
6734 if (nop == NULL)
6735 nop = gimple_build_nop ();
6736 SSA_NAME_DEF_STMT (s) = nop;
6737 release_ssa_name (s);
6740 if (optimize)
6741 disable_ranger (cfun);
6744 if (edge_insertions)
6745 gsi_commit_edge_inserts ();
6747 return ret;
6750 namespace {
6752 const pass_data pass_data_lower_bitint =
6754 GIMPLE_PASS, /* type */
6755 "bitintlower", /* name */
6756 OPTGROUP_NONE, /* optinfo_flags */
6757 TV_NONE, /* tv_id */
6758 PROP_ssa, /* properties_required */
6759 PROP_gimple_lbitint, /* properties_provided */
6760 0, /* properties_destroyed */
6761 0, /* todo_flags_start */
6762 0, /* todo_flags_finish */
6765 class pass_lower_bitint : public gimple_opt_pass
6767 public:
6768 pass_lower_bitint (gcc::context *ctxt)
6769 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6772 /* opt_pass methods: */
6773 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6774 unsigned int execute (function *) final override
6776 return gimple_lower_bitint ();
6779 }; // class pass_lower_bitint
6781 } // anon namespace
6783 gimple_opt_pass *
6784 make_pass_lower_bitint (gcc::context *ctxt)
6786 return new pass_lower_bitint (ctxt);
6790 namespace {
6792 const pass_data pass_data_lower_bitint_O0 =
6794 GIMPLE_PASS, /* type */
6795 "bitintlower0", /* name */
6796 OPTGROUP_NONE, /* optinfo_flags */
6797 TV_NONE, /* tv_id */
6798 PROP_cfg, /* properties_required */
6799 PROP_gimple_lbitint, /* properties_provided */
6800 0, /* properties_destroyed */
6801 0, /* todo_flags_start */
6802 0, /* todo_flags_finish */
6805 class pass_lower_bitint_O0 : public gimple_opt_pass
6807 public:
6808 pass_lower_bitint_O0 (gcc::context *ctxt)
6809 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6812 /* opt_pass methods: */
6813 bool gate (function *fun) final override
6815 /* With errors, normal optimization passes are not run. If we don't
6816 lower bitint operations at all, rtl expansion will abort. */
6817 return !(fun->curr_properties & PROP_gimple_lbitint);
6820 unsigned int execute (function *) final override
6822 return gimple_lower_bitint ();
6825 }; // class pass_lower_bitint_O0
6827 } // anon namespace
6829 gimple_opt_pass *
6830 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6832 return new pass_lower_bitint_O0 (ctxt);