Daily bump.
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob65e4ab3f105e0c25c4cd64eee38aef2b14169164
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "stor-layout.h"
58 #include "gimple-lower-bitint.h"
60 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 target hook says it is a single limb, middle _BitInt which per ABI
62 does not, but there is some INTEGER_TYPE in which arithmetics can be
63 performed (operations on such _BitInt are lowered to casts to that
64 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 ones), large _BitInt which should by straight line code and
67 finally huge _BitInt which should be handled by loops over the limbs. */
69 enum bitint_prec_kind {
70 bitint_prec_small,
71 bitint_prec_middle,
72 bitint_prec_large,
73 bitint_prec_huge
76 /* Caches to speed up bitint_precision_kind. */
78 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
79 static int limb_prec;
81 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
83 static bitint_prec_kind
84 bitint_precision_kind (int prec)
86 if (prec <= small_max_prec)
87 return bitint_prec_small;
88 if (huge_min_prec && prec >= huge_min_prec)
89 return bitint_prec_huge;
90 if (large_min_prec && prec >= large_min_prec)
91 return bitint_prec_large;
92 if (mid_min_prec && prec >= mid_min_prec)
93 return bitint_prec_middle;
95 struct bitint_info info;
96 bool ok = targetm.c.bitint_type_info (prec, &info);
97 gcc_assert (ok);
98 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
99 if (prec <= GET_MODE_PRECISION (limb_mode))
101 small_max_prec = prec;
102 return bitint_prec_small;
104 if (!large_min_prec
105 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
106 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
107 if (!limb_prec)
108 limb_prec = GET_MODE_PRECISION (limb_mode);
109 if (!huge_min_prec)
111 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
112 huge_min_prec = 4 * limb_prec;
113 else
114 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
116 if (prec <= MAX_FIXED_MODE_SIZE)
118 if (!mid_min_prec || prec < mid_min_prec)
119 mid_min_prec = prec;
120 return bitint_prec_middle;
122 if (large_min_prec && prec <= large_min_prec)
123 return bitint_prec_large;
124 return bitint_prec_huge;
127 /* Same for a TYPE. */
129 static bitint_prec_kind
130 bitint_precision_kind (tree type)
132 return bitint_precision_kind (TYPE_PRECISION (type));
135 /* Return minimum precision needed to describe INTEGER_CST
136 CST. All bits above that precision up to precision of
137 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
138 if EXT is set to -1. */
140 static unsigned
141 bitint_min_cst_precision (tree cst, int &ext)
143 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
144 wide_int w = wi::to_wide (cst);
145 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
146 /* For signed values, we don't need to count the sign bit,
147 we'll use constant 0 or -1 for the upper bits. */
148 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
149 --min_prec;
150 else
152 /* For unsigned values, also try signed min_precision
153 in case the constant has lots of most significant bits set. */
154 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
155 if (min_prec2 < min_prec)
157 ext = -1;
158 return min_prec2;
161 return min_prec;
164 namespace {
166 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
167 cached in TYPE and return it. */
169 tree
170 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
172 if (op == NULL_TREE
173 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
174 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
175 return op;
177 int prec = TYPE_PRECISION (TREE_TYPE (op));
178 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
179 if (type == NULL_TREE
180 || TYPE_PRECISION (type) != prec
181 || TYPE_UNSIGNED (type) != uns)
182 type = build_nonstandard_integer_type (prec, uns);
184 if (TREE_CODE (op) != SSA_NAME)
186 tree nop = fold_convert (type, op);
187 if (is_gimple_val (nop))
188 return nop;
191 tree nop = make_ssa_name (type);
192 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
193 gsi_insert_before (gsi, g, GSI_SAME_STMT);
194 return nop;
197 /* Return true if STMT can be handled in a loop from least to most
198 significant limb together with its dependencies. */
200 bool
201 mergeable_op (gimple *stmt)
203 if (!is_gimple_assign (stmt))
204 return false;
205 switch (gimple_assign_rhs_code (stmt))
207 case PLUS_EXPR:
208 case MINUS_EXPR:
209 case NEGATE_EXPR:
210 case BIT_AND_EXPR:
211 case BIT_IOR_EXPR:
212 case BIT_XOR_EXPR:
213 case BIT_NOT_EXPR:
214 case SSA_NAME:
215 case INTEGER_CST:
216 case BIT_FIELD_REF:
217 return true;
218 case LSHIFT_EXPR:
220 tree cnt = gimple_assign_rhs2 (stmt);
221 if (tree_fits_uhwi_p (cnt)
222 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
223 return true;
225 break;
226 CASE_CONVERT:
227 case VIEW_CONVERT_EXPR:
229 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
230 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
231 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
232 && TREE_CODE (lhs_type) == BITINT_TYPE
233 && TREE_CODE (rhs_type) == BITINT_TYPE
234 && bitint_precision_kind (lhs_type) >= bitint_prec_large
235 && bitint_precision_kind (rhs_type) >= bitint_prec_large
236 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
237 == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
239 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
240 return true;
241 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
242 return true;
243 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
244 return true;
246 break;
248 default:
249 break;
251 return false;
254 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
255 _Complex large/huge _BitInt lhs which has at most two immediate uses,
256 at most one use in REALPART_EXPR stmt in the same bb and exactly one
257 IMAGPART_EXPR use in the same bb with a single use which casts it to
258 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
259 return 2. Such cases (most common uses of those builtins) can be
260 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
261 of REALPART_EXPR as not needed to be backed up by a stack variable.
262 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
265 optimizable_arith_overflow (gimple *stmt)
267 bool is_ubsan = false;
268 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
269 return false;
270 switch (gimple_call_internal_fn (stmt))
272 case IFN_ADD_OVERFLOW:
273 case IFN_SUB_OVERFLOW:
274 case IFN_MUL_OVERFLOW:
275 break;
276 case IFN_UBSAN_CHECK_ADD:
277 case IFN_UBSAN_CHECK_SUB:
278 case IFN_UBSAN_CHECK_MUL:
279 is_ubsan = true;
280 break;
281 default:
282 return 0;
284 tree lhs = gimple_call_lhs (stmt);
285 if (!lhs)
286 return 0;
287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
288 return 0;
289 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
290 if (TREE_CODE (type) != BITINT_TYPE
291 || bitint_precision_kind (type) < bitint_prec_large)
292 return 0;
294 if (is_ubsan)
296 use_operand_p use_p;
297 gimple *use_stmt;
298 if (!single_imm_use (lhs, &use_p, &use_stmt)
299 || gimple_bb (use_stmt) != gimple_bb (stmt)
300 || !gimple_store_p (use_stmt)
301 || !is_gimple_assign (use_stmt)
302 || gimple_has_volatile_ops (use_stmt)
303 || stmt_ends_bb_p (use_stmt))
304 return 0;
305 return 3;
308 imm_use_iterator ui;
309 use_operand_p use_p;
310 int seen = 0;
311 gimple *realpart = NULL, *cast = NULL;
312 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
314 gimple *g = USE_STMT (use_p);
315 if (is_gimple_debug (g))
316 continue;
317 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
318 return 0;
319 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
321 if ((seen & 1) != 0)
322 return 0;
323 seen |= 1;
324 realpart = g;
326 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
328 if ((seen & 2) != 0)
329 return 0;
330 seen |= 2;
332 use_operand_p use2_p;
333 gimple *use_stmt;
334 tree lhs2 = gimple_assign_lhs (g);
335 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
336 return 0;
337 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
338 || gimple_bb (use_stmt) != gimple_bb (stmt)
339 || !gimple_assign_cast_p (use_stmt))
340 return 0;
342 lhs2 = gimple_assign_lhs (use_stmt);
343 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
344 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
345 return 0;
346 cast = use_stmt;
348 else
349 return 0;
351 if ((seen & 2) == 0)
352 return 0;
353 if (seen == 3)
355 /* Punt if the cast stmt appears before realpart stmt, because
356 if both appear, the lowering wants to emit all the code
357 at the location of realpart stmt. */
358 gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
359 unsigned int cnt = 0;
362 gsi_prev_nondebug (&gsi);
363 if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
364 return 0;
365 if (gsi_stmt (gsi) == stmt)
366 return 2;
367 /* If realpart is too far from stmt, punt as well.
368 Usually it will appear right after it. */
369 if (++cnt == 32)
370 return 0;
372 while (1);
374 return 1;
377 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
378 comparing large/huge _BitInt types, return the comparison code and if
379 non-NULL fill in the comparison operands to *POP1 and *POP2. */
381 tree_code
382 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
384 tree op1 = NULL_TREE, op2 = NULL_TREE;
385 tree_code code = ERROR_MARK;
386 if (gimple_code (stmt) == GIMPLE_COND)
388 code = gimple_cond_code (stmt);
389 op1 = gimple_cond_lhs (stmt);
390 op2 = gimple_cond_rhs (stmt);
392 else if (is_gimple_assign (stmt))
394 code = gimple_assign_rhs_code (stmt);
395 op1 = gimple_assign_rhs1 (stmt);
396 if (TREE_CODE_CLASS (code) == tcc_comparison
397 || TREE_CODE_CLASS (code) == tcc_binary)
398 op2 = gimple_assign_rhs2 (stmt);
400 if (TREE_CODE_CLASS (code) != tcc_comparison)
401 return ERROR_MARK;
402 tree type = TREE_TYPE (op1);
403 if (TREE_CODE (type) != BITINT_TYPE
404 || bitint_precision_kind (type) < bitint_prec_large)
405 return ERROR_MARK;
406 if (pop1)
408 *pop1 = op1;
409 *pop2 = op2;
411 return code;
414 /* Class used during large/huge _BitInt lowering containing all the
415 state for the methods. */
417 struct bitint_large_huge
419 bitint_large_huge ()
420 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
421 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
422 m_limb_type (NULL_TREE), m_data (vNULL) {}
424 ~bitint_large_huge ();
426 void insert_before (gimple *);
427 tree limb_access_type (tree, tree);
428 tree limb_access (tree, tree, tree, bool);
429 void if_then (gimple *, profile_probability, edge &, edge &);
430 void if_then_else (gimple *, profile_probability, edge &, edge &);
431 void if_then_if_then_else (gimple *g, gimple *,
432 profile_probability, profile_probability,
433 edge &, edge &, edge &);
434 tree handle_operand (tree, tree);
435 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
436 tree add_cast (tree, tree);
437 tree handle_plus_minus (tree_code, tree, tree, tree);
438 tree handle_lshift (tree, tree, tree);
439 tree handle_cast (tree, tree, tree);
440 tree handle_bit_field_ref (tree, tree);
441 tree handle_load (gimple *, tree);
442 tree handle_stmt (gimple *, tree);
443 tree handle_operand_addr (tree, gimple *, int *, int *);
444 tree create_loop (tree, tree *);
445 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
446 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
447 void lower_shift_stmt (tree, gimple *);
448 void lower_muldiv_stmt (tree, gimple *);
449 void lower_float_conv_stmt (tree, gimple *);
450 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
451 unsigned int, bool);
452 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
453 tree_code);
454 void lower_addsub_overflow (tree, gimple *);
455 void lower_mul_overflow (tree, gimple *);
456 void lower_cplxpart_stmt (tree, gimple *);
457 void lower_complexexpr_stmt (gimple *);
458 void lower_bit_query (gimple *);
459 void lower_call (tree, gimple *);
460 void lower_asm (gimple *);
461 void lower_stmt (gimple *);
463 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
464 merged with their uses. */
465 bitmap m_names;
466 /* Subset of those for lhs of load statements. These will be
467 cleared in m_names if the loads will be mergeable with all
468 their uses. */
469 bitmap m_loads;
470 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
471 to later passes (arguments or return values of calls). */
472 bitmap m_preserved;
473 /* Subset of m_names which have a single use. As the lowering
474 can replace various original statements with their lowered
475 form even before it is done iterating over all basic blocks,
476 testing has_single_use for the purpose of emitting clobbers
477 doesn't work properly. */
478 bitmap m_single_use_names;
479 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
480 set in m_names. */
481 var_map m_map;
482 /* Mapping of the partitions to corresponding decls. */
483 tree *m_vars;
484 /* Unsigned integer type with limb precision. */
485 tree m_limb_type;
486 /* Its TYPE_SIZE_UNIT. */
487 unsigned HOST_WIDE_INT m_limb_size;
488 /* Location of a gimple stmt which is being currently lowered. */
489 location_t m_loc;
490 /* Current stmt iterator where code is being lowered currently. */
491 gimple_stmt_iterator m_gsi;
492 /* Statement after which any clobbers should be added if non-NULL. */
493 gimple *m_after_stmt;
494 /* Set when creating loops to the loop header bb and its preheader. */
495 basic_block m_bb, m_preheader_bb;
496 /* Stmt iterator after which initialization statements should be emitted. */
497 gimple_stmt_iterator m_init_gsi;
498 /* Decl into which a mergeable statement stores result. */
499 tree m_lhs;
500 /* handle_operand/handle_stmt can be invoked in various ways.
502 lower_mergeable_stmt for large _BitInt calls those with constant
503 idx only, expanding to straight line code, for huge _BitInt
504 emits a loop from least significant limb upwards, where each loop
505 iteration handles 2 limbs, plus there can be up to one full limb
506 and one partial limb processed after the loop, where handle_operand
507 and/or handle_stmt are called with constant idx. m_upwards_2limb
508 is set for this case, false otherwise. m_upwards is true if it
509 is either large or huge _BitInt handled by lower_mergeable_stmt,
510 i.e. indexes always increase.
512 Another way is used by lower_comparison_stmt, which walks limbs
513 from most significant to least significant, partial limb if any
514 processed first with constant idx and then loop processing a single
515 limb per iteration with non-constant idx.
517 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
518 destination limbs are processed from most significant to least
519 significant or for RSHIFT_EXPR the other way around, in loops or
520 straight line code, but idx usually is non-constant (so from
521 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
522 handling there can access even partial limbs using non-constant
523 idx (then m_var_msb should be true, for all the other cases
524 including lower_mergeable_stmt/lower_comparison_stmt that is
525 not the case and so m_var_msb should be false.
527 m_first should be set the first time handle_operand/handle_stmt
528 is called and clear when it is called for some other limb with
529 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
530 or statement (e.g. +/-/<< with < limb_prec constant) needs some
531 state between the different calls, when m_first is true it should
532 push some trees to m_data vector and also make sure m_data_cnt is
533 incremented by how many trees were pushed, and when m_first is
534 false, it can use the m_data[m_data_cnt] etc. data or update them,
535 just needs to bump m_data_cnt by the same amount as when it was
536 called with m_first set. The toplevel calls to
537 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
538 m_data vector when setting m_first to true.
540 m_cast_conditional and m_bitfld_load are used when handling a
541 bit-field load inside of a widening cast. handle_cast sometimes
542 needs to do runtime comparisons and handle_operand only conditionally
543 or even in two separate conditional blocks for one idx (once with
544 constant index after comparing the runtime one for equality with the
545 constant). In these cases, m_cast_conditional is set to true and
546 the bit-field load then communicates its m_data_cnt to handle_cast
547 using m_bitfld_load. */
548 bool m_first;
549 bool m_var_msb;
550 unsigned m_upwards_2limb;
551 bool m_upwards;
552 bool m_cast_conditional;
553 unsigned m_bitfld_load;
554 vec<tree> m_data;
555 unsigned int m_data_cnt;
558 bitint_large_huge::~bitint_large_huge ()
560 BITMAP_FREE (m_names);
561 BITMAP_FREE (m_loads);
562 BITMAP_FREE (m_preserved);
563 BITMAP_FREE (m_single_use_names);
564 if (m_map)
565 delete_var_map (m_map);
566 XDELETEVEC (m_vars);
567 m_data.release ();
570 /* Insert gimple statement G before current location
571 and set its gimple_location. */
573 void
574 bitint_large_huge::insert_before (gimple *g)
576 gimple_set_location (g, m_loc);
577 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
580 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
581 This is normally m_limb_type, except for a partial most
582 significant limb if any. */
584 tree
585 bitint_large_huge::limb_access_type (tree type, tree idx)
587 if (type == NULL_TREE)
588 return m_limb_type;
589 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
590 unsigned int prec = TYPE_PRECISION (type);
591 gcc_assert (i * limb_prec < prec);
592 if ((i + 1) * limb_prec <= prec)
593 return m_limb_type;
594 else
595 return build_nonstandard_integer_type (prec % limb_prec,
596 TYPE_UNSIGNED (type));
599 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
600 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
602 tree
603 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
605 tree atype = (tree_fits_uhwi_p (idx)
606 ? limb_access_type (type, idx) : m_limb_type);
607 tree ltype = m_limb_type;
608 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
609 if (as != TYPE_ADDR_SPACE (ltype))
610 ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
611 | ENCODE_QUAL_ADDR_SPACE (as));
612 tree ret;
613 if (DECL_P (var) && tree_fits_uhwi_p (idx))
615 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
616 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
617 ret = build2 (MEM_REF, ltype,
618 build_fold_addr_expr (var),
619 build_int_cst (ptype, off));
620 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
621 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
623 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
626 = build2 (MEM_REF, ltype, unshare_expr (TREE_OPERAND (var, 0)),
627 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
628 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
629 tree_to_uhwi (idx)
630 * m_limb_size)));
631 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
632 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
633 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
635 else
637 var = unshare_expr (var);
638 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
639 || !useless_type_conversion_p (m_limb_type,
640 TREE_TYPE (TREE_TYPE (var))))
642 unsigned HOST_WIDE_INT nelts
643 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
644 tree atype = build_array_type_nelts (ltype, nelts);
645 var = build1 (VIEW_CONVERT_EXPR, atype, var);
647 ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
649 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
651 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
652 insert_before (g);
653 ret = gimple_assign_lhs (g);
654 ret = build1 (NOP_EXPR, atype, ret);
656 return ret;
659 /* Emit a half diamond,
660 if (COND)
664 | new_bb1
668 or if (COND) new_bb1;
669 PROB is the probability that the condition is true.
670 Updates m_gsi to start of new_bb1.
671 Sets EDGE_TRUE to edge from new_bb1 to successor and
672 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
674 void
675 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
676 edge &edge_true, edge &edge_false)
678 insert_before (cond);
679 edge e1 = split_block (gsi_bb (m_gsi), cond);
680 edge e2 = split_block (e1->dest, (gimple *) NULL);
681 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
682 e1->flags = EDGE_TRUE_VALUE;
683 e1->probability = prob;
684 e3->probability = prob.invert ();
685 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
686 edge_true = e2;
687 edge_false = e3;
688 m_gsi = gsi_after_labels (e1->dest);
691 /* Emit a full diamond,
692 if (COND)
696 new_bb1 new_bb2
700 or if (COND) new_bb2; else new_bb1;
701 PROB is the probability that the condition is true.
702 Updates m_gsi to start of new_bb2.
703 Sets EDGE_TRUE to edge from new_bb1 to successor and
704 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
706 void
707 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
708 edge &edge_true, edge &edge_false)
710 insert_before (cond);
711 edge e1 = split_block (gsi_bb (m_gsi), cond);
712 edge e2 = split_block (e1->dest, (gimple *) NULL);
713 basic_block bb = create_empty_bb (e1->dest);
714 add_bb_to_loop (bb, e1->dest->loop_father);
715 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
716 e1->flags = EDGE_FALSE_VALUE;
717 e3->probability = prob;
718 e1->probability = prob.invert ();
719 bb->count = e1->src->count.apply_probability (prob);
720 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
721 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
722 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
723 edge_false = e2;
724 m_gsi = gsi_after_labels (bb);
727 /* Emit a half diamond with full diamond in it
728 if (COND1)
732 | if (COND2)
733 | / \
734 | / \
735 |new_bb1 new_bb2
736 | | /
737 \ | /
738 \ | /
739 \ | /
741 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
742 PROB1 is the probability that the condition 1 is true.
743 PROB2 is the probability that the condition 2 is true.
744 Updates m_gsi to start of new_bb1.
745 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
746 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
747 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
748 If COND2 is NULL, this is equivalent to
749 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
750 EDGE_TRUE_TRUE = NULL; */
752 void
753 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
754 profile_probability prob1,
755 profile_probability prob2,
756 edge &edge_true_true,
757 edge &edge_true_false,
758 edge &edge_false)
760 edge e2, e3, e4 = NULL;
761 if_then (cond1, prob1, e2, e3);
762 if (cond2 == NULL)
764 edge_true_true = NULL;
765 edge_true_false = e2;
766 edge_false = e3;
767 return;
769 insert_before (cond2);
770 e2 = split_block (gsi_bb (m_gsi), cond2);
771 basic_block bb = create_empty_bb (e2->dest);
772 add_bb_to_loop (bb, e2->dest->loop_father);
773 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
774 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
775 e4->probability = prob2;
776 e2->flags = EDGE_FALSE_VALUE;
777 e2->probability = prob2.invert ();
778 bb->count = e2->src->count.apply_probability (prob2);
779 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
780 e2 = find_edge (e2->dest, e3->dest);
781 edge_true_true = e4;
782 edge_true_false = e2;
783 edge_false = e3;
784 m_gsi = gsi_after_labels (e2->src);
787 /* Emit code to access limb IDX from OP. */
789 tree
790 bitint_large_huge::handle_operand (tree op, tree idx)
792 switch (TREE_CODE (op))
794 case SSA_NAME:
795 if (m_names == NULL
796 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
798 if (SSA_NAME_IS_DEFAULT_DEF (op))
800 if (m_first)
802 tree v = create_tmp_reg (m_limb_type);
803 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
805 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
806 DECL_SOURCE_LOCATION (v)
807 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
809 v = get_or_create_ssa_default_def (cfun, v);
810 m_data.safe_push (v);
812 tree ret = m_data[m_data_cnt];
813 m_data_cnt++;
814 if (tree_fits_uhwi_p (idx))
816 tree type = limb_access_type (TREE_TYPE (op), idx);
817 ret = add_cast (type, ret);
819 return ret;
821 location_t loc_save = m_loc;
822 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
823 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
824 m_loc = loc_save;
825 return ret;
827 int p;
828 gimple *g;
829 tree t;
830 p = var_to_partition (m_map, op);
831 gcc_assert (m_vars[p] != NULL_TREE);
832 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
833 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
834 insert_before (g);
835 t = gimple_assign_lhs (g);
836 if (m_first
837 && m_single_use_names
838 && m_vars[p] != m_lhs
839 && m_after_stmt
840 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
842 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
843 CLOBBER_STORAGE_END);
844 g = gimple_build_assign (m_vars[p], clobber);
845 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
846 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
848 return t;
849 case INTEGER_CST:
850 if (tree_fits_uhwi_p (idx))
852 tree c, type = limb_access_type (TREE_TYPE (op), idx);
853 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
854 if (m_first)
856 m_data.safe_push (NULL_TREE);
857 m_data.safe_push (NULL_TREE);
859 if (limb_prec != HOST_BITS_PER_WIDE_INT)
861 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
862 TYPE_SIGN (TREE_TYPE (op)));
863 c = wide_int_to_tree (type,
864 wide_int::from (w, TYPE_PRECISION (type),
865 UNSIGNED));
867 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
868 c = build_int_cst (type,
869 tree_int_cst_sgn (op) < 0 ? -1 : 0);
870 else
871 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
872 m_data_cnt += 2;
873 return c;
875 if (m_first
876 || (m_data[m_data_cnt] == NULL_TREE
877 && m_data[m_data_cnt + 1] == NULL_TREE))
879 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
880 unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
881 int ext;
882 unsigned min_prec = bitint_min_cst_precision (op, ext);
883 if (m_first)
885 m_data.safe_push (NULL_TREE);
886 m_data.safe_push (NULL_TREE);
888 if (integer_zerop (op))
890 tree c = build_zero_cst (m_limb_type);
891 m_data[m_data_cnt] = c;
892 m_data[m_data_cnt + 1] = c;
894 else if (integer_all_onesp (op))
896 tree c = build_all_ones_cst (m_limb_type);
897 m_data[m_data_cnt] = c;
898 m_data[m_data_cnt + 1] = c;
900 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
902 /* Single limb constant. Use a phi with that limb from
903 the preheader edge and 0 or -1 constant from the other edge
904 and for the second limb in the loop. */
905 tree out;
906 gcc_assert (m_first);
907 m_data.pop ();
908 m_data.pop ();
909 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
910 build_int_cst (m_limb_type, ext));
912 else if (min_prec > prec - rem - 2 * limb_prec)
914 /* Constant which has enough significant bits that it isn't
915 worth trying to save .rodata space by extending from smaller
916 number. */
917 tree type;
918 if (m_var_msb)
919 type = TREE_TYPE (op);
920 else
921 /* If we have a guarantee the most significant partial limb
922 (if any) will be only accessed through handle_operand
923 with INTEGER_CST idx, we don't need to include the partial
924 limb in .rodata. */
925 type = build_bitint_type (prec - rem, 1);
926 tree c = tree_output_constant_def (fold_convert (type, op));
927 m_data[m_data_cnt] = c;
928 m_data[m_data_cnt + 1] = NULL_TREE;
930 else if (m_upwards_2limb)
932 /* Constant with smaller number of bits. Trade conditional
933 code for .rodata space by extending from smaller number. */
934 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
935 tree type = build_bitint_type (min_prec, 1);
936 tree c = tree_output_constant_def (fold_convert (type, op));
937 tree idx2 = make_ssa_name (sizetype);
938 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
939 insert_before (g);
940 g = gimple_build_cond (LT_EXPR, idx,
941 size_int (min_prec / limb_prec),
942 NULL_TREE, NULL_TREE);
943 edge edge_true, edge_false;
944 if_then (g, (min_prec >= (prec - rem) / 2
945 ? profile_probability::likely ()
946 : profile_probability::unlikely ()),
947 edge_true, edge_false);
948 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
949 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
950 insert_before (g);
951 c1 = gimple_assign_lhs (g);
952 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
953 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
954 insert_before (g);
955 c2 = gimple_assign_lhs (g);
956 tree c3 = build_int_cst (m_limb_type, ext);
957 m_gsi = gsi_after_labels (edge_true->dest);
958 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
959 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
960 gphi *phi = create_phi_node (m_data[m_data_cnt],
961 edge_true->dest);
962 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
963 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
964 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
965 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
966 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
968 else
970 /* Constant with smaller number of bits. Trade conditional
971 code for .rodata space by extending from smaller number.
972 Version for loops with random access to the limbs or
973 downwards loops. */
974 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
975 tree c;
976 if (min_prec <= (unsigned) limb_prec)
977 c = fold_convert (m_limb_type, op);
978 else
980 tree type = build_bitint_type (min_prec, 1);
981 c = tree_output_constant_def (fold_convert (type, op));
983 m_data[m_data_cnt] = c;
984 m_data[m_data_cnt + 1] = integer_type_node;
986 t = m_data[m_data_cnt];
987 if (m_data[m_data_cnt + 1] == NULL_TREE)
989 t = limb_access (TREE_TYPE (op), t, idx, false);
990 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
991 insert_before (g);
992 t = gimple_assign_lhs (g);
995 else if (m_data[m_data_cnt + 1] == NULL_TREE)
997 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
998 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
999 insert_before (g);
1000 t = gimple_assign_lhs (g);
1002 else
1003 t = m_data[m_data_cnt + 1];
1004 if (m_data[m_data_cnt + 1] == integer_type_node)
1006 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1007 unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1008 int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1009 tree c = m_data[m_data_cnt];
1010 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1011 g = gimple_build_cond (LT_EXPR, idx,
1012 size_int (min_prec / limb_prec),
1013 NULL_TREE, NULL_TREE);
1014 edge edge_true, edge_false;
1015 if_then (g, (min_prec >= (prec - rem) / 2
1016 ? profile_probability::likely ()
1017 : profile_probability::unlikely ()),
1018 edge_true, edge_false);
1019 if (min_prec > (unsigned) limb_prec)
1021 c = limb_access (TREE_TYPE (op), c, idx, false);
1022 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1023 insert_before (g);
1024 c = gimple_assign_lhs (g);
1026 tree c2 = build_int_cst (m_limb_type, ext);
1027 m_gsi = gsi_after_labels (edge_true->dest);
1028 t = make_ssa_name (m_limb_type);
1029 gphi *phi = create_phi_node (t, edge_true->dest);
1030 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1031 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1033 m_data_cnt += 2;
1034 return t;
1035 default:
1036 gcc_unreachable ();
1040 /* Helper method, add a PHI node with VAL from preheader edge if
1041 inside of a loop and m_first. Keep state in a pair of m_data
1042 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1043 the latch edge, otherwise create a new SSA_NAME for it and let
1044 caller initialize it. */
1046 tree
1047 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1048 tree val_out)
1050 if (!m_first)
1052 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1053 return m_data[m_data_cnt];
1056 *data_out = NULL_TREE;
1057 if (tree_fits_uhwi_p (idx))
1059 m_data.safe_push (val);
1060 m_data.safe_push (NULL_TREE);
1061 return val;
1064 tree in = make_ssa_name (TREE_TYPE (val));
1065 gphi *phi = create_phi_node (in, m_bb);
1066 edge e1 = find_edge (m_preheader_bb, m_bb);
1067 edge e2 = EDGE_PRED (m_bb, 0);
1068 if (e1 == e2)
1069 e2 = EDGE_PRED (m_bb, 1);
1070 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1071 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1072 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1073 m_data.safe_push (in);
1074 m_data.safe_push (out);
1075 return in;
1078 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1079 convert it without emitting any code, otherwise emit
1080 the conversion statement before the current location. */
1082 tree
1083 bitint_large_huge::add_cast (tree type, tree val)
1085 if (TREE_CODE (val) == INTEGER_CST)
1086 return fold_convert (type, val);
1088 tree lhs = make_ssa_name (type);
1089 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1090 insert_before (g);
1091 return lhs;
1094 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1096 tree
1097 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1098 tree idx)
1100 tree lhs, data_out, ctype;
1101 tree rhs1_type = TREE_TYPE (rhs1);
1102 gimple *g;
1103 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1104 &data_out);
1106 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1107 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1109 ctype = build_complex_type (m_limb_type);
1110 if (!types_compatible_p (rhs1_type, m_limb_type))
1112 if (!TYPE_UNSIGNED (rhs1_type))
1114 tree type = unsigned_type_for (rhs1_type);
1115 rhs1 = add_cast (type, rhs1);
1116 rhs2 = add_cast (type, rhs2);
1118 rhs1 = add_cast (m_limb_type, rhs1);
1119 rhs2 = add_cast (m_limb_type, rhs2);
1121 lhs = make_ssa_name (ctype);
1122 g = gimple_build_call_internal (code == PLUS_EXPR
1123 ? IFN_UADDC : IFN_USUBC,
1124 3, rhs1, rhs2, data_in);
1125 gimple_call_set_lhs (g, lhs);
1126 insert_before (g);
1127 if (data_out == NULL_TREE)
1128 data_out = make_ssa_name (m_limb_type);
1129 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1130 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1131 insert_before (g);
1133 else if (types_compatible_p (rhs1_type, m_limb_type))
1135 ctype = build_complex_type (m_limb_type);
1136 lhs = make_ssa_name (ctype);
1137 g = gimple_build_call_internal (code == PLUS_EXPR
1138 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1139 2, rhs1, rhs2);
1140 gimple_call_set_lhs (g, lhs);
1141 insert_before (g);
1142 if (data_out == NULL_TREE)
1143 data_out = make_ssa_name (m_limb_type);
1144 if (!integer_zerop (data_in))
1146 rhs1 = make_ssa_name (m_limb_type);
1147 g = gimple_build_assign (rhs1, REALPART_EXPR,
1148 build1 (REALPART_EXPR, m_limb_type, lhs));
1149 insert_before (g);
1150 rhs2 = make_ssa_name (m_limb_type);
1151 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1152 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1153 insert_before (g);
1154 lhs = make_ssa_name (ctype);
1155 g = gimple_build_call_internal (code == PLUS_EXPR
1156 ? IFN_ADD_OVERFLOW
1157 : IFN_SUB_OVERFLOW,
1158 2, rhs1, data_in);
1159 gimple_call_set_lhs (g, lhs);
1160 insert_before (g);
1161 data_in = make_ssa_name (m_limb_type);
1162 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1163 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1164 insert_before (g);
1165 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1166 insert_before (g);
1168 else
1170 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1171 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1172 insert_before (g);
1175 else
1177 tree in = add_cast (rhs1_type, data_in);
1178 lhs = make_ssa_name (rhs1_type);
1179 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1180 insert_before (g);
1181 rhs1 = make_ssa_name (rhs1_type);
1182 g = gimple_build_assign (rhs1, code, lhs, in);
1183 insert_before (g);
1184 m_data[m_data_cnt] = NULL_TREE;
1185 m_data_cnt += 2;
1186 return rhs1;
1188 rhs1 = make_ssa_name (m_limb_type);
1189 g = gimple_build_assign (rhs1, REALPART_EXPR,
1190 build1 (REALPART_EXPR, m_limb_type, lhs));
1191 insert_before (g);
1192 if (!types_compatible_p (rhs1_type, m_limb_type))
1193 rhs1 = add_cast (rhs1_type, rhs1);
1194 m_data[m_data_cnt] = data_out;
1195 m_data_cnt += 2;
1196 return rhs1;
1199 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1200 count in [0, limb_prec - 1] range. */
1202 tree
1203 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1205 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1206 gcc_checking_assert (cnt < (unsigned) limb_prec);
1207 if (cnt == 0)
1208 return rhs1;
1210 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1211 gimple *g;
1212 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1213 &data_out);
1215 if (!integer_zerop (data_in))
1217 lhs = make_ssa_name (m_limb_type);
1218 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1219 build_int_cst (unsigned_type_node,
1220 limb_prec - cnt));
1221 insert_before (g);
1222 if (!types_compatible_p (rhs1_type, m_limb_type))
1223 lhs = add_cast (rhs1_type, lhs);
1224 data_in = lhs;
1226 if (types_compatible_p (rhs1_type, m_limb_type))
1228 if (data_out == NULL_TREE)
1229 data_out = make_ssa_name (m_limb_type);
1230 g = gimple_build_assign (data_out, rhs1);
1231 insert_before (g);
1233 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1235 lhs = make_ssa_name (rhs1_type);
1236 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1237 insert_before (g);
1238 if (!integer_zerop (data_in))
1240 rhs1 = lhs;
1241 lhs = make_ssa_name (rhs1_type);
1242 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1243 insert_before (g);
1246 else
1247 lhs = data_in;
1248 m_data[m_data_cnt] = data_out;
1249 m_data_cnt += 2;
1250 return lhs;
1253 /* Helper function for handle_stmt method, handle an integral
1254 to integral conversion. */
1256 tree
1257 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1259 tree rhs_type = TREE_TYPE (rhs1);
1260 gimple *g;
1261 if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1262 && TREE_CODE (lhs_type) == BITINT_TYPE
1263 && TREE_CODE (rhs_type) == BITINT_TYPE
1264 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1265 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1267 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1268 /* If lhs has bigger precision than rhs, we can use
1269 the simple case only if there is a guarantee that
1270 the most significant limb is handled in straight
1271 line code. If m_var_msb (on left shifts) or
1272 if m_upwards_2limb * limb_prec is equal to
1273 lhs precision or if not m_upwards_2limb and lhs_type
1274 has precision which is multiple of limb_prec that is
1275 not the case. */
1276 || (!m_var_msb
1277 && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1278 == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1279 && ((!m_upwards_2limb
1280 && (TYPE_PRECISION (lhs_type) % limb_prec != 0))
1281 || (m_upwards_2limb
1282 && (m_upwards_2limb * limb_prec
1283 < TYPE_PRECISION (lhs_type))))))
1285 rhs1 = handle_operand (rhs1, idx);
1286 if (tree_fits_uhwi_p (idx))
1288 tree type = limb_access_type (lhs_type, idx);
1289 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1290 rhs1 = add_cast (type, rhs1);
1292 return rhs1;
1294 tree t;
1295 /* Indexes lower than this don't need any special processing. */
1296 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1297 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1298 /* Indexes >= than this always contain an extension. */
1299 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1300 bool save_first = m_first;
1301 if (m_first)
1303 m_data.safe_push (NULL_TREE);
1304 m_data.safe_push (NULL_TREE);
1305 m_data.safe_push (NULL_TREE);
1306 if (TYPE_UNSIGNED (rhs_type))
1307 /* No need to keep state between iterations. */
1309 else if (m_upwards && !m_upwards_2limb)
1310 /* We need to keep state between iterations, but
1311 not within any loop, everything is straight line
1312 code with only increasing indexes. */
1314 else if (!m_upwards_2limb)
1316 unsigned save_data_cnt = m_data_cnt;
1317 gimple_stmt_iterator save_gsi = m_gsi;
1318 m_gsi = m_init_gsi;
1319 if (gsi_end_p (m_gsi))
1320 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1321 else
1322 gsi_next (&m_gsi);
1323 m_data_cnt = save_data_cnt + 3;
1324 t = handle_operand (rhs1, size_int (low));
1325 m_first = false;
1326 m_data[save_data_cnt + 2]
1327 = build_int_cst (NULL_TREE, m_data_cnt);
1328 m_data_cnt = save_data_cnt;
1329 t = add_cast (signed_type_for (m_limb_type), t);
1330 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1331 tree n = make_ssa_name (TREE_TYPE (t));
1332 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1333 insert_before (g);
1334 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1335 m_init_gsi = m_gsi;
1336 if (gsi_end_p (m_init_gsi))
1337 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1338 else
1339 gsi_prev (&m_init_gsi);
1340 m_gsi = save_gsi;
1342 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1343 /* We need to keep state between iterations, but
1344 fortunately not within the loop, only afterwards. */
1346 else
1348 tree out;
1349 m_data.truncate (m_data_cnt);
1350 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1351 m_data.safe_push (NULL_TREE);
1355 unsigned save_data_cnt = m_data_cnt;
1356 m_data_cnt += 3;
1357 if (!tree_fits_uhwi_p (idx))
1359 if (m_upwards_2limb
1360 && low >= m_upwards_2limb - m_first)
1362 rhs1 = handle_operand (rhs1, idx);
1363 if (m_first)
1364 m_data[save_data_cnt + 2]
1365 = build_int_cst (NULL_TREE, m_data_cnt);
1366 m_first = save_first;
1367 return rhs1;
1369 bool single_comparison
1370 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1371 tree idxc = idx;
1372 if (!single_comparison
1373 && m_upwards_2limb
1374 && !m_first
1375 && low + 1 == m_upwards_2limb)
1376 /* In this case we know that idx <= low always,
1377 so effectively we just needs a single comparison,
1378 idx < low or idx == low, but we'd need to emit different
1379 code for the 2 branches than single_comparison normally
1380 emits. So, instead of special-casing that, emit a
1381 low <= low comparison which cfg cleanup will clean up
1382 at the end of the pass. */
1383 idxc = size_int (low);
1384 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1385 idxc, size_int (low), NULL_TREE, NULL_TREE);
1386 edge edge_true_true, edge_true_false, edge_false;
1387 if_then_if_then_else (g, (single_comparison ? NULL
1388 : gimple_build_cond (EQ_EXPR, idx,
1389 size_int (low),
1390 NULL_TREE,
1391 NULL_TREE)),
1392 profile_probability::likely (),
1393 profile_probability::unlikely (),
1394 edge_true_true, edge_true_false, edge_false);
1395 bool save_cast_conditional = m_cast_conditional;
1396 m_cast_conditional = true;
1397 m_bitfld_load = 0;
1398 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1399 if (m_first)
1400 m_data[save_data_cnt + 2]
1401 = build_int_cst (NULL_TREE, m_data_cnt);
1402 tree ext = NULL_TREE;
1403 tree bitfld = NULL_TREE;
1404 if (!single_comparison)
1406 m_gsi = gsi_after_labels (edge_true_true->src);
1407 m_first = false;
1408 m_data_cnt = save_data_cnt + 3;
1409 if (m_bitfld_load)
1411 bitfld = m_data[m_bitfld_load];
1412 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1413 m_bitfld_load = 0;
1415 t2 = handle_operand (rhs1, size_int (low));
1416 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1417 t2 = add_cast (m_limb_type, t2);
1418 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1420 ext = add_cast (signed_type_for (m_limb_type), t2);
1421 tree lpm1 = build_int_cst (unsigned_type_node,
1422 limb_prec - 1);
1423 tree n = make_ssa_name (TREE_TYPE (ext));
1424 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1425 insert_before (g);
1426 ext = add_cast (m_limb_type, n);
1429 tree t3;
1430 if (TYPE_UNSIGNED (rhs_type))
1431 t3 = build_zero_cst (m_limb_type);
1432 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1433 t3 = m_data[save_data_cnt];
1434 else
1435 t3 = m_data[save_data_cnt + 1];
1436 m_gsi = gsi_after_labels (edge_true_false->dest);
1437 t = make_ssa_name (m_limb_type);
1438 gphi *phi = create_phi_node (t, edge_true_false->dest);
1439 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1440 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1441 if (edge_true_true)
1442 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1443 if (ext)
1445 tree t4 = make_ssa_name (m_limb_type);
1446 phi = create_phi_node (t4, edge_true_false->dest);
1447 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1448 UNKNOWN_LOCATION);
1449 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1450 UNKNOWN_LOCATION);
1451 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1452 if (!save_cast_conditional)
1454 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1455 insert_before (g);
1457 else
1458 for (basic_block bb = gsi_bb (m_gsi);;)
1460 edge e1 = single_succ_edge (bb);
1461 edge e2 = find_edge (e1->dest, m_bb), e3;
1462 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1463 : make_ssa_name (m_limb_type));
1464 phi = create_phi_node (t5, e1->dest);
1465 edge_iterator ei;
1466 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1467 add_phi_arg (phi, (e3 == e1 ? t4
1468 : build_zero_cst (m_limb_type)),
1469 e3, UNKNOWN_LOCATION);
1470 if (e2)
1471 break;
1472 t4 = t5;
1473 bb = e1->dest;
1476 if (m_bitfld_load)
1478 tree t4;
1479 if (!m_first)
1480 t4 = m_data[m_bitfld_load + 1];
1481 else
1482 t4 = make_ssa_name (m_limb_type);
1483 phi = create_phi_node (t4, edge_true_false->dest);
1484 add_phi_arg (phi,
1485 edge_true_true ? bitfld : m_data[m_bitfld_load],
1486 edge_true_false, UNKNOWN_LOCATION);
1487 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1488 edge_false, UNKNOWN_LOCATION);
1489 if (edge_true_true)
1490 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1491 UNKNOWN_LOCATION);
1492 m_data[m_bitfld_load] = t4;
1493 m_data[m_bitfld_load + 2] = t4;
1494 m_bitfld_load = 0;
1496 m_cast_conditional = save_cast_conditional;
1497 m_first = save_first;
1498 return t;
1500 else
1502 if (tree_to_uhwi (idx) < low)
1504 t = handle_operand (rhs1, idx);
1505 if (m_first)
1506 m_data[save_data_cnt + 2]
1507 = build_int_cst (NULL_TREE, m_data_cnt);
1509 else if (tree_to_uhwi (idx) < high)
1511 t = handle_operand (rhs1, size_int (low));
1512 if (m_first)
1513 m_data[save_data_cnt + 2]
1514 = build_int_cst (NULL_TREE, m_data_cnt);
1515 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1516 t = add_cast (m_limb_type, t);
1517 tree ext = NULL_TREE;
1518 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1520 ext = add_cast (signed_type_for (m_limb_type), t);
1521 tree lpm1 = build_int_cst (unsigned_type_node,
1522 limb_prec - 1);
1523 tree n = make_ssa_name (TREE_TYPE (ext));
1524 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1525 insert_before (g);
1526 ext = add_cast (m_limb_type, n);
1527 m_data[save_data_cnt + 1] = ext;
1530 else
1532 if (TYPE_UNSIGNED (rhs_type) && m_first)
1534 handle_operand (rhs1, size_zero_node);
1535 m_data[save_data_cnt + 2]
1536 = build_int_cst (NULL_TREE, m_data_cnt);
1538 else
1539 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1540 if (TYPE_UNSIGNED (rhs_type))
1541 t = build_zero_cst (m_limb_type);
1542 else if (m_bb && m_data[save_data_cnt])
1543 t = m_data[save_data_cnt];
1544 else
1545 t = m_data[save_data_cnt + 1];
1547 tree type = limb_access_type (lhs_type, idx);
1548 if (!useless_type_conversion_p (type, m_limb_type))
1549 t = add_cast (type, t);
1550 m_first = save_first;
1551 return t;
1554 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1555 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1556 && INTEGRAL_TYPE_P (rhs_type))
1558 /* Add support for 3 or more limbs filled in from normal integral
1559 type if this assert fails. If no target chooses limb mode smaller
1560 than half of largest supported normal integral type, this will not
1561 be needed. */
1562 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1563 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1564 if (m_first)
1566 gimple_stmt_iterator save_gsi = m_gsi;
1567 m_gsi = m_init_gsi;
1568 if (gsi_end_p (m_gsi))
1569 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1570 else
1571 gsi_next (&m_gsi);
1572 if (TREE_CODE (rhs_type) == BITINT_TYPE
1573 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1575 tree type = NULL_TREE;
1576 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1577 rhs_type = TREE_TYPE (rhs1);
1579 r1 = rhs1;
1580 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1581 r1 = add_cast (m_limb_type, rhs1);
1582 if (TYPE_PRECISION (rhs_type) > limb_prec)
1584 g = gimple_build_assign (make_ssa_name (rhs_type),
1585 RSHIFT_EXPR, rhs1,
1586 build_int_cst (unsigned_type_node,
1587 limb_prec));
1588 insert_before (g);
1589 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1591 if (TYPE_UNSIGNED (rhs_type))
1592 rext = build_zero_cst (m_limb_type);
1593 else
1595 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1596 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1597 RSHIFT_EXPR, rext,
1598 build_int_cst (unsigned_type_node,
1599 limb_prec - 1));
1600 insert_before (g);
1601 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1603 m_init_gsi = m_gsi;
1604 if (gsi_end_p (m_init_gsi))
1605 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1606 else
1607 gsi_prev (&m_init_gsi);
1608 m_gsi = save_gsi;
1610 tree t;
1611 if (m_upwards_2limb)
1613 if (m_first)
1615 tree out1, out2;
1616 prepare_data_in_out (r1, idx, &out1, rext);
1617 if (TYPE_PRECISION (rhs_type) > limb_prec)
1619 prepare_data_in_out (r2, idx, &out2, rext);
1620 m_data.pop ();
1621 t = m_data.pop ();
1622 m_data[m_data_cnt + 1] = t;
1624 else
1625 m_data[m_data_cnt + 1] = rext;
1626 m_data.safe_push (rext);
1627 t = m_data[m_data_cnt];
1629 else if (!tree_fits_uhwi_p (idx))
1630 t = m_data[m_data_cnt + 1];
1631 else
1633 tree type = limb_access_type (lhs_type, idx);
1634 t = m_data[m_data_cnt + 2];
1635 if (!useless_type_conversion_p (type, m_limb_type))
1636 t = add_cast (type, t);
1638 m_data_cnt += 3;
1639 return t;
1641 else if (m_first)
1643 m_data.safe_push (r1);
1644 m_data.safe_push (r2);
1645 m_data.safe_push (rext);
1647 if (tree_fits_uhwi_p (idx))
1649 tree type = limb_access_type (lhs_type, idx);
1650 if (integer_zerop (idx))
1651 t = m_data[m_data_cnt];
1652 else if (TYPE_PRECISION (rhs_type) > limb_prec
1653 && integer_onep (idx))
1654 t = m_data[m_data_cnt + 1];
1655 else
1656 t = m_data[m_data_cnt + 2];
1657 if (!useless_type_conversion_p (type, m_limb_type))
1658 t = add_cast (type, t);
1659 m_data_cnt += 3;
1660 return t;
1662 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1663 NULL_TREE, NULL_TREE);
1664 edge e2, e3, e4 = NULL;
1665 if_then (g, profile_probability::likely (), e2, e3);
1666 if (m_data[m_data_cnt + 1])
1668 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1669 NULL_TREE, NULL_TREE);
1670 insert_before (g);
1671 edge e5 = split_block (gsi_bb (m_gsi), g);
1672 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1673 e2 = find_edge (e5->dest, e2->dest);
1674 e4->probability = profile_probability::unlikely ();
1675 e5->flags = EDGE_FALSE_VALUE;
1676 e5->probability = e4->probability.invert ();
1678 m_gsi = gsi_after_labels (e2->dest);
1679 t = make_ssa_name (m_limb_type);
1680 gphi *phi = create_phi_node (t, e2->dest);
1681 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1682 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1683 if (e4)
1684 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1685 m_data_cnt += 3;
1686 return t;
1688 return NULL_TREE;
1691 /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1693 tree
1694 bitint_large_huge::handle_bit_field_ref (tree op, tree idx)
1696 if (tree_fits_uhwi_p (idx))
1698 if (m_first)
1699 m_data.safe_push (NULL);
1700 ++m_data_cnt;
1701 unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (m_limb_type));
1702 tree bfr = build3 (BIT_FIELD_REF, m_limb_type,
1703 TREE_OPERAND (op, 0),
1704 TYPE_SIZE (m_limb_type),
1705 size_binop (PLUS_EXPR, TREE_OPERAND (op, 2),
1706 bitsize_int (tree_to_uhwi (idx) * sz)));
1707 tree r = make_ssa_name (m_limb_type);
1708 gimple *g = gimple_build_assign (r, bfr);
1709 insert_before (g);
1710 tree type = limb_access_type (TREE_TYPE (op), idx);
1711 if (!useless_type_conversion_p (type, m_limb_type))
1712 r = add_cast (type, r);
1713 return r;
1715 tree var;
1716 if (m_first)
1718 unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op)));
1719 machine_mode mode;
1720 tree type, bfr;
1721 if (bitwise_mode_for_size (sz).exists (&mode)
1722 && known_eq (GET_MODE_BITSIZE (mode), sz))
1723 type = bitwise_type_for_mode (mode);
1724 else
1726 mode = VOIDmode;
1727 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op, 0)));
1729 if (TYPE_ALIGN (type) < TYPE_ALIGN (TREE_TYPE (op)))
1730 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op)));
1731 var = create_tmp_var (type);
1732 TREE_ADDRESSABLE (var) = 1;
1733 gimple *g;
1734 if (mode != VOIDmode)
1736 bfr = build3 (BIT_FIELD_REF, type, TREE_OPERAND (op, 0),
1737 TYPE_SIZE (type), TREE_OPERAND (op, 2));
1738 g = gimple_build_assign (make_ssa_name (type),
1739 BIT_FIELD_REF, bfr);
1740 gimple_set_location (g, m_loc);
1741 gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1742 bfr = gimple_assign_lhs (g);
1744 else
1745 bfr = TREE_OPERAND (op, 0);
1746 g = gimple_build_assign (var, bfr);
1747 gimple_set_location (g, m_loc);
1748 gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1749 if (mode == VOIDmode)
1751 unsigned HOST_WIDE_INT nelts
1752 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op))), limb_prec);
1753 tree atype = build_array_type_nelts (m_limb_type, nelts);
1754 var = build2 (MEM_REF, atype, build_fold_addr_expr (var),
1755 build_int_cst (build_pointer_type (type),
1756 tree_to_uhwi (TREE_OPERAND (op, 2))
1757 / BITS_PER_UNIT));
1759 m_data.safe_push (var);
1761 else
1762 var = unshare_expr (m_data[m_data_cnt]);
1763 ++m_data_cnt;
1764 var = limb_access (TREE_TYPE (op), var, idx, false);
1765 tree r = make_ssa_name (m_limb_type);
1766 gimple *g = gimple_build_assign (r, var);
1767 insert_before (g);
1768 return r;
1771 /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1772 is an older EH edge, and except for virtual PHIs duplicate the
1773 PHI argument from the EH_EDGE to the new EH edge. */
1775 static void
1776 add_eh_edge (basic_block src, edge eh_edge)
1778 edge e = make_edge (src, eh_edge->dest, EDGE_EH);
1779 e->probability = profile_probability::very_unlikely ();
1780 for (gphi_iterator gsi = gsi_start_phis (eh_edge->dest);
1781 !gsi_end_p (gsi); gsi_next (&gsi))
1783 gphi *phi = gsi.phi ();
1784 tree lhs = gimple_phi_result (phi);
1785 if (virtual_operand_p (lhs))
1786 continue;
1787 const phi_arg_d *arg = gimple_phi_arg (phi, eh_edge->dest_idx);
1788 add_phi_arg (phi, arg->def, e, arg->locus);
1792 /* Helper function for handle_stmt method, handle a load from memory. */
1794 tree
1795 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1797 tree rhs1 = gimple_assign_rhs1 (stmt);
1798 tree rhs_type = TREE_TYPE (rhs1);
1799 bool eh = stmt_ends_bb_p (stmt);
1800 edge eh_edge = NULL;
1801 gimple *g;
1803 if (eh)
1805 edge_iterator ei;
1806 basic_block bb = gimple_bb (stmt);
1808 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1809 if (eh_edge->flags & EDGE_EH)
1810 break;
1813 if (TREE_CODE (rhs1) == COMPONENT_REF
1814 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1816 tree fld = TREE_OPERAND (rhs1, 1);
1817 /* For little-endian, we can allow as inputs bit-fields
1818 which start at a limb boundary. */
1819 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1820 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1821 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1822 goto normal_load;
1823 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1824 handle it normally for now. */
1825 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1826 goto normal_load;
1827 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1828 poly_int64 bitoffset;
1829 poly_uint64 field_offset, repr_offset;
1830 bool var_field_off = false;
1831 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1832 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1833 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1834 else
1836 bitoffset = 0;
1837 var_field_off = true;
1839 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1840 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1841 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1842 TREE_OPERAND (rhs1, 0), repr,
1843 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1844 HOST_WIDE_INT bo = bitoffset.to_constant ();
1845 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1846 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1847 if (m_first)
1849 if (m_upwards)
1851 gimple_stmt_iterator save_gsi = m_gsi;
1852 m_gsi = m_init_gsi;
1853 if (gsi_end_p (m_gsi))
1854 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1855 else
1856 gsi_next (&m_gsi);
1857 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1858 tree iv = make_ssa_name (m_limb_type);
1859 g = gimple_build_assign (iv, t);
1860 insert_before (g);
1861 if (eh)
1863 maybe_duplicate_eh_stmt (g, stmt);
1864 if (eh_edge)
1866 edge e = split_block (gsi_bb (m_gsi), g);
1867 add_eh_edge (e->src, eh_edge);
1868 m_gsi = gsi_after_labels (e->dest);
1869 if (gsi_bb (save_gsi) == e->src)
1871 if (gsi_end_p (save_gsi))
1872 save_gsi = gsi_end_bb (e->dest);
1873 else
1874 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1876 if (m_preheader_bb == e->src)
1877 m_preheader_bb = e->dest;
1880 m_init_gsi = m_gsi;
1881 if (gsi_end_p (m_init_gsi))
1882 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1883 else
1884 gsi_prev (&m_init_gsi);
1885 m_gsi = save_gsi;
1886 tree out;
1887 prepare_data_in_out (iv, idx, &out);
1888 out = m_data[m_data_cnt];
1889 m_data.safe_push (out);
1891 else
1893 m_data.safe_push (NULL_TREE);
1894 m_data.safe_push (NULL_TREE);
1895 m_data.safe_push (NULL_TREE);
1899 tree nidx0 = NULL_TREE, nidx1;
1900 tree iv = m_data[m_data_cnt];
1901 if (m_cast_conditional && iv)
1903 gcc_assert (!m_bitfld_load);
1904 m_bitfld_load = m_data_cnt;
1906 if (tree_fits_uhwi_p (idx))
1908 unsigned prec = TYPE_PRECISION (rhs_type);
1909 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1910 gcc_assert (i * limb_prec < prec);
1911 nidx1 = size_int (i + bo_idx + 1);
1912 if ((i + 1) * limb_prec > prec)
1914 prec %= limb_prec;
1915 if (prec + bo_bit <= (unsigned) limb_prec)
1916 nidx1 = NULL_TREE;
1918 if (!iv)
1919 nidx0 = size_int (i + bo_idx);
1921 else
1923 if (!iv)
1925 if (bo_idx == 0)
1926 nidx0 = idx;
1927 else
1929 nidx0 = make_ssa_name (sizetype);
1930 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1931 size_int (bo_idx));
1932 insert_before (g);
1935 nidx1 = make_ssa_name (sizetype);
1936 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1937 size_int (bo_idx + 1));
1938 insert_before (g);
1941 tree iv2 = NULL_TREE;
1942 if (nidx0)
1944 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1945 iv = make_ssa_name (m_limb_type);
1946 g = gimple_build_assign (iv, t);
1947 insert_before (g);
1948 gcc_assert (!eh);
1950 if (nidx1)
1952 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1953 unsigned prec = TYPE_PRECISION (rhs_type);
1954 if (conditional)
1956 if ((prec % limb_prec) == 0
1957 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1958 conditional = false;
1960 edge edge_true = NULL, edge_false = NULL;
1961 if (conditional)
1963 g = gimple_build_cond (NE_EXPR, idx,
1964 size_int (prec / limb_prec),
1965 NULL_TREE, NULL_TREE);
1966 if_then (g, profile_probability::likely (),
1967 edge_true, edge_false);
1969 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1970 if (m_upwards_2limb
1971 && !m_first
1972 && !m_bitfld_load
1973 && !tree_fits_uhwi_p (idx))
1974 iv2 = m_data[m_data_cnt + 1];
1975 else
1976 iv2 = make_ssa_name (m_limb_type);
1977 g = gimple_build_assign (iv2, t);
1978 insert_before (g);
1979 if (eh)
1981 maybe_duplicate_eh_stmt (g, stmt);
1982 if (eh_edge)
1984 edge e = split_block (gsi_bb (m_gsi), g);
1985 m_gsi = gsi_after_labels (e->dest);
1986 add_eh_edge (e->src, eh_edge);
1989 if (conditional)
1991 tree iv3 = make_ssa_name (m_limb_type);
1992 if (eh)
1993 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1994 gphi *phi = create_phi_node (iv3, edge_true->dest);
1995 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1996 add_phi_arg (phi, build_zero_cst (m_limb_type),
1997 edge_false, UNKNOWN_LOCATION);
1998 m_gsi = gsi_after_labels (edge_true->dest);
2001 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
2002 iv, build_int_cst (unsigned_type_node, bo_bit));
2003 insert_before (g);
2004 iv = gimple_assign_lhs (g);
2005 if (iv2)
2007 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
2008 iv2, build_int_cst (unsigned_type_node,
2009 limb_prec - bo_bit));
2010 insert_before (g);
2011 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
2012 gimple_assign_lhs (g), iv);
2013 insert_before (g);
2014 iv = gimple_assign_lhs (g);
2015 if (m_data[m_data_cnt])
2016 m_data[m_data_cnt] = iv2;
2018 if (tree_fits_uhwi_p (idx))
2020 tree atype = limb_access_type (rhs_type, idx);
2021 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
2022 iv = add_cast (atype, iv);
2024 m_data_cnt += 3;
2025 return iv;
2028 normal_load:
2029 /* Use write_p = true for loads with EH edges to make
2030 sure limb_access doesn't add a cast as separate
2031 statement after it. */
2032 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
2033 tree ret = make_ssa_name (TREE_TYPE (rhs1));
2034 g = gimple_build_assign (ret, rhs1);
2035 insert_before (g);
2036 if (eh)
2038 maybe_duplicate_eh_stmt (g, stmt);
2039 if (eh_edge)
2041 edge e = split_block (gsi_bb (m_gsi), g);
2042 m_gsi = gsi_after_labels (e->dest);
2043 add_eh_edge (e->src, eh_edge);
2045 if (tree_fits_uhwi_p (idx))
2047 tree atype = limb_access_type (rhs_type, idx);
2048 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
2049 ret = add_cast (atype, ret);
2052 return ret;
2055 /* Return a limb IDX from a mergeable statement STMT. */
2057 tree
2058 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
2060 tree lhs, rhs1, rhs2 = NULL_TREE;
2061 gimple *g;
2062 switch (gimple_code (stmt))
2064 case GIMPLE_ASSIGN:
2065 if (gimple_assign_load_p (stmt))
2066 return handle_load (stmt, idx);
2067 switch (gimple_assign_rhs_code (stmt))
2069 case BIT_AND_EXPR:
2070 case BIT_IOR_EXPR:
2071 case BIT_XOR_EXPR:
2072 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2073 /* FALLTHRU */
2074 case BIT_NOT_EXPR:
2075 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2076 lhs = make_ssa_name (TREE_TYPE (rhs1));
2077 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
2078 rhs1, rhs2);
2079 insert_before (g);
2080 return lhs;
2081 case PLUS_EXPR:
2082 case MINUS_EXPR:
2083 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2084 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2085 return handle_plus_minus (gimple_assign_rhs_code (stmt),
2086 rhs1, rhs2, idx);
2087 case NEGATE_EXPR:
2088 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2089 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
2090 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
2091 case LSHIFT_EXPR:
2092 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
2093 idx),
2094 gimple_assign_rhs2 (stmt), idx);
2095 case SSA_NAME:
2096 case INTEGER_CST:
2097 return handle_operand (gimple_assign_rhs1 (stmt), idx);
2098 CASE_CONVERT:
2099 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2100 gimple_assign_rhs1 (stmt), idx);
2101 case VIEW_CONVERT_EXPR:
2102 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2103 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
2104 idx);
2105 case BIT_FIELD_REF:
2106 return handle_bit_field_ref (gimple_assign_rhs1 (stmt), idx);
2107 default:
2108 break;
2110 break;
2111 default:
2112 break;
2114 gcc_unreachable ();
2117 /* Return minimum precision of OP at STMT.
2118 Positive value is minimum precision above which all bits
2119 are zero, negative means all bits above negation of the
2120 value are copies of the sign bit. */
2122 static int
2123 range_to_prec (tree op, gimple *stmt)
2125 int_range_max r;
2126 wide_int w;
2127 tree type = TREE_TYPE (op);
2128 unsigned int prec = TYPE_PRECISION (type);
2130 if (!optimize
2131 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2132 || r.undefined_p ())
2134 if (TYPE_UNSIGNED (type))
2135 return prec;
2136 else
2137 return MIN ((int) -prec, -2);
2140 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2142 w = r.lower_bound ();
2143 if (wi::neg_p (w))
2145 int min_prec1 = wi::min_precision (w, SIGNED);
2146 w = r.upper_bound ();
2147 int min_prec2 = wi::min_precision (w, SIGNED);
2148 int min_prec = MAX (min_prec1, min_prec2);
2149 return MIN (-min_prec, -2);
2153 w = r.upper_bound ();
2154 int min_prec = wi::min_precision (w, UNSIGNED);
2155 return MAX (min_prec, 1);
2158 /* Return address of the first limb of OP and write into *PREC
2159 its precision. If positive, the operand is zero extended
2160 from that precision, if it is negative, the operand is sign-extended
2161 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2162 otherwise *PREC_STORED is prec from the innermost call without
2163 range optimizations. */
2165 tree
2166 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2167 int *prec_stored, int *prec)
2169 wide_int w;
2170 location_t loc_save = m_loc;
2171 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2172 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2173 && TREE_CODE (op) != INTEGER_CST)
2175 do_int:
2176 *prec = range_to_prec (op, stmt);
2177 bitint_prec_kind kind = bitint_prec_small;
2178 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2179 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2180 kind = bitint_precision_kind (TREE_TYPE (op));
2181 if (kind == bitint_prec_middle)
2183 tree type = NULL_TREE;
2184 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2186 tree op_type = TREE_TYPE (op);
2187 unsigned HOST_WIDE_INT nelts
2188 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2189 /* Add support for 3 or more limbs filled in from normal
2190 integral type if this assert fails. If no target chooses
2191 limb mode smaller than half of largest supported normal
2192 integral type, this will not be needed. */
2193 gcc_assert (nelts <= 2);
2194 if (prec_stored)
2195 *prec_stored = (TYPE_UNSIGNED (op_type)
2196 ? TYPE_PRECISION (op_type)
2197 : -TYPE_PRECISION (op_type));
2198 if (*prec <= limb_prec && *prec >= -limb_prec)
2200 nelts = 1;
2201 if (prec_stored)
2203 if (TYPE_UNSIGNED (op_type))
2205 if (*prec_stored > limb_prec)
2206 *prec_stored = limb_prec;
2208 else if (*prec_stored < -limb_prec)
2209 *prec_stored = -limb_prec;
2212 tree atype = build_array_type_nelts (m_limb_type, nelts);
2213 tree var = create_tmp_var (atype);
2214 tree t1 = op;
2215 if (!useless_type_conversion_p (m_limb_type, op_type))
2216 t1 = add_cast (m_limb_type, t1);
2217 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2218 NULL_TREE, NULL_TREE);
2219 gimple *g = gimple_build_assign (v, t1);
2220 insert_before (g);
2221 if (nelts > 1)
2223 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2224 g = gimple_build_assign (make_ssa_name (op_type),
2225 RSHIFT_EXPR, op, lp);
2226 insert_before (g);
2227 tree t2 = gimple_assign_lhs (g);
2228 t2 = add_cast (m_limb_type, t2);
2229 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2230 NULL_TREE, NULL_TREE);
2231 g = gimple_build_assign (v, t2);
2232 insert_before (g);
2234 tree ret = build_fold_addr_expr (var);
2235 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2237 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2238 g = gimple_build_assign (var, clobber);
2239 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2241 m_loc = loc_save;
2242 return ret;
2244 switch (TREE_CODE (op))
2246 case SSA_NAME:
2247 if (m_names == NULL
2248 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2250 gimple *g = SSA_NAME_DEF_STMT (op);
2251 tree ret;
2252 m_loc = gimple_location (g);
2253 if (gimple_assign_load_p (g))
2255 *prec = range_to_prec (op, NULL);
2256 if (prec_stored)
2257 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2258 ? TYPE_PRECISION (TREE_TYPE (op))
2259 : -TYPE_PRECISION (TREE_TYPE (op)));
2260 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2261 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2262 NULL_TREE, true, GSI_SAME_STMT);
2264 else if (gimple_code (g) == GIMPLE_NOP)
2266 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2267 if (prec_stored)
2268 *prec_stored = *prec;
2269 tree var = create_tmp_var (m_limb_type);
2270 TREE_ADDRESSABLE (var) = 1;
2271 ret = build_fold_addr_expr (var);
2272 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2274 tree clobber = build_clobber (m_limb_type,
2275 CLOBBER_STORAGE_END);
2276 g = gimple_build_assign (var, clobber);
2277 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2280 else
2282 gcc_assert (gimple_assign_cast_p (g));
2283 tree rhs1 = gimple_assign_rhs1 (g);
2284 bitint_prec_kind kind = bitint_prec_small;
2285 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2286 rhs1 = TREE_OPERAND (rhs1, 0);
2287 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2288 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2289 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2290 if (kind >= bitint_prec_large)
2292 tree lhs_type = TREE_TYPE (op);
2293 tree rhs_type = TREE_TYPE (rhs1);
2294 int prec_stored_val = 0;
2295 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2296 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2298 if (TYPE_UNSIGNED (lhs_type)
2299 && !TYPE_UNSIGNED (rhs_type))
2300 gcc_assert (*prec >= 0 || prec_stored == NULL);
2302 else
2304 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2306 else if (TYPE_UNSIGNED (lhs_type))
2308 gcc_assert (*prec > 0
2309 || prec_stored_val > 0
2310 || (-prec_stored_val
2311 >= TYPE_PRECISION (lhs_type)));
2312 *prec = TYPE_PRECISION (lhs_type);
2314 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2316 else
2317 *prec = -TYPE_PRECISION (lhs_type);
2320 else
2322 op = rhs1;
2323 stmt = g;
2324 goto do_int;
2327 m_loc = loc_save;
2328 return ret;
2330 else
2332 int p = var_to_partition (m_map, op);
2333 gcc_assert (m_vars[p] != NULL_TREE);
2334 *prec = range_to_prec (op, stmt);
2335 if (prec_stored)
2336 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2337 ? TYPE_PRECISION (TREE_TYPE (op))
2338 : -TYPE_PRECISION (TREE_TYPE (op)));
2339 return build_fold_addr_expr (m_vars[p]);
2341 case INTEGER_CST:
2342 unsigned int min_prec, mp;
2343 tree type;
2344 w = wi::to_wide (op);
2345 if (tree_int_cst_sgn (op) >= 0)
2347 min_prec = wi::min_precision (w, UNSIGNED);
2348 *prec = MAX (min_prec, 1);
2350 else
2352 min_prec = wi::min_precision (w, SIGNED);
2353 *prec = MIN ((int) -min_prec, -2);
2355 mp = CEIL (min_prec, limb_prec) * limb_prec;
2356 if (mp == 0)
2357 mp = 1;
2358 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2359 && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2360 || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2361 type = TREE_TYPE (op);
2362 else
2363 type = build_bitint_type (mp, 1);
2364 if (TREE_CODE (type) != BITINT_TYPE
2365 || bitint_precision_kind (type) == bitint_prec_small)
2367 if (TYPE_PRECISION (type) <= limb_prec)
2368 type = m_limb_type;
2369 else
2371 while (bitint_precision_kind (mp) == bitint_prec_small)
2372 mp += limb_prec;
2373 /* This case is for targets which e.g. have 64-bit
2374 limb but categorize up to 128-bits _BitInts as
2375 small. We could use type of m_limb_type[2] and
2376 similar instead to save space. */
2377 type = build_bitint_type (mp, 1);
2380 if (prec_stored)
2382 if (tree_int_cst_sgn (op) >= 0)
2383 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2384 else
2385 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2387 op = tree_output_constant_def (fold_convert (type, op));
2388 return build_fold_addr_expr (op);
2389 default:
2390 gcc_unreachable ();
2394 /* Helper function, create a loop before the current location,
2395 start with sizetype INIT value from the preheader edge. Return
2396 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2397 from the latch edge. */
2399 tree
2400 bitint_large_huge::create_loop (tree init, tree *idx_next)
2402 if (!gsi_end_p (m_gsi))
2403 gsi_prev (&m_gsi);
2404 else
2405 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2406 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2407 edge e2 = split_block (e1->dest, (gimple *) NULL);
2408 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2409 e3->probability = profile_probability::very_unlikely ();
2410 e2->flags = EDGE_FALSE_VALUE;
2411 e2->probability = e3->probability.invert ();
2412 tree idx = make_ssa_name (sizetype);
2413 gphi *phi = create_phi_node (idx, e1->dest);
2414 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2415 *idx_next = make_ssa_name (sizetype);
2416 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2417 m_gsi = gsi_after_labels (e1->dest);
2418 m_bb = e1->dest;
2419 m_preheader_bb = e1->src;
2420 class loop *loop = alloc_loop ();
2421 loop->header = e1->dest;
2422 add_loop (loop, e1->src->loop_father);
2423 return idx;
2426 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2427 lowered using iteration from the least significant limb up to the most
2428 significant limb. For large _BitInt it is emitted as straight line code
2429 before current location, for huge _BitInt as a loop handling two limbs
2430 at once, followed by handling up to limbs in straight line code (at most
2431 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2432 comparisons, in that case CMP_CODE should be the comparison code and
2433 CMP_OP1/CMP_OP2 the comparison operands. */
2435 tree
2436 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2437 tree cmp_op1, tree cmp_op2)
2439 bool eq_p = cmp_code != ERROR_MARK;
2440 tree type;
2441 if (eq_p)
2442 type = TREE_TYPE (cmp_op1);
2443 else
2444 type = TREE_TYPE (gimple_assign_lhs (stmt));
2445 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2446 bitint_prec_kind kind = bitint_precision_kind (type);
2447 gcc_assert (kind >= bitint_prec_large);
2448 gimple *g;
2449 tree lhs = gimple_get_lhs (stmt);
2450 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2451 if (lhs
2452 && TREE_CODE (lhs) == SSA_NAME
2453 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2454 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2456 int p = var_to_partition (m_map, lhs);
2457 gcc_assert (m_vars[p] != NULL_TREE);
2458 m_lhs = lhs = m_vars[p];
2460 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2461 bool sext = false;
2462 tree ext = NULL_TREE, store_operand = NULL_TREE;
2463 bool eh = false;
2464 basic_block eh_pad = NULL;
2465 tree nlhs = NULL_TREE;
2466 unsigned HOST_WIDE_INT bo_idx = 0;
2467 unsigned HOST_WIDE_INT bo_bit = 0;
2468 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2469 if (gimple_store_p (stmt))
2471 store_operand = gimple_assign_rhs1 (stmt);
2472 eh = stmt_ends_bb_p (stmt);
2473 if (eh)
2475 edge e;
2476 edge_iterator ei;
2477 basic_block bb = gimple_bb (stmt);
2479 FOR_EACH_EDGE (e, ei, bb->succs)
2480 if (e->flags & EDGE_EH)
2482 eh_pad = e->dest;
2483 break;
2486 if (TREE_CODE (lhs) == COMPONENT_REF
2487 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2489 tree fld = TREE_OPERAND (lhs, 1);
2490 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2491 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2492 poly_int64 bitoffset;
2493 poly_uint64 field_offset, repr_offset;
2494 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2495 nlhs = lhs;
2496 else
2498 bool var_field_off = false;
2499 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2500 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2501 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2502 else
2504 bitoffset = 0;
2505 var_field_off = true;
2507 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2508 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2509 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2510 TREE_OPERAND (lhs, 0), repr,
2511 var_field_off
2512 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2513 HOST_WIDE_INT bo = bitoffset.to_constant ();
2514 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2515 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2519 if ((store_operand
2520 && TREE_CODE (store_operand) == SSA_NAME
2521 && (m_names == NULL
2522 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2523 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2524 || gimple_assign_cast_p (stmt))
2526 rhs1 = gimple_assign_rhs1 (store_operand
2527 ? SSA_NAME_DEF_STMT (store_operand)
2528 : stmt);
2529 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2530 rhs1 = TREE_OPERAND (rhs1, 0);
2531 /* Optimize mergeable ops ending with widening cast to _BitInt
2532 (or followed by store). We can lower just the limbs of the
2533 cast operand and widen afterwards. */
2534 if (TREE_CODE (rhs1) == SSA_NAME
2535 && (m_names == NULL
2536 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2537 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2538 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2539 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2540 limb_prec) < CEIL (prec, limb_prec)
2541 || (kind == bitint_prec_huge
2542 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2544 store_operand = rhs1;
2545 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2546 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2547 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2548 sext = true;
2551 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2552 if (kind == bitint_prec_large)
2553 cnt = CEIL (prec, limb_prec);
2554 else
2556 rem = (prec % (2 * limb_prec));
2557 end = (prec - rem) / limb_prec;
2558 cnt = 2 + CEIL (rem, limb_prec);
2559 idx = idx_first = create_loop (size_zero_node, &idx_next);
2562 basic_block edge_bb = NULL;
2563 if (eq_p)
2565 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2566 gsi_prev (&gsi);
2567 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2568 edge_bb = e->src;
2569 if (kind == bitint_prec_large)
2570 m_gsi = gsi_end_bb (edge_bb);
2572 else
2573 m_after_stmt = stmt;
2574 if (kind != bitint_prec_large)
2575 m_upwards_2limb = end;
2576 m_upwards = true;
2578 bool separate_ext
2579 = (prec != (unsigned) TYPE_PRECISION (type)
2580 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2581 > CEIL (prec, limb_prec)));
2583 for (unsigned i = 0; i < cnt; i++)
2585 m_data_cnt = 0;
2586 if (kind == bitint_prec_large)
2587 idx = size_int (i);
2588 else if (i >= 2)
2589 idx = size_int (end + (i > 2));
2590 if (eq_p)
2592 rhs1 = handle_operand (cmp_op1, idx);
2593 tree rhs2 = handle_operand (cmp_op2, idx);
2594 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2595 insert_before (g);
2596 edge e1 = split_block (gsi_bb (m_gsi), g);
2597 e1->flags = EDGE_FALSE_VALUE;
2598 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2599 e1->probability = profile_probability::unlikely ();
2600 e2->probability = e1->probability.invert ();
2601 if (i == 0)
2602 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2603 m_gsi = gsi_after_labels (e1->dest);
2605 else
2607 if (store_operand)
2608 rhs1 = handle_operand (store_operand, idx);
2609 else
2610 rhs1 = handle_stmt (stmt, idx);
2611 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2612 rhs1 = add_cast (m_limb_type, rhs1);
2613 if (sext && i == cnt - 1)
2614 ext = rhs1;
2615 tree nidx = idx;
2616 if (bo_idx)
2618 if (tree_fits_uhwi_p (idx))
2619 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2620 else
2622 nidx = make_ssa_name (sizetype);
2623 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2624 size_int (bo_idx));
2625 insert_before (g);
2628 bool done = false;
2629 basic_block new_bb = NULL;
2630 /* Handle stores into bit-fields. */
2631 if (bo_bit)
2633 if (i == 0)
2635 edge e2 = NULL;
2636 if (kind != bitint_prec_large)
2638 prepare_data_in_out (build_zero_cst (m_limb_type),
2639 idx, &bf_next);
2640 bf_next = m_data.pop ();
2641 bf_cur = m_data.pop ();
2642 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2643 NULL_TREE, NULL_TREE);
2644 edge edge_true;
2645 if_then_else (g, profile_probability::unlikely (),
2646 edge_true, e2);
2647 new_bb = e2->dest;
2649 tree ftype
2650 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2651 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2652 bitsize_int (limb_prec - bo_bit),
2653 bitsize_int (bo_idx * limb_prec + bo_bit));
2654 tree t = add_cast (ftype, rhs1);
2655 g = gimple_build_assign (bfr, t);
2656 insert_before (g);
2657 if (eh)
2659 maybe_duplicate_eh_stmt (g, stmt);
2660 if (eh_pad)
2662 edge e = split_block (gsi_bb (m_gsi), g);
2663 m_gsi = gsi_after_labels (e->dest);
2664 add_eh_edge (e->src,
2665 find_edge (gimple_bb (stmt), eh_pad));
2668 if (kind == bitint_prec_large)
2670 bf_cur = rhs1;
2671 done = true;
2673 else if (e2)
2674 m_gsi = gsi_after_labels (e2->src);
2676 if (!done)
2678 tree t1 = make_ssa_name (m_limb_type);
2679 tree t2 = make_ssa_name (m_limb_type);
2680 tree t3 = make_ssa_name (m_limb_type);
2681 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2682 build_int_cst (unsigned_type_node,
2683 limb_prec - bo_bit));
2684 insert_before (g);
2685 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2686 build_int_cst (unsigned_type_node,
2687 bo_bit));
2688 insert_before (g);
2689 bf_cur = rhs1;
2690 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2691 insert_before (g);
2692 rhs1 = t3;
2693 if (bf_next && i == 1)
2695 g = gimple_build_assign (bf_next, bf_cur);
2696 insert_before (g);
2700 if (!done)
2702 /* Handle bit-field access to partial last limb if needed. */
2703 if (nlhs
2704 && i == cnt - 1
2705 && !separate_ext
2706 && tree_fits_uhwi_p (idx))
2708 unsigned int tprec = TYPE_PRECISION (type);
2709 unsigned int rprec = tprec % limb_prec;
2710 if (rprec + bo_bit < (unsigned) limb_prec)
2712 tree ftype
2713 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2714 tree bfr = build3 (BIT_FIELD_REF, ftype,
2715 unshare_expr (nlhs),
2716 bitsize_int (rprec + bo_bit),
2717 bitsize_int ((bo_idx
2718 + tprec / limb_prec)
2719 * limb_prec));
2720 tree t = add_cast (ftype, rhs1);
2721 g = gimple_build_assign (bfr, t);
2722 done = true;
2723 bf_cur = NULL_TREE;
2725 else if (rprec + bo_bit == (unsigned) limb_prec)
2726 bf_cur = NULL_TREE;
2728 /* Otherwise, stores to any other lhs. */
2729 if (!done)
2731 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2732 nidx, true);
2733 g = gimple_build_assign (l, rhs1);
2735 insert_before (g);
2736 if (eh)
2738 maybe_duplicate_eh_stmt (g, stmt);
2739 if (eh_pad)
2741 edge e = split_block (gsi_bb (m_gsi), g);
2742 m_gsi = gsi_after_labels (e->dest);
2743 add_eh_edge (e->src,
2744 find_edge (gimple_bb (stmt), eh_pad));
2747 if (new_bb)
2748 m_gsi = gsi_after_labels (new_bb);
2751 m_first = false;
2752 if (kind == bitint_prec_huge && i <= 1)
2754 if (i == 0)
2756 idx = make_ssa_name (sizetype);
2757 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2758 size_one_node);
2759 insert_before (g);
2761 else
2763 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2764 size_int (2));
2765 insert_before (g);
2766 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2767 NULL_TREE, NULL_TREE);
2768 insert_before (g);
2769 if (eq_p)
2770 m_gsi = gsi_after_labels (edge_bb);
2771 else
2772 m_gsi = gsi_for_stmt (stmt);
2773 m_bb = NULL;
2778 if (separate_ext)
2780 if (sext)
2782 ext = add_cast (signed_type_for (m_limb_type), ext);
2783 tree lpm1 = build_int_cst (unsigned_type_node,
2784 limb_prec - 1);
2785 tree n = make_ssa_name (TREE_TYPE (ext));
2786 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2787 insert_before (g);
2788 ext = add_cast (m_limb_type, n);
2790 else
2791 ext = build_zero_cst (m_limb_type);
2792 kind = bitint_precision_kind (type);
2793 unsigned start = CEIL (prec, limb_prec);
2794 prec = TYPE_PRECISION (type);
2795 idx = idx_first = idx_next = NULL_TREE;
2796 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2797 kind = bitint_prec_large;
2798 if (kind == bitint_prec_large)
2799 cnt = CEIL (prec, limb_prec) - start;
2800 else
2802 rem = prec % limb_prec;
2803 end = (prec - rem) / limb_prec;
2804 cnt = (bo_bit != 0) + 1 + (rem != 0);
2806 for (unsigned i = 0; i < cnt; i++)
2808 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2809 idx = size_int (start + i);
2810 else if (i == cnt - 1 && (rem != 0))
2811 idx = size_int (end);
2812 else if (i == (bo_bit != 0))
2813 idx = create_loop (size_int (start + i), &idx_next);
2814 rhs1 = ext;
2815 if (bf_cur != NULL_TREE && bf_cur != ext)
2817 tree t1 = make_ssa_name (m_limb_type);
2818 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2819 build_int_cst (unsigned_type_node,
2820 limb_prec - bo_bit));
2821 insert_before (g);
2822 if (integer_zerop (ext))
2823 rhs1 = t1;
2824 else
2826 tree t2 = make_ssa_name (m_limb_type);
2827 rhs1 = make_ssa_name (m_limb_type);
2828 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2829 build_int_cst (unsigned_type_node,
2830 bo_bit));
2831 insert_before (g);
2832 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2833 insert_before (g);
2835 bf_cur = ext;
2837 tree nidx = idx;
2838 if (bo_idx)
2840 if (tree_fits_uhwi_p (idx))
2841 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2842 else
2844 nidx = make_ssa_name (sizetype);
2845 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2846 size_int (bo_idx));
2847 insert_before (g);
2850 bool done = false;
2851 /* Handle bit-field access to partial last limb if needed. */
2852 if (nlhs && i == cnt - 1)
2854 unsigned int tprec = TYPE_PRECISION (type);
2855 unsigned int rprec = tprec % limb_prec;
2856 if (rprec + bo_bit < (unsigned) limb_prec)
2858 tree ftype
2859 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2860 tree bfr = build3 (BIT_FIELD_REF, ftype,
2861 unshare_expr (nlhs),
2862 bitsize_int (rprec + bo_bit),
2863 bitsize_int ((bo_idx + tprec / limb_prec)
2864 * limb_prec));
2865 tree t = add_cast (ftype, rhs1);
2866 g = gimple_build_assign (bfr, t);
2867 done = true;
2868 bf_cur = NULL_TREE;
2870 else if (rprec + bo_bit == (unsigned) limb_prec)
2871 bf_cur = NULL_TREE;
2873 /* Otherwise, stores to any other lhs. */
2874 if (!done)
2876 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2877 g = gimple_build_assign (l, rhs1);
2879 insert_before (g);
2880 if (eh)
2882 maybe_duplicate_eh_stmt (g, stmt);
2883 if (eh_pad)
2885 edge e = split_block (gsi_bb (m_gsi), g);
2886 m_gsi = gsi_after_labels (e->dest);
2887 add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
2890 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2892 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2893 size_one_node);
2894 insert_before (g);
2895 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2896 NULL_TREE, NULL_TREE);
2897 insert_before (g);
2898 m_gsi = gsi_for_stmt (stmt);
2899 m_bb = NULL;
2903 if (bf_cur != NULL_TREE)
2905 unsigned int tprec = TYPE_PRECISION (type);
2906 unsigned int rprec = tprec % limb_prec;
2907 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2908 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2909 bitsize_int (rprec + bo_bit),
2910 bitsize_int ((bo_idx + tprec / limb_prec)
2911 * limb_prec));
2912 rhs1 = bf_cur;
2913 if (bf_cur != ext)
2915 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2916 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2917 build_int_cst (unsigned_type_node,
2918 limb_prec - bo_bit));
2919 insert_before (g);
2921 rhs1 = add_cast (ftype, rhs1);
2922 g = gimple_build_assign (bfr, rhs1);
2923 insert_before (g);
2924 if (eh)
2926 maybe_duplicate_eh_stmt (g, stmt);
2927 if (eh_pad)
2929 edge e = split_block (gsi_bb (m_gsi), g);
2930 m_gsi = gsi_after_labels (e->dest);
2931 add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
2936 if (gimple_store_p (stmt))
2938 unlink_stmt_vdef (stmt);
2939 release_ssa_name (gimple_vdef (stmt));
2940 gsi_remove (&m_gsi, true);
2942 if (eq_p)
2944 lhs = make_ssa_name (boolean_type_node);
2945 basic_block bb = gimple_bb (stmt);
2946 gphi *phi = create_phi_node (lhs, bb);
2947 edge e = find_edge (gsi_bb (m_gsi), bb);
2948 unsigned int n = EDGE_COUNT (bb->preds);
2949 for (unsigned int i = 0; i < n; i++)
2951 edge e2 = EDGE_PRED (bb, i);
2952 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2953 e2, UNKNOWN_LOCATION);
2955 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2956 return lhs;
2958 else
2959 return NULL_TREE;
2962 /* Handle a large/huge _BitInt comparison statement STMT other than
2963 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2964 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2965 lowered by iteration from the most significant limb downwards to
2966 the least significant one, for large _BitInt in straight line code,
2967 otherwise with most significant limb handled in
2968 straight line code followed by a loop handling one limb at a time.
2969 Comparisons with unsigned huge _BitInt with precisions which are
2970 multiples of limb precision can use just the loop and don't need to
2971 handle most significant limb before the loop. The loop or straight
2972 line code jumps to final basic block if a particular pair of limbs
2973 is not equal. */
2975 tree
2976 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2977 tree cmp_op1, tree cmp_op2)
2979 tree type = TREE_TYPE (cmp_op1);
2980 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2981 bitint_prec_kind kind = bitint_precision_kind (type);
2982 gcc_assert (kind >= bitint_prec_large);
2983 gimple *g;
2984 if (!TYPE_UNSIGNED (type)
2985 && integer_zerop (cmp_op2)
2986 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2988 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2989 tree idx = size_int (end);
2990 m_data_cnt = 0;
2991 tree rhs1 = handle_operand (cmp_op1, idx);
2992 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2994 tree stype = signed_type_for (TREE_TYPE (rhs1));
2995 rhs1 = add_cast (stype, rhs1);
2997 tree lhs = make_ssa_name (boolean_type_node);
2998 g = gimple_build_assign (lhs, cmp_code, rhs1,
2999 build_zero_cst (TREE_TYPE (rhs1)));
3000 insert_before (g);
3001 cmp_code = NE_EXPR;
3002 return lhs;
3005 unsigned cnt, rem = 0, end = 0;
3006 tree idx = NULL_TREE, idx_next = NULL_TREE;
3007 if (kind == bitint_prec_large)
3008 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
3009 else
3011 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
3012 if (rem == 0 && !TYPE_UNSIGNED (type))
3013 rem = limb_prec;
3014 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
3015 cnt = 1 + (rem != 0);
3018 basic_block edge_bb = NULL;
3019 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3020 gsi_prev (&gsi);
3021 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
3022 edge_bb = e->src;
3023 m_gsi = gsi_end_bb (edge_bb);
3025 edge *edges = XALLOCAVEC (edge, cnt * 2);
3026 for (unsigned i = 0; i < cnt; i++)
3028 m_data_cnt = 0;
3029 if (kind == bitint_prec_large)
3030 idx = size_int (cnt - i - 1);
3031 else if (i == cnt - 1)
3032 idx = create_loop (size_int (end - 1), &idx_next);
3033 else
3034 idx = size_int (end);
3035 tree rhs1 = handle_operand (cmp_op1, idx);
3036 tree rhs2 = handle_operand (cmp_op2, idx);
3037 if (i == 0
3038 && !TYPE_UNSIGNED (type)
3039 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3041 tree stype = signed_type_for (TREE_TYPE (rhs1));
3042 rhs1 = add_cast (stype, rhs1);
3043 rhs2 = add_cast (stype, rhs2);
3045 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3046 insert_before (g);
3047 edge e1 = split_block (gsi_bb (m_gsi), g);
3048 e1->flags = EDGE_FALSE_VALUE;
3049 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3050 e1->probability = profile_probability::likely ();
3051 e2->probability = e1->probability.invert ();
3052 if (i == 0)
3053 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
3054 m_gsi = gsi_after_labels (e1->dest);
3055 edges[2 * i] = e2;
3056 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3057 insert_before (g);
3058 e1 = split_block (gsi_bb (m_gsi), g);
3059 e1->flags = EDGE_FALSE_VALUE;
3060 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3061 e1->probability = profile_probability::unlikely ();
3062 e2->probability = e1->probability.invert ();
3063 m_gsi = gsi_after_labels (e1->dest);
3064 edges[2 * i + 1] = e2;
3065 m_first = false;
3066 if (kind == bitint_prec_huge && i == cnt - 1)
3068 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3069 insert_before (g);
3070 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3071 NULL_TREE, NULL_TREE);
3072 insert_before (g);
3073 edge true_edge, false_edge;
3074 extract_true_false_edges_from_block (gsi_bb (m_gsi),
3075 &true_edge, &false_edge);
3076 m_gsi = gsi_after_labels (false_edge->dest);
3077 m_bb = NULL;
3081 tree lhs = make_ssa_name (boolean_type_node);
3082 basic_block bb = gimple_bb (stmt);
3083 gphi *phi = create_phi_node (lhs, bb);
3084 for (unsigned int i = 0; i < cnt * 2; i++)
3086 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
3087 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
3088 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
3090 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
3091 ? boolean_true_node : boolean_false_node,
3092 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
3093 cmp_code = NE_EXPR;
3094 return lhs;
3097 /* Lower large/huge _BitInt left and right shift except for left
3098 shift by < limb_prec constant. */
3100 void
3101 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
3103 tree rhs1 = gimple_assign_rhs1 (stmt);
3104 tree lhs = gimple_assign_lhs (stmt);
3105 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3106 tree type = TREE_TYPE (rhs1);
3107 gimple *final_stmt = gsi_stmt (m_gsi);
3108 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3109 && bitint_precision_kind (type) >= bitint_prec_large);
3110 int prec = TYPE_PRECISION (type);
3111 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
3112 gimple *g;
3113 if (obj == NULL_TREE)
3115 int part = var_to_partition (m_map, lhs);
3116 gcc_assert (m_vars[part] != NULL_TREE);
3117 obj = m_vars[part];
3119 /* Preparation code common for both left and right shifts.
3120 unsigned n1 = n % limb_prec;
3121 size_t n2 = n / limb_prec;
3122 size_t n3 = n1 != 0;
3123 unsigned n4 = (limb_prec - n1) % limb_prec;
3124 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3125 if (TREE_CODE (n) == INTEGER_CST)
3127 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3128 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3129 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3130 n3 = size_int (!integer_zerop (n1));
3131 n4 = int_const_binop (TRUNC_MOD_EXPR,
3132 int_const_binop (MINUS_EXPR, lp, n1), lp);
3134 else
3136 n1 = make_ssa_name (TREE_TYPE (n));
3137 n2 = make_ssa_name (sizetype);
3138 n3 = make_ssa_name (sizetype);
3139 n4 = make_ssa_name (TREE_TYPE (n));
3140 if (pow2p_hwi (limb_prec))
3142 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3143 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3144 insert_before (g);
3145 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3146 TREE_TYPE (n))
3147 ? n2 : make_ssa_name (TREE_TYPE (n)),
3148 RSHIFT_EXPR, n,
3149 build_int_cst (TREE_TYPE (n),
3150 exact_log2 (limb_prec)));
3151 insert_before (g);
3152 if (gimple_assign_lhs (g) != n2)
3154 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3155 insert_before (g);
3157 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3158 NEGATE_EXPR, n1);
3159 insert_before (g);
3160 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3161 lpm1);
3162 insert_before (g);
3164 else
3166 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3167 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3168 insert_before (g);
3169 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3170 TREE_TYPE (n))
3171 ? n2 : make_ssa_name (TREE_TYPE (n)),
3172 TRUNC_DIV_EXPR, n, lp);
3173 insert_before (g);
3174 if (gimple_assign_lhs (g) != n2)
3176 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3177 insert_before (g);
3179 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3180 MINUS_EXPR, lp, n1);
3181 insert_before (g);
3182 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3183 lp);
3184 insert_before (g);
3186 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3187 build_zero_cst (TREE_TYPE (n)));
3188 insert_before (g);
3189 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3190 insert_before (g);
3192 tree p = build_int_cst (sizetype,
3193 prec / limb_prec - (prec % limb_prec == 0));
3194 if (rhs_code == RSHIFT_EXPR)
3196 /* Lower
3197 dst = src >> n;
3199 unsigned n1 = n % limb_prec;
3200 size_t n2 = n / limb_prec;
3201 size_t n3 = n1 != 0;
3202 unsigned n4 = (limb_prec - n1) % limb_prec;
3203 size_t idx;
3204 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3205 int signed_p = (typeof (src) -1) < 0;
3206 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3207 ? p : p - n3); ++idx)
3208 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3209 limb_type ext;
3210 if (prec % limb_prec == 0)
3211 ext = src[p];
3212 else if (signed_p)
3213 ext = ((signed limb_type) (src[p] << (limb_prec
3214 - (prec % limb_prec))))
3215 >> (limb_prec - (prec % limb_prec));
3216 else
3217 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3218 if (!signed_p && (prec % limb_prec == 0))
3220 else if (idx < prec / 64)
3222 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3223 ++idx;
3225 idx -= n2;
3226 if (signed_p)
3228 dst[idx] = ((signed limb_type) ext) >> n1;
3229 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3231 else
3233 dst[idx] = ext >> n1;
3234 ext = 0;
3236 for (++idx; idx <= p; ++idx)
3237 dst[idx] = ext; */
3238 tree pmn3;
3239 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3240 pmn3 = p;
3241 else if (TREE_CODE (n3) == INTEGER_CST)
3242 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3243 else
3245 pmn3 = make_ssa_name (sizetype);
3246 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3247 insert_before (g);
3249 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3250 edge edge_true, edge_false;
3251 if_then (g, profile_probability::likely (), edge_true, edge_false);
3252 tree idx_next;
3253 tree idx = create_loop (n2, &idx_next);
3254 tree idxmn2 = make_ssa_name (sizetype);
3255 tree idxpn3 = make_ssa_name (sizetype);
3256 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3257 insert_before (g);
3258 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3259 insert_before (g);
3260 m_data_cnt = 0;
3261 tree t1 = handle_operand (rhs1, idx);
3262 m_first = false;
3263 g = gimple_build_assign (make_ssa_name (m_limb_type),
3264 RSHIFT_EXPR, t1, n1);
3265 insert_before (g);
3266 t1 = gimple_assign_lhs (g);
3267 if (!integer_zerop (n3))
3269 m_data_cnt = 0;
3270 tree t2 = handle_operand (rhs1, idxpn3);
3271 g = gimple_build_assign (make_ssa_name (m_limb_type),
3272 LSHIFT_EXPR, t2, n4);
3273 insert_before (g);
3274 t2 = gimple_assign_lhs (g);
3275 g = gimple_build_assign (make_ssa_name (m_limb_type),
3276 BIT_IOR_EXPR, t1, t2);
3277 insert_before (g);
3278 t1 = gimple_assign_lhs (g);
3280 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3281 g = gimple_build_assign (l, t1);
3282 insert_before (g);
3283 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3284 insert_before (g);
3285 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3286 insert_before (g);
3287 idx = make_ssa_name (sizetype);
3288 m_gsi = gsi_for_stmt (final_stmt);
3289 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3290 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3291 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3292 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3293 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3294 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3295 m_data_cnt = 0;
3296 tree ms = handle_operand (rhs1, p);
3297 tree ext = ms;
3298 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3299 ext = add_cast (m_limb_type, ms);
3300 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3301 && !integer_zerop (n3))
3303 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3304 if_then (g, profile_probability::likely (), edge_true, edge_false);
3305 m_data_cnt = 0;
3306 t1 = handle_operand (rhs1, idx);
3307 g = gimple_build_assign (make_ssa_name (m_limb_type),
3308 RSHIFT_EXPR, t1, n1);
3309 insert_before (g);
3310 t1 = gimple_assign_lhs (g);
3311 g = gimple_build_assign (make_ssa_name (m_limb_type),
3312 LSHIFT_EXPR, ext, n4);
3313 insert_before (g);
3314 tree t2 = gimple_assign_lhs (g);
3315 g = gimple_build_assign (make_ssa_name (m_limb_type),
3316 BIT_IOR_EXPR, t1, t2);
3317 insert_before (g);
3318 t1 = gimple_assign_lhs (g);
3319 idxmn2 = make_ssa_name (sizetype);
3320 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3321 insert_before (g);
3322 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3323 g = gimple_build_assign (l, t1);
3324 insert_before (g);
3325 idx_next = make_ssa_name (sizetype);
3326 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3327 insert_before (g);
3328 m_gsi = gsi_for_stmt (final_stmt);
3329 tree nidx = make_ssa_name (sizetype);
3330 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3331 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3332 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3333 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3334 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3335 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3336 idx = nidx;
3338 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3339 insert_before (g);
3340 idx = gimple_assign_lhs (g);
3341 tree sext = ext;
3342 if (!TYPE_UNSIGNED (type))
3343 sext = add_cast (signed_type_for (m_limb_type), ext);
3344 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3345 RSHIFT_EXPR, sext, n1);
3346 insert_before (g);
3347 t1 = gimple_assign_lhs (g);
3348 if (!TYPE_UNSIGNED (type))
3350 t1 = add_cast (m_limb_type, t1);
3351 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3352 RSHIFT_EXPR, sext,
3353 build_int_cst (TREE_TYPE (n),
3354 limb_prec - 1));
3355 insert_before (g);
3356 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3358 else
3359 ext = build_zero_cst (m_limb_type);
3360 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3361 g = gimple_build_assign (l, t1);
3362 insert_before (g);
3363 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3364 size_one_node);
3365 insert_before (g);
3366 idx = gimple_assign_lhs (g);
3367 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3368 if_then (g, profile_probability::likely (), edge_true, edge_false);
3369 idx = create_loop (idx, &idx_next);
3370 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3371 g = gimple_build_assign (l, ext);
3372 insert_before (g);
3373 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3374 insert_before (g);
3375 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3376 insert_before (g);
3378 else
3380 /* Lower
3381 dst = src << n;
3383 unsigned n1 = n % limb_prec;
3384 size_t n2 = n / limb_prec;
3385 size_t n3 = n1 != 0;
3386 unsigned n4 = (limb_prec - n1) % limb_prec;
3387 size_t idx;
3388 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3389 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3390 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3391 if (n1)
3393 dst[idx] = src[idx - n2] << n1;
3394 --idx;
3396 for (; (ssize_t) idx >= 0; --idx)
3397 dst[idx] = 0; */
3398 tree n2pn3;
3399 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3400 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3401 else
3403 n2pn3 = make_ssa_name (sizetype);
3404 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3405 insert_before (g);
3407 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3408 idx even to access the most significant partial limb. */
3409 m_var_msb = true;
3410 if (integer_zerop (n3))
3411 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3412 counts. Emit if (true) condition that can be optimized later. */
3413 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3414 NULL_TREE, NULL_TREE);
3415 else
3416 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3417 edge edge_true, edge_false;
3418 if_then (g, profile_probability::likely (), edge_true, edge_false);
3419 tree idx_next;
3420 tree idx = create_loop (p, &idx_next);
3421 tree idxmn2 = make_ssa_name (sizetype);
3422 tree idxmn2mn3 = make_ssa_name (sizetype);
3423 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3424 insert_before (g);
3425 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3426 insert_before (g);
3427 m_data_cnt = 0;
3428 tree t1 = handle_operand (rhs1, idxmn2);
3429 m_first = false;
3430 g = gimple_build_assign (make_ssa_name (m_limb_type),
3431 LSHIFT_EXPR, t1, n1);
3432 insert_before (g);
3433 t1 = gimple_assign_lhs (g);
3434 if (!integer_zerop (n3))
3436 m_data_cnt = 0;
3437 tree t2 = handle_operand (rhs1, idxmn2mn3);
3438 g = gimple_build_assign (make_ssa_name (m_limb_type),
3439 RSHIFT_EXPR, t2, n4);
3440 insert_before (g);
3441 t2 = gimple_assign_lhs (g);
3442 g = gimple_build_assign (make_ssa_name (m_limb_type),
3443 BIT_IOR_EXPR, t1, t2);
3444 insert_before (g);
3445 t1 = gimple_assign_lhs (g);
3447 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3448 g = gimple_build_assign (l, t1);
3449 insert_before (g);
3450 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3451 insert_before (g);
3452 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3453 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3454 NULL_TREE, NULL_TREE);
3455 insert_before (g);
3456 idx = make_ssa_name (sizetype);
3457 m_gsi = gsi_for_stmt (final_stmt);
3458 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3459 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3460 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3461 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3462 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3463 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3464 m_data_cnt = 0;
3465 if (!integer_zerop (n3))
3467 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3468 NULL_TREE, NULL_TREE);
3469 if_then (g, profile_probability::likely (), edge_true, edge_false);
3470 idxmn2 = make_ssa_name (sizetype);
3471 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3472 insert_before (g);
3473 m_data_cnt = 0;
3474 t1 = handle_operand (rhs1, idxmn2);
3475 g = gimple_build_assign (make_ssa_name (m_limb_type),
3476 LSHIFT_EXPR, t1, n1);
3477 insert_before (g);
3478 t1 = gimple_assign_lhs (g);
3479 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3480 g = gimple_build_assign (l, t1);
3481 insert_before (g);
3482 idx_next = make_ssa_name (sizetype);
3483 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3484 insert_before (g);
3485 m_gsi = gsi_for_stmt (final_stmt);
3486 tree nidx = make_ssa_name (sizetype);
3487 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3488 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3489 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3490 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3491 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3492 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3493 idx = nidx;
3495 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3496 ssize_int (0), NULL_TREE, NULL_TREE);
3497 if_then (g, profile_probability::likely (), edge_true, edge_false);
3498 idx = create_loop (idx, &idx_next);
3499 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3500 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3501 insert_before (g);
3502 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3503 insert_before (g);
3504 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3505 ssize_int (0), NULL_TREE, NULL_TREE);
3506 insert_before (g);
3510 /* Lower large/huge _BitInt multiplication or division. */
3512 void
3513 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3515 tree rhs1 = gimple_assign_rhs1 (stmt);
3516 tree rhs2 = gimple_assign_rhs2 (stmt);
3517 tree lhs = gimple_assign_lhs (stmt);
3518 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3519 tree type = TREE_TYPE (rhs1);
3520 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3521 && bitint_precision_kind (type) >= bitint_prec_large);
3522 int prec = TYPE_PRECISION (type), prec1, prec2;
3523 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3524 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3525 if (obj == NULL_TREE)
3527 int part = var_to_partition (m_map, lhs);
3528 gcc_assert (m_vars[part] != NULL_TREE);
3529 obj = m_vars[part];
3530 lhs = build_fold_addr_expr (obj);
3532 else
3534 lhs = build_fold_addr_expr (obj);
3535 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3536 NULL_TREE, true, GSI_SAME_STMT);
3538 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3539 gimple *g;
3540 switch (rhs_code)
3542 case MULT_EXPR:
3543 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3544 lhs, build_int_cst (sitype, prec),
3545 rhs1, build_int_cst (sitype, prec1),
3546 rhs2, build_int_cst (sitype, prec2));
3547 insert_before (g);
3548 break;
3549 case TRUNC_DIV_EXPR:
3550 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3551 lhs, build_int_cst (sitype, prec),
3552 null_pointer_node,
3553 build_int_cst (sitype, 0),
3554 rhs1, build_int_cst (sitype, prec1),
3555 rhs2, build_int_cst (sitype, prec2));
3556 if (!stmt_ends_bb_p (stmt))
3557 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3558 insert_before (g);
3559 break;
3560 case TRUNC_MOD_EXPR:
3561 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3562 build_int_cst (sitype, 0),
3563 lhs, build_int_cst (sitype, prec),
3564 rhs1, build_int_cst (sitype, prec1),
3565 rhs2, build_int_cst (sitype, prec2));
3566 if (!stmt_ends_bb_p (stmt))
3567 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3568 insert_before (g);
3569 break;
3570 default:
3571 gcc_unreachable ();
3573 if (stmt_ends_bb_p (stmt))
3575 maybe_duplicate_eh_stmt (g, stmt);
3576 edge e1;
3577 edge_iterator ei;
3578 basic_block bb = gimple_bb (stmt);
3580 FOR_EACH_EDGE (e1, ei, bb->succs)
3581 if (e1->flags & EDGE_EH)
3582 break;
3583 if (e1)
3585 edge e2 = split_block (gsi_bb (m_gsi), g);
3586 m_gsi = gsi_after_labels (e2->dest);
3587 add_eh_edge (e2->src, e1);
3592 /* Lower large/huge _BitInt conversion to/from floating point. */
3594 void
3595 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3597 tree rhs1 = gimple_assign_rhs1 (stmt);
3598 tree lhs = gimple_assign_lhs (stmt);
3599 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3600 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3601 gimple *g;
3602 if (rhs_code == FIX_TRUNC_EXPR)
3604 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3605 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3606 prec = -prec;
3607 if (obj == NULL_TREE)
3609 int part = var_to_partition (m_map, lhs);
3610 gcc_assert (m_vars[part] != NULL_TREE);
3611 obj = m_vars[part];
3612 lhs = build_fold_addr_expr (obj);
3614 else
3616 lhs = build_fold_addr_expr (obj);
3617 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3618 NULL_TREE, true, GSI_SAME_STMT);
3620 scalar_mode from_mode
3621 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3622 #ifdef HAVE_SFmode
3623 /* IEEE single is a full superset of both IEEE half and
3624 bfloat formats, convert to float first and then to _BitInt
3625 to avoid the need of another 2 library routines. */
3626 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3627 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3628 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3630 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3631 if (type)
3632 rhs1 = add_cast (type, rhs1);
3634 #endif
3635 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3636 lhs, build_int_cst (sitype, prec),
3637 rhs1);
3638 insert_before (g);
3640 else
3642 int prec;
3643 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3644 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3645 rhs1, build_int_cst (sitype, prec));
3646 gimple_call_set_lhs (g, lhs);
3647 if (!stmt_ends_bb_p (stmt))
3648 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3649 gsi_replace (&m_gsi, g, true);
3653 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3654 If check_zero is true, caller wants to check if all bits in [start, end)
3655 are zero, otherwise if bits in [start, end) are either all zero or
3656 all ones. L is the limb with index LIMB, START and END are measured
3657 in bits. */
3659 tree
3660 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3661 unsigned int end, tree l,
3662 unsigned int limb,
3663 bool check_zero)
3665 unsigned startlimb = start / limb_prec;
3666 unsigned endlimb = (end - 1) / limb_prec;
3667 gimple *g;
3669 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3670 return l;
3671 if (startlimb == endlimb && limb == startlimb)
3673 if (check_zero)
3675 wide_int w = wi::shifted_mask (start % limb_prec,
3676 end - start, false, limb_prec);
3677 g = gimple_build_assign (make_ssa_name (m_limb_type),
3678 BIT_AND_EXPR, l,
3679 wide_int_to_tree (m_limb_type, w));
3680 insert_before (g);
3681 return gimple_assign_lhs (g);
3683 unsigned int shift = start % limb_prec;
3684 if ((end % limb_prec) != 0)
3686 unsigned int lshift = (-end) % limb_prec;
3687 shift += lshift;
3688 g = gimple_build_assign (make_ssa_name (m_limb_type),
3689 LSHIFT_EXPR, l,
3690 build_int_cst (unsigned_type_node,
3691 lshift));
3692 insert_before (g);
3693 l = gimple_assign_lhs (g);
3695 l = add_cast (signed_type_for (m_limb_type), l);
3696 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3697 RSHIFT_EXPR, l,
3698 build_int_cst (unsigned_type_node, shift));
3699 insert_before (g);
3700 return add_cast (m_limb_type, gimple_assign_lhs (g));
3702 else if (limb == startlimb)
3704 if ((start % limb_prec) == 0)
3705 return l;
3706 if (!check_zero)
3707 l = add_cast (signed_type_for (m_limb_type), l);
3708 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3709 RSHIFT_EXPR, l,
3710 build_int_cst (unsigned_type_node,
3711 start % limb_prec));
3712 insert_before (g);
3713 l = gimple_assign_lhs (g);
3714 if (!check_zero)
3715 l = add_cast (m_limb_type, l);
3716 return l;
3718 else if (limb == endlimb)
3720 if ((end % limb_prec) == 0)
3721 return l;
3722 if (check_zero)
3724 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3725 g = gimple_build_assign (make_ssa_name (m_limb_type),
3726 BIT_AND_EXPR, l,
3727 wide_int_to_tree (m_limb_type, w));
3728 insert_before (g);
3729 return gimple_assign_lhs (g);
3731 unsigned int shift = (-end) % limb_prec;
3732 g = gimple_build_assign (make_ssa_name (m_limb_type),
3733 LSHIFT_EXPR, l,
3734 build_int_cst (unsigned_type_node, shift));
3735 insert_before (g);
3736 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3737 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3738 RSHIFT_EXPR, l,
3739 build_int_cst (unsigned_type_node, shift));
3740 insert_before (g);
3741 return add_cast (m_limb_type, gimple_assign_lhs (g));
3743 return l;
3746 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3747 result including overflow flag into the right locations. */
3749 void
3750 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3751 tree ovf, tree lhs, tree orig_obj,
3752 gimple *stmt, tree_code code)
3754 gimple *g;
3756 if (obj == NULL_TREE
3757 && (TREE_CODE (type) != BITINT_TYPE
3758 || bitint_precision_kind (type) < bitint_prec_large))
3760 /* Add support for 3 or more limbs filled in from normal integral
3761 type if this assert fails. If no target chooses limb mode smaller
3762 than half of largest supported normal integral type, this will not
3763 be needed. */
3764 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3765 tree lhs_type = type;
3766 if (TREE_CODE (type) == BITINT_TYPE
3767 && bitint_precision_kind (type) == bitint_prec_middle)
3768 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3769 TYPE_UNSIGNED (type));
3770 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3771 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3772 insert_before (g);
3773 r1 = gimple_assign_lhs (g);
3774 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3775 r1 = add_cast (lhs_type, r1);
3776 if (TYPE_PRECISION (lhs_type) > limb_prec)
3778 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3779 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3780 insert_before (g);
3781 r2 = gimple_assign_lhs (g);
3782 r2 = add_cast (lhs_type, r2);
3783 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3784 build_int_cst (unsigned_type_node,
3785 limb_prec));
3786 insert_before (g);
3787 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3788 gimple_assign_lhs (g));
3789 insert_before (g);
3790 r1 = gimple_assign_lhs (g);
3792 if (lhs_type != type)
3793 r1 = add_cast (type, r1);
3794 ovf = add_cast (lhs_type, ovf);
3795 if (lhs_type != type)
3796 ovf = add_cast (type, ovf);
3797 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3798 m_gsi = gsi_for_stmt (stmt);
3799 gsi_replace (&m_gsi, g, true);
3801 else
3803 unsigned HOST_WIDE_INT nelts = 0;
3804 tree atype = NULL_TREE;
3805 if (obj)
3807 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3808 if (orig_obj == NULL_TREE)
3809 nelts >>= 1;
3810 atype = build_array_type_nelts (m_limb_type, nelts);
3812 if (var && obj)
3814 tree v1, v2;
3815 tree zero;
3816 if (orig_obj == NULL_TREE)
3818 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3819 v1 = build2 (MEM_REF, atype,
3820 build_fold_addr_expr (unshare_expr (obj)), zero);
3822 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3823 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3824 else
3825 v1 = unshare_expr (obj);
3826 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3827 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3828 g = gimple_build_assign (v1, v2);
3829 insert_before (g);
3831 if (orig_obj == NULL_TREE && obj)
3833 ovf = add_cast (m_limb_type, ovf);
3834 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3835 g = gimple_build_assign (l, ovf);
3836 insert_before (g);
3837 if (nelts > 1)
3839 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3840 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3841 (nelts + 1) * m_limb_size);
3842 tree v1 = build2 (MEM_REF, atype,
3843 build_fold_addr_expr (unshare_expr (obj)),
3844 off);
3845 g = gimple_build_assign (v1, build_zero_cst (atype));
3846 insert_before (g);
3849 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3851 imm_use_iterator ui;
3852 use_operand_p use_p;
3853 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3855 g = USE_STMT (use_p);
3856 if (!is_gimple_assign (g)
3857 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3858 continue;
3859 tree lhs2 = gimple_assign_lhs (g);
3860 gimple *use_stmt;
3861 single_imm_use (lhs2, &use_p, &use_stmt);
3862 lhs2 = gimple_assign_lhs (use_stmt);
3863 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3864 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3865 g = gimple_build_assign (lhs2, ovf);
3866 else
3867 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3868 gsi_replace (&gsi, g, true);
3869 if (gsi_stmt (m_gsi) == use_stmt)
3870 m_gsi = gsi_for_stmt (g);
3871 break;
3874 else if (ovf != boolean_false_node)
3876 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3877 NULL_TREE, NULL_TREE);
3878 edge edge_true, edge_false;
3879 if_then (g, profile_probability::very_unlikely (),
3880 edge_true, edge_false);
3881 tree zero = build_zero_cst (TREE_TYPE (lhs));
3882 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3883 TREE_TYPE (lhs),
3884 zero, zero, NULL);
3885 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3886 true, GSI_SAME_STMT);
3887 m_gsi = gsi_after_labels (edge_true->dest);
3890 if (var)
3892 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3893 g = gimple_build_assign (var, clobber);
3894 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3898 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3899 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3900 argument 1 precision PREC1 and minimum precision for the result
3901 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3903 static tree
3904 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3905 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3907 *start = 0;
3908 *end = 0;
3909 *check_zero = true;
3910 /* Ignore this special rule for subtraction, even if both
3911 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3912 in infinite precision. */
3913 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3915 /* Result in [0, prec2) is unsigned, if prec > prec2,
3916 all bits above it will be zero. */
3917 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3918 return boolean_false_node;
3919 else
3921 /* ovf if any of bits in [start, end) is non-zero. */
3922 *start = prec - !TYPE_UNSIGNED (type);
3923 *end = prec2;
3926 else if (TYPE_UNSIGNED (type))
3928 /* If result in [0, prec2) is signed and if prec > prec2,
3929 all bits above it will be sign bit copies. */
3930 if (prec >= prec2)
3932 /* ovf if bit prec - 1 is non-zero. */
3933 *start = prec - 1;
3934 *end = prec;
3936 else
3938 /* ovf if any of bits in [start, end) is non-zero. */
3939 *start = prec;
3940 *end = prec2;
3943 else if (prec >= prec2)
3944 return boolean_false_node;
3945 else
3947 /* ovf if [start, end) bits aren't all zeros or all ones. */
3948 *start = prec - 1;
3949 *end = prec2;
3950 *check_zero = false;
3952 return NULL_TREE;
3955 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3956 argument or return type _Complex large/huge _BitInt. */
3958 void
3959 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3961 tree arg0 = gimple_call_arg (stmt, 0);
3962 tree arg1 = gimple_call_arg (stmt, 1);
3963 tree lhs = gimple_call_lhs (stmt);
3964 gimple *g;
3966 if (!lhs)
3968 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3969 gsi_remove (&gsi, true);
3970 return;
3972 gimple *final_stmt = gsi_stmt (m_gsi);
3973 tree type = TREE_TYPE (lhs);
3974 if (TREE_CODE (type) == COMPLEX_TYPE)
3975 type = TREE_TYPE (type);
3976 int prec = TYPE_PRECISION (type);
3977 int prec0 = range_to_prec (arg0, stmt);
3978 int prec1 = range_to_prec (arg1, stmt);
3979 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3980 the be minimum unsigned precision of any possible operation's
3981 result, otherwise it is minimum signed precision.
3982 Some examples:
3983 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3984 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3985 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3986 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3987 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3988 8 + 8 [0, 0x1fe] 9 UNSIGNED
3989 8 + 10 [0, 0x4fe] 11 UNSIGNED
3990 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3991 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3992 8 + -8 [-0x80, 0x17e] 10 SIGNED
3993 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3994 10 + -8 [-0x80, 0x47e] 12 SIGNED
3995 8 - 8 [-0xff, 0xff] 9 SIGNED
3996 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3997 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3998 -8 - -8 [-0xff, 0xff] 9 SIGNED
3999 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4000 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4001 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4002 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4003 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4004 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4005 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4006 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4007 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
4008 prec1 < 0 ? -prec1 : prec1);
4009 /* If operands are either both signed or both unsigned,
4010 we need just one additional bit. */
4011 prec2 = (((prec0 < 0) == (prec1 < 0)
4012 /* If one operand is signed and one unsigned and
4013 the signed one has larger precision, we need
4014 just one extra bit, otherwise two. */
4015 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
4016 : (prec2 == -prec1 && prec2 != prec0)))
4017 ? prec2 + 1 : prec2 + 2);
4018 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
4019 prec1 < 0 ? -prec1 : prec1);
4020 prec3 = MAX (prec3, prec);
4021 tree var = NULL_TREE;
4022 tree orig_obj = obj;
4023 if (obj == NULL_TREE
4024 && TREE_CODE (type) == BITINT_TYPE
4025 && bitint_precision_kind (type) >= bitint_prec_large
4026 && m_names
4027 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4029 int part = var_to_partition (m_map, lhs);
4030 gcc_assert (m_vars[part] != NULL_TREE);
4031 obj = m_vars[part];
4032 if (TREE_TYPE (lhs) == type)
4033 orig_obj = obj;
4035 if (TREE_CODE (type) != BITINT_TYPE
4036 || bitint_precision_kind (type) < bitint_prec_large)
4038 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
4039 tree atype = build_array_type_nelts (m_limb_type, nelts);
4040 var = create_tmp_var (atype);
4043 enum tree_code code;
4044 switch (gimple_call_internal_fn (stmt))
4046 case IFN_ADD_OVERFLOW:
4047 case IFN_UBSAN_CHECK_ADD:
4048 code = PLUS_EXPR;
4049 break;
4050 case IFN_SUB_OVERFLOW:
4051 case IFN_UBSAN_CHECK_SUB:
4052 code = MINUS_EXPR;
4053 break;
4054 default:
4055 gcc_unreachable ();
4057 unsigned start, end;
4058 bool check_zero;
4059 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
4060 &start, &end, &check_zero);
4062 unsigned startlimb, endlimb;
4063 if (ovf)
4065 startlimb = ~0U;
4066 endlimb = ~0U;
4068 else
4070 startlimb = start / limb_prec;
4071 endlimb = (end - 1) / limb_prec;
4074 int prec4 = ovf != NULL_TREE ? prec : prec3;
4075 bitint_prec_kind kind = bitint_precision_kind (prec4);
4076 unsigned cnt, rem = 0, fin = 0;
4077 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4078 bool last_ovf = (ovf == NULL_TREE
4079 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
4080 if (kind != bitint_prec_huge)
4081 cnt = CEIL (prec4, limb_prec) + last_ovf;
4082 else
4084 rem = (prec4 % (2 * limb_prec));
4085 fin = (prec4 - rem) / limb_prec;
4086 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
4087 idx = idx_first = create_loop (size_zero_node, &idx_next);
4090 if (kind == bitint_prec_huge)
4091 m_upwards_2limb = fin;
4092 m_upwards = true;
4094 tree type0 = TREE_TYPE (arg0);
4095 tree type1 = TREE_TYPE (arg1);
4096 int prec5 = prec3;
4097 if (bitint_precision_kind (prec5) < bitint_prec_large)
4098 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
4099 if (TYPE_PRECISION (type0) < prec5)
4101 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
4102 if (TREE_CODE (arg0) == INTEGER_CST)
4103 arg0 = fold_convert (type0, arg0);
4105 if (TYPE_PRECISION (type1) < prec5)
4107 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
4108 if (TREE_CODE (arg1) == INTEGER_CST)
4109 arg1 = fold_convert (type1, arg1);
4111 unsigned int data_cnt = 0;
4112 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
4113 tree cmp = build_zero_cst (m_limb_type);
4114 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
4115 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
4116 for (unsigned i = 0; i < cnt; i++)
4118 m_data_cnt = 0;
4119 tree rhs1, rhs2;
4120 if (kind != bitint_prec_huge)
4121 idx = size_int (i);
4122 else if (i >= 2)
4123 idx = size_int (fin + i - 2);
4124 if (!last_ovf || i < cnt - 1)
4126 if (type0 != TREE_TYPE (arg0))
4127 rhs1 = handle_cast (type0, arg0, idx);
4128 else
4129 rhs1 = handle_operand (arg0, idx);
4130 if (type1 != TREE_TYPE (arg1))
4131 rhs2 = handle_cast (type1, arg1, idx);
4132 else
4133 rhs2 = handle_operand (arg1, idx);
4134 if (i == 0)
4135 data_cnt = m_data_cnt;
4136 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4137 rhs1 = add_cast (m_limb_type, rhs1);
4138 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4139 rhs2 = add_cast (m_limb_type, rhs2);
4140 last_rhs1 = rhs1;
4141 last_rhs2 = rhs2;
4143 else
4145 m_data_cnt = data_cnt;
4146 if (TYPE_UNSIGNED (type0))
4147 rhs1 = build_zero_cst (m_limb_type);
4148 else
4150 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4151 if (TREE_CODE (rhs1) == INTEGER_CST)
4152 rhs1 = build_int_cst (m_limb_type,
4153 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4154 else
4156 tree lpm1 = build_int_cst (unsigned_type_node,
4157 limb_prec - 1);
4158 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4159 RSHIFT_EXPR, rhs1, lpm1);
4160 insert_before (g);
4161 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4164 if (TYPE_UNSIGNED (type1))
4165 rhs2 = build_zero_cst (m_limb_type);
4166 else
4168 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4169 if (TREE_CODE (rhs2) == INTEGER_CST)
4170 rhs2 = build_int_cst (m_limb_type,
4171 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4172 else
4174 tree lpm1 = build_int_cst (unsigned_type_node,
4175 limb_prec - 1);
4176 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4177 RSHIFT_EXPR, rhs2, lpm1);
4178 insert_before (g);
4179 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4183 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4184 if (ovf != boolean_false_node)
4186 if (tree_fits_uhwi_p (idx))
4188 unsigned limb = tree_to_uhwi (idx);
4189 if (limb >= startlimb && limb <= endlimb)
4191 tree l = arith_overflow_extract_bits (start, end, rhs,
4192 limb, check_zero);
4193 tree this_ovf = make_ssa_name (boolean_type_node);
4194 if (ovf == NULL_TREE && !check_zero)
4196 cmp = l;
4197 g = gimple_build_assign (make_ssa_name (m_limb_type),
4198 PLUS_EXPR, l,
4199 build_int_cst (m_limb_type, 1));
4200 insert_before (g);
4201 g = gimple_build_assign (this_ovf, GT_EXPR,
4202 gimple_assign_lhs (g),
4203 build_int_cst (m_limb_type, 1));
4205 else
4206 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4207 insert_before (g);
4208 if (ovf == NULL_TREE)
4209 ovf = this_ovf;
4210 else
4212 tree b = make_ssa_name (boolean_type_node);
4213 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4214 insert_before (g);
4215 ovf = b;
4219 else if (startlimb < fin)
4221 if (m_first && startlimb + 2 < fin)
4223 tree data_out;
4224 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4225 ovf_out = m_data.pop ();
4226 m_data.pop ();
4227 if (!check_zero)
4229 cmp = prepare_data_in_out (cmp, idx, &data_out);
4230 cmp_out = m_data.pop ();
4231 m_data.pop ();
4234 if (i != 0 || startlimb != fin - 1)
4236 tree_code cmp_code;
4237 bool single_comparison
4238 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4239 if (!single_comparison)
4241 cmp_code = GE_EXPR;
4242 if (!check_zero && (start % limb_prec) == 0)
4243 single_comparison = true;
4245 else if ((startlimb & 1) == (i & 1))
4246 cmp_code = EQ_EXPR;
4247 else
4248 cmp_code = GT_EXPR;
4249 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4250 NULL_TREE, NULL_TREE);
4251 edge edge_true_true, edge_true_false, edge_false;
4252 gimple *g2 = NULL;
4253 if (!single_comparison)
4254 g2 = gimple_build_cond (NE_EXPR, idx,
4255 size_int (startlimb), NULL_TREE,
4256 NULL_TREE);
4257 if_then_if_then_else (g, g2, profile_probability::likely (),
4258 profile_probability::likely (),
4259 edge_true_true, edge_true_false,
4260 edge_false);
4261 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4262 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4263 check_zero);
4264 tree this_ovf = make_ssa_name (boolean_type_node);
4265 if (cmp_code != GT_EXPR && !check_zero)
4267 g = gimple_build_assign (make_ssa_name (m_limb_type),
4268 PLUS_EXPR, l,
4269 build_int_cst (m_limb_type, 1));
4270 insert_before (g);
4271 g = gimple_build_assign (this_ovf, GT_EXPR,
4272 gimple_assign_lhs (g),
4273 build_int_cst (m_limb_type, 1));
4275 else
4276 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4277 insert_before (g);
4278 if (cmp_code == GT_EXPR)
4280 tree t = make_ssa_name (boolean_type_node);
4281 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4282 insert_before (g);
4283 this_ovf = t;
4285 tree this_ovf2 = NULL_TREE;
4286 if (!single_comparison)
4288 m_gsi = gsi_after_labels (edge_true_true->src);
4289 tree t = make_ssa_name (boolean_type_node);
4290 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4291 insert_before (g);
4292 this_ovf2 = make_ssa_name (boolean_type_node);
4293 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4294 ovf, t);
4295 insert_before (g);
4297 m_gsi = gsi_after_labels (edge_true_false->dest);
4298 tree t;
4299 if (i == 1 && ovf_out)
4300 t = ovf_out;
4301 else
4302 t = make_ssa_name (boolean_type_node);
4303 gphi *phi = create_phi_node (t, edge_true_false->dest);
4304 add_phi_arg (phi, this_ovf, edge_true_false,
4305 UNKNOWN_LOCATION);
4306 add_phi_arg (phi, ovf ? ovf
4307 : boolean_false_node, edge_false,
4308 UNKNOWN_LOCATION);
4309 if (edge_true_true)
4310 add_phi_arg (phi, this_ovf2, edge_true_true,
4311 UNKNOWN_LOCATION);
4312 ovf = t;
4313 if (!check_zero && cmp_code != GT_EXPR)
4315 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4316 phi = create_phi_node (t, edge_true_false->dest);
4317 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4318 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4319 if (edge_true_true)
4320 add_phi_arg (phi, cmp, edge_true_true,
4321 UNKNOWN_LOCATION);
4322 cmp = t;
4328 if (var || obj)
4330 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4332 else if (!tree_fits_uhwi_p (idx)
4333 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4335 bool single_comparison
4336 = (((unsigned) prec % limb_prec) == 0
4337 || prec_limbs + 1 >= fin
4338 || (prec_limbs & 1) == (i & 1));
4339 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4340 NULL_TREE, NULL_TREE);
4341 gimple *g2 = NULL;
4342 if (!single_comparison)
4343 g2 = gimple_build_cond (EQ_EXPR, idx,
4344 size_int (prec_limbs - 1),
4345 NULL_TREE, NULL_TREE);
4346 edge edge_true_true, edge_true_false, edge_false;
4347 if_then_if_then_else (g, g2, profile_probability::likely (),
4348 profile_probability::unlikely (),
4349 edge_true_true, edge_true_false,
4350 edge_false);
4351 tree l = limb_access (type, var ? var : obj, idx, true);
4352 g = gimple_build_assign (l, rhs);
4353 insert_before (g);
4354 if (!single_comparison)
4356 m_gsi = gsi_after_labels (edge_true_true->src);
4357 tree plm1idx = size_int (prec_limbs - 1);
4358 tree plm1type = limb_access_type (type, plm1idx);
4359 l = limb_access (type, var ? var : obj, plm1idx, true);
4360 if (!useless_type_conversion_p (plm1type, TREE_TYPE (rhs)))
4361 rhs = add_cast (plm1type, rhs);
4362 if (!useless_type_conversion_p (TREE_TYPE (l),
4363 TREE_TYPE (rhs)))
4364 rhs = add_cast (TREE_TYPE (l), rhs);
4365 g = gimple_build_assign (l, rhs);
4366 insert_before (g);
4368 m_gsi = gsi_after_labels (edge_true_false->dest);
4370 else
4372 tree l = limb_access (type, var ? var : obj, idx, true);
4373 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4374 rhs = add_cast (TREE_TYPE (l), rhs);
4375 g = gimple_build_assign (l, rhs);
4376 insert_before (g);
4379 m_first = false;
4380 if (kind == bitint_prec_huge && i <= 1)
4382 if (i == 0)
4384 idx = make_ssa_name (sizetype);
4385 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4386 size_one_node);
4387 insert_before (g);
4389 else
4391 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4392 size_int (2));
4393 insert_before (g);
4394 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4395 NULL_TREE, NULL_TREE);
4396 insert_before (g);
4397 m_gsi = gsi_for_stmt (final_stmt);
4398 m_bb = NULL;
4403 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4406 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4407 argument or return type _Complex large/huge _BitInt. */
4409 void
4410 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4412 tree arg0 = gimple_call_arg (stmt, 0);
4413 tree arg1 = gimple_call_arg (stmt, 1);
4414 tree lhs = gimple_call_lhs (stmt);
4415 if (!lhs)
4417 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4418 gsi_remove (&gsi, true);
4419 return;
4421 gimple *final_stmt = gsi_stmt (m_gsi);
4422 tree type = TREE_TYPE (lhs);
4423 if (TREE_CODE (type) == COMPLEX_TYPE)
4424 type = TREE_TYPE (type);
4425 int prec = TYPE_PRECISION (type), prec0, prec1;
4426 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4427 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4428 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4429 + (prec1 < 0 ? -prec1 : prec1));
4430 if (prec0 == 1 || prec1 == 1)
4431 --prec2;
4432 tree var = NULL_TREE;
4433 tree orig_obj = obj;
4434 bool force_var = false;
4435 if (obj == NULL_TREE
4436 && TREE_CODE (type) == BITINT_TYPE
4437 && bitint_precision_kind (type) >= bitint_prec_large
4438 && m_names
4439 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4441 int part = var_to_partition (m_map, lhs);
4442 gcc_assert (m_vars[part] != NULL_TREE);
4443 obj = m_vars[part];
4444 if (TREE_TYPE (lhs) == type)
4445 orig_obj = obj;
4447 else if (obj != NULL_TREE && DECL_P (obj))
4449 for (int i = 0; i < 2; ++i)
4451 tree arg = i ? arg1 : arg0;
4452 if (TREE_CODE (arg) == ADDR_EXPR)
4453 arg = TREE_OPERAND (arg, 0);
4454 if (get_base_address (arg) == obj)
4456 force_var = true;
4457 break;
4461 if (obj == NULL_TREE
4462 || force_var
4463 || TREE_CODE (type) != BITINT_TYPE
4464 || bitint_precision_kind (type) < bitint_prec_large
4465 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4467 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4468 tree atype = build_array_type_nelts (m_limb_type, nelts);
4469 var = create_tmp_var (atype);
4471 tree addr = build_fold_addr_expr (var ? var : obj);
4472 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4473 NULL_TREE, true, GSI_SAME_STMT);
4474 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4475 gimple *g
4476 = gimple_build_call_internal (IFN_MULBITINT, 6,
4477 addr, build_int_cst (sitype,
4478 MAX (prec2, prec)),
4479 arg0, build_int_cst (sitype, prec0),
4480 arg1, build_int_cst (sitype, prec1));
4481 insert_before (g);
4483 unsigned start, end;
4484 bool check_zero;
4485 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4486 &start, &end, &check_zero);
4487 if (ovf == NULL_TREE)
4489 unsigned startlimb = start / limb_prec;
4490 unsigned endlimb = (end - 1) / limb_prec;
4491 unsigned cnt;
4492 bool use_loop = false;
4493 if (startlimb == endlimb)
4494 cnt = 1;
4495 else if (startlimb + 1 == endlimb)
4496 cnt = 2;
4497 else if ((end % limb_prec) == 0)
4499 cnt = 2;
4500 use_loop = true;
4502 else
4504 cnt = 3;
4505 use_loop = startlimb + 2 < endlimb;
4507 if (cnt == 1)
4509 tree l = limb_access (NULL_TREE, var ? var : obj,
4510 size_int (startlimb), true);
4511 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4512 insert_before (g);
4513 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4514 startlimb, check_zero);
4515 ovf = make_ssa_name (boolean_type_node);
4516 if (check_zero)
4517 g = gimple_build_assign (ovf, NE_EXPR, l,
4518 build_zero_cst (m_limb_type));
4519 else
4521 g = gimple_build_assign (make_ssa_name (m_limb_type),
4522 PLUS_EXPR, l,
4523 build_int_cst (m_limb_type, 1));
4524 insert_before (g);
4525 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4526 build_int_cst (m_limb_type, 1));
4528 insert_before (g);
4530 else
4532 basic_block edge_bb = NULL;
4533 gimple_stmt_iterator gsi = m_gsi;
4534 gsi_prev (&gsi);
4535 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4536 edge_bb = e->src;
4537 m_gsi = gsi_end_bb (edge_bb);
4539 tree cmp = build_zero_cst (m_limb_type);
4540 for (unsigned i = 0; i < cnt; i++)
4542 tree idx, idx_next = NULL_TREE;
4543 if (i == 0)
4544 idx = size_int (startlimb);
4545 else if (i == 2)
4546 idx = size_int (endlimb);
4547 else if (use_loop)
4548 idx = create_loop (size_int (startlimb + 1), &idx_next);
4549 else
4550 idx = size_int (startlimb + 1);
4551 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4552 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4553 insert_before (g);
4554 l = gimple_assign_lhs (g);
4555 if (i == 0 || i == 2)
4556 l = arith_overflow_extract_bits (start, end, l,
4557 tree_to_uhwi (idx),
4558 check_zero);
4559 if (i == 0 && !check_zero)
4561 cmp = l;
4562 g = gimple_build_assign (make_ssa_name (m_limb_type),
4563 PLUS_EXPR, l,
4564 build_int_cst (m_limb_type, 1));
4565 insert_before (g);
4566 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4567 build_int_cst (m_limb_type, 1),
4568 NULL_TREE, NULL_TREE);
4570 else
4571 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4572 insert_before (g);
4573 edge e1 = split_block (gsi_bb (m_gsi), g);
4574 e1->flags = EDGE_FALSE_VALUE;
4575 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4576 EDGE_TRUE_VALUE);
4577 e1->probability = profile_probability::likely ();
4578 e2->probability = e1->probability.invert ();
4579 if (i == 0)
4580 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4581 m_gsi = gsi_after_labels (e1->dest);
4582 if (i == 1 && use_loop)
4584 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4585 size_one_node);
4586 insert_before (g);
4587 g = gimple_build_cond (NE_EXPR, idx_next,
4588 size_int (endlimb + (cnt == 2)),
4589 NULL_TREE, NULL_TREE);
4590 insert_before (g);
4591 edge true_edge, false_edge;
4592 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4593 &true_edge,
4594 &false_edge);
4595 m_gsi = gsi_after_labels (false_edge->dest);
4596 m_bb = NULL;
4600 ovf = make_ssa_name (boolean_type_node);
4601 basic_block bb = gimple_bb (final_stmt);
4602 gphi *phi = create_phi_node (ovf, bb);
4603 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4604 edge_iterator ei;
4605 FOR_EACH_EDGE (e, ei, bb->preds)
4607 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4608 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4610 m_gsi = gsi_for_stmt (final_stmt);
4614 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4617 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4618 .{ADD,SUB,MUL}_OVERFLOW call. */
4620 void
4621 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4623 tree rhs1 = gimple_assign_rhs1 (stmt);
4624 rhs1 = TREE_OPERAND (rhs1, 0);
4625 if (obj == NULL_TREE)
4627 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4628 gcc_assert (m_vars[part] != NULL_TREE);
4629 obj = m_vars[part];
4631 if (TREE_CODE (rhs1) == SSA_NAME
4632 && (m_names == NULL
4633 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4635 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4636 return;
4638 int part = var_to_partition (m_map, rhs1);
4639 gcc_assert (m_vars[part] != NULL_TREE);
4640 tree var = m_vars[part];
4641 unsigned HOST_WIDE_INT nelts
4642 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4643 tree atype = build_array_type_nelts (m_limb_type, nelts);
4644 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4645 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4646 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4647 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4648 ? 0 : nelts * m_limb_size);
4649 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4650 gimple *g = gimple_build_assign (obj, v2);
4651 insert_before (g);
4654 /* Lower COMPLEX_EXPR stmt. */
4656 void
4657 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4659 tree lhs = gimple_assign_lhs (stmt);
4660 tree rhs1 = gimple_assign_rhs1 (stmt);
4661 tree rhs2 = gimple_assign_rhs2 (stmt);
4662 int part = var_to_partition (m_map, lhs);
4663 gcc_assert (m_vars[part] != NULL_TREE);
4664 lhs = m_vars[part];
4665 unsigned HOST_WIDE_INT nelts
4666 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4667 tree atype = build_array_type_nelts (m_limb_type, nelts);
4668 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4669 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4670 tree v2;
4671 if (TREE_CODE (rhs1) == SSA_NAME)
4673 part = var_to_partition (m_map, rhs1);
4674 gcc_assert (m_vars[part] != NULL_TREE);
4675 v2 = m_vars[part];
4677 else if (integer_zerop (rhs1))
4678 v2 = build_zero_cst (atype);
4679 else
4680 v2 = tree_output_constant_def (rhs1);
4681 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4682 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4683 gimple *g = gimple_build_assign (v1, v2);
4684 insert_before (g);
4685 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4686 TYPE_SIZE_UNIT (atype));
4687 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4688 if (TREE_CODE (rhs2) == SSA_NAME)
4690 part = var_to_partition (m_map, rhs2);
4691 gcc_assert (m_vars[part] != NULL_TREE);
4692 v2 = m_vars[part];
4694 else if (integer_zerop (rhs2))
4695 v2 = build_zero_cst (atype);
4696 else
4697 v2 = tree_output_constant_def (rhs2);
4698 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4699 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4700 g = gimple_build_assign (v1, v2);
4701 insert_before (g);
4704 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4705 argument. */
4707 void
4708 bitint_large_huge::lower_bit_query (gimple *stmt)
4710 tree arg0 = gimple_call_arg (stmt, 0);
4711 tree arg1 = (gimple_call_num_args (stmt) == 2
4712 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4713 tree lhs = gimple_call_lhs (stmt);
4714 gimple *g;
4716 if (!lhs)
4718 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4719 gsi_remove (&gsi, true);
4720 return;
4722 tree type = TREE_TYPE (arg0);
4723 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4724 bitint_prec_kind kind = bitint_precision_kind (type);
4725 gcc_assert (kind >= bitint_prec_large);
4726 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4727 enum built_in_function fcode = END_BUILTINS;
4728 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4729 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4730 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4731 switch (ifn)
4733 case IFN_CLZ:
4734 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4735 fcode = BUILT_IN_CLZ;
4736 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4737 fcode = BUILT_IN_CLZL;
4738 else
4739 fcode = BUILT_IN_CLZLL;
4740 break;
4741 case IFN_FFS:
4742 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4743 we don't add the addend at the end. */
4744 arg1 = integer_zero_node;
4745 /* FALLTHRU */
4746 case IFN_CTZ:
4747 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4748 fcode = BUILT_IN_CTZ;
4749 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4750 fcode = BUILT_IN_CTZL;
4751 else
4752 fcode = BUILT_IN_CTZLL;
4753 m_upwards = true;
4754 break;
4755 case IFN_CLRSB:
4756 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4757 fcode = BUILT_IN_CLRSB;
4758 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4759 fcode = BUILT_IN_CLRSBL;
4760 else
4761 fcode = BUILT_IN_CLRSBLL;
4762 break;
4763 case IFN_PARITY:
4764 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4765 fcode = BUILT_IN_PARITY;
4766 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4767 fcode = BUILT_IN_PARITYL;
4768 else
4769 fcode = BUILT_IN_PARITYLL;
4770 m_upwards = true;
4771 break;
4772 case IFN_POPCOUNT:
4773 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4774 fcode = BUILT_IN_POPCOUNT;
4775 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4776 fcode = BUILT_IN_POPCOUNTL;
4777 else
4778 fcode = BUILT_IN_POPCOUNTLL;
4779 m_upwards = true;
4780 break;
4781 default:
4782 gcc_unreachable ();
4784 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4785 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4786 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4787 basic_block edge_bb = NULL;
4788 if (m_upwards)
4790 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4791 if (kind == bitint_prec_large)
4792 cnt = CEIL (prec, limb_prec);
4793 else
4795 rem = (prec % (2 * limb_prec));
4796 end = (prec - rem) / limb_prec;
4797 cnt = 2 + CEIL (rem, limb_prec);
4798 idx = idx_first = create_loop (size_zero_node, &idx_next);
4801 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4803 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4804 gsi_prev (&gsi);
4805 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4806 edge_bb = e->src;
4807 if (kind == bitint_prec_large)
4808 m_gsi = gsi_end_bb (edge_bb);
4809 bqp = XALLOCAVEC (struct bq_details, cnt);
4811 else
4812 m_after_stmt = stmt;
4813 if (kind != bitint_prec_large)
4814 m_upwards_2limb = end;
4816 for (unsigned i = 0; i < cnt; i++)
4818 m_data_cnt = 0;
4819 if (kind == bitint_prec_large)
4820 idx = size_int (i);
4821 else if (i >= 2)
4822 idx = size_int (end + (i > 2));
4824 tree rhs1 = handle_operand (arg0, idx);
4825 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4827 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4828 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4829 rhs1 = add_cast (m_limb_type, rhs1);
4832 tree in, out, tem;
4833 if (ifn == IFN_PARITY)
4834 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4835 else if (ifn == IFN_FFS)
4836 in = prepare_data_in_out (integer_one_node, idx, &out);
4837 else
4838 in = prepare_data_in_out (integer_zero_node, idx, &out);
4840 switch (ifn)
4842 case IFN_CTZ:
4843 case IFN_FFS:
4844 g = gimple_build_cond (NE_EXPR, rhs1,
4845 build_zero_cst (m_limb_type),
4846 NULL_TREE, NULL_TREE);
4847 insert_before (g);
4848 edge e1, e2;
4849 e1 = split_block (gsi_bb (m_gsi), g);
4850 e1->flags = EDGE_FALSE_VALUE;
4851 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4852 e1->probability = profile_probability::unlikely ();
4853 e2->probability = e1->probability.invert ();
4854 if (i == 0)
4855 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4856 m_gsi = gsi_after_labels (e1->dest);
4857 bqp[i].e = e2;
4858 bqp[i].val = rhs1;
4859 if (tree_fits_uhwi_p (idx))
4860 bqp[i].addend
4861 = build_int_cst (integer_type_node,
4862 tree_to_uhwi (idx) * limb_prec
4863 + (ifn == IFN_FFS));
4864 else
4866 bqp[i].addend = in;
4867 if (i == 1)
4868 res = out;
4869 else
4870 res = make_ssa_name (integer_type_node);
4871 g = gimple_build_assign (res, PLUS_EXPR, in,
4872 build_int_cst (integer_type_node,
4873 limb_prec));
4874 insert_before (g);
4875 m_data[m_data_cnt] = res;
4877 break;
4878 case IFN_PARITY:
4879 if (!integer_zerop (in))
4881 if (kind == bitint_prec_huge && i == 1)
4882 res = out;
4883 else
4884 res = make_ssa_name (m_limb_type);
4885 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4886 insert_before (g);
4888 else
4889 res = rhs1;
4890 m_data[m_data_cnt] = res;
4891 break;
4892 case IFN_POPCOUNT:
4893 g = gimple_build_call (fndecl, 1, rhs1);
4894 tem = make_ssa_name (integer_type_node);
4895 gimple_call_set_lhs (g, tem);
4896 insert_before (g);
4897 if (!integer_zerop (in))
4899 if (kind == bitint_prec_huge && i == 1)
4900 res = out;
4901 else
4902 res = make_ssa_name (integer_type_node);
4903 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4904 insert_before (g);
4906 else
4907 res = tem;
4908 m_data[m_data_cnt] = res;
4909 break;
4910 default:
4911 gcc_unreachable ();
4914 m_first = false;
4915 if (kind == bitint_prec_huge && i <= 1)
4917 if (i == 0)
4919 idx = make_ssa_name (sizetype);
4920 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4921 size_one_node);
4922 insert_before (g);
4924 else
4926 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4927 size_int (2));
4928 insert_before (g);
4929 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4930 NULL_TREE, NULL_TREE);
4931 insert_before (g);
4932 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4933 m_gsi = gsi_after_labels (edge_bb);
4934 else
4935 m_gsi = gsi_for_stmt (stmt);
4936 m_bb = NULL;
4941 else
4943 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4944 int sub_one = 0;
4945 if (kind == bitint_prec_large)
4946 cnt = CEIL (prec, limb_prec);
4947 else
4949 rem = prec % limb_prec;
4950 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4951 rem = limb_prec;
4952 end = (prec - rem) / limb_prec;
4953 cnt = 1 + (rem != 0);
4954 if (ifn == IFN_CLRSB)
4955 sub_one = 1;
4958 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4959 gsi_prev (&gsi);
4960 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4961 edge_bb = e->src;
4962 m_gsi = gsi_end_bb (edge_bb);
4964 if (ifn == IFN_CLZ)
4965 bqp = XALLOCAVEC (struct bq_details, cnt);
4966 else
4968 gsi = gsi_for_stmt (stmt);
4969 gsi_prev (&gsi);
4970 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4971 edge_bb = e->src;
4972 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4975 for (unsigned i = 0; i < cnt; i++)
4977 m_data_cnt = 0;
4978 if (kind == bitint_prec_large)
4979 idx = size_int (cnt - i - 1);
4980 else if (i == cnt - 1)
4981 idx = create_loop (size_int (end - 1), &idx_next);
4982 else
4983 idx = size_int (end);
4985 tree rhs1 = handle_operand (arg0, idx);
4986 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4988 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4989 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4990 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4991 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4992 rhs1 = add_cast (m_limb_type, rhs1);
4995 if (ifn == IFN_CLZ)
4997 g = gimple_build_cond (NE_EXPR, rhs1,
4998 build_zero_cst (m_limb_type),
4999 NULL_TREE, NULL_TREE);
5000 insert_before (g);
5001 edge e1 = split_block (gsi_bb (m_gsi), g);
5002 e1->flags = EDGE_FALSE_VALUE;
5003 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5004 e1->probability = profile_probability::unlikely ();
5005 e2->probability = e1->probability.invert ();
5006 if (i == 0)
5007 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5008 m_gsi = gsi_after_labels (e1->dest);
5009 bqp[i].e = e2;
5010 bqp[i].val = rhs1;
5012 else
5014 if (i == 0)
5016 first = rhs1;
5017 g = gimple_build_assign (make_ssa_name (m_limb_type),
5018 PLUS_EXPR, rhs1,
5019 build_int_cst (m_limb_type, 1));
5020 insert_before (g);
5021 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5022 build_int_cst (m_limb_type, 1),
5023 NULL_TREE, NULL_TREE);
5024 insert_before (g);
5026 else
5028 g = gimple_build_assign (make_ssa_name (m_limb_type),
5029 BIT_XOR_EXPR, rhs1, first);
5030 insert_before (g);
5031 tree stype = signed_type_for (m_limb_type);
5032 g = gimple_build_cond (LT_EXPR,
5033 add_cast (stype,
5034 gimple_assign_lhs (g)),
5035 build_zero_cst (stype),
5036 NULL_TREE, NULL_TREE);
5037 insert_before (g);
5038 edge e1 = split_block (gsi_bb (m_gsi), g);
5039 e1->flags = EDGE_FALSE_VALUE;
5040 edge e2 = make_edge (e1->src, gimple_bb (stmt),
5041 EDGE_TRUE_VALUE);
5042 e1->probability = profile_probability::unlikely ();
5043 e2->probability = e1->probability.invert ();
5044 if (i == 1)
5045 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
5046 e2->src);
5047 m_gsi = gsi_after_labels (e1->dest);
5048 bqp[2 * i].e = e2;
5049 g = gimple_build_cond (NE_EXPR, rhs1, first,
5050 NULL_TREE, NULL_TREE);
5051 insert_before (g);
5053 edge e1 = split_block (gsi_bb (m_gsi), g);
5054 e1->flags = EDGE_FALSE_VALUE;
5055 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
5056 e1->probability = profile_probability::unlikely ();
5057 e2->probability = e1->probability.invert ();
5058 if (i == 0)
5059 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5060 m_gsi = gsi_after_labels (e1->dest);
5061 bqp[2 * i + 1].e = e2;
5062 bqp[i].val = rhs1;
5064 if (tree_fits_uhwi_p (idx))
5065 bqp[i].addend
5066 = build_int_cst (integer_type_node,
5067 (int) prec
5068 - (((int) tree_to_uhwi (idx) + 1)
5069 * limb_prec) - sub_one);
5070 else
5072 tree in, out;
5073 in = build_int_cst (integer_type_node, rem - sub_one);
5074 m_first = true;
5075 in = prepare_data_in_out (in, idx, &out);
5076 out = m_data[m_data_cnt + 1];
5077 bqp[i].addend = in;
5078 g = gimple_build_assign (out, PLUS_EXPR, in,
5079 build_int_cst (integer_type_node,
5080 limb_prec));
5081 insert_before (g);
5082 m_data[m_data_cnt] = out;
5085 m_first = false;
5086 if (kind == bitint_prec_huge && i == cnt - 1)
5088 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5089 size_int (-1));
5090 insert_before (g);
5091 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
5092 NULL_TREE, NULL_TREE);
5093 insert_before (g);
5094 edge true_edge, false_edge;
5095 extract_true_false_edges_from_block (gsi_bb (m_gsi),
5096 &true_edge, &false_edge);
5097 m_gsi = gsi_after_labels (false_edge->dest);
5098 m_bb = NULL;
5102 switch (ifn)
5104 case IFN_CLZ:
5105 case IFN_CTZ:
5106 case IFN_FFS:
5107 gphi *phi1, *phi2, *phi3;
5108 basic_block bb;
5109 bb = gsi_bb (m_gsi);
5110 remove_edge (find_edge (bb, gimple_bb (stmt)));
5111 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5112 gimple_bb (stmt));
5113 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5114 gimple_bb (stmt));
5115 for (unsigned i = 0; i < cnt; i++)
5117 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
5118 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
5120 if (arg1 == NULL_TREE)
5122 g = gimple_build_builtin_unreachable (m_loc);
5123 insert_before (g);
5125 m_gsi = gsi_for_stmt (stmt);
5126 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
5127 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5128 insert_before (g);
5129 if (arg1 == NULL_TREE)
5130 g = gimple_build_assign (lhs, PLUS_EXPR,
5131 gimple_phi_result (phi2),
5132 gimple_call_lhs (g));
5133 else
5135 g = gimple_build_assign (make_ssa_name (integer_type_node),
5136 PLUS_EXPR, gimple_phi_result (phi2),
5137 gimple_call_lhs (g));
5138 insert_before (g);
5139 edge e1 = split_block (gimple_bb (stmt), g);
5140 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5141 e2->probability = profile_probability::always ();
5142 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5143 get_immediate_dominator (CDI_DOMINATORS,
5144 e1->src));
5145 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5146 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5147 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5148 m_gsi = gsi_for_stmt (stmt);
5149 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5151 gsi_replace (&m_gsi, g, true);
5152 break;
5153 case IFN_CLRSB:
5154 bb = gsi_bb (m_gsi);
5155 remove_edge (find_edge (bb, edge_bb));
5156 edge e;
5157 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5158 e->probability = profile_probability::always ();
5159 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5160 get_immediate_dominator (CDI_DOMINATORS,
5161 edge_bb));
5162 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5163 edge_bb);
5164 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5165 edge_bb);
5166 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5167 gimple_bb (stmt));
5168 for (unsigned i = 0; i < cnt; i++)
5170 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5171 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5172 UNKNOWN_LOCATION);
5173 tree a = bqp[i].addend;
5174 if (i && kind == bitint_prec_large)
5175 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5176 if (i)
5177 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5179 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5180 UNKNOWN_LOCATION);
5181 m_gsi = gsi_after_labels (edge_bb);
5182 g = gimple_build_call (fndecl, 1,
5183 add_cast (signed_type_for (m_limb_type),
5184 gimple_phi_result (phi1)));
5185 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5186 insert_before (g);
5187 g = gimple_build_assign (make_ssa_name (integer_type_node),
5188 PLUS_EXPR, gimple_call_lhs (g),
5189 gimple_phi_result (phi2));
5190 insert_before (g);
5191 if (kind != bitint_prec_large)
5193 g = gimple_build_assign (make_ssa_name (integer_type_node),
5194 PLUS_EXPR, gimple_assign_lhs (g),
5195 integer_one_node);
5196 insert_before (g);
5198 add_phi_arg (phi3, gimple_assign_lhs (g),
5199 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5200 m_gsi = gsi_for_stmt (stmt);
5201 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5202 gsi_replace (&m_gsi, g, true);
5203 break;
5204 case IFN_PARITY:
5205 g = gimple_build_call (fndecl, 1, res);
5206 gimple_call_set_lhs (g, lhs);
5207 gsi_replace (&m_gsi, g, true);
5208 break;
5209 case IFN_POPCOUNT:
5210 g = gimple_build_assign (lhs, res);
5211 gsi_replace (&m_gsi, g, true);
5212 break;
5213 default:
5214 gcc_unreachable ();
5218 /* Lower a call statement with one or more large/huge _BitInt
5219 arguments or large/huge _BitInt return value. */
5221 void
5222 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5224 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5225 unsigned int nargs = gimple_call_num_args (stmt);
5226 if (gimple_call_internal_p (stmt))
5227 switch (gimple_call_internal_fn (stmt))
5229 case IFN_ADD_OVERFLOW:
5230 case IFN_SUB_OVERFLOW:
5231 case IFN_UBSAN_CHECK_ADD:
5232 case IFN_UBSAN_CHECK_SUB:
5233 lower_addsub_overflow (obj, stmt);
5234 return;
5235 case IFN_MUL_OVERFLOW:
5236 case IFN_UBSAN_CHECK_MUL:
5237 lower_mul_overflow (obj, stmt);
5238 return;
5239 case IFN_CLZ:
5240 case IFN_CTZ:
5241 case IFN_CLRSB:
5242 case IFN_FFS:
5243 case IFN_PARITY:
5244 case IFN_POPCOUNT:
5245 lower_bit_query (stmt);
5246 return;
5247 default:
5248 break;
5250 for (unsigned int i = 0; i < nargs; ++i)
5252 tree arg = gimple_call_arg (stmt, i);
5253 if (TREE_CODE (arg) != SSA_NAME
5254 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5255 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5256 continue;
5257 if (SSA_NAME_IS_DEFAULT_DEF (arg)
5258 && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
5260 tree var = create_tmp_reg (TREE_TYPE (arg));
5261 arg = get_or_create_ssa_default_def (cfun, var);
5263 else
5265 int p = var_to_partition (m_map, arg);
5266 tree v = m_vars[p];
5267 gcc_assert (v != NULL_TREE);
5268 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5269 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5270 arg = make_ssa_name (TREE_TYPE (arg));
5271 gimple *g = gimple_build_assign (arg, v);
5272 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5274 gimple_call_set_arg (stmt, i, arg);
5275 if (m_preserved == NULL)
5276 m_preserved = BITMAP_ALLOC (NULL);
5277 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5279 tree lhs = gimple_call_lhs (stmt);
5280 if (lhs
5281 && TREE_CODE (lhs) == SSA_NAME
5282 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5283 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5285 int p = var_to_partition (m_map, lhs);
5286 tree v = m_vars[p];
5287 gcc_assert (v != NULL_TREE);
5288 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5289 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5290 gimple_call_set_lhs (stmt, v);
5291 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5293 update_stmt (stmt);
5296 /* Lower __asm STMT which involves large/huge _BitInt values. */
5298 void
5299 bitint_large_huge::lower_asm (gimple *stmt)
5301 gasm *g = as_a <gasm *> (stmt);
5302 unsigned noutputs = gimple_asm_noutputs (g);
5303 unsigned ninputs = gimple_asm_ninputs (g);
5305 for (unsigned i = 0; i < noutputs; ++i)
5307 tree t = gimple_asm_output_op (g, i);
5308 tree s = TREE_VALUE (t);
5309 if (TREE_CODE (s) == SSA_NAME
5310 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5311 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5313 int part = var_to_partition (m_map, s);
5314 gcc_assert (m_vars[part] != NULL_TREE);
5315 TREE_VALUE (t) = m_vars[part];
5318 for (unsigned i = 0; i < ninputs; ++i)
5320 tree t = gimple_asm_input_op (g, i);
5321 tree s = TREE_VALUE (t);
5322 if (TREE_CODE (s) == SSA_NAME
5323 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5324 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5326 if (SSA_NAME_IS_DEFAULT_DEF (s)
5327 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
5329 TREE_VALUE (t) = create_tmp_var (TREE_TYPE (s), "bitint");
5330 mark_addressable (TREE_VALUE (t));
5332 else
5334 int part = var_to_partition (m_map, s);
5335 gcc_assert (m_vars[part] != NULL_TREE);
5336 TREE_VALUE (t) = m_vars[part];
5340 update_stmt (stmt);
5343 /* Lower statement STMT which involves large/huge _BitInt values
5344 into code accessing individual limbs. */
5346 void
5347 bitint_large_huge::lower_stmt (gimple *stmt)
5349 m_first = true;
5350 m_lhs = NULL_TREE;
5351 m_data.truncate (0);
5352 m_data_cnt = 0;
5353 m_gsi = gsi_for_stmt (stmt);
5354 m_after_stmt = NULL;
5355 m_bb = NULL;
5356 m_init_gsi = m_gsi;
5357 gsi_prev (&m_init_gsi);
5358 m_preheader_bb = NULL;
5359 m_upwards_2limb = 0;
5360 m_upwards = false;
5361 m_var_msb = false;
5362 m_cast_conditional = false;
5363 m_bitfld_load = 0;
5364 m_loc = gimple_location (stmt);
5365 if (is_gimple_call (stmt))
5367 lower_call (NULL_TREE, stmt);
5368 return;
5370 if (gimple_code (stmt) == GIMPLE_ASM)
5372 lower_asm (stmt);
5373 return;
5375 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5376 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5377 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5378 bool mergeable_cast_p = false;
5379 bool final_cast_p = false;
5380 if (gimple_assign_cast_p (stmt))
5382 lhs = gimple_assign_lhs (stmt);
5383 tree rhs1 = gimple_assign_rhs1 (stmt);
5384 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
5385 rhs1 = TREE_OPERAND (rhs1, 0);
5386 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5387 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5388 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5389 mergeable_cast_p = true;
5390 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5391 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5392 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5393 || POINTER_TYPE_P (TREE_TYPE (lhs))
5394 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR))
5396 final_cast_p = true;
5397 if (((TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
5398 && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
5399 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5400 && !POINTER_TYPE_P (TREE_TYPE (lhs))))
5401 && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
5403 /* Handle VIEW_CONVERT_EXPRs to not generally supported
5404 huge INTEGER_TYPEs like uint256_t or uint512_t. These
5405 are usually emitted from memcpy folding and backends
5406 support moves with them but that is usually it.
5407 Similarly handle VCEs to vector/complex types etc. */
5408 gcc_assert (TREE_CODE (rhs1) == SSA_NAME);
5409 if (SSA_NAME_IS_DEFAULT_DEF (rhs1)
5410 && (!SSA_NAME_VAR (rhs1) || VAR_P (SSA_NAME_VAR (rhs1))))
5412 tree var = create_tmp_reg (TREE_TYPE (lhs));
5413 rhs1 = get_or_create_ssa_default_def (cfun, var);
5414 gimple_assign_set_rhs1 (stmt, rhs1);
5415 gimple_assign_set_rhs_code (stmt, SSA_NAME);
5417 else if (m_names == NULL
5418 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
5420 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5421 gcc_assert (gimple_assign_load_p (g));
5422 tree mem = gimple_assign_rhs1 (g);
5423 tree ltype = TREE_TYPE (lhs);
5424 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (mem));
5425 if (as != TYPE_ADDR_SPACE (ltype))
5426 ltype
5427 = build_qualified_type (ltype,
5428 TYPE_QUALS (ltype)
5429 | ENCODE_QUAL_ADDR_SPACE (as));
5430 rhs1 = build1 (VIEW_CONVERT_EXPR, ltype, unshare_expr (mem));
5431 gimple_assign_set_rhs1 (stmt, rhs1);
5433 else
5435 int part = var_to_partition (m_map, rhs1);
5436 gcc_assert (m_vars[part] != NULL_TREE);
5437 rhs1 = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
5438 m_vars[part]);
5439 gimple_assign_set_rhs1 (stmt, rhs1);
5441 update_stmt (stmt);
5442 return;
5444 if (TREE_CODE (rhs1) == SSA_NAME
5445 && (m_names == NULL
5446 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5448 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5449 if (is_gimple_assign (g)
5450 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5452 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5453 if (TREE_CODE (rhs2) == SSA_NAME
5454 && (m_names == NULL
5455 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5457 g = SSA_NAME_DEF_STMT (rhs2);
5458 int ovf = optimizable_arith_overflow (g);
5459 if (ovf == 2)
5460 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5461 and IMAGPART_EXPR uses, where the latter is cast to
5462 non-_BitInt, it will be optimized when handling
5463 the REALPART_EXPR. */
5464 return;
5465 if (ovf == 1)
5467 lower_call (NULL_TREE, g);
5468 return;
5474 else if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5475 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5476 && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5477 && !POINTER_TYPE_P (TREE_TYPE (rhs1))
5478 && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
5480 int part = var_to_partition (m_map, lhs);
5481 gcc_assert (m_vars[part] != NULL_TREE);
5482 lhs = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs1), m_vars[part]);
5483 insert_before (gimple_build_assign (lhs, rhs1));
5484 return;
5487 if (gimple_store_p (stmt))
5489 tree rhs1 = gimple_assign_rhs1 (stmt);
5490 if (TREE_CODE (rhs1) == SSA_NAME
5491 && (m_names == NULL
5492 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5494 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5495 m_loc = gimple_location (g);
5496 lhs = gimple_assign_lhs (stmt);
5497 if (is_gimple_assign (g) && !mergeable_op (g))
5498 switch (gimple_assign_rhs_code (g))
5500 case LSHIFT_EXPR:
5501 case RSHIFT_EXPR:
5502 lower_shift_stmt (lhs, g);
5503 handled:
5504 m_gsi = gsi_for_stmt (stmt);
5505 unlink_stmt_vdef (stmt);
5506 release_ssa_name (gimple_vdef (stmt));
5507 gsi_remove (&m_gsi, true);
5508 return;
5509 case MULT_EXPR:
5510 case TRUNC_DIV_EXPR:
5511 case TRUNC_MOD_EXPR:
5512 lower_muldiv_stmt (lhs, g);
5513 goto handled;
5514 case FIX_TRUNC_EXPR:
5515 lower_float_conv_stmt (lhs, g);
5516 goto handled;
5517 case REALPART_EXPR:
5518 case IMAGPART_EXPR:
5519 lower_cplxpart_stmt (lhs, g);
5520 goto handled;
5521 case VIEW_CONVERT_EXPR:
5523 tree rhs1 = gimple_assign_rhs1 (g);
5524 rhs1 = TREE_OPERAND (rhs1, 0);
5525 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5526 && !POINTER_TYPE_P (TREE_TYPE (rhs1)))
5528 tree ltype = TREE_TYPE (rhs1);
5529 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (lhs));
5530 ltype
5531 = build_qualified_type (ltype,
5532 TYPE_QUALS (TREE_TYPE (lhs))
5533 | ENCODE_QUAL_ADDR_SPACE (as));
5534 lhs = build1 (VIEW_CONVERT_EXPR, ltype, lhs);
5535 gimple_assign_set_lhs (stmt, lhs);
5536 gimple_assign_set_rhs1 (stmt, rhs1);
5537 gimple_assign_set_rhs_code (stmt, TREE_CODE (rhs1));
5538 update_stmt (stmt);
5539 return;
5542 break;
5543 default:
5544 break;
5546 else if (optimizable_arith_overflow (g) == 3)
5548 lower_call (lhs, g);
5549 goto handled;
5551 m_loc = gimple_location (stmt);
5554 if (mergeable_op (stmt)
5555 || gimple_store_p (stmt)
5556 || gimple_assign_load_p (stmt)
5557 || eq_p
5558 || mergeable_cast_p)
5560 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5561 if (!eq_p)
5562 return;
5564 else if (cmp_code != ERROR_MARK)
5565 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5566 if (cmp_code != ERROR_MARK)
5568 if (gimple_code (stmt) == GIMPLE_COND)
5570 gcond *cstmt = as_a <gcond *> (stmt);
5571 gimple_cond_set_lhs (cstmt, lhs);
5572 gimple_cond_set_rhs (cstmt, boolean_false_node);
5573 gimple_cond_set_code (cstmt, cmp_code);
5574 update_stmt (stmt);
5575 return;
5577 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5579 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5580 boolean_false_node);
5581 gimple_assign_set_rhs1 (stmt, cond);
5582 lhs = gimple_assign_lhs (stmt);
5583 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5584 || (bitint_precision_kind (TREE_TYPE (lhs))
5585 <= bitint_prec_middle));
5586 update_stmt (stmt);
5587 return;
5589 gimple_assign_set_rhs1 (stmt, lhs);
5590 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5591 gimple_assign_set_rhs_code (stmt, cmp_code);
5592 update_stmt (stmt);
5593 return;
5595 if (final_cast_p)
5597 tree lhs_type = TREE_TYPE (lhs);
5598 /* Add support for 3 or more limbs filled in from normal integral
5599 type if this assert fails. If no target chooses limb mode smaller
5600 than half of largest supported normal integral type, this will not
5601 be needed. */
5602 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5603 gimple *g;
5604 if ((TREE_CODE (lhs_type) == BITINT_TYPE
5605 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5606 || POINTER_TYPE_P (lhs_type))
5607 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5608 TYPE_UNSIGNED (lhs_type));
5609 m_data_cnt = 0;
5610 tree rhs1 = gimple_assign_rhs1 (stmt);
5611 tree r1 = handle_operand (rhs1, size_int (0));
5612 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5613 r1 = add_cast (lhs_type, r1);
5614 if (TYPE_PRECISION (lhs_type) > limb_prec)
5616 m_data_cnt = 0;
5617 m_first = false;
5618 tree r2 = handle_operand (rhs1, size_int (1));
5619 r2 = add_cast (lhs_type, r2);
5620 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5621 build_int_cst (unsigned_type_node,
5622 limb_prec));
5623 insert_before (g);
5624 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5625 gimple_assign_lhs (g));
5626 insert_before (g);
5627 r1 = gimple_assign_lhs (g);
5629 if (lhs_type != TREE_TYPE (lhs))
5630 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5631 else
5632 g = gimple_build_assign (lhs, r1);
5633 gsi_replace (&m_gsi, g, true);
5634 return;
5636 if (is_gimple_assign (stmt))
5637 switch (gimple_assign_rhs_code (stmt))
5639 case LSHIFT_EXPR:
5640 case RSHIFT_EXPR:
5641 lower_shift_stmt (NULL_TREE, stmt);
5642 return;
5643 case MULT_EXPR:
5644 case TRUNC_DIV_EXPR:
5645 case TRUNC_MOD_EXPR:
5646 lower_muldiv_stmt (NULL_TREE, stmt);
5647 return;
5648 case FIX_TRUNC_EXPR:
5649 case FLOAT_EXPR:
5650 lower_float_conv_stmt (NULL_TREE, stmt);
5651 return;
5652 case REALPART_EXPR:
5653 case IMAGPART_EXPR:
5654 lower_cplxpart_stmt (NULL_TREE, stmt);
5655 return;
5656 case COMPLEX_EXPR:
5657 lower_complexexpr_stmt (stmt);
5658 return;
5659 default:
5660 break;
5662 gcc_unreachable ();
5665 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5666 the desired memory state. */
5668 void *
5669 vuse_eq (ao_ref *, tree vuse1, void *data)
5671 tree vuse2 = (tree) data;
5672 if (vuse1 == vuse2)
5673 return data;
5675 return NULL;
5678 /* Return true if STMT uses a library function and needs to take
5679 address of its inputs. We need to avoid bit-fields in those
5680 cases. Similarly, we need to avoid overlap between destination
5681 and source limb arrays. */
5683 bool
5684 stmt_needs_operand_addr (gimple *stmt)
5686 if (is_gimple_assign (stmt))
5687 switch (gimple_assign_rhs_code (stmt))
5689 case MULT_EXPR:
5690 case TRUNC_DIV_EXPR:
5691 case TRUNC_MOD_EXPR:
5692 case FLOAT_EXPR:
5693 return true;
5694 default:
5695 break;
5697 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5698 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5699 return true;
5700 return false;
5703 /* Dominator walker used to discover which large/huge _BitInt
5704 loads could be sunk into all their uses. */
5706 class bitint_dom_walker : public dom_walker
5708 public:
5709 bitint_dom_walker (bitmap names, bitmap loads)
5710 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5712 edge before_dom_children (basic_block) final override;
5714 private:
5715 bitmap m_names, m_loads;
5718 edge
5719 bitint_dom_walker::before_dom_children (basic_block bb)
5721 gphi *phi = get_virtual_phi (bb);
5722 tree vop;
5723 if (phi)
5724 vop = gimple_phi_result (phi);
5725 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5726 vop = NULL_TREE;
5727 else
5728 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5730 auto_vec<tree, 16> worklist;
5731 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5732 !gsi_end_p (gsi); gsi_next (&gsi))
5734 gimple *stmt = gsi_stmt (gsi);
5735 if (is_gimple_debug (stmt))
5736 continue;
5738 if (!vop && gimple_vuse (stmt))
5739 vop = gimple_vuse (stmt);
5741 tree cvop = vop;
5742 if (gimple_vdef (stmt))
5743 vop = gimple_vdef (stmt);
5745 tree lhs = gimple_get_lhs (stmt);
5746 if (lhs
5747 && TREE_CODE (lhs) == SSA_NAME
5748 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5749 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5750 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5751 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5752 it means it will be handled in a loop or straight line code
5753 at the location of its (ultimate) immediate use, so for
5754 vop checking purposes check these only at the ultimate
5755 immediate use. */
5756 continue;
5758 ssa_op_iter oi;
5759 use_operand_p use_p;
5760 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5762 tree s = USE_FROM_PTR (use_p);
5763 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5764 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5765 worklist.safe_push (s);
5768 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5769 while (worklist.length () > 0)
5771 tree s = worklist.pop ();
5773 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5775 gimple *g = SSA_NAME_DEF_STMT (s);
5776 needs_operand_addr |= stmt_needs_operand_addr (g);
5777 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5779 tree s2 = USE_FROM_PTR (use_p);
5780 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5781 && (bitint_precision_kind (TREE_TYPE (s2))
5782 >= bitint_prec_large))
5783 worklist.safe_push (s2);
5785 continue;
5787 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5788 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5790 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5791 if (TREE_CODE (rhs) == SSA_NAME
5792 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5793 s = rhs;
5794 else
5795 continue;
5797 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5798 continue;
5800 gimple *g = SSA_NAME_DEF_STMT (s);
5801 tree rhs1 = gimple_assign_rhs1 (g);
5802 if (needs_operand_addr
5803 && TREE_CODE (rhs1) == COMPONENT_REF
5804 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5806 tree fld = TREE_OPERAND (rhs1, 1);
5807 /* For little-endian, we can allow as inputs bit-fields
5808 which start at a limb boundary. */
5809 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5810 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5811 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5812 % limb_prec) == 0)
5814 else
5816 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5817 continue;
5821 ao_ref ref;
5822 ao_ref_init (&ref, rhs1);
5823 tree lvop = gimple_vuse (g);
5824 unsigned limit = 64;
5825 tree vuse = cvop;
5826 if (vop != cvop
5827 && is_gimple_assign (stmt)
5828 && gimple_store_p (stmt)
5829 && (needs_operand_addr
5830 || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
5831 vuse = vop;
5832 if (vuse != lvop
5833 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5834 NULL, NULL, limit, lvop) == NULL)
5835 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5839 bb->aux = (void *) vop;
5840 return NULL;
5845 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5846 build_ssa_conflict_graph.
5847 The differences are:
5848 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5849 2) for large/huge _BitInt multiplication/division/modulo process def
5850 only after processing uses rather than before to make uses conflict
5851 with the definition
5852 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5853 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5854 the final statement. */
5856 void
5857 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5858 ssa_conflicts *graph, bitmap names,
5859 void (*def) (live_track *, tree,
5860 ssa_conflicts *),
5861 void (*use) (live_track *, tree))
5863 bool muldiv_p = false;
5864 tree lhs = NULL_TREE;
5865 if (is_gimple_assign (stmt))
5867 lhs = gimple_assign_lhs (stmt);
5868 if (TREE_CODE (lhs) == SSA_NAME
5869 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5870 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5872 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5873 return;
5874 switch (gimple_assign_rhs_code (stmt))
5876 case MULT_EXPR:
5877 case TRUNC_DIV_EXPR:
5878 case TRUNC_MOD_EXPR:
5879 muldiv_p = true;
5880 default:
5881 break;
5886 ssa_op_iter iter;
5887 tree var;
5888 if (!muldiv_p)
5890 /* For stmts with more than one SSA_NAME definition pretend all the
5891 SSA_NAME outputs but the first one are live at this point, so
5892 that conflicts are added in between all those even when they are
5893 actually not really live after the asm, because expansion might
5894 copy those into pseudos after the asm and if multiple outputs
5895 share the same partition, it might overwrite those that should
5896 be live. E.g.
5897 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5898 return a;
5899 See PR70593. */
5900 bool first = true;
5901 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5902 if (first)
5903 first = false;
5904 else
5905 use (live, var);
5907 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5908 def (live, var, graph);
5911 auto_vec<tree, 16> worklist;
5912 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5913 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5914 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5916 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5917 use (live, var);
5918 else
5919 worklist.safe_push (var);
5922 while (worklist.length () > 0)
5924 tree s = worklist.pop ();
5925 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5926 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5927 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5929 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5930 use (live, var);
5931 else
5932 worklist.safe_push (var);
5936 if (muldiv_p)
5937 def (live, lhs, graph);
5940 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5941 return the largest bitint_prec_kind of them, otherwise return
5942 bitint_prec_small. */
5944 static bitint_prec_kind
5945 arith_overflow_arg_kind (gimple *stmt)
5947 bitint_prec_kind ret = bitint_prec_small;
5948 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5949 switch (gimple_call_internal_fn (stmt))
5951 case IFN_ADD_OVERFLOW:
5952 case IFN_SUB_OVERFLOW:
5953 case IFN_MUL_OVERFLOW:
5954 for (int i = 0; i < 2; ++i)
5956 tree a = gimple_call_arg (stmt, i);
5957 if (TREE_CODE (a) == INTEGER_CST
5958 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5960 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5961 ret = MAX (ret, kind);
5964 break;
5965 default:
5966 break;
5968 return ret;
5971 /* Entry point for _BitInt(N) operation lowering during optimization. */
5973 static unsigned int
5974 gimple_lower_bitint (void)
5976 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5977 limb_prec = 0;
5979 unsigned int i;
5980 for (i = 0; i < num_ssa_names; ++i)
5982 tree s = ssa_name (i);
5983 if (s == NULL)
5984 continue;
5985 tree type = TREE_TYPE (s);
5986 if (TREE_CODE (type) == COMPLEX_TYPE)
5988 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5989 != bitint_prec_small)
5990 break;
5991 type = TREE_TYPE (type);
5993 if (TREE_CODE (type) == BITINT_TYPE
5994 && bitint_precision_kind (type) != bitint_prec_small)
5995 break;
5996 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5997 into memory. Such functions could have no large/huge SSA_NAMEs. */
5998 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6000 gimple *g = SSA_NAME_DEF_STMT (s);
6001 if (is_gimple_assign (g) && gimple_store_p (g))
6003 tree t = gimple_assign_rhs1 (g);
6004 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6005 && (bitint_precision_kind (TREE_TYPE (t))
6006 >= bitint_prec_large))
6007 break;
6010 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6011 to floating point types need to be rewritten. */
6012 else if (SCALAR_FLOAT_TYPE_P (type))
6014 gimple *g = SSA_NAME_DEF_STMT (s);
6015 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6017 tree t = gimple_assign_rhs1 (g);
6018 if (TREE_CODE (t) == INTEGER_CST
6019 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6020 && (bitint_precision_kind (TREE_TYPE (t))
6021 != bitint_prec_small))
6022 break;
6026 if (i == num_ssa_names)
6027 return 0;
6029 basic_block bb;
6030 auto_vec<gimple *, 4> switch_statements;
6031 FOR_EACH_BB_FN (bb, cfun)
6033 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
6035 tree idx = gimple_switch_index (swtch);
6036 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
6037 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
6038 continue;
6040 if (optimize)
6041 group_case_labels_stmt (swtch);
6042 if (gimple_switch_num_labels (swtch) == 1)
6044 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
6045 gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
6046 gsi_remove (&gsi, true);
6048 else
6049 switch_statements.safe_push (swtch);
6053 if (!switch_statements.is_empty ())
6055 bool expanded = false;
6056 gimple *stmt;
6057 unsigned int j;
6058 i = 0;
6059 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
6061 gswitch *swtch = as_a<gswitch *> (stmt);
6062 tree_switch_conversion::switch_decision_tree dt (swtch);
6063 expanded |= dt.analyze_switch_statement ();
6066 if (expanded)
6068 free_dominance_info (CDI_DOMINATORS);
6069 free_dominance_info (CDI_POST_DOMINATORS);
6070 mark_virtual_operands_for_renaming (cfun);
6071 cleanup_tree_cfg (TODO_update_ssa);
6075 struct bitint_large_huge large_huge;
6076 bool has_large_huge_parm_result = false;
6077 bool has_large_huge = false;
6078 unsigned int ret = 0, first_large_huge = ~0U;
6079 bool edge_insertions = false;
6080 for (; i < num_ssa_names; ++i)
6082 tree s = ssa_name (i);
6083 if (s == NULL)
6084 continue;
6085 tree type = TREE_TYPE (s);
6086 if (TREE_CODE (type) == COMPLEX_TYPE)
6088 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
6089 >= bitint_prec_large)
6090 has_large_huge = true;
6091 type = TREE_TYPE (type);
6093 if (TREE_CODE (type) == BITINT_TYPE
6094 && bitint_precision_kind (type) >= bitint_prec_large)
6096 if (first_large_huge == ~0U)
6097 first_large_huge = i;
6098 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
6099 gimple_stmt_iterator gsi;
6100 tree_code rhs_code;
6101 /* Unoptimize certain constructs to simpler alternatives to
6102 avoid having to lower all of them. */
6103 if (is_gimple_assign (stmt) && gimple_bb (stmt))
6104 switch (rhs_code = gimple_assign_rhs_code (stmt))
6106 default:
6107 break;
6108 case MULT_EXPR:
6109 case TRUNC_DIV_EXPR:
6110 case TRUNC_MOD_EXPR:
6111 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s))
6113 location_t loc = gimple_location (stmt);
6114 gsi = gsi_for_stmt (stmt);
6115 tree rhs1 = gimple_assign_rhs1 (stmt);
6116 tree rhs2 = gimple_assign_rhs2 (stmt);
6117 /* For multiplication and division with (ab)
6118 lhs and one or both operands force the operands
6119 into new SSA_NAMEs to avoid coalescing failures. */
6120 if (TREE_CODE (rhs1) == SSA_NAME
6121 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
6123 first_large_huge = 0;
6124 tree t = make_ssa_name (TREE_TYPE (rhs1));
6125 g = gimple_build_assign (t, SSA_NAME, rhs1);
6126 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6127 gimple_set_location (g, loc);
6128 gimple_assign_set_rhs1 (stmt, t);
6129 if (rhs1 == rhs2)
6131 gimple_assign_set_rhs2 (stmt, t);
6132 rhs2 = t;
6134 update_stmt (stmt);
6136 if (TREE_CODE (rhs2) == SSA_NAME
6137 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2))
6139 first_large_huge = 0;
6140 tree t = make_ssa_name (TREE_TYPE (rhs2));
6141 g = gimple_build_assign (t, SSA_NAME, rhs2);
6142 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6143 gimple_set_location (g, loc);
6144 gimple_assign_set_rhs2 (stmt, t);
6145 update_stmt (stmt);
6148 break;
6149 case LROTATE_EXPR:
6150 case RROTATE_EXPR:
6152 first_large_huge = 0;
6153 location_t loc = gimple_location (stmt);
6154 gsi = gsi_for_stmt (stmt);
6155 tree rhs1 = gimple_assign_rhs1 (stmt);
6156 tree type = TREE_TYPE (rhs1);
6157 tree n = gimple_assign_rhs2 (stmt), m;
6158 tree p = build_int_cst (TREE_TYPE (n),
6159 TYPE_PRECISION (type));
6160 if (TREE_CODE (n) == INTEGER_CST)
6161 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
6162 else
6164 m = make_ssa_name (TREE_TYPE (n));
6165 g = gimple_build_assign (m, MINUS_EXPR, p, n);
6166 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6167 gimple_set_location (g, loc);
6169 if (!TYPE_UNSIGNED (type))
6171 tree utype = build_bitint_type (TYPE_PRECISION (type),
6173 if (TREE_CODE (rhs1) == INTEGER_CST)
6174 rhs1 = fold_convert (utype, rhs1);
6175 else
6177 tree t = make_ssa_name (type);
6178 g = gimple_build_assign (t, NOP_EXPR, rhs1);
6179 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6180 gimple_set_location (g, loc);
6183 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6184 rhs_code == LROTATE_EXPR
6185 ? LSHIFT_EXPR : RSHIFT_EXPR,
6186 rhs1, n);
6187 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6188 gimple_set_location (g, loc);
6189 tree op1 = gimple_assign_lhs (g);
6190 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6191 rhs_code == LROTATE_EXPR
6192 ? RSHIFT_EXPR : LSHIFT_EXPR,
6193 rhs1, m);
6194 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6195 gimple_set_location (g, loc);
6196 tree op2 = gimple_assign_lhs (g);
6197 tree lhs = gimple_assign_lhs (stmt);
6198 if (!TYPE_UNSIGNED (type))
6200 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
6201 BIT_IOR_EXPR, op1, op2);
6202 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6203 gimple_set_location (g, loc);
6204 g = gimple_build_assign (lhs, NOP_EXPR,
6205 gimple_assign_lhs (g));
6207 else
6208 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
6209 gsi_replace (&gsi, g, true);
6210 gimple_set_location (g, loc);
6212 break;
6213 case ABS_EXPR:
6214 case ABSU_EXPR:
6215 case MIN_EXPR:
6216 case MAX_EXPR:
6217 case COND_EXPR:
6218 first_large_huge = 0;
6219 gsi = gsi_for_stmt (stmt);
6220 tree lhs = gimple_assign_lhs (stmt);
6221 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
6222 location_t loc = gimple_location (stmt);
6223 if (rhs_code == ABS_EXPR)
6224 g = gimple_build_cond (LT_EXPR, rhs1,
6225 build_zero_cst (TREE_TYPE (rhs1)),
6226 NULL_TREE, NULL_TREE);
6227 else if (rhs_code == ABSU_EXPR)
6229 rhs2 = make_ssa_name (TREE_TYPE (lhs));
6230 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
6231 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6232 gimple_set_location (g, loc);
6233 g = gimple_build_cond (LT_EXPR, rhs1,
6234 build_zero_cst (TREE_TYPE (rhs1)),
6235 NULL_TREE, NULL_TREE);
6236 rhs1 = rhs2;
6238 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
6240 rhs2 = gimple_assign_rhs2 (stmt);
6241 if (TREE_CODE (rhs1) == INTEGER_CST)
6242 std::swap (rhs1, rhs2);
6243 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
6244 NULL_TREE, NULL_TREE);
6245 if (rhs_code == MAX_EXPR)
6246 std::swap (rhs1, rhs2);
6248 else
6250 g = gimple_build_cond (NE_EXPR, rhs1,
6251 build_zero_cst (TREE_TYPE (rhs1)),
6252 NULL_TREE, NULL_TREE);
6253 rhs1 = gimple_assign_rhs2 (stmt);
6254 rhs2 = gimple_assign_rhs3 (stmt);
6256 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6257 gimple_set_location (g, loc);
6258 edge e1 = split_block (gsi_bb (gsi), g);
6259 edge e2 = split_block (e1->dest, (gimple *) NULL);
6260 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
6261 e3->probability = profile_probability::even ();
6262 e1->flags = EDGE_TRUE_VALUE;
6263 e1->probability = e3->probability.invert ();
6264 if (dom_info_available_p (CDI_DOMINATORS))
6265 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
6266 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
6268 gsi = gsi_after_labels (e1->dest);
6269 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6270 NEGATE_EXPR, rhs1);
6271 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6272 gimple_set_location (g, loc);
6273 rhs2 = gimple_assign_lhs (g);
6274 std::swap (rhs1, rhs2);
6276 gsi = gsi_for_stmt (stmt);
6277 gsi_remove (&gsi, true);
6278 gphi *phi = create_phi_node (lhs, e2->dest);
6279 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
6280 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
6281 break;
6284 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6285 into memory. Such functions could have no large/huge SSA_NAMEs. */
6286 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6288 gimple *g = SSA_NAME_DEF_STMT (s);
6289 if (is_gimple_assign (g) && gimple_store_p (g))
6291 tree t = gimple_assign_rhs1 (g);
6292 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6293 && (bitint_precision_kind (TREE_TYPE (t))
6294 >= bitint_prec_large))
6295 has_large_huge = true;
6298 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6299 to floating point types need to be rewritten. */
6300 else if (SCALAR_FLOAT_TYPE_P (type))
6302 gimple *g = SSA_NAME_DEF_STMT (s);
6303 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6305 tree t = gimple_assign_rhs1 (g);
6306 if (TREE_CODE (t) == INTEGER_CST
6307 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6308 && (bitint_precision_kind (TREE_TYPE (t))
6309 >= bitint_prec_large))
6310 has_large_huge = true;
6314 for (i = first_large_huge; i < num_ssa_names; ++i)
6316 tree s = ssa_name (i);
6317 if (s == NULL)
6318 continue;
6319 tree type = TREE_TYPE (s);
6320 if (TREE_CODE (type) == COMPLEX_TYPE)
6321 type = TREE_TYPE (type);
6322 if (TREE_CODE (type) == BITINT_TYPE
6323 && bitint_precision_kind (type) >= bitint_prec_large)
6325 use_operand_p use_p;
6326 gimple *use_stmt;
6327 has_large_huge = true;
6328 if (optimize
6329 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6330 continue;
6331 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6332 the same bb and could be handled in the same loop with the
6333 immediate use. */
6334 if (optimize
6335 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6336 && single_imm_use (s, &use_p, &use_stmt)
6337 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6339 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6341 if (mergeable_op (use_stmt))
6342 continue;
6343 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6344 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6345 continue;
6346 if (gimple_assign_cast_p (use_stmt))
6348 tree lhs = gimple_assign_lhs (use_stmt);
6349 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6350 /* Don't merge with VIEW_CONVERT_EXPRs to
6351 huge INTEGER_TYPEs used sometimes in memcpy
6352 expansion. */
6353 && (TREE_CODE (TREE_TYPE (lhs)) != INTEGER_TYPE
6354 || (TYPE_PRECISION (TREE_TYPE (lhs))
6355 <= MAX_FIXED_MODE_SIZE)))
6356 continue;
6358 else if (gimple_store_p (use_stmt)
6359 && is_gimple_assign (use_stmt)
6360 && !gimple_has_volatile_ops (use_stmt)
6361 && !stmt_ends_bb_p (use_stmt))
6362 continue;
6364 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6366 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6367 if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
6369 rhs1 = TREE_OPERAND (rhs1, 0);
6370 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6371 && !POINTER_TYPE_P (TREE_TYPE (rhs1))
6372 && gimple_store_p (use_stmt))
6373 continue;
6375 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6376 && ((is_gimple_assign (use_stmt)
6377 && (gimple_assign_rhs_code (use_stmt)
6378 != COMPLEX_EXPR))
6379 || gimple_code (use_stmt) == GIMPLE_COND)
6380 && (!gimple_store_p (use_stmt)
6381 || (is_gimple_assign (use_stmt)
6382 && !gimple_has_volatile_ops (use_stmt)
6383 && !stmt_ends_bb_p (use_stmt)))
6384 && (TREE_CODE (rhs1) != SSA_NAME
6385 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6387 if (is_gimple_assign (use_stmt))
6388 switch (gimple_assign_rhs_code (use_stmt))
6390 case TRUNC_DIV_EXPR:
6391 case TRUNC_MOD_EXPR:
6392 case FLOAT_EXPR:
6393 /* For division, modulo and casts to floating
6394 point, avoid representing unsigned operands
6395 using negative prec if they were sign-extended
6396 from narrower precision. */
6397 if (TYPE_UNSIGNED (TREE_TYPE (s))
6398 && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
6399 && (TYPE_PRECISION (TREE_TYPE (s))
6400 > TYPE_PRECISION (TREE_TYPE (rhs1))))
6401 goto force_name;
6402 /* FALLTHRU */
6403 case MULT_EXPR:
6404 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6405 || (bitint_precision_kind (TREE_TYPE (rhs1))
6406 < bitint_prec_large))
6407 continue;
6408 /* Uses which use handle_operand_addr can't
6409 deal with nested casts. */
6410 if (TREE_CODE (rhs1) == SSA_NAME
6411 && gimple_assign_cast_p
6412 (SSA_NAME_DEF_STMT (rhs1))
6413 && has_single_use (rhs1)
6414 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6415 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6416 goto force_name;
6417 break;
6418 case VIEW_CONVERT_EXPR:
6420 tree lhs = gimple_assign_lhs (use_stmt);
6421 /* Don't merge with VIEW_CONVERT_EXPRs to
6422 non-integral types. */
6423 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6424 goto force_name;
6425 /* Don't merge with VIEW_CONVERT_EXPRs to
6426 huge INTEGER_TYPEs used sometimes in memcpy
6427 expansion. */
6428 if (TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
6429 && (TYPE_PRECISION (TREE_TYPE (lhs))
6430 > MAX_FIXED_MODE_SIZE))
6431 goto force_name;
6433 break;
6434 default:
6435 break;
6437 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6438 || (bitint_precision_kind (TREE_TYPE (rhs1))
6439 < bitint_prec_large))
6440 continue;
6441 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6442 >= TYPE_PRECISION (TREE_TYPE (s)))
6443 && mergeable_op (use_stmt))
6444 continue;
6445 /* Prevent merging a widening non-mergeable cast
6446 on result of some narrower mergeable op
6447 together with later mergeable operations. E.g.
6448 result of _BitInt(223) addition shouldn't be
6449 sign-extended to _BitInt(513) and have another
6450 _BitInt(513) added to it, as handle_plus_minus
6451 with its PHI node handling inside of handle_cast
6452 will not work correctly. An exception is if
6453 use_stmt is a store, this is handled directly
6454 in lower_mergeable_stmt. */
6455 if (TREE_CODE (rhs1) != SSA_NAME
6456 || !has_single_use (rhs1)
6457 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6458 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6459 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6460 || gimple_store_p (use_stmt))
6461 continue;
6462 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6463 < TYPE_PRECISION (TREE_TYPE (s)))
6464 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6466 /* Another exception is if the widening cast is
6467 from mergeable same precision cast from something
6468 not mergeable. */
6469 tree rhs2
6470 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6471 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6472 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6473 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6475 if (TREE_CODE (rhs2) != SSA_NAME
6476 || !has_single_use (rhs2)
6477 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6478 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6479 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6480 continue;
6485 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6486 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6488 case IMAGPART_EXPR:
6490 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6491 rhs1 = TREE_OPERAND (rhs1, 0);
6492 if (TREE_CODE (rhs1) == SSA_NAME)
6494 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6495 if (optimizable_arith_overflow (g))
6496 continue;
6499 /* FALLTHRU */
6500 case LSHIFT_EXPR:
6501 case RSHIFT_EXPR:
6502 case MULT_EXPR:
6503 case TRUNC_DIV_EXPR:
6504 case TRUNC_MOD_EXPR:
6505 case FIX_TRUNC_EXPR:
6506 case REALPART_EXPR:
6507 if (gimple_store_p (use_stmt)
6508 && is_gimple_assign (use_stmt)
6509 && !gimple_has_volatile_ops (use_stmt)
6510 && !stmt_ends_bb_p (use_stmt))
6512 tree lhs = gimple_assign_lhs (use_stmt);
6513 /* As multiply/division passes address of the lhs
6514 to library function and that assumes it can extend
6515 it to whole number of limbs, avoid merging those
6516 with bit-field stores. Don't allow it for
6517 shifts etc. either, so that the bit-field store
6518 handling doesn't have to be done everywhere. */
6519 if (TREE_CODE (lhs) == COMPONENT_REF
6520 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6521 break;
6522 continue;
6524 break;
6525 default:
6526 break;
6530 /* Also ignore uninitialized uses. */
6531 if (SSA_NAME_IS_DEFAULT_DEF (s)
6532 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6533 continue;
6535 force_name:
6536 if (!large_huge.m_names)
6537 large_huge.m_names = BITMAP_ALLOC (NULL);
6538 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6539 if (has_single_use (s))
6541 if (!large_huge.m_single_use_names)
6542 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6543 bitmap_set_bit (large_huge.m_single_use_names,
6544 SSA_NAME_VERSION (s));
6546 if (SSA_NAME_VAR (s)
6547 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6548 && SSA_NAME_IS_DEFAULT_DEF (s))
6549 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6550 has_large_huge_parm_result = true;
6551 if (optimize
6552 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6553 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6554 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6555 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6557 use_operand_p use_p;
6558 imm_use_iterator iter;
6559 bool optimizable_load = true;
6560 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6562 gimple *use_stmt = USE_STMT (use_p);
6563 if (is_gimple_debug (use_stmt))
6564 continue;
6565 if (gimple_code (use_stmt) == GIMPLE_PHI
6566 || is_gimple_call (use_stmt)
6567 || gimple_code (use_stmt) == GIMPLE_ASM)
6569 optimizable_load = false;
6570 break;
6574 ssa_op_iter oi;
6575 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6576 oi, SSA_OP_USE)
6578 tree s2 = USE_FROM_PTR (use_p);
6579 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6581 optimizable_load = false;
6582 break;
6586 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6588 if (!large_huge.m_loads)
6589 large_huge.m_loads = BITMAP_ALLOC (NULL);
6590 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6594 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6595 into memory. Such functions could have no large/huge SSA_NAMEs. */
6596 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6598 gimple *g = SSA_NAME_DEF_STMT (s);
6599 if (is_gimple_assign (g) && gimple_store_p (g))
6601 tree t = gimple_assign_rhs1 (g);
6602 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6603 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6604 has_large_huge = true;
6609 if (large_huge.m_names || has_large_huge)
6611 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6612 calculate_dominance_info (CDI_DOMINATORS);
6613 if (optimize)
6614 enable_ranger (cfun);
6615 if (large_huge.m_loads)
6617 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6618 entry->aux = NULL;
6619 bitint_dom_walker (large_huge.m_names,
6620 large_huge.m_loads).walk (entry);
6621 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6622 clear_aux_for_blocks ();
6623 BITMAP_FREE (large_huge.m_loads);
6625 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6626 large_huge.m_limb_size
6627 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6629 if (large_huge.m_names)
6631 large_huge.m_map
6632 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6633 coalesce_ssa_name (large_huge.m_map);
6634 partition_view_normal (large_huge.m_map);
6635 if (dump_file && (dump_flags & TDF_DETAILS))
6637 fprintf (dump_file, "After Coalescing:\n");
6638 dump_var_map (dump_file, large_huge.m_map);
6640 large_huge.m_vars
6641 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6642 bitmap_iterator bi;
6643 if (has_large_huge_parm_result)
6644 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6646 tree s = ssa_name (i);
6647 if (SSA_NAME_VAR (s)
6648 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6649 && SSA_NAME_IS_DEFAULT_DEF (s))
6650 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6652 int p = var_to_partition (large_huge.m_map, s);
6653 if (large_huge.m_vars[p] == NULL_TREE)
6655 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6656 mark_addressable (SSA_NAME_VAR (s));
6660 tree atype = NULL_TREE;
6661 if (dump_file && (dump_flags & TDF_DETAILS))
6662 fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
6663 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6665 tree s = ssa_name (i);
6666 int p = var_to_partition (large_huge.m_map, s);
6667 if (large_huge.m_vars[p] == NULL_TREE)
6669 if (atype == NULL_TREE
6670 || !tree_int_cst_equal (TYPE_SIZE (atype),
6671 TYPE_SIZE (TREE_TYPE (s))))
6673 unsigned HOST_WIDE_INT nelts
6674 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6675 atype = build_array_type_nelts (large_huge.m_limb_type,
6676 nelts);
6678 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6679 mark_addressable (large_huge.m_vars[p]);
6681 if (dump_file && (dump_flags & TDF_DETAILS))
6683 print_generic_expr (dump_file, s, TDF_SLIM);
6684 fprintf (dump_file, " -> ");
6685 print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
6686 fprintf (dump_file, "\n");
6691 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6693 gimple_stmt_iterator prev;
6694 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6695 gsi = prev)
6697 prev = gsi;
6698 gsi_prev (&prev);
6699 ssa_op_iter iter;
6700 gimple *stmt = gsi_stmt (gsi);
6701 if (is_gimple_debug (stmt))
6702 continue;
6703 bitint_prec_kind kind = bitint_prec_small;
6704 tree t;
6705 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6706 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6708 bitint_prec_kind this_kind
6709 = bitint_precision_kind (TREE_TYPE (t));
6710 kind = MAX (kind, this_kind);
6712 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6714 t = gimple_assign_rhs1 (stmt);
6715 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6717 bitint_prec_kind this_kind
6718 = bitint_precision_kind (TREE_TYPE (t));
6719 kind = MAX (kind, this_kind);
6722 if (is_gimple_assign (stmt)
6723 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6725 t = gimple_assign_rhs1 (stmt);
6726 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6727 && TREE_CODE (t) == INTEGER_CST)
6729 bitint_prec_kind this_kind
6730 = bitint_precision_kind (TREE_TYPE (t));
6731 kind = MAX (kind, this_kind);
6734 if (is_gimple_call (stmt))
6736 t = gimple_call_lhs (stmt);
6737 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6739 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6740 kind = MAX (kind, this_kind);
6741 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6743 this_kind
6744 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6745 kind = MAX (kind, this_kind);
6749 if (kind == bitint_prec_small)
6750 continue;
6751 switch (gimple_code (stmt))
6753 case GIMPLE_CALL:
6754 /* For now. We'll need to handle some internal functions and
6755 perhaps some builtins. */
6756 if (kind == bitint_prec_middle)
6757 continue;
6758 break;
6759 case GIMPLE_ASM:
6760 if (kind == bitint_prec_middle)
6761 continue;
6762 break;
6763 case GIMPLE_RETURN:
6764 continue;
6765 case GIMPLE_ASSIGN:
6766 if (gimple_clobber_p (stmt))
6767 continue;
6768 if (kind >= bitint_prec_large)
6769 break;
6770 if (gimple_assign_single_p (stmt))
6771 /* No need to lower copies, loads or stores. */
6772 continue;
6773 if (gimple_assign_cast_p (stmt))
6775 tree lhs = gimple_assign_lhs (stmt);
6776 tree rhs = gimple_assign_rhs1 (stmt);
6777 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6778 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6779 && (TYPE_PRECISION (TREE_TYPE (lhs))
6780 == TYPE_PRECISION (TREE_TYPE (rhs))))
6781 /* No need to lower casts to same precision. */
6782 continue;
6784 break;
6785 default:
6786 break;
6789 if (kind == bitint_prec_middle)
6791 tree type = NULL_TREE;
6792 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6793 with the same precision and back. */
6794 unsigned int nops = gimple_num_ops (stmt);
6795 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6796 i < nops; ++i)
6797 if (tree op = gimple_op (stmt, i))
6799 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6800 if (nop != op)
6801 gimple_set_op (stmt, i, nop);
6802 else if (COMPARISON_CLASS_P (op))
6804 TREE_OPERAND (op, 0)
6805 = maybe_cast_middle_bitint (&gsi,
6806 TREE_OPERAND (op, 0),
6807 type);
6808 TREE_OPERAND (op, 1)
6809 = maybe_cast_middle_bitint (&gsi,
6810 TREE_OPERAND (op, 1),
6811 type);
6813 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6815 CASE_LOW (op)
6816 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6817 type);
6818 CASE_HIGH (op)
6819 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6820 type);
6823 if (tree lhs = gimple_get_lhs (stmt))
6824 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6825 && (bitint_precision_kind (TREE_TYPE (lhs))
6826 == bitint_prec_middle))
6828 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6829 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6830 type = build_nonstandard_integer_type (prec, uns);
6831 tree lhs2 = make_ssa_name (type);
6832 gimple_set_lhs (stmt, lhs2);
6833 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6834 if (stmt_ends_bb_p (stmt))
6836 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6837 gsi_insert_on_edge_immediate (e, g);
6839 else
6840 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6842 update_stmt (stmt);
6843 continue;
6846 if (tree lhs = gimple_get_lhs (stmt))
6847 if (TREE_CODE (lhs) == SSA_NAME)
6849 tree type = TREE_TYPE (lhs);
6850 if (TREE_CODE (type) == COMPLEX_TYPE)
6851 type = TREE_TYPE (type);
6852 if (TREE_CODE (type) == BITINT_TYPE
6853 && bitint_precision_kind (type) >= bitint_prec_large
6854 && (large_huge.m_names == NULL
6855 || !bitmap_bit_p (large_huge.m_names,
6856 SSA_NAME_VERSION (lhs))))
6857 continue;
6860 large_huge.lower_stmt (stmt);
6863 tree atype = NULL_TREE;
6864 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6865 gsi_next (&gsi))
6867 gphi *phi = gsi.phi ();
6868 tree lhs = gimple_phi_result (phi);
6869 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6870 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6871 continue;
6872 int p1 = var_to_partition (large_huge.m_map, lhs);
6873 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6874 tree v1 = large_huge.m_vars[p1];
6875 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6877 tree arg = gimple_phi_arg_def (phi, i);
6878 edge e = gimple_phi_arg_edge (phi, i);
6879 gimple *g;
6880 switch (TREE_CODE (arg))
6882 case INTEGER_CST:
6883 if (integer_zerop (arg) && VAR_P (v1))
6885 tree zero = build_zero_cst (TREE_TYPE (v1));
6886 g = gimple_build_assign (v1, zero);
6887 gsi_insert_on_edge (e, g);
6888 edge_insertions = true;
6889 break;
6891 int ext;
6892 unsigned int min_prec, prec, rem;
6893 tree c;
6894 prec = TYPE_PRECISION (TREE_TYPE (arg));
6895 rem = prec % (2 * limb_prec);
6896 min_prec = bitint_min_cst_precision (arg, ext);
6897 if (min_prec > prec - rem - 2 * limb_prec
6898 && min_prec > (unsigned) limb_prec)
6899 /* Constant which has enough significant bits that it
6900 isn't worth trying to save .rodata space by extending
6901 from smaller number. */
6902 min_prec = prec;
6903 else
6904 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6905 if (min_prec == 0)
6906 c = NULL_TREE;
6907 else if (min_prec == prec)
6908 c = tree_output_constant_def (arg);
6909 else if (min_prec == (unsigned) limb_prec)
6910 c = fold_convert (large_huge.m_limb_type, arg);
6911 else
6913 tree ctype = build_bitint_type (min_prec, 1);
6914 c = tree_output_constant_def (fold_convert (ctype, arg));
6916 if (c)
6918 if (VAR_P (v1) && min_prec == prec)
6920 tree v2 = build1 (VIEW_CONVERT_EXPR,
6921 TREE_TYPE (v1), c);
6922 g = gimple_build_assign (v1, v2);
6923 gsi_insert_on_edge (e, g);
6924 edge_insertions = true;
6925 break;
6927 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6928 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6929 TREE_TYPE (c), v1),
6931 else
6933 unsigned HOST_WIDE_INT nelts
6934 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6935 / limb_prec;
6936 tree vtype
6937 = build_array_type_nelts (large_huge.m_limb_type,
6938 nelts);
6939 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6940 vtype, v1),
6941 build1 (VIEW_CONVERT_EXPR,
6942 vtype, c));
6944 gsi_insert_on_edge (e, g);
6946 if (ext == 0)
6948 unsigned HOST_WIDE_INT nelts
6949 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6950 - min_prec) / limb_prec;
6951 tree vtype
6952 = build_array_type_nelts (large_huge.m_limb_type,
6953 nelts);
6954 tree ptype = build_pointer_type (TREE_TYPE (v1));
6955 tree off;
6956 if (c)
6957 off = fold_convert (ptype,
6958 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6959 else
6960 off = build_zero_cst (ptype);
6961 tree vd = build2 (MEM_REF, vtype,
6962 build_fold_addr_expr (v1), off);
6963 g = gimple_build_assign (vd, build_zero_cst (vtype));
6965 else
6967 tree vd = v1;
6968 if (c)
6970 tree ptype = build_pointer_type (TREE_TYPE (v1));
6971 tree off
6972 = fold_convert (ptype,
6973 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6974 vd = build2 (MEM_REF, large_huge.m_limb_type,
6975 build_fold_addr_expr (v1), off);
6977 vd = build_fold_addr_expr (vd);
6978 unsigned HOST_WIDE_INT nbytes
6979 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6980 if (c)
6981 nbytes
6982 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6983 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6984 g = gimple_build_call (fn, 3, vd,
6985 integer_minus_one_node,
6986 build_int_cst (sizetype,
6987 nbytes));
6989 gsi_insert_on_edge (e, g);
6990 edge_insertions = true;
6991 break;
6992 default:
6993 gcc_unreachable ();
6994 case SSA_NAME:
6995 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6997 if (large_huge.m_names == NULL
6998 || !bitmap_bit_p (large_huge.m_names,
6999 SSA_NAME_VERSION (arg)))
7000 continue;
7002 int p2 = var_to_partition (large_huge.m_map, arg);
7003 if (p1 == p2)
7004 continue;
7005 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
7006 tree v2 = large_huge.m_vars[p2];
7007 if (VAR_P (v1) && VAR_P (v2))
7008 g = gimple_build_assign (v1, v2);
7009 else if (VAR_P (v1))
7010 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
7011 TREE_TYPE (v1), v2));
7012 else if (VAR_P (v2))
7013 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
7014 TREE_TYPE (v2), v1), v2);
7015 else
7017 if (atype == NULL_TREE
7018 || !tree_int_cst_equal (TYPE_SIZE (atype),
7019 TYPE_SIZE (TREE_TYPE (lhs))))
7021 unsigned HOST_WIDE_INT nelts
7022 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
7023 / limb_prec;
7024 atype
7025 = build_array_type_nelts (large_huge.m_limb_type,
7026 nelts);
7028 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
7029 atype, v1),
7030 build1 (VIEW_CONVERT_EXPR,
7031 atype, v2));
7033 gsi_insert_on_edge (e, g);
7034 edge_insertions = true;
7035 break;
7041 if (large_huge.m_names || has_large_huge)
7043 gimple *nop = NULL;
7044 for (i = 0; i < num_ssa_names; ++i)
7046 tree s = ssa_name (i);
7047 if (s == NULL_TREE)
7048 continue;
7049 tree type = TREE_TYPE (s);
7050 if (TREE_CODE (type) == COMPLEX_TYPE)
7051 type = TREE_TYPE (type);
7052 if (TREE_CODE (type) == BITINT_TYPE
7053 && bitint_precision_kind (type) >= bitint_prec_large)
7055 if (large_huge.m_preserved
7056 && bitmap_bit_p (large_huge.m_preserved,
7057 SSA_NAME_VERSION (s)))
7058 continue;
7059 gimple *g = SSA_NAME_DEF_STMT (s);
7060 if (gimple_code (g) == GIMPLE_NOP)
7062 if (SSA_NAME_VAR (s))
7063 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
7064 release_ssa_name (s);
7065 continue;
7067 if (gimple_bb (g) == NULL)
7069 release_ssa_name (s);
7070 continue;
7072 if (gimple_code (g) != GIMPLE_ASM)
7074 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7075 bool save_vta = flag_var_tracking_assignments;
7076 flag_var_tracking_assignments = false;
7077 gsi_remove (&gsi, true);
7078 flag_var_tracking_assignments = save_vta;
7080 if (nop == NULL)
7081 nop = gimple_build_nop ();
7082 SSA_NAME_DEF_STMT (s) = nop;
7083 release_ssa_name (s);
7086 if (optimize)
7087 disable_ranger (cfun);
7090 if (edge_insertions)
7091 gsi_commit_edge_inserts ();
7093 return ret;
7096 namespace {
7098 const pass_data pass_data_lower_bitint =
7100 GIMPLE_PASS, /* type */
7101 "bitintlower", /* name */
7102 OPTGROUP_NONE, /* optinfo_flags */
7103 TV_NONE, /* tv_id */
7104 PROP_ssa, /* properties_required */
7105 PROP_gimple_lbitint, /* properties_provided */
7106 0, /* properties_destroyed */
7107 0, /* todo_flags_start */
7108 0, /* todo_flags_finish */
7111 class pass_lower_bitint : public gimple_opt_pass
7113 public:
7114 pass_lower_bitint (gcc::context *ctxt)
7115 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
7118 /* opt_pass methods: */
7119 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
7120 unsigned int execute (function *) final override
7122 return gimple_lower_bitint ();
7125 }; // class pass_lower_bitint
7127 } // anon namespace
7129 gimple_opt_pass *
7130 make_pass_lower_bitint (gcc::context *ctxt)
7132 return new pass_lower_bitint (ctxt);
7136 namespace {
7138 const pass_data pass_data_lower_bitint_O0 =
7140 GIMPLE_PASS, /* type */
7141 "bitintlower0", /* name */
7142 OPTGROUP_NONE, /* optinfo_flags */
7143 TV_NONE, /* tv_id */
7144 PROP_cfg, /* properties_required */
7145 PROP_gimple_lbitint, /* properties_provided */
7146 0, /* properties_destroyed */
7147 0, /* todo_flags_start */
7148 0, /* todo_flags_finish */
7151 class pass_lower_bitint_O0 : public gimple_opt_pass
7153 public:
7154 pass_lower_bitint_O0 (gcc::context *ctxt)
7155 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
7158 /* opt_pass methods: */
7159 bool gate (function *fun) final override
7161 /* With errors, normal optimization passes are not run. If we don't
7162 lower bitint operations at all, rtl expansion will abort. */
7163 return !(fun->curr_properties & PROP_gimple_lbitint);
7166 unsigned int execute (function *) final override
7168 return gimple_lower_bitint ();
7171 }; // class pass_lower_bitint_O0
7173 } // anon namespace
7175 gimple_opt_pass *
7176 make_pass_lower_bitint_O0 (gcc::context *ctxt)
7178 return new pass_lower_bitint_O0 (ctxt);