tree-optimization/112767 - spurious diagnostic after sccp/loop-split swap
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob5024815ed9dfae31b295e48e1380990a7caa63eb
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
342 else
343 return 0;
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
365 else if (is_gimple_assign (stmt))
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
381 *pop1 = op1;
382 *pop2 = op2;
384 return code;
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_bit_query (gimple *);
431 void lower_call (tree, gimple *);
432 void lower_asm (gimple *);
433 void lower_stmt (gimple *);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
437 bitmap m_names;
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
440 their uses. */
441 bitmap m_loads;
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
444 bitmap m_preserved;
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
452 set in m_names. */
453 var_map m_map;
454 /* Mapping of the partitions to corresponding decls. */
455 tree *m_vars;
456 /* Unsigned integer type with limb precision. */
457 tree m_limb_type;
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size;
460 /* Location of a gimple stmt which is being currently lowered. */
461 location_t m_loc;
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple *m_after_stmt;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb, m_preheader_bb;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi;
470 /* Decl into which a mergeable statement stores result. */
471 tree m_lhs;
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
520 bool m_first;
521 bool m_var_msb;
522 unsigned m_upwards_2limb;
523 bool m_upwards;
524 bool m_cast_conditional;
525 unsigned m_bitfld_load;
526 vec<tree> m_data;
527 unsigned int m_data_cnt;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names);
533 BITMAP_FREE (m_loads);
534 BITMAP_FREE (m_preserved);
535 BITMAP_FREE (m_single_use_names);
536 if (m_map)
537 delete_var_map (m_map);
538 XDELETEVEC (m_vars);
539 m_data.release ();
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
545 void
546 bitint_large_huge::insert_before (gimple *g)
548 gimple_set_location (g, m_loc);
549 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
556 tree
557 bitint_large_huge::limb_access_type (tree type, tree idx)
559 if (type == NULL_TREE)
560 return m_limb_type;
561 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
562 unsigned int prec = TYPE_PRECISION (type);
563 gcc_assert (i * limb_prec < prec);
564 if ((i + 1) * limb_prec <= prec)
565 return m_limb_type;
566 else
567 return build_nonstandard_integer_type (prec % limb_prec,
568 TYPE_UNSIGNED (type));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
574 tree
575 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
577 tree atype = (tree_fits_uhwi_p (idx)
578 ? limb_access_type (type, idx) : m_limb_type);
579 tree ret;
580 if (DECL_P (var) && tree_fits_uhwi_p (idx))
582 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
583 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
584 ret = build2 (MEM_REF, m_limb_type,
585 build_fold_addr_expr (var),
586 build_int_cst (ptype, off));
587 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
588 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
590 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
593 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
594 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
596 tree_to_uhwi (idx)
597 * m_limb_size)));
598 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
599 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
600 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
602 else
604 var = unshare_expr (var);
605 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type,
607 TREE_TYPE (TREE_TYPE (var))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
611 tree atype = build_array_type_nelts (m_limb_type, nelts);
612 var = build1 (VIEW_CONVERT_EXPR, atype, var);
614 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
616 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
618 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
619 insert_before (g);
620 ret = gimple_assign_lhs (g);
621 ret = build1 (NOP_EXPR, atype, ret);
623 return ret;
626 /* Emit a half diamond,
627 if (COND)
631 | new_bb1
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
641 void
642 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
643 edge &edge_true, edge &edge_false)
645 insert_before (cond);
646 edge e1 = split_block (gsi_bb (m_gsi), cond);
647 edge e2 = split_block (e1->dest, (gimple *) NULL);
648 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
649 e1->flags = EDGE_TRUE_VALUE;
650 e1->probability = prob;
651 e3->probability = prob.invert ();
652 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
653 edge_true = e2;
654 edge_false = e3;
655 m_gsi = gsi_after_labels (e1->dest);
658 /* Emit a full diamond,
659 if (COND)
663 new_bb1 new_bb2
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
673 void
674 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
675 edge &edge_true, edge &edge_false)
677 insert_before (cond);
678 edge e1 = split_block (gsi_bb (m_gsi), cond);
679 edge e2 = split_block (e1->dest, (gimple *) NULL);
680 basic_block bb = create_empty_bb (e1->dest);
681 add_bb_to_loop (bb, e1->dest->loop_father);
682 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
683 e1->flags = EDGE_FALSE_VALUE;
684 e3->probability = prob;
685 e1->probability = prob.invert ();
686 bb->count = e1->src->count.apply_probability (prob);
687 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
688 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
689 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
690 edge_false = e2;
691 m_gsi = gsi_after_labels (bb);
694 /* Emit a half diamond with full diamond in it
695 if (COND1)
699 | if (COND2)
700 | / \
701 | / \
702 |new_bb1 new_bb2
703 | | /
704 \ | /
705 \ | /
706 \ | /
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
719 void
720 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
721 profile_probability prob1,
722 profile_probability prob2,
723 edge &edge_true_true,
724 edge &edge_true_false,
725 edge &edge_false)
727 edge e2, e3, e4 = NULL;
728 if_then (cond1, prob1, e2, e3);
729 if (cond2 == NULL)
731 edge_true_true = NULL;
732 edge_true_false = e2;
733 edge_false = e3;
734 return;
736 insert_before (cond2);
737 e2 = split_block (gsi_bb (m_gsi), cond2);
738 basic_block bb = create_empty_bb (e2->dest);
739 add_bb_to_loop (bb, e2->dest->loop_father);
740 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
741 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
742 e4->probability = prob2;
743 e2->flags = EDGE_FALSE_VALUE;
744 e2->probability = prob2.invert ();
745 bb->count = e2->src->count.apply_probability (prob2);
746 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
747 e2 = find_edge (e2->dest, e3->dest);
748 edge_true_true = e4;
749 edge_true_false = e2;
750 edge_false = e3;
751 m_gsi = gsi_after_labels (e2->src);
754 /* Emit code to access limb IDX from OP. */
756 tree
757 bitint_large_huge::handle_operand (tree op, tree idx)
759 switch (TREE_CODE (op))
761 case SSA_NAME:
762 if (m_names == NULL
763 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op))
767 if (m_first)
769 tree v = create_tmp_reg (m_limb_type);
770 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
772 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
773 DECL_SOURCE_LOCATION (v)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
776 v = get_or_create_ssa_default_def (cfun, v);
777 m_data.safe_push (v);
779 tree ret = m_data[m_data_cnt];
780 m_data_cnt++;
781 if (tree_fits_uhwi_p (idx))
783 tree type = limb_access_type (TREE_TYPE (op), idx);
784 ret = add_cast (type, ret);
786 return ret;
788 location_t loc_save = m_loc;
789 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
790 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
791 m_loc = loc_save;
792 return ret;
794 int p;
795 gimple *g;
796 tree t;
797 p = var_to_partition (m_map, op);
798 gcc_assert (m_vars[p] != NULL_TREE);
799 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
801 insert_before (g);
802 t = gimple_assign_lhs (g);
803 if (m_first
804 && m_single_use_names
805 && m_vars[p] != m_lhs
806 && m_after_stmt
807 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
809 tree clobber = build_clobber (TREE_TYPE (m_vars[p]), CLOBBER_EOL);
810 g = gimple_build_assign (m_vars[p], clobber);
811 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
812 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
814 return t;
815 case INTEGER_CST:
816 if (tree_fits_uhwi_p (idx))
818 tree c, type = limb_access_type (TREE_TYPE (op), idx);
819 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
820 if (m_first)
822 m_data.safe_push (NULL_TREE);
823 m_data.safe_push (NULL_TREE);
825 if (limb_prec != HOST_BITS_PER_WIDE_INT)
827 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
828 TYPE_SIGN (TREE_TYPE (op)));
829 c = wide_int_to_tree (type,
830 wide_int::from (w, TYPE_PRECISION (type),
831 UNSIGNED));
833 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
834 c = build_int_cst (type,
835 tree_int_cst_sgn (op) < 0 ? -1 : 0);
836 else
837 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
838 m_data_cnt += 2;
839 return c;
841 if (m_first
842 || (m_data[m_data_cnt] == NULL_TREE
843 && m_data[m_data_cnt + 1] == NULL_TREE))
845 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
846 unsigned int rem = prec % (2 * limb_prec);
847 int ext;
848 unsigned min_prec = bitint_min_cst_precision (op, ext);
849 if (m_first)
851 m_data.safe_push (NULL_TREE);
852 m_data.safe_push (NULL_TREE);
854 if (integer_zerop (op))
856 tree c = build_zero_cst (m_limb_type);
857 m_data[m_data_cnt] = c;
858 m_data[m_data_cnt + 1] = c;
860 else if (integer_all_onesp (op))
862 tree c = build_all_ones_cst (m_limb_type);
863 m_data[m_data_cnt] = c;
864 m_data[m_data_cnt + 1] = c;
866 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
868 /* Single limb constant. Use a phi with that limb from
869 the preheader edge and 0 or -1 constant from the other edge
870 and for the second limb in the loop. */
871 tree out;
872 gcc_assert (m_first);
873 m_data.pop ();
874 m_data.pop ();
875 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out);
876 g = gimple_build_assign (m_data[m_data_cnt + 1],
877 build_int_cst (m_limb_type, ext));
878 insert_before (g);
879 m_data[m_data_cnt + 1] = gimple_assign_rhs1 (g);
881 else if (min_prec > prec - rem - 2 * limb_prec)
883 /* Constant which has enough significant bits that it isn't
884 worth trying to save .rodata space by extending from smaller
885 number. */
886 tree type;
887 if (m_var_msb)
888 type = TREE_TYPE (op);
889 else
890 /* If we have a guarantee the most significant partial limb
891 (if any) will be only accessed through handle_operand
892 with INTEGER_CST idx, we don't need to include the partial
893 limb in .rodata. */
894 type = build_bitint_type (prec - rem, 1);
895 tree c = tree_output_constant_def (fold_convert (type, op));
896 m_data[m_data_cnt] = c;
897 m_data[m_data_cnt + 1] = NULL_TREE;
899 else if (m_upwards_2limb)
901 /* Constant with smaller number of bits. Trade conditional
902 code for .rodata space by extending from smaller number. */
903 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
904 tree type = build_bitint_type (min_prec, 1);
905 tree c = tree_output_constant_def (fold_convert (type, op));
906 tree idx2 = make_ssa_name (sizetype);
907 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
908 insert_before (g);
909 g = gimple_build_cond (LT_EXPR, idx,
910 size_int (min_prec / limb_prec),
911 NULL_TREE, NULL_TREE);
912 edge edge_true, edge_false;
913 if_then (g, (min_prec >= (prec - rem) / 2
914 ? profile_probability::likely ()
915 : profile_probability::unlikely ()),
916 edge_true, edge_false);
917 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
918 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
919 insert_before (g);
920 c1 = gimple_assign_lhs (g);
921 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
922 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
923 insert_before (g);
924 c2 = gimple_assign_lhs (g);
925 tree c3 = build_int_cst (m_limb_type, ext);
926 m_gsi = gsi_after_labels (edge_true->dest);
927 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
928 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
929 gphi *phi = create_phi_node (m_data[m_data_cnt],
930 edge_true->dest);
931 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
932 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
933 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
934 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
935 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
937 else
939 /* Constant with smaller number of bits. Trade conditional
940 code for .rodata space by extending from smaller number.
941 Version for loops with random access to the limbs or
942 downwards loops. */
943 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
944 tree c;
945 if (min_prec <= (unsigned) limb_prec)
946 c = fold_convert (m_limb_type, op);
947 else
949 tree type = build_bitint_type (min_prec, 1);
950 c = tree_output_constant_def (fold_convert (type, op));
952 m_data[m_data_cnt] = c;
953 m_data[m_data_cnt + 1] = integer_type_node;
955 t = m_data[m_data_cnt];
956 if (m_data[m_data_cnt + 1] == NULL_TREE)
958 t = limb_access (TREE_TYPE (op), t, idx, false);
959 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
960 insert_before (g);
961 t = gimple_assign_lhs (g);
964 else if (m_data[m_data_cnt + 1] == NULL_TREE)
966 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
967 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
968 insert_before (g);
969 t = gimple_assign_lhs (g);
971 else
972 t = m_data[m_data_cnt + 1];
973 if (m_data[m_data_cnt + 1] == integer_type_node)
975 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
976 unsigned rem = prec % (2 * limb_prec);
977 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
978 tree c = m_data[m_data_cnt];
979 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
980 g = gimple_build_cond (LT_EXPR, idx,
981 size_int (min_prec / limb_prec),
982 NULL_TREE, NULL_TREE);
983 edge edge_true, edge_false;
984 if_then (g, (min_prec >= (prec - rem) / 2
985 ? profile_probability::likely ()
986 : profile_probability::unlikely ()),
987 edge_true, edge_false);
988 if (min_prec > (unsigned) limb_prec)
990 c = limb_access (TREE_TYPE (op), c, idx, false);
991 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
992 insert_before (g);
993 c = gimple_assign_lhs (g);
995 tree c2 = build_int_cst (m_limb_type, ext);
996 m_gsi = gsi_after_labels (edge_true->dest);
997 t = make_ssa_name (m_limb_type);
998 gphi *phi = create_phi_node (t, edge_true->dest);
999 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1000 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1002 m_data_cnt += 2;
1003 return t;
1004 default:
1005 gcc_unreachable ();
1009 /* Helper method, add a PHI node with VAL from preheader edge if
1010 inside of a loop and m_first. Keep state in a pair of m_data
1011 elements. */
1013 tree
1014 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out)
1016 if (!m_first)
1018 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1019 return m_data[m_data_cnt];
1022 *data_out = NULL_TREE;
1023 if (tree_fits_uhwi_p (idx))
1025 m_data.safe_push (val);
1026 m_data.safe_push (NULL_TREE);
1027 return val;
1030 tree in = make_ssa_name (TREE_TYPE (val));
1031 gphi *phi = create_phi_node (in, m_bb);
1032 edge e1 = find_edge (m_preheader_bb, m_bb);
1033 edge e2 = EDGE_PRED (m_bb, 0);
1034 if (e1 == e2)
1035 e2 = EDGE_PRED (m_bb, 1);
1036 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1037 tree out = make_ssa_name (TREE_TYPE (val));
1038 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1039 m_data.safe_push (in);
1040 m_data.safe_push (out);
1041 return in;
1044 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1045 convert it without emitting any code, otherwise emit
1046 the conversion statement before the current location. */
1048 tree
1049 bitint_large_huge::add_cast (tree type, tree val)
1051 if (TREE_CODE (val) == INTEGER_CST)
1052 return fold_convert (type, val);
1054 tree lhs = make_ssa_name (type);
1055 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1056 insert_before (g);
1057 return lhs;
1060 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1062 tree
1063 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1064 tree idx)
1066 tree lhs, data_out, ctype;
1067 tree rhs1_type = TREE_TYPE (rhs1);
1068 gimple *g;
1069 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1070 &data_out);
1072 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1073 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1075 ctype = build_complex_type (m_limb_type);
1076 if (!types_compatible_p (rhs1_type, m_limb_type))
1078 if (!TYPE_UNSIGNED (rhs1_type))
1080 tree type = unsigned_type_for (rhs1_type);
1081 rhs1 = add_cast (type, rhs1);
1082 rhs2 = add_cast (type, rhs2);
1084 rhs1 = add_cast (m_limb_type, rhs1);
1085 rhs2 = add_cast (m_limb_type, rhs2);
1087 lhs = make_ssa_name (ctype);
1088 g = gimple_build_call_internal (code == PLUS_EXPR
1089 ? IFN_UADDC : IFN_USUBC,
1090 3, rhs1, rhs2, data_in);
1091 gimple_call_set_lhs (g, lhs);
1092 insert_before (g);
1093 if (data_out == NULL_TREE)
1094 data_out = make_ssa_name (m_limb_type);
1095 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1096 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1097 insert_before (g);
1099 else if (types_compatible_p (rhs1_type, m_limb_type))
1101 ctype = build_complex_type (m_limb_type);
1102 lhs = make_ssa_name (ctype);
1103 g = gimple_build_call_internal (code == PLUS_EXPR
1104 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1105 2, rhs1, rhs2);
1106 gimple_call_set_lhs (g, lhs);
1107 insert_before (g);
1108 if (data_out == NULL_TREE)
1109 data_out = make_ssa_name (m_limb_type);
1110 if (!integer_zerop (data_in))
1112 rhs1 = make_ssa_name (m_limb_type);
1113 g = gimple_build_assign (rhs1, REALPART_EXPR,
1114 build1 (REALPART_EXPR, m_limb_type, lhs));
1115 insert_before (g);
1116 rhs2 = make_ssa_name (m_limb_type);
1117 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1118 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1119 insert_before (g);
1120 lhs = make_ssa_name (ctype);
1121 g = gimple_build_call_internal (code == PLUS_EXPR
1122 ? IFN_ADD_OVERFLOW
1123 : IFN_SUB_OVERFLOW,
1124 2, rhs1, data_in);
1125 gimple_call_set_lhs (g, lhs);
1126 insert_before (g);
1127 data_in = make_ssa_name (m_limb_type);
1128 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1129 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1130 insert_before (g);
1131 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1132 insert_before (g);
1134 else
1136 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1137 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1138 insert_before (g);
1141 else
1143 tree in = add_cast (rhs1_type, data_in);
1144 lhs = make_ssa_name (rhs1_type);
1145 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1146 insert_before (g);
1147 rhs1 = make_ssa_name (rhs1_type);
1148 g = gimple_build_assign (rhs1, code, lhs, in);
1149 insert_before (g);
1150 m_data[m_data_cnt] = NULL_TREE;
1151 m_data_cnt += 2;
1152 return rhs1;
1154 rhs1 = make_ssa_name (m_limb_type);
1155 g = gimple_build_assign (rhs1, REALPART_EXPR,
1156 build1 (REALPART_EXPR, m_limb_type, lhs));
1157 insert_before (g);
1158 if (!types_compatible_p (rhs1_type, m_limb_type))
1159 rhs1 = add_cast (rhs1_type, rhs1);
1160 m_data[m_data_cnt] = data_out;
1161 m_data_cnt += 2;
1162 return rhs1;
1165 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1166 count in [0, limb_prec - 1] range. */
1168 tree
1169 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1171 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1172 gcc_checking_assert (cnt < (unsigned) limb_prec);
1173 if (cnt == 0)
1174 return rhs1;
1176 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1177 gimple *g;
1178 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1179 &data_out);
1181 if (!integer_zerop (data_in))
1183 lhs = make_ssa_name (m_limb_type);
1184 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1185 build_int_cst (unsigned_type_node,
1186 limb_prec - cnt));
1187 insert_before (g);
1188 if (!types_compatible_p (rhs1_type, m_limb_type))
1189 lhs = add_cast (rhs1_type, lhs);
1190 data_in = lhs;
1192 if (types_compatible_p (rhs1_type, m_limb_type))
1194 if (data_out == NULL_TREE)
1195 data_out = make_ssa_name (m_limb_type);
1196 g = gimple_build_assign (data_out, rhs1);
1197 insert_before (g);
1199 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1201 lhs = make_ssa_name (rhs1_type);
1202 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1203 insert_before (g);
1204 if (!integer_zerop (data_in))
1206 rhs1 = lhs;
1207 lhs = make_ssa_name (rhs1_type);
1208 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1209 insert_before (g);
1212 else
1213 lhs = data_in;
1214 m_data[m_data_cnt] = data_out;
1215 m_data_cnt += 2;
1216 return lhs;
1219 /* Helper function for handle_stmt method, handle an integral
1220 to integral conversion. */
1222 tree
1223 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1225 tree rhs_type = TREE_TYPE (rhs1);
1226 gimple *g;
1227 if (TREE_CODE (rhs1) == SSA_NAME
1228 && TREE_CODE (lhs_type) == BITINT_TYPE
1229 && TREE_CODE (rhs_type) == BITINT_TYPE
1230 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1231 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1233 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1234 /* If lhs has bigger precision than rhs, we can use
1235 the simple case only if there is a guarantee that
1236 the most significant limb is handled in straight
1237 line code. If m_var_msb (on left shifts) or
1238 if m_upwards_2limb * limb_prec is equal to
1239 lhs precision that is not the case. */
1240 || (!m_var_msb
1241 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1242 TYPE_SIZE (lhs_type))
1243 && (!m_upwards_2limb
1244 || (m_upwards_2limb * limb_prec
1245 < TYPE_PRECISION (lhs_type)))))
1247 rhs1 = handle_operand (rhs1, idx);
1248 if (tree_fits_uhwi_p (idx))
1250 tree type = limb_access_type (lhs_type, idx);
1251 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1252 rhs1 = add_cast (type, rhs1);
1254 return rhs1;
1256 tree t;
1257 /* Indexes lower than this don't need any special processing. */
1258 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1259 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1260 /* Indexes >= than this always contain an extension. */
1261 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1262 bool save_first = m_first;
1263 if (m_first)
1265 m_data.safe_push (NULL_TREE);
1266 m_data.safe_push (NULL_TREE);
1267 m_data.safe_push (NULL_TREE);
1268 if (TYPE_UNSIGNED (rhs_type))
1269 /* No need to keep state between iterations. */
1271 else if (m_upwards && !m_upwards_2limb)
1272 /* We need to keep state between iterations, but
1273 not within any loop, everything is straight line
1274 code with only increasing indexes. */
1276 else if (!m_upwards_2limb)
1278 unsigned save_data_cnt = m_data_cnt;
1279 gimple_stmt_iterator save_gsi = m_gsi;
1280 m_gsi = m_init_gsi;
1281 if (gsi_end_p (m_gsi))
1282 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1283 else
1284 gsi_next (&m_gsi);
1285 m_data_cnt = save_data_cnt + 3;
1286 t = handle_operand (rhs1, size_int (low));
1287 m_first = false;
1288 m_data[save_data_cnt + 2]
1289 = build_int_cst (NULL_TREE, m_data_cnt);
1290 m_data_cnt = save_data_cnt;
1291 t = add_cast (signed_type_for (m_limb_type), t);
1292 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1293 tree n = make_ssa_name (TREE_TYPE (t));
1294 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1295 insert_before (g);
1296 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1297 m_init_gsi = m_gsi;
1298 if (gsi_end_p (m_init_gsi))
1299 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1300 else
1301 gsi_prev (&m_init_gsi);
1302 m_gsi = save_gsi;
1304 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1305 /* We need to keep state between iterations, but
1306 fortunately not within the loop, only afterwards. */
1308 else
1310 tree out;
1311 m_data.truncate (m_data_cnt);
1312 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1313 m_data.safe_push (NULL_TREE);
1317 unsigned save_data_cnt = m_data_cnt;
1318 m_data_cnt += 3;
1319 if (!tree_fits_uhwi_p (idx))
1321 if (m_upwards_2limb
1322 && (m_upwards_2limb * limb_prec
1323 <= ((unsigned) TYPE_PRECISION (rhs_type)
1324 - !TYPE_UNSIGNED (rhs_type))))
1326 rhs1 = handle_operand (rhs1, idx);
1327 if (m_first)
1328 m_data[save_data_cnt + 2]
1329 = build_int_cst (NULL_TREE, m_data_cnt);
1330 m_first = save_first;
1331 return rhs1;
1333 bool single_comparison
1334 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1335 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1336 idx, size_int (low), NULL_TREE, NULL_TREE);
1337 edge edge_true_true, edge_true_false, edge_false;
1338 if_then_if_then_else (g, (single_comparison ? NULL
1339 : gimple_build_cond (EQ_EXPR, idx,
1340 size_int (low),
1341 NULL_TREE,
1342 NULL_TREE)),
1343 profile_probability::likely (),
1344 profile_probability::unlikely (),
1345 edge_true_true, edge_true_false, edge_false);
1346 bool save_cast_conditional = m_cast_conditional;
1347 m_cast_conditional = true;
1348 m_bitfld_load = 0;
1349 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1350 if (m_first)
1351 m_data[save_data_cnt + 2]
1352 = build_int_cst (NULL_TREE, m_data_cnt);
1353 tree ext = NULL_TREE;
1354 tree bitfld = NULL_TREE;
1355 if (!single_comparison)
1357 m_gsi = gsi_after_labels (edge_true_true->src);
1358 m_first = false;
1359 m_data_cnt = save_data_cnt + 3;
1360 if (m_bitfld_load)
1362 bitfld = m_data[m_bitfld_load];
1363 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1364 m_bitfld_load = 0;
1366 t2 = handle_operand (rhs1, size_int (low));
1367 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1368 t2 = add_cast (m_limb_type, t2);
1369 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1371 ext = add_cast (signed_type_for (m_limb_type), t2);
1372 tree lpm1 = build_int_cst (unsigned_type_node,
1373 limb_prec - 1);
1374 tree n = make_ssa_name (TREE_TYPE (ext));
1375 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1376 insert_before (g);
1377 ext = add_cast (m_limb_type, n);
1380 tree t3;
1381 if (TYPE_UNSIGNED (rhs_type))
1382 t3 = build_zero_cst (m_limb_type);
1383 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1384 t3 = m_data[save_data_cnt];
1385 else
1386 t3 = m_data[save_data_cnt + 1];
1387 m_gsi = gsi_after_labels (edge_true_false->dest);
1388 t = make_ssa_name (m_limb_type);
1389 gphi *phi = create_phi_node (t, edge_true_false->dest);
1390 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1391 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1392 if (edge_true_true)
1393 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1394 if (ext)
1396 tree t4 = make_ssa_name (m_limb_type);
1397 phi = create_phi_node (t4, edge_true_false->dest);
1398 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1399 UNKNOWN_LOCATION);
1400 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1401 UNKNOWN_LOCATION);
1402 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1403 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1404 insert_before (g);
1406 if (m_bitfld_load)
1408 tree t4;
1409 if (!m_first)
1410 t4 = m_data[m_bitfld_load + 1];
1411 else
1412 t4 = make_ssa_name (m_limb_type);
1413 phi = create_phi_node (t4, edge_true_false->dest);
1414 add_phi_arg (phi,
1415 edge_true_true ? bitfld : m_data[m_bitfld_load],
1416 edge_true_false, UNKNOWN_LOCATION);
1417 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1418 edge_false, UNKNOWN_LOCATION);
1419 if (edge_true_true)
1420 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1421 UNKNOWN_LOCATION);
1422 m_data[m_bitfld_load] = t4;
1423 m_data[m_bitfld_load + 2] = t4;
1424 m_bitfld_load = 0;
1426 m_cast_conditional = save_cast_conditional;
1427 m_first = save_first;
1428 return t;
1430 else
1432 if (tree_to_uhwi (idx) < low)
1434 t = handle_operand (rhs1, idx);
1435 if (m_first)
1436 m_data[save_data_cnt + 2]
1437 = build_int_cst (NULL_TREE, m_data_cnt);
1439 else if (tree_to_uhwi (idx) < high)
1441 t = handle_operand (rhs1, size_int (low));
1442 if (m_first)
1443 m_data[save_data_cnt + 2]
1444 = build_int_cst (NULL_TREE, m_data_cnt);
1445 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1446 t = add_cast (m_limb_type, t);
1447 tree ext = NULL_TREE;
1448 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1450 ext = add_cast (signed_type_for (m_limb_type), t);
1451 tree lpm1 = build_int_cst (unsigned_type_node,
1452 limb_prec - 1);
1453 tree n = make_ssa_name (TREE_TYPE (ext));
1454 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1455 insert_before (g);
1456 ext = add_cast (m_limb_type, n);
1457 m_data[save_data_cnt + 1] = ext;
1460 else
1462 if (TYPE_UNSIGNED (rhs_type) && m_first)
1464 handle_operand (rhs1, size_zero_node);
1465 m_data[save_data_cnt + 2]
1466 = build_int_cst (NULL_TREE, m_data_cnt);
1468 else
1469 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1470 if (TYPE_UNSIGNED (rhs_type))
1471 t = build_zero_cst (m_limb_type);
1472 else
1473 t = m_data[save_data_cnt + 1];
1475 tree type = limb_access_type (lhs_type, idx);
1476 if (!useless_type_conversion_p (type, m_limb_type))
1477 t = add_cast (type, t);
1478 m_first = save_first;
1479 return t;
1482 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1483 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1484 && INTEGRAL_TYPE_P (rhs_type))
1486 /* Add support for 3 or more limbs filled in from normal integral
1487 type if this assert fails. If no target chooses limb mode smaller
1488 than half of largest supported normal integral type, this will not
1489 be needed. */
1490 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1491 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1492 if (m_first)
1494 gimple_stmt_iterator save_gsi = m_gsi;
1495 m_gsi = m_init_gsi;
1496 if (gsi_end_p (m_gsi))
1497 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1498 else
1499 gsi_next (&m_gsi);
1500 if (TREE_CODE (rhs_type) == BITINT_TYPE
1501 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1503 tree type = NULL_TREE;
1504 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1505 rhs_type = TREE_TYPE (rhs1);
1507 r1 = rhs1;
1508 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1509 r1 = add_cast (m_limb_type, rhs1);
1510 if (TYPE_PRECISION (rhs_type) > limb_prec)
1512 g = gimple_build_assign (make_ssa_name (rhs_type),
1513 RSHIFT_EXPR, rhs1,
1514 build_int_cst (unsigned_type_node,
1515 limb_prec));
1516 insert_before (g);
1517 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1519 if (TYPE_UNSIGNED (rhs_type))
1520 rext = build_zero_cst (m_limb_type);
1521 else
1523 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1524 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1525 RSHIFT_EXPR, rext,
1526 build_int_cst (unsigned_type_node,
1527 limb_prec - 1));
1528 insert_before (g);
1529 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1531 m_init_gsi = m_gsi;
1532 if (gsi_end_p (m_init_gsi))
1533 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1534 else
1535 gsi_prev (&m_init_gsi);
1536 m_gsi = save_gsi;
1538 tree t;
1539 if (m_upwards_2limb)
1541 if (m_first)
1543 tree out1, out2;
1544 prepare_data_in_out (r1, idx, &out1);
1545 g = gimple_build_assign (m_data[m_data_cnt + 1], rext);
1546 insert_before (g);
1547 if (TYPE_PRECISION (rhs_type) > limb_prec)
1549 prepare_data_in_out (r2, idx, &out2);
1550 g = gimple_build_assign (m_data[m_data_cnt + 3], rext);
1551 insert_before (g);
1552 m_data.pop ();
1553 t = m_data.pop ();
1554 m_data[m_data_cnt + 1] = t;
1556 else
1557 m_data[m_data_cnt + 1] = rext;
1558 m_data.safe_push (rext);
1559 t = m_data[m_data_cnt];
1561 else if (!tree_fits_uhwi_p (idx))
1562 t = m_data[m_data_cnt + 1];
1563 else
1565 tree type = limb_access_type (lhs_type, idx);
1566 t = m_data[m_data_cnt + 2];
1567 if (!useless_type_conversion_p (type, m_limb_type))
1568 t = add_cast (type, t);
1570 m_data_cnt += 3;
1571 return t;
1573 else if (m_first)
1575 m_data.safe_push (r1);
1576 m_data.safe_push (r2);
1577 m_data.safe_push (rext);
1579 if (tree_fits_uhwi_p (idx))
1581 tree type = limb_access_type (lhs_type, idx);
1582 if (integer_zerop (idx))
1583 t = m_data[m_data_cnt];
1584 else if (TYPE_PRECISION (rhs_type) > limb_prec
1585 && integer_onep (idx))
1586 t = m_data[m_data_cnt + 1];
1587 else
1588 t = m_data[m_data_cnt + 2];
1589 if (!useless_type_conversion_p (type, m_limb_type))
1590 t = add_cast (type, t);
1591 m_data_cnt += 3;
1592 return t;
1594 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1595 NULL_TREE, NULL_TREE);
1596 edge e2, e3, e4 = NULL;
1597 if_then (g, profile_probability::likely (), e2, e3);
1598 if (m_data[m_data_cnt + 1])
1600 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1601 NULL_TREE, NULL_TREE);
1602 insert_before (g);
1603 edge e5 = split_block (gsi_bb (m_gsi), g);
1604 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1605 e2 = find_edge (e5->dest, e2->dest);
1606 e4->probability = profile_probability::unlikely ();
1607 e5->flags = EDGE_FALSE_VALUE;
1608 e5->probability = e4->probability.invert ();
1610 m_gsi = gsi_after_labels (e2->dest);
1611 t = make_ssa_name (m_limb_type);
1612 gphi *phi = create_phi_node (t, e2->dest);
1613 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1614 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1615 if (e4)
1616 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1617 m_data_cnt += 3;
1618 return t;
1620 return NULL_TREE;
1623 /* Helper function for handle_stmt method, handle a load from memory. */
1625 tree
1626 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1628 tree rhs1 = gimple_assign_rhs1 (stmt);
1629 tree rhs_type = TREE_TYPE (rhs1);
1630 bool eh = stmt_ends_bb_p (stmt);
1631 edge eh_edge = NULL;
1632 gimple *g;
1634 if (eh)
1636 edge_iterator ei;
1637 basic_block bb = gimple_bb (stmt);
1639 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1640 if (eh_edge->flags & EDGE_EH)
1641 break;
1644 if (TREE_CODE (rhs1) == COMPONENT_REF
1645 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1647 tree fld = TREE_OPERAND (rhs1, 1);
1648 /* For little-endian, we can allow as inputs bit-fields
1649 which start at a limb boundary. */
1650 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1651 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1652 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1653 goto normal_load;
1654 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1655 handle it normally for now. */
1656 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1657 goto normal_load;
1658 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1659 poly_int64 bitoffset;
1660 poly_uint64 field_offset, repr_offset;
1661 bool var_field_off = false;
1662 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1663 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1664 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1665 else
1667 bitoffset = 0;
1668 var_field_off = true;
1670 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1671 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1672 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1673 TREE_OPERAND (rhs1, 0), repr,
1674 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1675 HOST_WIDE_INT bo = bitoffset.to_constant ();
1676 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1677 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1678 if (m_first)
1680 if (m_upwards)
1682 gimple_stmt_iterator save_gsi = m_gsi;
1683 m_gsi = m_init_gsi;
1684 if (gsi_end_p (m_gsi))
1685 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1686 else
1687 gsi_next (&m_gsi);
1688 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1689 tree iv = make_ssa_name (m_limb_type);
1690 g = gimple_build_assign (iv, t);
1691 insert_before (g);
1692 if (eh)
1694 maybe_duplicate_eh_stmt (g, stmt);
1695 if (eh_edge)
1697 edge e = split_block (gsi_bb (m_gsi), g);
1698 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1699 = profile_probability::very_unlikely ();
1700 m_gsi = gsi_after_labels (e->dest);
1701 if (gsi_bb (save_gsi) == e->src)
1703 if (gsi_end_p (save_gsi))
1704 save_gsi = gsi_end_bb (e->dest);
1705 else
1706 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1708 if (m_preheader_bb == e->src)
1709 m_preheader_bb = e->dest;
1712 m_init_gsi = m_gsi;
1713 if (gsi_end_p (m_init_gsi))
1714 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1715 else
1716 gsi_prev (&m_init_gsi);
1717 m_gsi = save_gsi;
1718 tree out;
1719 prepare_data_in_out (iv, idx, &out);
1720 out = m_data[m_data_cnt];
1721 m_data.safe_push (out);
1723 else
1725 m_data.safe_push (NULL_TREE);
1726 m_data.safe_push (NULL_TREE);
1727 m_data.safe_push (NULL_TREE);
1731 tree nidx0 = NULL_TREE, nidx1;
1732 tree iv = m_data[m_data_cnt];
1733 if (m_cast_conditional && iv)
1735 gcc_assert (!m_bitfld_load);
1736 m_bitfld_load = m_data_cnt;
1738 if (tree_fits_uhwi_p (idx))
1740 unsigned prec = TYPE_PRECISION (rhs_type);
1741 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1742 gcc_assert (i * limb_prec < prec);
1743 nidx1 = size_int (i + bo_idx + 1);
1744 if ((i + 1) * limb_prec > prec)
1746 prec %= limb_prec;
1747 if (prec + bo_bit <= (unsigned) limb_prec)
1748 nidx1 = NULL_TREE;
1750 if (!iv)
1751 nidx0 = size_int (i + bo_idx);
1753 else
1755 if (!iv)
1757 if (bo_idx == 0)
1758 nidx0 = idx;
1759 else
1761 nidx0 = make_ssa_name (sizetype);
1762 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1763 size_int (bo_idx));
1764 insert_before (g);
1767 nidx1 = make_ssa_name (sizetype);
1768 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1769 size_int (bo_idx + 1));
1770 insert_before (g);
1773 tree iv2 = NULL_TREE;
1774 if (nidx0)
1776 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1777 iv = make_ssa_name (m_limb_type);
1778 g = gimple_build_assign (iv, t);
1779 insert_before (g);
1780 gcc_assert (!eh);
1782 if (nidx1)
1784 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1785 unsigned prec = TYPE_PRECISION (rhs_type);
1786 if (conditional)
1788 if ((prec % limb_prec) == 0
1789 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1790 conditional = false;
1792 edge edge_true = NULL, edge_false = NULL;
1793 if (conditional)
1795 g = gimple_build_cond (NE_EXPR, idx,
1796 size_int (prec / limb_prec),
1797 NULL_TREE, NULL_TREE);
1798 if_then (g, profile_probability::likely (),
1799 edge_true, edge_false);
1801 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1802 if (m_upwards_2limb
1803 && !m_first
1804 && !m_bitfld_load
1805 && !tree_fits_uhwi_p (idx))
1806 iv2 = m_data[m_data_cnt + 1];
1807 else
1808 iv2 = make_ssa_name (m_limb_type);
1809 g = gimple_build_assign (iv2, t);
1810 insert_before (g);
1811 if (eh)
1813 maybe_duplicate_eh_stmt (g, stmt);
1814 if (eh_edge)
1816 edge e = split_block (gsi_bb (m_gsi), g);
1817 m_gsi = gsi_after_labels (e->dest);
1818 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1819 = profile_probability::very_unlikely ();
1822 if (conditional)
1824 tree iv3 = make_ssa_name (m_limb_type);
1825 if (eh)
1826 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1827 gphi *phi = create_phi_node (iv3, edge_true->dest);
1828 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1829 add_phi_arg (phi, build_zero_cst (m_limb_type),
1830 edge_false, UNKNOWN_LOCATION);
1831 m_gsi = gsi_after_labels (edge_true->dest);
1834 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1835 iv, build_int_cst (unsigned_type_node, bo_bit));
1836 insert_before (g);
1837 iv = gimple_assign_lhs (g);
1838 if (iv2)
1840 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1841 iv2, build_int_cst (unsigned_type_node,
1842 limb_prec - bo_bit));
1843 insert_before (g);
1844 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1845 gimple_assign_lhs (g), iv);
1846 insert_before (g);
1847 iv = gimple_assign_lhs (g);
1848 if (m_data[m_data_cnt])
1849 m_data[m_data_cnt] = iv2;
1851 if (tree_fits_uhwi_p (idx))
1853 tree atype = limb_access_type (rhs_type, idx);
1854 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1855 iv = add_cast (atype, iv);
1857 m_data_cnt += 3;
1858 return iv;
1861 normal_load:
1862 /* Use write_p = true for loads with EH edges to make
1863 sure limb_access doesn't add a cast as separate
1864 statement after it. */
1865 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1866 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1867 g = gimple_build_assign (ret, rhs1);
1868 insert_before (g);
1869 if (eh)
1871 maybe_duplicate_eh_stmt (g, stmt);
1872 if (eh_edge)
1874 edge e = split_block (gsi_bb (m_gsi), g);
1875 m_gsi = gsi_after_labels (e->dest);
1876 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1877 = profile_probability::very_unlikely ();
1879 if (tree_fits_uhwi_p (idx))
1881 tree atype = limb_access_type (rhs_type, idx);
1882 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1883 ret = add_cast (atype, ret);
1886 return ret;
1889 /* Return a limb IDX from a mergeable statement STMT. */
1891 tree
1892 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1894 tree lhs, rhs1, rhs2 = NULL_TREE;
1895 gimple *g;
1896 switch (gimple_code (stmt))
1898 case GIMPLE_ASSIGN:
1899 if (gimple_assign_load_p (stmt))
1900 return handle_load (stmt, idx);
1901 switch (gimple_assign_rhs_code (stmt))
1903 case BIT_AND_EXPR:
1904 case BIT_IOR_EXPR:
1905 case BIT_XOR_EXPR:
1906 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1907 /* FALLTHRU */
1908 case BIT_NOT_EXPR:
1909 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1910 lhs = make_ssa_name (TREE_TYPE (rhs1));
1911 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1912 rhs1, rhs2);
1913 insert_before (g);
1914 return lhs;
1915 case PLUS_EXPR:
1916 case MINUS_EXPR:
1917 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1918 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1919 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1920 rhs1, rhs2, idx);
1921 case NEGATE_EXPR:
1922 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1923 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1924 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1925 case LSHIFT_EXPR:
1926 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1927 idx),
1928 gimple_assign_rhs2 (stmt), idx);
1929 case SSA_NAME:
1930 case INTEGER_CST:
1931 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1932 CASE_CONVERT:
1933 case VIEW_CONVERT_EXPR:
1934 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1935 gimple_assign_rhs1 (stmt), idx);
1936 default:
1937 break;
1939 break;
1940 default:
1941 break;
1943 gcc_unreachable ();
1946 /* Return minimum precision of OP at STMT.
1947 Positive value is minimum precision above which all bits
1948 are zero, negative means all bits above negation of the
1949 value are copies of the sign bit. */
1951 static int
1952 range_to_prec (tree op, gimple *stmt)
1954 int_range_max r;
1955 wide_int w;
1956 tree type = TREE_TYPE (op);
1957 unsigned int prec = TYPE_PRECISION (type);
1959 if (!optimize
1960 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1961 || r.undefined_p ())
1963 if (TYPE_UNSIGNED (type))
1964 return prec;
1965 else
1966 return -prec;
1969 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1971 w = r.lower_bound ();
1972 if (wi::neg_p (w))
1974 int min_prec1 = wi::min_precision (w, SIGNED);
1975 w = r.upper_bound ();
1976 int min_prec2 = wi::min_precision (w, SIGNED);
1977 int min_prec = MAX (min_prec1, min_prec2);
1978 return MIN (-min_prec, -2);
1982 w = r.upper_bound ();
1983 int min_prec = wi::min_precision (w, UNSIGNED);
1984 return MAX (min_prec, 1);
1987 /* Return address of the first limb of OP and write into *PREC
1988 its precision. If positive, the operand is zero extended
1989 from that precision, if it is negative, the operand is sign-extended
1990 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1991 otherwise *PREC_STORED is prec from the innermost call without
1992 range optimizations. */
1994 tree
1995 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
1996 int *prec_stored, int *prec)
1998 wide_int w;
1999 location_t loc_save = m_loc;
2000 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2001 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2002 && TREE_CODE (op) != INTEGER_CST)
2004 do_int:
2005 *prec = range_to_prec (op, stmt);
2006 bitint_prec_kind kind = bitint_prec_small;
2007 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2008 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2009 kind = bitint_precision_kind (TREE_TYPE (op));
2010 if (kind == bitint_prec_middle)
2012 tree type = NULL_TREE;
2013 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2015 tree op_type = TREE_TYPE (op);
2016 unsigned HOST_WIDE_INT nelts
2017 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2018 /* Add support for 3 or more limbs filled in from normal
2019 integral type if this assert fails. If no target chooses
2020 limb mode smaller than half of largest supported normal
2021 integral type, this will not be needed. */
2022 gcc_assert (nelts <= 2);
2023 if (prec_stored)
2024 *prec_stored = (TYPE_UNSIGNED (op_type)
2025 ? TYPE_PRECISION (op_type)
2026 : -TYPE_PRECISION (op_type));
2027 if (*prec <= limb_prec && *prec >= -limb_prec)
2029 nelts = 1;
2030 if (prec_stored)
2032 if (TYPE_UNSIGNED (op_type))
2034 if (*prec_stored > limb_prec)
2035 *prec_stored = limb_prec;
2037 else if (*prec_stored < -limb_prec)
2038 *prec_stored = -limb_prec;
2041 tree atype = build_array_type_nelts (m_limb_type, nelts);
2042 tree var = create_tmp_var (atype);
2043 tree t1 = op;
2044 if (!useless_type_conversion_p (m_limb_type, op_type))
2045 t1 = add_cast (m_limb_type, t1);
2046 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2047 NULL_TREE, NULL_TREE);
2048 gimple *g = gimple_build_assign (v, t1);
2049 insert_before (g);
2050 if (nelts > 1)
2052 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2053 g = gimple_build_assign (make_ssa_name (op_type),
2054 RSHIFT_EXPR, op, lp);
2055 insert_before (g);
2056 tree t2 = gimple_assign_lhs (g);
2057 t2 = add_cast (m_limb_type, t2);
2058 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2059 NULL_TREE, NULL_TREE);
2060 g = gimple_build_assign (v, t2);
2061 insert_before (g);
2063 tree ret = build_fold_addr_expr (var);
2064 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2066 tree clobber = build_clobber (atype, CLOBBER_EOL);
2067 g = gimple_build_assign (var, clobber);
2068 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2070 m_loc = loc_save;
2071 return ret;
2073 switch (TREE_CODE (op))
2075 case SSA_NAME:
2076 if (m_names == NULL
2077 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2079 gimple *g = SSA_NAME_DEF_STMT (op);
2080 tree ret;
2081 m_loc = gimple_location (g);
2082 if (gimple_assign_load_p (g))
2084 *prec = range_to_prec (op, NULL);
2085 if (prec_stored)
2086 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2087 ? TYPE_PRECISION (TREE_TYPE (op))
2088 : -TYPE_PRECISION (TREE_TYPE (op)));
2089 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2090 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2091 NULL_TREE, true, GSI_SAME_STMT);
2093 else if (gimple_code (g) == GIMPLE_NOP)
2095 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2096 if (prec_stored)
2097 *prec_stored = *prec;
2098 tree var = create_tmp_var (m_limb_type);
2099 TREE_ADDRESSABLE (var) = 1;
2100 ret = build_fold_addr_expr (var);
2101 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2103 tree clobber = build_clobber (m_limb_type, CLOBBER_EOL);
2104 g = gimple_build_assign (var, clobber);
2105 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2108 else
2110 gcc_assert (gimple_assign_cast_p (g));
2111 tree rhs1 = gimple_assign_rhs1 (g);
2112 bitint_prec_kind kind = bitint_prec_small;
2113 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2114 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2115 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2116 if (kind >= bitint_prec_large)
2118 tree lhs_type = TREE_TYPE (op);
2119 tree rhs_type = TREE_TYPE (rhs1);
2120 int prec_stored_val = 0;
2121 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2122 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2124 if (TYPE_UNSIGNED (lhs_type)
2125 && !TYPE_UNSIGNED (rhs_type))
2126 gcc_assert (*prec >= 0 || prec_stored == NULL);
2128 else
2130 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2132 else if (TYPE_UNSIGNED (lhs_type))
2134 gcc_assert (*prec > 0
2135 || prec_stored_val > 0
2136 || (-prec_stored_val
2137 >= TYPE_PRECISION (lhs_type)));
2138 *prec = TYPE_PRECISION (lhs_type);
2140 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2142 else
2143 *prec = -TYPE_PRECISION (lhs_type);
2146 else
2148 op = rhs1;
2149 stmt = g;
2150 goto do_int;
2153 m_loc = loc_save;
2154 return ret;
2156 else
2158 int p = var_to_partition (m_map, op);
2159 gcc_assert (m_vars[p] != NULL_TREE);
2160 *prec = range_to_prec (op, stmt);
2161 if (prec_stored)
2162 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2163 ? TYPE_PRECISION (TREE_TYPE (op))
2164 : -TYPE_PRECISION (TREE_TYPE (op)));
2165 return build_fold_addr_expr (m_vars[p]);
2167 case INTEGER_CST:
2168 unsigned int min_prec, mp;
2169 tree type;
2170 w = wi::to_wide (op);
2171 if (tree_int_cst_sgn (op) >= 0)
2173 min_prec = wi::min_precision (w, UNSIGNED);
2174 *prec = MAX (min_prec, 1);
2176 else
2178 min_prec = wi::min_precision (w, SIGNED);
2179 *prec = MIN ((int) -min_prec, -2);
2181 mp = CEIL (min_prec, limb_prec) * limb_prec;
2182 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2183 type = TREE_TYPE (op);
2184 else
2185 type = build_bitint_type (mp, 1);
2186 if (TREE_CODE (type) != BITINT_TYPE
2187 || bitint_precision_kind (type) == bitint_prec_small)
2189 if (TYPE_PRECISION (type) <= limb_prec)
2190 type = m_limb_type;
2191 else
2192 /* This case is for targets which e.g. have 64-bit
2193 limb but categorize up to 128-bits _BitInts as
2194 small. We could use type of m_limb_type[2] and
2195 similar instead to save space. */
2196 type = build_bitint_type (mid_min_prec, 1);
2198 if (prec_stored)
2200 if (tree_int_cst_sgn (op) >= 0)
2201 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2202 else
2203 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2205 op = tree_output_constant_def (fold_convert (type, op));
2206 return build_fold_addr_expr (op);
2207 default:
2208 gcc_unreachable ();
2212 /* Helper function, create a loop before the current location,
2213 start with sizetype INIT value from the preheader edge. Return
2214 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2215 from the latch edge. */
2217 tree
2218 bitint_large_huge::create_loop (tree init, tree *idx_next)
2220 if (!gsi_end_p (m_gsi))
2221 gsi_prev (&m_gsi);
2222 else
2223 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2224 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2225 edge e2 = split_block (e1->dest, (gimple *) NULL);
2226 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2227 e3->probability = profile_probability::very_unlikely ();
2228 e2->flags = EDGE_FALSE_VALUE;
2229 e2->probability = e3->probability.invert ();
2230 tree idx = make_ssa_name (sizetype);
2231 gphi *phi = create_phi_node (idx, e1->dest);
2232 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2233 *idx_next = make_ssa_name (sizetype);
2234 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2235 m_gsi = gsi_after_labels (e1->dest);
2236 m_bb = e1->dest;
2237 m_preheader_bb = e1->src;
2238 class loop *loop = alloc_loop ();
2239 loop->header = e1->dest;
2240 add_loop (loop, e1->src->loop_father);
2241 return idx;
2244 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2245 lowered using iteration from the least significant limb up to the most
2246 significant limb. For large _BitInt it is emitted as straight line code
2247 before current location, for huge _BitInt as a loop handling two limbs
2248 at once, followed by handling up to limbs in straight line code (at most
2249 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2250 comparisons, in that case CMP_CODE should be the comparison code and
2251 CMP_OP1/CMP_OP2 the comparison operands. */
2253 tree
2254 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2255 tree cmp_op1, tree cmp_op2)
2257 bool eq_p = cmp_code != ERROR_MARK;
2258 tree type;
2259 if (eq_p)
2260 type = TREE_TYPE (cmp_op1);
2261 else
2262 type = TREE_TYPE (gimple_assign_lhs (stmt));
2263 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2264 bitint_prec_kind kind = bitint_precision_kind (type);
2265 gcc_assert (kind >= bitint_prec_large);
2266 gimple *g;
2267 tree lhs = gimple_get_lhs (stmt);
2268 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2269 if (lhs
2270 && TREE_CODE (lhs) == SSA_NAME
2271 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2272 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2274 int p = var_to_partition (m_map, lhs);
2275 gcc_assert (m_vars[p] != NULL_TREE);
2276 m_lhs = lhs = m_vars[p];
2278 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2279 bool sext = false;
2280 tree ext = NULL_TREE, store_operand = NULL_TREE;
2281 bool eh = false;
2282 basic_block eh_pad = NULL;
2283 tree nlhs = NULL_TREE;
2284 unsigned HOST_WIDE_INT bo_idx = 0;
2285 unsigned HOST_WIDE_INT bo_bit = 0;
2286 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2287 if (gimple_store_p (stmt))
2289 store_operand = gimple_assign_rhs1 (stmt);
2290 eh = stmt_ends_bb_p (stmt);
2291 if (eh)
2293 edge e;
2294 edge_iterator ei;
2295 basic_block bb = gimple_bb (stmt);
2297 FOR_EACH_EDGE (e, ei, bb->succs)
2298 if (e->flags & EDGE_EH)
2300 eh_pad = e->dest;
2301 break;
2304 if (TREE_CODE (lhs) == COMPONENT_REF
2305 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2307 tree fld = TREE_OPERAND (lhs, 1);
2308 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2309 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2310 poly_int64 bitoffset;
2311 poly_uint64 field_offset, repr_offset;
2312 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2313 nlhs = lhs;
2314 else
2316 bool var_field_off = false;
2317 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2318 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2319 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2320 else
2322 bitoffset = 0;
2323 var_field_off = true;
2325 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2326 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2327 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2328 TREE_OPERAND (lhs, 0), repr,
2329 var_field_off
2330 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2331 HOST_WIDE_INT bo = bitoffset.to_constant ();
2332 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2333 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2337 if ((store_operand
2338 && TREE_CODE (store_operand) == SSA_NAME
2339 && (m_names == NULL
2340 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2341 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2342 || gimple_assign_cast_p (stmt))
2344 rhs1 = gimple_assign_rhs1 (store_operand
2345 ? SSA_NAME_DEF_STMT (store_operand)
2346 : stmt);
2347 /* Optimize mergeable ops ending with widening cast to _BitInt
2348 (or followed by store). We can lower just the limbs of the
2349 cast operand and widen afterwards. */
2350 if (TREE_CODE (rhs1) == SSA_NAME
2351 && (m_names == NULL
2352 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2353 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2354 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2355 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2356 limb_prec) < CEIL (prec, limb_prec)
2357 || (kind == bitint_prec_huge
2358 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2360 store_operand = rhs1;
2361 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2362 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2363 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2364 sext = true;
2367 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2368 if (kind == bitint_prec_large)
2369 cnt = CEIL (prec, limb_prec);
2370 else
2372 rem = (prec % (2 * limb_prec));
2373 end = (prec - rem) / limb_prec;
2374 cnt = 2 + CEIL (rem, limb_prec);
2375 idx = idx_first = create_loop (size_zero_node, &idx_next);
2378 basic_block edge_bb = NULL;
2379 if (eq_p)
2381 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2382 gsi_prev (&gsi);
2383 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2384 edge_bb = e->src;
2385 if (kind == bitint_prec_large)
2386 m_gsi = gsi_end_bb (edge_bb);
2388 else
2389 m_after_stmt = stmt;
2390 if (kind != bitint_prec_large)
2391 m_upwards_2limb = end;
2392 m_upwards = true;
2394 bool separate_ext
2395 = (prec != (unsigned) TYPE_PRECISION (type)
2396 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2397 > CEIL (prec, limb_prec)));
2399 for (unsigned i = 0; i < cnt; i++)
2401 m_data_cnt = 0;
2402 if (kind == bitint_prec_large)
2403 idx = size_int (i);
2404 else if (i >= 2)
2405 idx = size_int (end + (i > 2));
2406 if (eq_p)
2408 rhs1 = handle_operand (cmp_op1, idx);
2409 tree rhs2 = handle_operand (cmp_op2, idx);
2410 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2411 insert_before (g);
2412 edge e1 = split_block (gsi_bb (m_gsi), g);
2413 e1->flags = EDGE_FALSE_VALUE;
2414 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2415 e1->probability = profile_probability::unlikely ();
2416 e2->probability = e1->probability.invert ();
2417 if (i == 0)
2418 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2419 m_gsi = gsi_after_labels (e1->dest);
2421 else
2423 if (store_operand)
2424 rhs1 = handle_operand (store_operand, idx);
2425 else
2426 rhs1 = handle_stmt (stmt, idx);
2427 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2428 rhs1 = add_cast (m_limb_type, rhs1);
2429 if (sext && i == cnt - 1)
2430 ext = rhs1;
2431 tree nidx = idx;
2432 if (bo_idx)
2434 if (tree_fits_uhwi_p (idx))
2435 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2436 else
2438 nidx = make_ssa_name (sizetype);
2439 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2440 size_int (bo_idx));
2441 insert_before (g);
2444 bool done = false;
2445 basic_block new_bb = NULL;
2446 /* Handle stores into bit-fields. */
2447 if (bo_bit)
2449 if (i == 0)
2451 edge e2 = NULL;
2452 if (kind != bitint_prec_large)
2454 prepare_data_in_out (build_zero_cst (m_limb_type),
2455 idx, &bf_next);
2456 bf_next = m_data.pop ();
2457 bf_cur = m_data.pop ();
2458 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2459 NULL_TREE, NULL_TREE);
2460 edge edge_true;
2461 if_then_else (g, profile_probability::unlikely (),
2462 edge_true, e2);
2463 new_bb = e2->dest;
2465 tree ftype
2466 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2467 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2468 bitsize_int (limb_prec - bo_bit),
2469 bitsize_int (bo_idx * limb_prec + bo_bit));
2470 tree t = add_cast (ftype, rhs1);
2471 g = gimple_build_assign (bfr, t);
2472 insert_before (g);
2473 if (eh)
2475 maybe_duplicate_eh_stmt (g, stmt);
2476 if (eh_pad)
2478 edge e = split_block (gsi_bb (m_gsi), g);
2479 m_gsi = gsi_after_labels (e->dest);
2480 make_edge (e->src, eh_pad, EDGE_EH)->probability
2481 = profile_probability::very_unlikely ();
2484 if (kind == bitint_prec_large)
2486 bf_cur = rhs1;
2487 done = true;
2489 else if (e2)
2490 m_gsi = gsi_after_labels (e2->src);
2492 if (!done)
2494 tree t1 = make_ssa_name (m_limb_type);
2495 tree t2 = make_ssa_name (m_limb_type);
2496 tree t3 = make_ssa_name (m_limb_type);
2497 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2498 build_int_cst (unsigned_type_node,
2499 limb_prec - bo_bit));
2500 insert_before (g);
2501 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2502 build_int_cst (unsigned_type_node,
2503 bo_bit));
2504 insert_before (g);
2505 bf_cur = rhs1;
2506 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2507 insert_before (g);
2508 rhs1 = t3;
2509 if (bf_next && i == 1)
2511 g = gimple_build_assign (bf_next, bf_cur);
2512 insert_before (g);
2516 if (!done)
2518 /* Handle bit-field access to partial last limb if needed. */
2519 if (nlhs
2520 && i == cnt - 1
2521 && !separate_ext
2522 && tree_fits_uhwi_p (idx))
2524 unsigned int tprec = TYPE_PRECISION (type);
2525 unsigned int rprec = tprec % limb_prec;
2526 if (rprec + bo_bit < (unsigned) limb_prec)
2528 tree ftype
2529 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2530 tree bfr = build3 (BIT_FIELD_REF, ftype,
2531 unshare_expr (nlhs),
2532 bitsize_int (rprec + bo_bit),
2533 bitsize_int ((bo_idx
2534 + tprec / limb_prec)
2535 * limb_prec));
2536 tree t = add_cast (ftype, rhs1);
2537 g = gimple_build_assign (bfr, t);
2538 done = true;
2539 bf_cur = NULL_TREE;
2541 else if (rprec + bo_bit == (unsigned) limb_prec)
2542 bf_cur = NULL_TREE;
2544 /* Otherwise, stores to any other lhs. */
2545 if (!done)
2547 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2548 nidx, true);
2549 g = gimple_build_assign (l, rhs1);
2551 insert_before (g);
2552 if (eh)
2554 maybe_duplicate_eh_stmt (g, stmt);
2555 if (eh_pad)
2557 edge e = split_block (gsi_bb (m_gsi), g);
2558 m_gsi = gsi_after_labels (e->dest);
2559 make_edge (e->src, eh_pad, EDGE_EH)->probability
2560 = profile_probability::very_unlikely ();
2563 if (new_bb)
2564 m_gsi = gsi_after_labels (new_bb);
2567 m_first = false;
2568 if (kind == bitint_prec_huge && i <= 1)
2570 if (i == 0)
2572 idx = make_ssa_name (sizetype);
2573 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2574 size_one_node);
2575 insert_before (g);
2577 else
2579 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2580 size_int (2));
2581 insert_before (g);
2582 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2583 NULL_TREE, NULL_TREE);
2584 insert_before (g);
2585 if (eq_p)
2586 m_gsi = gsi_after_labels (edge_bb);
2587 else
2588 m_gsi = gsi_for_stmt (stmt);
2593 if (separate_ext)
2595 if (sext)
2597 ext = add_cast (signed_type_for (m_limb_type), ext);
2598 tree lpm1 = build_int_cst (unsigned_type_node,
2599 limb_prec - 1);
2600 tree n = make_ssa_name (TREE_TYPE (ext));
2601 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2602 insert_before (g);
2603 ext = add_cast (m_limb_type, n);
2605 else
2606 ext = build_zero_cst (m_limb_type);
2607 kind = bitint_precision_kind (type);
2608 unsigned start = CEIL (prec, limb_prec);
2609 prec = TYPE_PRECISION (type);
2610 idx = idx_first = idx_next = NULL_TREE;
2611 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2612 kind = bitint_prec_large;
2613 if (kind == bitint_prec_large)
2614 cnt = CEIL (prec, limb_prec) - start;
2615 else
2617 rem = prec % limb_prec;
2618 end = (prec - rem) / limb_prec;
2619 cnt = (bo_bit != 0) + 1 + (rem != 0);
2621 for (unsigned i = 0; i < cnt; i++)
2623 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2624 idx = size_int (start + i);
2625 else if (i == cnt - 1)
2626 idx = size_int (end);
2627 else if (i == (bo_bit != 0))
2628 idx = create_loop (size_int (start + i), &idx_next);
2629 rhs1 = ext;
2630 if (bf_cur != NULL_TREE && bf_cur != ext)
2632 tree t1 = make_ssa_name (m_limb_type);
2633 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2634 build_int_cst (unsigned_type_node,
2635 limb_prec - bo_bit));
2636 insert_before (g);
2637 if (integer_zerop (ext))
2638 rhs1 = t1;
2639 else
2641 tree t2 = make_ssa_name (m_limb_type);
2642 rhs1 = make_ssa_name (m_limb_type);
2643 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2644 build_int_cst (unsigned_type_node,
2645 bo_bit));
2646 insert_before (g);
2647 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2648 insert_before (g);
2650 bf_cur = ext;
2652 tree nidx = idx;
2653 if (bo_idx)
2655 if (tree_fits_uhwi_p (idx))
2656 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2657 else
2659 nidx = make_ssa_name (sizetype);
2660 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2661 size_int (bo_idx));
2662 insert_before (g);
2665 bool done = false;
2666 /* Handle bit-field access to partial last limb if needed. */
2667 if (nlhs && i == cnt - 1)
2669 unsigned int tprec = TYPE_PRECISION (type);
2670 unsigned int rprec = tprec % limb_prec;
2671 if (rprec + bo_bit < (unsigned) limb_prec)
2673 tree ftype
2674 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2675 tree bfr = build3 (BIT_FIELD_REF, ftype,
2676 unshare_expr (nlhs),
2677 bitsize_int (rprec + bo_bit),
2678 bitsize_int ((bo_idx + tprec / limb_prec)
2679 * limb_prec));
2680 tree t = add_cast (ftype, rhs1);
2681 g = gimple_build_assign (bfr, t);
2682 done = true;
2683 bf_cur = NULL_TREE;
2685 else if (rprec + bo_bit == (unsigned) limb_prec)
2686 bf_cur = NULL_TREE;
2688 /* Otherwise, stores to any other lhs. */
2689 if (!done)
2691 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2692 g = gimple_build_assign (l, rhs1);
2694 insert_before (g);
2695 if (eh)
2697 maybe_duplicate_eh_stmt (g, stmt);
2698 if (eh_pad)
2700 edge e = split_block (gsi_bb (m_gsi), g);
2701 m_gsi = gsi_after_labels (e->dest);
2702 make_edge (e->src, eh_pad, EDGE_EH)->probability
2703 = profile_probability::very_unlikely ();
2706 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2708 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2709 size_one_node);
2710 insert_before (g);
2711 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2712 NULL_TREE, NULL_TREE);
2713 insert_before (g);
2714 m_gsi = gsi_for_stmt (stmt);
2718 if (bf_cur != NULL_TREE)
2720 unsigned int tprec = TYPE_PRECISION (type);
2721 unsigned int rprec = tprec % limb_prec;
2722 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2723 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2724 bitsize_int (rprec + bo_bit),
2725 bitsize_int ((bo_idx + tprec / limb_prec)
2726 * limb_prec));
2727 rhs1 = bf_cur;
2728 if (bf_cur != ext)
2730 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2731 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2732 build_int_cst (unsigned_type_node,
2733 limb_prec - bo_bit));
2734 insert_before (g);
2736 rhs1 = add_cast (ftype, rhs1);
2737 g = gimple_build_assign (bfr, rhs1);
2738 insert_before (g);
2739 if (eh)
2741 maybe_duplicate_eh_stmt (g, stmt);
2742 if (eh_pad)
2744 edge e = split_block (gsi_bb (m_gsi), g);
2745 m_gsi = gsi_after_labels (e->dest);
2746 make_edge (e->src, eh_pad, EDGE_EH)->probability
2747 = profile_probability::very_unlikely ();
2752 if (gimple_store_p (stmt))
2754 unlink_stmt_vdef (stmt);
2755 release_ssa_name (gimple_vdef (stmt));
2756 gsi_remove (&m_gsi, true);
2758 if (eq_p)
2760 lhs = make_ssa_name (boolean_type_node);
2761 basic_block bb = gimple_bb (stmt);
2762 gphi *phi = create_phi_node (lhs, bb);
2763 edge e = find_edge (gsi_bb (m_gsi), bb);
2764 unsigned int n = EDGE_COUNT (bb->preds);
2765 for (unsigned int i = 0; i < n; i++)
2767 edge e2 = EDGE_PRED (bb, i);
2768 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2769 e2, UNKNOWN_LOCATION);
2771 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2772 return lhs;
2774 else
2775 return NULL_TREE;
2778 /* Handle a large/huge _BitInt comparison statement STMT other than
2779 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2780 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2781 lowered by iteration from the most significant limb downwards to
2782 the least significant one, for large _BitInt in straight line code,
2783 otherwise with most significant limb handled in
2784 straight line code followed by a loop handling one limb at a time.
2785 Comparisons with unsigned huge _BitInt with precisions which are
2786 multiples of limb precision can use just the loop and don't need to
2787 handle most significant limb before the loop. The loop or straight
2788 line code jumps to final basic block if a particular pair of limbs
2789 is not equal. */
2791 tree
2792 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2793 tree cmp_op1, tree cmp_op2)
2795 tree type = TREE_TYPE (cmp_op1);
2796 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2797 bitint_prec_kind kind = bitint_precision_kind (type);
2798 gcc_assert (kind >= bitint_prec_large);
2799 gimple *g;
2800 if (!TYPE_UNSIGNED (type)
2801 && integer_zerop (cmp_op2)
2802 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2804 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2805 tree idx = size_int (end);
2806 m_data_cnt = 0;
2807 tree rhs1 = handle_operand (cmp_op1, idx);
2808 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2810 tree stype = signed_type_for (TREE_TYPE (rhs1));
2811 rhs1 = add_cast (stype, rhs1);
2813 tree lhs = make_ssa_name (boolean_type_node);
2814 g = gimple_build_assign (lhs, cmp_code, rhs1,
2815 build_zero_cst (TREE_TYPE (rhs1)));
2816 insert_before (g);
2817 cmp_code = NE_EXPR;
2818 return lhs;
2821 unsigned cnt, rem = 0, end = 0;
2822 tree idx = NULL_TREE, idx_next = NULL_TREE;
2823 if (kind == bitint_prec_large)
2824 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2825 else
2827 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2828 if (rem == 0 && !TYPE_UNSIGNED (type))
2829 rem = limb_prec;
2830 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2831 cnt = 1 + (rem != 0);
2834 basic_block edge_bb = NULL;
2835 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2836 gsi_prev (&gsi);
2837 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2838 edge_bb = e->src;
2839 m_gsi = gsi_end_bb (edge_bb);
2841 edge *edges = XALLOCAVEC (edge, cnt * 2);
2842 for (unsigned i = 0; i < cnt; i++)
2844 m_data_cnt = 0;
2845 if (kind == bitint_prec_large)
2846 idx = size_int (cnt - i - 1);
2847 else if (i == cnt - 1)
2848 idx = create_loop (size_int (end - 1), &idx_next);
2849 else
2850 idx = size_int (end);
2851 tree rhs1 = handle_operand (cmp_op1, idx);
2852 tree rhs2 = handle_operand (cmp_op2, idx);
2853 if (i == 0
2854 && !TYPE_UNSIGNED (type)
2855 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2857 tree stype = signed_type_for (TREE_TYPE (rhs1));
2858 rhs1 = add_cast (stype, rhs1);
2859 rhs2 = add_cast (stype, rhs2);
2861 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2862 insert_before (g);
2863 edge e1 = split_block (gsi_bb (m_gsi), g);
2864 e1->flags = EDGE_FALSE_VALUE;
2865 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2866 e1->probability = profile_probability::likely ();
2867 e2->probability = e1->probability.invert ();
2868 if (i == 0)
2869 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2870 m_gsi = gsi_after_labels (e1->dest);
2871 edges[2 * i] = e2;
2872 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2873 insert_before (g);
2874 e1 = split_block (gsi_bb (m_gsi), g);
2875 e1->flags = EDGE_FALSE_VALUE;
2876 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2877 e1->probability = profile_probability::unlikely ();
2878 e2->probability = e1->probability.invert ();
2879 m_gsi = gsi_after_labels (e1->dest);
2880 edges[2 * i + 1] = e2;
2881 m_first = false;
2882 if (kind == bitint_prec_huge && i == cnt - 1)
2884 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2885 insert_before (g);
2886 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2887 NULL_TREE, NULL_TREE);
2888 insert_before (g);
2889 edge true_edge, false_edge;
2890 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2891 &true_edge, &false_edge);
2892 m_gsi = gsi_after_labels (false_edge->dest);
2896 tree lhs = make_ssa_name (boolean_type_node);
2897 basic_block bb = gimple_bb (stmt);
2898 gphi *phi = create_phi_node (lhs, bb);
2899 for (unsigned int i = 0; i < cnt * 2; i++)
2901 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2902 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2903 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2905 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2906 ? boolean_true_node : boolean_false_node,
2907 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2908 cmp_code = NE_EXPR;
2909 return lhs;
2912 /* Lower large/huge _BitInt left and right shift except for left
2913 shift by < limb_prec constant. */
2915 void
2916 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2918 tree rhs1 = gimple_assign_rhs1 (stmt);
2919 tree lhs = gimple_assign_lhs (stmt);
2920 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2921 tree type = TREE_TYPE (rhs1);
2922 gimple *final_stmt = gsi_stmt (m_gsi);
2923 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2924 && bitint_precision_kind (type) >= bitint_prec_large);
2925 int prec = TYPE_PRECISION (type);
2926 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2927 gimple *g;
2928 if (obj == NULL_TREE)
2930 int part = var_to_partition (m_map, lhs);
2931 gcc_assert (m_vars[part] != NULL_TREE);
2932 obj = m_vars[part];
2934 /* Preparation code common for both left and right shifts.
2935 unsigned n1 = n % limb_prec;
2936 size_t n2 = n / limb_prec;
2937 size_t n3 = n1 != 0;
2938 unsigned n4 = (limb_prec - n1) % limb_prec;
2939 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2940 if (TREE_CODE (n) == INTEGER_CST)
2942 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2943 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2944 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2945 n3 = size_int (!integer_zerop (n1));
2946 n4 = int_const_binop (TRUNC_MOD_EXPR,
2947 int_const_binop (MINUS_EXPR, lp, n1), lp);
2949 else
2951 n1 = make_ssa_name (TREE_TYPE (n));
2952 n2 = make_ssa_name (sizetype);
2953 n3 = make_ssa_name (sizetype);
2954 n4 = make_ssa_name (TREE_TYPE (n));
2955 if (pow2p_hwi (limb_prec))
2957 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2958 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2959 insert_before (g);
2960 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2961 TREE_TYPE (n))
2962 ? n2 : make_ssa_name (TREE_TYPE (n)),
2963 RSHIFT_EXPR, n,
2964 build_int_cst (TREE_TYPE (n),
2965 exact_log2 (limb_prec)));
2966 insert_before (g);
2967 if (gimple_assign_lhs (g) != n2)
2969 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2970 insert_before (g);
2972 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2973 NEGATE_EXPR, n1);
2974 insert_before (g);
2975 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
2976 lpm1);
2977 insert_before (g);
2979 else
2981 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2982 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
2983 insert_before (g);
2984 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2985 TREE_TYPE (n))
2986 ? n2 : make_ssa_name (TREE_TYPE (n)),
2987 TRUNC_DIV_EXPR, n, lp);
2988 insert_before (g);
2989 if (gimple_assign_lhs (g) != n2)
2991 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2992 insert_before (g);
2994 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2995 MINUS_EXPR, lp, n1);
2996 insert_before (g);
2997 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
2998 lp);
2999 insert_before (g);
3001 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3002 build_zero_cst (TREE_TYPE (n)));
3003 insert_before (g);
3004 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3005 insert_before (g);
3007 tree p = build_int_cst (sizetype,
3008 prec / limb_prec - (prec % limb_prec == 0));
3009 if (rhs_code == RSHIFT_EXPR)
3011 /* Lower
3012 dst = src >> n;
3014 unsigned n1 = n % limb_prec;
3015 size_t n2 = n / limb_prec;
3016 size_t n3 = n1 != 0;
3017 unsigned n4 = (limb_prec - n1) % limb_prec;
3018 size_t idx;
3019 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3020 int signed_p = (typeof (src) -1) < 0;
3021 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3022 ? p : p - n3); ++idx)
3023 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3024 limb_type ext;
3025 if (prec % limb_prec == 0)
3026 ext = src[p];
3027 else if (signed_p)
3028 ext = ((signed limb_type) (src[p] << (limb_prec
3029 - (prec % limb_prec))))
3030 >> (limb_prec - (prec % limb_prec));
3031 else
3032 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3033 if (!signed_p && (prec % limb_prec == 0))
3035 else if (idx < prec / 64)
3037 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3038 ++idx;
3040 idx -= n2;
3041 if (signed_p)
3043 dst[idx] = ((signed limb_type) ext) >> n1;
3044 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3046 else
3048 dst[idx] = ext >> n1;
3049 ext = 0;
3051 for (++idx; idx <= p; ++idx)
3052 dst[idx] = ext; */
3053 tree pmn3;
3054 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3055 pmn3 = p;
3056 else if (TREE_CODE (n3) == INTEGER_CST)
3057 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3058 else
3060 pmn3 = make_ssa_name (sizetype);
3061 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3062 insert_before (g);
3064 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3065 edge edge_true, edge_false;
3066 if_then (g, profile_probability::likely (), edge_true, edge_false);
3067 tree idx_next;
3068 tree idx = create_loop (n2, &idx_next);
3069 tree idxmn2 = make_ssa_name (sizetype);
3070 tree idxpn3 = make_ssa_name (sizetype);
3071 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3072 insert_before (g);
3073 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3074 insert_before (g);
3075 m_data_cnt = 0;
3076 tree t1 = handle_operand (rhs1, idx);
3077 m_first = false;
3078 g = gimple_build_assign (make_ssa_name (m_limb_type),
3079 RSHIFT_EXPR, t1, n1);
3080 insert_before (g);
3081 t1 = gimple_assign_lhs (g);
3082 if (!integer_zerop (n3))
3084 m_data_cnt = 0;
3085 tree t2 = handle_operand (rhs1, idxpn3);
3086 g = gimple_build_assign (make_ssa_name (m_limb_type),
3087 LSHIFT_EXPR, t2, n4);
3088 insert_before (g);
3089 t2 = gimple_assign_lhs (g);
3090 g = gimple_build_assign (make_ssa_name (m_limb_type),
3091 BIT_IOR_EXPR, t1, t2);
3092 insert_before (g);
3093 t1 = gimple_assign_lhs (g);
3095 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3096 g = gimple_build_assign (l, t1);
3097 insert_before (g);
3098 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3099 insert_before (g);
3100 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3101 insert_before (g);
3102 idx = make_ssa_name (sizetype);
3103 m_gsi = gsi_for_stmt (final_stmt);
3104 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3105 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3106 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3107 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3108 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3109 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3110 m_data_cnt = 0;
3111 tree ms = handle_operand (rhs1, p);
3112 tree ext = ms;
3113 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3114 ext = add_cast (m_limb_type, ms);
3115 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3116 && !integer_zerop (n3))
3118 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3119 if_then (g, profile_probability::likely (), edge_true, edge_false);
3120 m_data_cnt = 0;
3121 t1 = handle_operand (rhs1, idx);
3122 g = gimple_build_assign (make_ssa_name (m_limb_type),
3123 RSHIFT_EXPR, t1, n1);
3124 insert_before (g);
3125 t1 = gimple_assign_lhs (g);
3126 g = gimple_build_assign (make_ssa_name (m_limb_type),
3127 LSHIFT_EXPR, ext, n4);
3128 insert_before (g);
3129 tree t2 = gimple_assign_lhs (g);
3130 g = gimple_build_assign (make_ssa_name (m_limb_type),
3131 BIT_IOR_EXPR, t1, t2);
3132 insert_before (g);
3133 t1 = gimple_assign_lhs (g);
3134 idxmn2 = make_ssa_name (sizetype);
3135 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3136 insert_before (g);
3137 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3138 g = gimple_build_assign (l, t1);
3139 insert_before (g);
3140 idx_next = make_ssa_name (sizetype);
3141 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3142 insert_before (g);
3143 m_gsi = gsi_for_stmt (final_stmt);
3144 tree nidx = make_ssa_name (sizetype);
3145 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3146 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3147 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3148 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3149 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3150 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3151 idx = nidx;
3153 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3154 insert_before (g);
3155 idx = gimple_assign_lhs (g);
3156 tree sext = ext;
3157 if (!TYPE_UNSIGNED (type))
3158 sext = add_cast (signed_type_for (m_limb_type), ext);
3159 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3160 RSHIFT_EXPR, sext, n1);
3161 insert_before (g);
3162 t1 = gimple_assign_lhs (g);
3163 if (!TYPE_UNSIGNED (type))
3165 t1 = add_cast (m_limb_type, t1);
3166 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3167 RSHIFT_EXPR, sext,
3168 build_int_cst (TREE_TYPE (n),
3169 limb_prec - 1));
3170 insert_before (g);
3171 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3173 else
3174 ext = build_zero_cst (m_limb_type);
3175 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3176 g = gimple_build_assign (l, t1);
3177 insert_before (g);
3178 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3179 size_one_node);
3180 insert_before (g);
3181 idx = gimple_assign_lhs (g);
3182 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3183 if_then (g, profile_probability::likely (), edge_true, edge_false);
3184 idx = create_loop (idx, &idx_next);
3185 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3186 g = gimple_build_assign (l, ext);
3187 insert_before (g);
3188 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3189 insert_before (g);
3190 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3191 insert_before (g);
3193 else
3195 /* Lower
3196 dst = src << n;
3198 unsigned n1 = n % limb_prec;
3199 size_t n2 = n / limb_prec;
3200 size_t n3 = n1 != 0;
3201 unsigned n4 = (limb_prec - n1) % limb_prec;
3202 size_t idx;
3203 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3204 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3205 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3206 if (n1)
3208 dst[idx] = src[idx - n2] << n1;
3209 --idx;
3211 for (; (ssize_t) idx >= 0; --idx)
3212 dst[idx] = 0; */
3213 tree n2pn3;
3214 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3215 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3216 else
3218 n2pn3 = make_ssa_name (sizetype);
3219 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3220 insert_before (g);
3222 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3223 idx even to access the most significant partial limb. */
3224 m_var_msb = true;
3225 if (integer_zerop (n3))
3226 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3227 counts. Emit if (true) condition that can be optimized later. */
3228 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3229 NULL_TREE, NULL_TREE);
3230 else
3231 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3232 edge edge_true, edge_false;
3233 if_then (g, profile_probability::likely (), edge_true, edge_false);
3234 tree idx_next;
3235 tree idx = create_loop (p, &idx_next);
3236 tree idxmn2 = make_ssa_name (sizetype);
3237 tree idxmn2mn3 = make_ssa_name (sizetype);
3238 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3239 insert_before (g);
3240 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3241 insert_before (g);
3242 m_data_cnt = 0;
3243 tree t1 = handle_operand (rhs1, idxmn2);
3244 m_first = false;
3245 g = gimple_build_assign (make_ssa_name (m_limb_type),
3246 LSHIFT_EXPR, t1, n1);
3247 insert_before (g);
3248 t1 = gimple_assign_lhs (g);
3249 if (!integer_zerop (n3))
3251 m_data_cnt = 0;
3252 tree t2 = handle_operand (rhs1, idxmn2mn3);
3253 g = gimple_build_assign (make_ssa_name (m_limb_type),
3254 RSHIFT_EXPR, t2, n4);
3255 insert_before (g);
3256 t2 = gimple_assign_lhs (g);
3257 g = gimple_build_assign (make_ssa_name (m_limb_type),
3258 BIT_IOR_EXPR, t1, t2);
3259 insert_before (g);
3260 t1 = gimple_assign_lhs (g);
3262 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3263 g = gimple_build_assign (l, t1);
3264 insert_before (g);
3265 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3266 insert_before (g);
3267 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3268 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3269 NULL_TREE, NULL_TREE);
3270 insert_before (g);
3271 idx = make_ssa_name (sizetype);
3272 m_gsi = gsi_for_stmt (final_stmt);
3273 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3274 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3275 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3276 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3277 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3278 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3279 m_data_cnt = 0;
3280 if (!integer_zerop (n3))
3282 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3283 NULL_TREE, NULL_TREE);
3284 if_then (g, profile_probability::likely (), edge_true, edge_false);
3285 idxmn2 = make_ssa_name (sizetype);
3286 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3287 insert_before (g);
3288 m_data_cnt = 0;
3289 t1 = handle_operand (rhs1, idxmn2);
3290 g = gimple_build_assign (make_ssa_name (m_limb_type),
3291 LSHIFT_EXPR, t1, n1);
3292 insert_before (g);
3293 t1 = gimple_assign_lhs (g);
3294 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3295 g = gimple_build_assign (l, t1);
3296 insert_before (g);
3297 idx_next = make_ssa_name (sizetype);
3298 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3299 insert_before (g);
3300 m_gsi = gsi_for_stmt (final_stmt);
3301 tree nidx = make_ssa_name (sizetype);
3302 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3303 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3304 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3305 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3306 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3307 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3308 idx = nidx;
3310 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3311 ssize_int (0), NULL_TREE, NULL_TREE);
3312 if_then (g, profile_probability::likely (), edge_true, edge_false);
3313 idx = create_loop (idx, &idx_next);
3314 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3315 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3316 insert_before (g);
3317 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3318 insert_before (g);
3319 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3320 ssize_int (0), NULL_TREE, NULL_TREE);
3321 insert_before (g);
3325 /* Lower large/huge _BitInt multiplication or division. */
3327 void
3328 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3330 tree rhs1 = gimple_assign_rhs1 (stmt);
3331 tree rhs2 = gimple_assign_rhs2 (stmt);
3332 tree lhs = gimple_assign_lhs (stmt);
3333 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3334 tree type = TREE_TYPE (rhs1);
3335 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3336 && bitint_precision_kind (type) >= bitint_prec_large);
3337 int prec = TYPE_PRECISION (type), prec1, prec2;
3338 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3339 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3340 if (obj == NULL_TREE)
3342 int part = var_to_partition (m_map, lhs);
3343 gcc_assert (m_vars[part] != NULL_TREE);
3344 obj = m_vars[part];
3345 lhs = build_fold_addr_expr (obj);
3347 else
3349 lhs = build_fold_addr_expr (obj);
3350 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3351 NULL_TREE, true, GSI_SAME_STMT);
3353 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3354 gimple *g;
3355 switch (rhs_code)
3357 case MULT_EXPR:
3358 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3359 lhs, build_int_cst (sitype, prec),
3360 rhs1, build_int_cst (sitype, prec1),
3361 rhs2, build_int_cst (sitype, prec2));
3362 insert_before (g);
3363 break;
3364 case TRUNC_DIV_EXPR:
3365 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3366 lhs, build_int_cst (sitype, prec),
3367 null_pointer_node,
3368 build_int_cst (sitype, 0),
3369 rhs1, build_int_cst (sitype, prec1),
3370 rhs2, build_int_cst (sitype, prec2));
3371 if (!stmt_ends_bb_p (stmt))
3372 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3373 insert_before (g);
3374 break;
3375 case TRUNC_MOD_EXPR:
3376 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3377 build_int_cst (sitype, 0),
3378 lhs, build_int_cst (sitype, prec),
3379 rhs1, build_int_cst (sitype, prec1),
3380 rhs2, build_int_cst (sitype, prec2));
3381 if (!stmt_ends_bb_p (stmt))
3382 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3383 insert_before (g);
3384 break;
3385 default:
3386 gcc_unreachable ();
3388 if (stmt_ends_bb_p (stmt))
3390 maybe_duplicate_eh_stmt (g, stmt);
3391 edge e1;
3392 edge_iterator ei;
3393 basic_block bb = gimple_bb (stmt);
3395 FOR_EACH_EDGE (e1, ei, bb->succs)
3396 if (e1->flags & EDGE_EH)
3397 break;
3398 if (e1)
3400 edge e2 = split_block (gsi_bb (m_gsi), g);
3401 m_gsi = gsi_after_labels (e2->dest);
3402 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3403 = profile_probability::very_unlikely ();
3408 /* Lower large/huge _BitInt conversion to/from floating point. */
3410 void
3411 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3413 tree rhs1 = gimple_assign_rhs1 (stmt);
3414 tree lhs = gimple_assign_lhs (stmt);
3415 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3416 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3417 gimple *g;
3418 if (rhs_code == FIX_TRUNC_EXPR)
3420 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3421 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3422 prec = -prec;
3423 if (obj == NULL_TREE)
3425 int part = var_to_partition (m_map, lhs);
3426 gcc_assert (m_vars[part] != NULL_TREE);
3427 obj = m_vars[part];
3428 lhs = build_fold_addr_expr (obj);
3430 else
3432 lhs = build_fold_addr_expr (obj);
3433 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3434 NULL_TREE, true, GSI_SAME_STMT);
3436 scalar_mode from_mode
3437 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3438 #ifdef HAVE_SFmode
3439 /* IEEE single is a full superset of both IEEE half and
3440 bfloat formats, convert to float first and then to _BitInt
3441 to avoid the need of another 2 library routines. */
3442 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3443 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3444 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3446 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3447 if (type)
3448 rhs1 = add_cast (type, rhs1);
3450 #endif
3451 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3452 lhs, build_int_cst (sitype, prec),
3453 rhs1);
3454 insert_before (g);
3456 else
3458 int prec;
3459 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3460 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3461 rhs1, build_int_cst (sitype, prec));
3462 gimple_call_set_lhs (g, lhs);
3463 if (!stmt_ends_bb_p (stmt))
3464 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3465 gsi_replace (&m_gsi, g, true);
3469 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3470 If check_zero is true, caller wants to check if all bits in [start, end)
3471 are zero, otherwise if bits in [start, end) are either all zero or
3472 all ones. L is the limb with index LIMB, START and END are measured
3473 in bits. */
3475 tree
3476 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3477 unsigned int end, tree l,
3478 unsigned int limb,
3479 bool check_zero)
3481 unsigned startlimb = start / limb_prec;
3482 unsigned endlimb = (end - 1) / limb_prec;
3483 gimple *g;
3485 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3486 return l;
3487 if (startlimb == endlimb && limb == startlimb)
3489 if (check_zero)
3491 wide_int w = wi::shifted_mask (start % limb_prec,
3492 end - start, false, limb_prec);
3493 g = gimple_build_assign (make_ssa_name (m_limb_type),
3494 BIT_AND_EXPR, l,
3495 wide_int_to_tree (m_limb_type, w));
3496 insert_before (g);
3497 return gimple_assign_lhs (g);
3499 unsigned int shift = start % limb_prec;
3500 if ((end % limb_prec) != 0)
3502 unsigned int lshift = (-end) % limb_prec;
3503 shift += lshift;
3504 g = gimple_build_assign (make_ssa_name (m_limb_type),
3505 LSHIFT_EXPR, l,
3506 build_int_cst (unsigned_type_node,
3507 lshift));
3508 insert_before (g);
3509 l = gimple_assign_lhs (g);
3511 l = add_cast (signed_type_for (m_limb_type), l);
3512 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3513 RSHIFT_EXPR, l,
3514 build_int_cst (unsigned_type_node, shift));
3515 insert_before (g);
3516 return add_cast (m_limb_type, gimple_assign_lhs (g));
3518 else if (limb == startlimb)
3520 if ((start % limb_prec) == 0)
3521 return l;
3522 if (!check_zero)
3523 l = add_cast (signed_type_for (m_limb_type), l);
3524 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3525 RSHIFT_EXPR, l,
3526 build_int_cst (unsigned_type_node,
3527 start % limb_prec));
3528 insert_before (g);
3529 l = gimple_assign_lhs (g);
3530 if (!check_zero)
3531 l = add_cast (m_limb_type, l);
3532 return l;
3534 else if (limb == endlimb)
3536 if ((end % limb_prec) == 0)
3537 return l;
3538 if (check_zero)
3540 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3541 g = gimple_build_assign (make_ssa_name (m_limb_type),
3542 BIT_AND_EXPR, l,
3543 wide_int_to_tree (m_limb_type, w));
3544 insert_before (g);
3545 return gimple_assign_lhs (g);
3547 unsigned int shift = (-end) % limb_prec;
3548 g = gimple_build_assign (make_ssa_name (m_limb_type),
3549 LSHIFT_EXPR, l,
3550 build_int_cst (unsigned_type_node, shift));
3551 insert_before (g);
3552 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3553 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3554 RSHIFT_EXPR, l,
3555 build_int_cst (unsigned_type_node, shift));
3556 insert_before (g);
3557 return add_cast (m_limb_type, gimple_assign_lhs (g));
3559 return l;
3562 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3563 result including overflow flag into the right locations. */
3565 void
3566 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3567 tree ovf, tree lhs, tree orig_obj,
3568 gimple *stmt, tree_code code)
3570 gimple *g;
3572 if (obj == NULL_TREE
3573 && (TREE_CODE (type) != BITINT_TYPE
3574 || bitint_precision_kind (type) < bitint_prec_large))
3576 /* Add support for 3 or more limbs filled in from normal integral
3577 type if this assert fails. If no target chooses limb mode smaller
3578 than half of largest supported normal integral type, this will not
3579 be needed. */
3580 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3581 tree lhs_type = type;
3582 if (TREE_CODE (type) == BITINT_TYPE
3583 && bitint_precision_kind (type) == bitint_prec_middle)
3584 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3585 TYPE_UNSIGNED (type));
3586 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3587 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3588 insert_before (g);
3589 r1 = gimple_assign_lhs (g);
3590 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3591 r1 = add_cast (lhs_type, r1);
3592 if (TYPE_PRECISION (lhs_type) > limb_prec)
3594 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3595 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3596 insert_before (g);
3597 r2 = gimple_assign_lhs (g);
3598 r2 = add_cast (lhs_type, r2);
3599 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3600 build_int_cst (unsigned_type_node,
3601 limb_prec));
3602 insert_before (g);
3603 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3604 gimple_assign_lhs (g));
3605 insert_before (g);
3606 r1 = gimple_assign_lhs (g);
3608 if (lhs_type != type)
3609 r1 = add_cast (type, r1);
3610 ovf = add_cast (lhs_type, ovf);
3611 if (lhs_type != type)
3612 ovf = add_cast (type, ovf);
3613 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3614 m_gsi = gsi_for_stmt (stmt);
3615 gsi_replace (&m_gsi, g, true);
3617 else
3619 unsigned HOST_WIDE_INT nelts = 0;
3620 tree atype = NULL_TREE;
3621 if (obj)
3623 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3624 if (orig_obj == NULL_TREE)
3625 nelts >>= 1;
3626 atype = build_array_type_nelts (m_limb_type, nelts);
3628 if (var && obj)
3630 tree v1, v2;
3631 tree zero;
3632 if (orig_obj == NULL_TREE)
3634 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3635 v1 = build2 (MEM_REF, atype,
3636 build_fold_addr_expr (unshare_expr (obj)), zero);
3638 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3639 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3640 else
3641 v1 = unshare_expr (obj);
3642 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3643 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3644 g = gimple_build_assign (v1, v2);
3645 insert_before (g);
3647 if (orig_obj == NULL_TREE && obj)
3649 ovf = add_cast (m_limb_type, ovf);
3650 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3651 g = gimple_build_assign (l, ovf);
3652 insert_before (g);
3653 if (nelts > 1)
3655 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3656 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3657 (nelts + 1) * m_limb_size);
3658 tree v1 = build2 (MEM_REF, atype,
3659 build_fold_addr_expr (unshare_expr (obj)),
3660 off);
3661 g = gimple_build_assign (v1, build_zero_cst (atype));
3662 insert_before (g);
3665 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3667 imm_use_iterator ui;
3668 use_operand_p use_p;
3669 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3671 g = USE_STMT (use_p);
3672 if (!is_gimple_assign (g)
3673 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3674 continue;
3675 tree lhs2 = gimple_assign_lhs (g);
3676 gimple *use_stmt;
3677 single_imm_use (lhs2, &use_p, &use_stmt);
3678 lhs2 = gimple_assign_lhs (use_stmt);
3679 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3680 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3681 g = gimple_build_assign (lhs2, ovf);
3682 else
3683 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3684 gsi_replace (&gsi, g, true);
3685 break;
3688 else if (ovf != boolean_false_node)
3690 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3691 NULL_TREE, NULL_TREE);
3692 edge edge_true, edge_false;
3693 if_then (g, profile_probability::very_unlikely (),
3694 edge_true, edge_false);
3695 tree zero = build_zero_cst (TREE_TYPE (lhs));
3696 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3697 TREE_TYPE (lhs),
3698 zero, zero, NULL);
3699 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3700 true, GSI_SAME_STMT);
3701 m_gsi = gsi_after_labels (edge_true->dest);
3704 if (var)
3706 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_EOL);
3707 g = gimple_build_assign (var, clobber);
3708 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3712 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3713 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3714 argument 1 precision PREC1 and minimum precision for the result
3715 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3717 static tree
3718 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3719 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3721 *start = 0;
3722 *end = 0;
3723 *check_zero = true;
3724 /* Ignore this special rule for subtraction, even if both
3725 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3726 in infinite precision. */
3727 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3729 /* Result in [0, prec2) is unsigned, if prec > prec2,
3730 all bits above it will be zero. */
3731 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3732 return boolean_false_node;
3733 else
3735 /* ovf if any of bits in [start, end) is non-zero. */
3736 *start = prec - !TYPE_UNSIGNED (type);
3737 *end = prec2;
3740 else if (TYPE_UNSIGNED (type))
3742 /* If result in [0, prec2) is signed and if prec > prec2,
3743 all bits above it will be sign bit copies. */
3744 if (prec >= prec2)
3746 /* ovf if bit prec - 1 is non-zero. */
3747 *start = prec - 1;
3748 *end = prec;
3750 else
3752 /* ovf if any of bits in [start, end) is non-zero. */
3753 *start = prec;
3754 *end = prec2;
3757 else if (prec >= prec2)
3758 return boolean_false_node;
3759 else
3761 /* ovf if [start, end) bits aren't all zeros or all ones. */
3762 *start = prec - 1;
3763 *end = prec2;
3764 *check_zero = false;
3766 return NULL_TREE;
3769 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3770 argument or return type _Complex large/huge _BitInt. */
3772 void
3773 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3775 tree arg0 = gimple_call_arg (stmt, 0);
3776 tree arg1 = gimple_call_arg (stmt, 1);
3777 tree lhs = gimple_call_lhs (stmt);
3778 gimple *g;
3780 if (!lhs)
3782 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3783 gsi_remove (&gsi, true);
3784 return;
3786 gimple *final_stmt = gsi_stmt (m_gsi);
3787 tree type = TREE_TYPE (lhs);
3788 if (TREE_CODE (type) == COMPLEX_TYPE)
3789 type = TREE_TYPE (type);
3790 int prec = TYPE_PRECISION (type);
3791 int prec0 = range_to_prec (arg0, stmt);
3792 int prec1 = range_to_prec (arg1, stmt);
3793 int prec2 = ((prec0 < 0) == (prec1 < 0)
3794 ? MAX (prec0 < 0 ? -prec0 : prec0,
3795 prec1 < 0 ? -prec1 : prec1) + 1
3796 : MAX (prec0 < 0 ? -prec0 : prec0 + 1,
3797 prec1 < 0 ? -prec1 : prec1 + 1) + 1);
3798 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3799 prec1 < 0 ? -prec1 : prec1);
3800 prec3 = MAX (prec3, prec);
3801 tree var = NULL_TREE;
3802 tree orig_obj = obj;
3803 if (obj == NULL_TREE
3804 && TREE_CODE (type) == BITINT_TYPE
3805 && bitint_precision_kind (type) >= bitint_prec_large
3806 && m_names
3807 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3809 int part = var_to_partition (m_map, lhs);
3810 gcc_assert (m_vars[part] != NULL_TREE);
3811 obj = m_vars[part];
3812 if (TREE_TYPE (lhs) == type)
3813 orig_obj = obj;
3815 if (TREE_CODE (type) != BITINT_TYPE
3816 || bitint_precision_kind (type) < bitint_prec_large)
3818 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3819 tree atype = build_array_type_nelts (m_limb_type, nelts);
3820 var = create_tmp_var (atype);
3823 enum tree_code code;
3824 switch (gimple_call_internal_fn (stmt))
3826 case IFN_ADD_OVERFLOW:
3827 case IFN_UBSAN_CHECK_ADD:
3828 code = PLUS_EXPR;
3829 break;
3830 case IFN_SUB_OVERFLOW:
3831 case IFN_UBSAN_CHECK_SUB:
3832 code = MINUS_EXPR;
3833 break;
3834 default:
3835 gcc_unreachable ();
3837 unsigned start, end;
3838 bool check_zero;
3839 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3840 &start, &end, &check_zero);
3842 unsigned startlimb, endlimb;
3843 if (ovf)
3845 startlimb = ~0U;
3846 endlimb = ~0U;
3848 else
3850 startlimb = start / limb_prec;
3851 endlimb = (end - 1) / limb_prec;
3854 int prec4 = ovf != NULL_TREE ? prec : prec3;
3855 bitint_prec_kind kind = bitint_precision_kind (prec4);
3856 unsigned cnt, rem = 0, fin = 0;
3857 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3858 bool last_ovf = (ovf == NULL_TREE
3859 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3860 if (kind != bitint_prec_huge)
3861 cnt = CEIL (prec4, limb_prec) + last_ovf;
3862 else
3864 rem = (prec4 % (2 * limb_prec));
3865 fin = (prec4 - rem) / limb_prec;
3866 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3867 idx = idx_first = create_loop (size_zero_node, &idx_next);
3870 if (kind == bitint_prec_huge)
3871 m_upwards_2limb = fin;
3872 m_upwards = true;
3874 tree type0 = TREE_TYPE (arg0);
3875 tree type1 = TREE_TYPE (arg1);
3876 if (TYPE_PRECISION (type0) < prec3)
3878 type0 = build_bitint_type (prec3, TYPE_UNSIGNED (type0));
3879 if (TREE_CODE (arg0) == INTEGER_CST)
3880 arg0 = fold_convert (type0, arg0);
3882 if (TYPE_PRECISION (type1) < prec3)
3884 type1 = build_bitint_type (prec3, TYPE_UNSIGNED (type1));
3885 if (TREE_CODE (arg1) == INTEGER_CST)
3886 arg1 = fold_convert (type1, arg1);
3888 unsigned int data_cnt = 0;
3889 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3890 tree cmp = build_zero_cst (m_limb_type);
3891 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3892 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3893 for (unsigned i = 0; i < cnt; i++)
3895 m_data_cnt = 0;
3896 tree rhs1, rhs2;
3897 if (kind != bitint_prec_huge)
3898 idx = size_int (i);
3899 else if (i >= 2)
3900 idx = size_int (fin + (i > 2));
3901 if (!last_ovf || i < cnt - 1)
3903 if (type0 != TREE_TYPE (arg0))
3904 rhs1 = handle_cast (type0, arg0, idx);
3905 else
3906 rhs1 = handle_operand (arg0, idx);
3907 if (type1 != TREE_TYPE (arg1))
3908 rhs2 = handle_cast (type1, arg1, idx);
3909 else
3910 rhs2 = handle_operand (arg1, idx);
3911 if (i == 0)
3912 data_cnt = m_data_cnt;
3913 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3914 rhs1 = add_cast (m_limb_type, rhs1);
3915 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3916 rhs2 = add_cast (m_limb_type, rhs2);
3917 last_rhs1 = rhs1;
3918 last_rhs2 = rhs2;
3920 else
3922 m_data_cnt = data_cnt;
3923 if (TYPE_UNSIGNED (type0))
3924 rhs1 = build_zero_cst (m_limb_type);
3925 else
3927 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3928 if (TREE_CODE (rhs1) == INTEGER_CST)
3929 rhs1 = build_int_cst (m_limb_type,
3930 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3931 else
3933 tree lpm1 = build_int_cst (unsigned_type_node,
3934 limb_prec - 1);
3935 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
3936 RSHIFT_EXPR, rhs1, lpm1);
3937 insert_before (g);
3938 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
3941 if (TYPE_UNSIGNED (type1))
3942 rhs2 = build_zero_cst (m_limb_type);
3943 else
3945 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
3946 if (TREE_CODE (rhs2) == INTEGER_CST)
3947 rhs2 = build_int_cst (m_limb_type,
3948 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
3949 else
3951 tree lpm1 = build_int_cst (unsigned_type_node,
3952 limb_prec - 1);
3953 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
3954 RSHIFT_EXPR, rhs2, lpm1);
3955 insert_before (g);
3956 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
3960 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
3961 if (ovf != boolean_false_node)
3963 if (tree_fits_uhwi_p (idx))
3965 unsigned limb = tree_to_uhwi (idx);
3966 if (limb >= startlimb && limb <= endlimb)
3968 tree l = arith_overflow_extract_bits (start, end, rhs,
3969 limb, check_zero);
3970 tree this_ovf = make_ssa_name (boolean_type_node);
3971 if (ovf == NULL_TREE && !check_zero)
3973 cmp = l;
3974 g = gimple_build_assign (make_ssa_name (m_limb_type),
3975 PLUS_EXPR, l,
3976 build_int_cst (m_limb_type, 1));
3977 insert_before (g);
3978 g = gimple_build_assign (this_ovf, GT_EXPR,
3979 gimple_assign_lhs (g),
3980 build_int_cst (m_limb_type, 1));
3982 else
3983 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
3984 insert_before (g);
3985 if (ovf == NULL_TREE)
3986 ovf = this_ovf;
3987 else
3989 tree b = make_ssa_name (boolean_type_node);
3990 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
3991 insert_before (g);
3992 ovf = b;
3996 else if (startlimb < fin)
3998 if (m_first && startlimb + 2 < fin)
4000 tree data_out;
4001 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4002 ovf_out = m_data.pop ();
4003 m_data.pop ();
4004 if (!check_zero)
4006 cmp = prepare_data_in_out (cmp, idx, &data_out);
4007 cmp_out = m_data.pop ();
4008 m_data.pop ();
4011 if (i != 0 || startlimb != fin - 1)
4013 tree_code cmp_code;
4014 bool single_comparison
4015 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4016 if (!single_comparison)
4018 cmp_code = GE_EXPR;
4019 if (!check_zero && (start % limb_prec) == 0)
4020 single_comparison = true;
4022 else if ((startlimb & 1) == (i & 1))
4023 cmp_code = EQ_EXPR;
4024 else
4025 cmp_code = GT_EXPR;
4026 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4027 NULL_TREE, NULL_TREE);
4028 edge edge_true_true, edge_true_false, edge_false;
4029 gimple *g2 = NULL;
4030 if (!single_comparison)
4031 g2 = gimple_build_cond (EQ_EXPR, idx,
4032 size_int (startlimb), NULL_TREE,
4033 NULL_TREE);
4034 if_then_if_then_else (g, g2, profile_probability::likely (),
4035 profile_probability::unlikely (),
4036 edge_true_true, edge_true_false,
4037 edge_false);
4038 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4039 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4040 check_zero);
4041 tree this_ovf = make_ssa_name (boolean_type_node);
4042 if (cmp_code != GT_EXPR && !check_zero)
4044 g = gimple_build_assign (make_ssa_name (m_limb_type),
4045 PLUS_EXPR, l,
4046 build_int_cst (m_limb_type, 1));
4047 insert_before (g);
4048 g = gimple_build_assign (this_ovf, GT_EXPR,
4049 gimple_assign_lhs (g),
4050 build_int_cst (m_limb_type, 1));
4052 else
4053 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4054 insert_before (g);
4055 if (cmp_code == GT_EXPR)
4057 tree t = make_ssa_name (boolean_type_node);
4058 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4059 insert_before (g);
4060 this_ovf = t;
4062 tree this_ovf2 = NULL_TREE;
4063 if (!single_comparison)
4065 m_gsi = gsi_after_labels (edge_true_true->src);
4066 tree t = make_ssa_name (boolean_type_node);
4067 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4068 insert_before (g);
4069 this_ovf2 = make_ssa_name (boolean_type_node);
4070 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4071 ovf, t);
4072 insert_before (g);
4074 m_gsi = gsi_after_labels (edge_true_false->dest);
4075 tree t;
4076 if (i == 1 && ovf_out)
4077 t = ovf_out;
4078 else
4079 t = make_ssa_name (boolean_type_node);
4080 gphi *phi = create_phi_node (t, edge_true_false->dest);
4081 add_phi_arg (phi, this_ovf, edge_true_false,
4082 UNKNOWN_LOCATION);
4083 add_phi_arg (phi, ovf ? ovf
4084 : boolean_false_node, edge_false,
4085 UNKNOWN_LOCATION);
4086 if (edge_true_true)
4087 add_phi_arg (phi, this_ovf2, edge_true_true,
4088 UNKNOWN_LOCATION);
4089 ovf = t;
4090 if (!check_zero && cmp_code != GT_EXPR)
4092 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4093 phi = create_phi_node (t, edge_true_false->dest);
4094 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4095 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4096 if (edge_true_true)
4097 add_phi_arg (phi, cmp, edge_true_true,
4098 UNKNOWN_LOCATION);
4099 cmp = t;
4105 if (var || obj)
4107 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4109 else if (!tree_fits_uhwi_p (idx)
4110 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4112 bool single_comparison
4113 = (((unsigned) prec % limb_prec) == 0
4114 || prec_limbs + 1 >= fin
4115 || (prec_limbs & 1) == (i & 1));
4116 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4117 NULL_TREE, NULL_TREE);
4118 gimple *g2 = NULL;
4119 if (!single_comparison)
4120 g2 = gimple_build_cond (LT_EXPR, idx,
4121 size_int (prec_limbs - 1),
4122 NULL_TREE, NULL_TREE);
4123 edge edge_true_true, edge_true_false, edge_false;
4124 if_then_if_then_else (g, g2, profile_probability::likely (),
4125 profile_probability::likely (),
4126 edge_true_true, edge_true_false,
4127 edge_false);
4128 tree l = limb_access (type, var ? var : obj, idx, true);
4129 g = gimple_build_assign (l, rhs);
4130 insert_before (g);
4131 if (!single_comparison)
4133 m_gsi = gsi_after_labels (edge_true_true->src);
4134 l = limb_access (type, var ? var : obj,
4135 size_int (prec_limbs - 1), true);
4136 if (!useless_type_conversion_p (TREE_TYPE (l),
4137 TREE_TYPE (rhs)))
4138 rhs = add_cast (TREE_TYPE (l), rhs);
4139 g = gimple_build_assign (l, rhs);
4140 insert_before (g);
4142 m_gsi = gsi_after_labels (edge_true_false->dest);
4144 else
4146 tree l = limb_access (type, var ? var : obj, idx, true);
4147 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4148 rhs = add_cast (TREE_TYPE (l), rhs);
4149 g = gimple_build_assign (l, rhs);
4150 insert_before (g);
4153 m_first = false;
4154 if (kind == bitint_prec_huge && i <= 1)
4156 if (i == 0)
4158 idx = make_ssa_name (sizetype);
4159 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4160 size_one_node);
4161 insert_before (g);
4163 else
4165 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4166 size_int (2));
4167 insert_before (g);
4168 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4169 NULL_TREE, NULL_TREE);
4170 insert_before (g);
4171 m_gsi = gsi_for_stmt (final_stmt);
4176 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4179 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4180 argument or return type _Complex large/huge _BitInt. */
4182 void
4183 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4185 tree arg0 = gimple_call_arg (stmt, 0);
4186 tree arg1 = gimple_call_arg (stmt, 1);
4187 tree lhs = gimple_call_lhs (stmt);
4188 if (!lhs)
4190 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4191 gsi_remove (&gsi, true);
4192 return;
4194 gimple *final_stmt = gsi_stmt (m_gsi);
4195 tree type = TREE_TYPE (lhs);
4196 if (TREE_CODE (type) == COMPLEX_TYPE)
4197 type = TREE_TYPE (type);
4198 int prec = TYPE_PRECISION (type), prec0, prec1;
4199 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4200 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4201 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4202 + (prec1 < 0 ? -prec1 : prec1)
4203 + ((prec0 < 0) != (prec1 < 0)));
4204 tree var = NULL_TREE;
4205 tree orig_obj = obj;
4206 bool force_var = false;
4207 if (obj == NULL_TREE
4208 && TREE_CODE (type) == BITINT_TYPE
4209 && bitint_precision_kind (type) >= bitint_prec_large
4210 && m_names
4211 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4213 int part = var_to_partition (m_map, lhs);
4214 gcc_assert (m_vars[part] != NULL_TREE);
4215 obj = m_vars[part];
4216 if (TREE_TYPE (lhs) == type)
4217 orig_obj = obj;
4219 else if (obj != NULL_TREE && DECL_P (obj))
4221 for (int i = 0; i < 2; ++i)
4223 tree arg = i ? arg1 : arg0;
4224 if (TREE_CODE (arg) == ADDR_EXPR)
4225 arg = TREE_OPERAND (arg, 0);
4226 if (get_base_address (arg) == obj)
4228 force_var = true;
4229 break;
4233 if (obj == NULL_TREE
4234 || force_var
4235 || TREE_CODE (type) != BITINT_TYPE
4236 || bitint_precision_kind (type) < bitint_prec_large
4237 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4239 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4240 tree atype = build_array_type_nelts (m_limb_type, nelts);
4241 var = create_tmp_var (atype);
4243 tree addr = build_fold_addr_expr (var ? var : obj);
4244 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4245 NULL_TREE, true, GSI_SAME_STMT);
4246 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4247 gimple *g
4248 = gimple_build_call_internal (IFN_MULBITINT, 6,
4249 addr, build_int_cst (sitype,
4250 MAX (prec2, prec)),
4251 arg0, build_int_cst (sitype, prec0),
4252 arg1, build_int_cst (sitype, prec1));
4253 insert_before (g);
4255 unsigned start, end;
4256 bool check_zero;
4257 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4258 &start, &end, &check_zero);
4259 if (ovf == NULL_TREE)
4261 unsigned startlimb = start / limb_prec;
4262 unsigned endlimb = (end - 1) / limb_prec;
4263 unsigned cnt;
4264 bool use_loop = false;
4265 if (startlimb == endlimb)
4266 cnt = 1;
4267 else if (startlimb + 1 == endlimb)
4268 cnt = 2;
4269 else if ((end % limb_prec) == 0)
4271 cnt = 2;
4272 use_loop = true;
4274 else
4276 cnt = 3;
4277 use_loop = startlimb + 2 < endlimb;
4279 if (cnt == 1)
4281 tree l = limb_access (NULL_TREE, var ? var : obj,
4282 size_int (startlimb), true);
4283 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4284 insert_before (g);
4285 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4286 startlimb, check_zero);
4287 ovf = make_ssa_name (boolean_type_node);
4288 if (check_zero)
4289 g = gimple_build_assign (ovf, NE_EXPR, l,
4290 build_zero_cst (m_limb_type));
4291 else
4293 g = gimple_build_assign (make_ssa_name (m_limb_type),
4294 PLUS_EXPR, l,
4295 build_int_cst (m_limb_type, 1));
4296 insert_before (g);
4297 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4298 build_int_cst (m_limb_type, 1));
4300 insert_before (g);
4302 else
4304 basic_block edge_bb = NULL;
4305 gimple_stmt_iterator gsi = m_gsi;
4306 gsi_prev (&gsi);
4307 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4308 edge_bb = e->src;
4309 m_gsi = gsi_end_bb (edge_bb);
4311 tree cmp = build_zero_cst (m_limb_type);
4312 for (unsigned i = 0; i < cnt; i++)
4314 tree idx, idx_next = NULL_TREE;
4315 if (i == 0)
4316 idx = size_int (startlimb);
4317 else if (i == 2)
4318 idx = size_int (endlimb);
4319 else if (use_loop)
4320 idx = create_loop (size_int (startlimb + 1), &idx_next);
4321 else
4322 idx = size_int (startlimb + 1);
4323 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4324 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4325 insert_before (g);
4326 l = gimple_assign_lhs (g);
4327 if (i == 0 || i == 2)
4328 l = arith_overflow_extract_bits (start, end, l,
4329 tree_to_uhwi (idx),
4330 check_zero);
4331 if (i == 0 && !check_zero)
4333 cmp = l;
4334 g = gimple_build_assign (make_ssa_name (m_limb_type),
4335 PLUS_EXPR, l,
4336 build_int_cst (m_limb_type, 1));
4337 insert_before (g);
4338 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4339 build_int_cst (m_limb_type, 1),
4340 NULL_TREE, NULL_TREE);
4342 else
4343 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4344 insert_before (g);
4345 edge e1 = split_block (gsi_bb (m_gsi), g);
4346 e1->flags = EDGE_FALSE_VALUE;
4347 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4348 EDGE_TRUE_VALUE);
4349 e1->probability = profile_probability::likely ();
4350 e2->probability = e1->probability.invert ();
4351 if (i == 0)
4352 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4353 m_gsi = gsi_after_labels (e1->dest);
4354 if (i == 1 && use_loop)
4356 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4357 size_one_node);
4358 insert_before (g);
4359 g = gimple_build_cond (NE_EXPR, idx_next,
4360 size_int (endlimb + (cnt == 1)),
4361 NULL_TREE, NULL_TREE);
4362 insert_before (g);
4363 edge true_edge, false_edge;
4364 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4365 &true_edge,
4366 &false_edge);
4367 m_gsi = gsi_after_labels (false_edge->dest);
4371 ovf = make_ssa_name (boolean_type_node);
4372 basic_block bb = gimple_bb (final_stmt);
4373 gphi *phi = create_phi_node (ovf, bb);
4374 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4375 edge_iterator ei;
4376 FOR_EACH_EDGE (e, ei, bb->preds)
4378 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4379 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4381 m_gsi = gsi_for_stmt (final_stmt);
4385 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4388 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4389 .{ADD,SUB,MUL}_OVERFLOW call. */
4391 void
4392 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4394 tree rhs1 = gimple_assign_rhs1 (stmt);
4395 rhs1 = TREE_OPERAND (rhs1, 0);
4396 if (obj == NULL_TREE)
4398 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4399 gcc_assert (m_vars[part] != NULL_TREE);
4400 obj = m_vars[part];
4402 if (TREE_CODE (rhs1) == SSA_NAME
4403 && (m_names == NULL
4404 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4406 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4407 return;
4409 int part = var_to_partition (m_map, rhs1);
4410 gcc_assert (m_vars[part] != NULL_TREE);
4411 tree var = m_vars[part];
4412 unsigned HOST_WIDE_INT nelts
4413 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4414 tree atype = build_array_type_nelts (m_limb_type, nelts);
4415 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4416 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4417 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4418 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4419 ? 0 : nelts * m_limb_size);
4420 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4421 gimple *g = gimple_build_assign (obj, v2);
4422 insert_before (g);
4425 /* Lower COMPLEX_EXPR stmt. */
4427 void
4428 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4430 tree lhs = gimple_assign_lhs (stmt);
4431 tree rhs1 = gimple_assign_rhs1 (stmt);
4432 tree rhs2 = gimple_assign_rhs2 (stmt);
4433 int part = var_to_partition (m_map, lhs);
4434 gcc_assert (m_vars[part] != NULL_TREE);
4435 lhs = m_vars[part];
4436 unsigned HOST_WIDE_INT nelts
4437 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4438 tree atype = build_array_type_nelts (m_limb_type, nelts);
4439 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4440 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4441 tree v2;
4442 if (TREE_CODE (rhs1) == SSA_NAME)
4444 part = var_to_partition (m_map, rhs1);
4445 gcc_assert (m_vars[part] != NULL_TREE);
4446 v2 = m_vars[part];
4448 else if (integer_zerop (rhs1))
4449 v2 = build_zero_cst (atype);
4450 else
4451 v2 = tree_output_constant_def (rhs1);
4452 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4453 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4454 gimple *g = gimple_build_assign (v1, v2);
4455 insert_before (g);
4456 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4457 TYPE_SIZE_UNIT (atype));
4458 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4459 if (TREE_CODE (rhs2) == SSA_NAME)
4461 part = var_to_partition (m_map, rhs2);
4462 gcc_assert (m_vars[part] != NULL_TREE);
4463 v2 = m_vars[part];
4465 else if (integer_zerop (rhs2))
4466 v2 = build_zero_cst (atype);
4467 else
4468 v2 = tree_output_constant_def (rhs2);
4469 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4470 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4471 g = gimple_build_assign (v1, v2);
4472 insert_before (g);
4475 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4476 argument. */
4478 void
4479 bitint_large_huge::lower_bit_query (gimple *stmt)
4481 tree arg0 = gimple_call_arg (stmt, 0);
4482 tree arg1 = (gimple_call_num_args (stmt) == 2
4483 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4484 tree lhs = gimple_call_lhs (stmt);
4485 gimple *g;
4487 if (!lhs)
4489 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4490 gsi_remove (&gsi, true);
4491 return;
4493 tree type = TREE_TYPE (arg0);
4494 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4495 bitint_prec_kind kind = bitint_precision_kind (type);
4496 gcc_assert (kind >= bitint_prec_large);
4497 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4498 enum built_in_function fcode = END_BUILTINS;
4499 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4500 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4501 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4502 switch (ifn)
4504 case IFN_CLZ:
4505 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4506 fcode = BUILT_IN_CLZ;
4507 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4508 fcode = BUILT_IN_CLZL;
4509 else
4510 fcode = BUILT_IN_CLZLL;
4511 break;
4512 case IFN_FFS:
4513 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4514 we don't add the addend at the end. */
4515 arg1 = integer_zero_node;
4516 /* FALLTHRU */
4517 case IFN_CTZ:
4518 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4519 fcode = BUILT_IN_CTZ;
4520 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4521 fcode = BUILT_IN_CTZL;
4522 else
4523 fcode = BUILT_IN_CTZLL;
4524 m_upwards = true;
4525 break;
4526 case IFN_CLRSB:
4527 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4528 fcode = BUILT_IN_CLRSB;
4529 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4530 fcode = BUILT_IN_CLRSBL;
4531 else
4532 fcode = BUILT_IN_CLRSBLL;
4533 break;
4534 case IFN_PARITY:
4535 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4536 fcode = BUILT_IN_PARITY;
4537 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4538 fcode = BUILT_IN_PARITYL;
4539 else
4540 fcode = BUILT_IN_PARITYLL;
4541 m_upwards = true;
4542 break;
4543 case IFN_POPCOUNT:
4544 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4545 fcode = BUILT_IN_POPCOUNT;
4546 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4547 fcode = BUILT_IN_POPCOUNTL;
4548 else
4549 fcode = BUILT_IN_POPCOUNTLL;
4550 m_upwards = true;
4551 break;
4552 default:
4553 gcc_unreachable ();
4555 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4556 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4557 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4558 basic_block edge_bb = NULL;
4559 if (m_upwards)
4561 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4562 if (kind == bitint_prec_large)
4563 cnt = CEIL (prec, limb_prec);
4564 else
4566 rem = (prec % (2 * limb_prec));
4567 end = (prec - rem) / limb_prec;
4568 cnt = 2 + CEIL (rem, limb_prec);
4569 idx = idx_first = create_loop (size_zero_node, &idx_next);
4572 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4574 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4575 gsi_prev (&gsi);
4576 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4577 edge_bb = e->src;
4578 if (kind == bitint_prec_large)
4579 m_gsi = gsi_end_bb (edge_bb);
4580 bqp = XALLOCAVEC (struct bq_details, cnt);
4582 else
4583 m_after_stmt = stmt;
4584 if (kind != bitint_prec_large)
4585 m_upwards_2limb = end;
4587 for (unsigned i = 0; i < cnt; i++)
4589 m_data_cnt = 0;
4590 if (kind == bitint_prec_large)
4591 idx = size_int (i);
4592 else if (i >= 2)
4593 idx = size_int (end + (i > 2));
4595 tree rhs1 = handle_operand (arg0, idx);
4596 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4598 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4599 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4600 rhs1 = add_cast (m_limb_type, rhs1);
4603 tree in, out, tem;
4604 if (ifn == IFN_PARITY)
4605 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4606 else if (ifn == IFN_FFS)
4607 in = prepare_data_in_out (integer_one_node, idx, &out);
4608 else
4609 in = prepare_data_in_out (integer_zero_node, idx, &out);
4611 switch (ifn)
4613 case IFN_CTZ:
4614 case IFN_FFS:
4615 g = gimple_build_cond (NE_EXPR, rhs1,
4616 build_zero_cst (m_limb_type),
4617 NULL_TREE, NULL_TREE);
4618 insert_before (g);
4619 edge e1, e2;
4620 e1 = split_block (gsi_bb (m_gsi), g);
4621 e1->flags = EDGE_FALSE_VALUE;
4622 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4623 e1->probability = profile_probability::unlikely ();
4624 e2->probability = e1->probability.invert ();
4625 if (i == 0)
4626 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4627 m_gsi = gsi_after_labels (e1->dest);
4628 bqp[i].e = e2;
4629 bqp[i].val = rhs1;
4630 if (tree_fits_uhwi_p (idx))
4631 bqp[i].addend
4632 = build_int_cst (integer_type_node,
4633 tree_to_uhwi (idx) * limb_prec
4634 + (ifn == IFN_FFS));
4635 else
4637 bqp[i].addend = in;
4638 if (i == 1)
4639 res = out;
4640 else
4641 res = make_ssa_name (integer_type_node);
4642 g = gimple_build_assign (res, PLUS_EXPR, in,
4643 build_int_cst (integer_type_node,
4644 limb_prec));
4645 insert_before (g);
4646 m_data[m_data_cnt] = res;
4648 break;
4649 case IFN_PARITY:
4650 if (!integer_zerop (in))
4652 if (kind == bitint_prec_huge && i == 1)
4653 res = out;
4654 else
4655 res = make_ssa_name (m_limb_type);
4656 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4657 insert_before (g);
4659 else
4660 res = rhs1;
4661 m_data[m_data_cnt] = res;
4662 break;
4663 case IFN_POPCOUNT:
4664 g = gimple_build_call (fndecl, 1, rhs1);
4665 tem = make_ssa_name (integer_type_node);
4666 gimple_call_set_lhs (g, tem);
4667 insert_before (g);
4668 if (!integer_zerop (in))
4670 if (kind == bitint_prec_huge && i == 1)
4671 res = out;
4672 else
4673 res = make_ssa_name (integer_type_node);
4674 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4675 insert_before (g);
4677 else
4678 res = tem;
4679 m_data[m_data_cnt] = res;
4680 break;
4681 default:
4682 gcc_unreachable ();
4685 m_first = false;
4686 if (kind == bitint_prec_huge && i <= 1)
4688 if (i == 0)
4690 idx = make_ssa_name (sizetype);
4691 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4692 size_one_node);
4693 insert_before (g);
4695 else
4697 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4698 size_int (2));
4699 insert_before (g);
4700 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4701 NULL_TREE, NULL_TREE);
4702 insert_before (g);
4703 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4704 m_gsi = gsi_after_labels (edge_bb);
4705 else
4706 m_gsi = gsi_for_stmt (stmt);
4711 else
4713 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4714 int sub_one = 0;
4715 if (kind == bitint_prec_large)
4716 cnt = CEIL (prec, limb_prec);
4717 else
4719 rem = prec % limb_prec;
4720 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4721 rem = limb_prec;
4722 end = (prec - rem) / limb_prec;
4723 cnt = 1 + (rem != 0);
4724 if (ifn == IFN_CLRSB)
4725 sub_one = 1;
4728 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4729 gsi_prev (&gsi);
4730 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4731 edge_bb = e->src;
4732 m_gsi = gsi_end_bb (edge_bb);
4734 if (ifn == IFN_CLZ)
4735 bqp = XALLOCAVEC (struct bq_details, cnt);
4736 else
4738 gsi = gsi_for_stmt (stmt);
4739 gsi_prev (&gsi);
4740 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4741 edge_bb = e->src;
4742 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4745 for (unsigned i = 0; i < cnt; i++)
4747 m_data_cnt = 0;
4748 if (kind == bitint_prec_large)
4749 idx = size_int (cnt - i - 1);
4750 else if (i == cnt - 1)
4751 idx = create_loop (size_int (end - 1), &idx_next);
4752 else
4753 idx = size_int (end);
4755 tree rhs1 = handle_operand (arg0, idx);
4756 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4758 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4759 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4760 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4761 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4762 rhs1 = add_cast (m_limb_type, rhs1);
4765 if (ifn == IFN_CLZ)
4767 g = gimple_build_cond (NE_EXPR, rhs1,
4768 build_zero_cst (m_limb_type),
4769 NULL_TREE, NULL_TREE);
4770 insert_before (g);
4771 edge e1 = split_block (gsi_bb (m_gsi), g);
4772 e1->flags = EDGE_FALSE_VALUE;
4773 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4774 e1->probability = profile_probability::unlikely ();
4775 e2->probability = e1->probability.invert ();
4776 if (i == 0)
4777 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4778 m_gsi = gsi_after_labels (e1->dest);
4779 bqp[i].e = e2;
4780 bqp[i].val = rhs1;
4782 else
4784 if (i == 0)
4786 first = rhs1;
4787 g = gimple_build_assign (make_ssa_name (m_limb_type),
4788 PLUS_EXPR, rhs1,
4789 build_int_cst (m_limb_type, 1));
4790 insert_before (g);
4791 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4792 build_int_cst (m_limb_type, 1),
4793 NULL_TREE, NULL_TREE);
4794 insert_before (g);
4796 else
4798 g = gimple_build_assign (make_ssa_name (m_limb_type),
4799 BIT_XOR_EXPR, rhs1, first);
4800 insert_before (g);
4801 tree stype = signed_type_for (m_limb_type);
4802 g = gimple_build_cond (LT_EXPR,
4803 add_cast (stype,
4804 gimple_assign_lhs (g)),
4805 build_zero_cst (stype),
4806 NULL_TREE, NULL_TREE);
4807 insert_before (g);
4808 edge e1 = split_block (gsi_bb (m_gsi), g);
4809 e1->flags = EDGE_FALSE_VALUE;
4810 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4811 EDGE_TRUE_VALUE);
4812 e1->probability = profile_probability::unlikely ();
4813 e2->probability = e1->probability.invert ();
4814 if (i == 1)
4815 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4816 e2->src);
4817 m_gsi = gsi_after_labels (e1->dest);
4818 bqp[2 * i].e = e2;
4819 g = gimple_build_cond (NE_EXPR, rhs1, first,
4820 NULL_TREE, NULL_TREE);
4821 insert_before (g);
4823 edge e1 = split_block (gsi_bb (m_gsi), g);
4824 e1->flags = EDGE_FALSE_VALUE;
4825 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4826 e1->probability = profile_probability::unlikely ();
4827 e2->probability = e1->probability.invert ();
4828 if (i == 0)
4829 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4830 m_gsi = gsi_after_labels (e1->dest);
4831 bqp[2 * i + 1].e = e2;
4832 bqp[i].val = rhs1;
4834 if (tree_fits_uhwi_p (idx))
4835 bqp[i].addend
4836 = build_int_cst (integer_type_node,
4837 (int) prec
4838 - (((int) tree_to_uhwi (idx) + 1)
4839 * limb_prec) - sub_one);
4840 else
4842 tree in, out;
4843 in = build_int_cst (integer_type_node, rem - sub_one);
4844 m_first = true;
4845 in = prepare_data_in_out (in, idx, &out);
4846 out = m_data[m_data_cnt + 1];
4847 bqp[i].addend = in;
4848 g = gimple_build_assign (out, PLUS_EXPR, in,
4849 build_int_cst (integer_type_node,
4850 limb_prec));
4851 insert_before (g);
4852 m_data[m_data_cnt] = out;
4855 m_first = false;
4856 if (kind == bitint_prec_huge && i == cnt - 1)
4858 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4859 size_int (-1));
4860 insert_before (g);
4861 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4862 NULL_TREE, NULL_TREE);
4863 insert_before (g);
4864 edge true_edge, false_edge;
4865 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4866 &true_edge, &false_edge);
4867 m_gsi = gsi_after_labels (false_edge->dest);
4871 switch (ifn)
4873 case IFN_CLZ:
4874 case IFN_CTZ:
4875 case IFN_FFS:
4876 gphi *phi1, *phi2, *phi3;
4877 basic_block bb;
4878 bb = gsi_bb (m_gsi);
4879 remove_edge (find_edge (bb, gimple_bb (stmt)));
4880 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4881 gimple_bb (stmt));
4882 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4883 gimple_bb (stmt));
4884 for (unsigned i = 0; i < cnt; i++)
4886 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4887 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4889 if (arg1 == NULL_TREE)
4891 g = gimple_build_builtin_unreachable (m_loc);
4892 insert_before (g);
4894 m_gsi = gsi_for_stmt (stmt);
4895 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4896 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4897 insert_before (g);
4898 if (arg1 == NULL_TREE)
4899 g = gimple_build_assign (lhs, PLUS_EXPR,
4900 gimple_phi_result (phi2),
4901 gimple_call_lhs (g));
4902 else
4904 g = gimple_build_assign (make_ssa_name (integer_type_node),
4905 PLUS_EXPR, gimple_phi_result (phi2),
4906 gimple_call_lhs (g));
4907 insert_before (g);
4908 edge e1 = split_block (gimple_bb (stmt), g);
4909 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
4910 e2->probability = profile_probability::always ();
4911 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
4912 get_immediate_dominator (CDI_DOMINATORS,
4913 e1->src));
4914 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
4915 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
4916 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
4917 m_gsi = gsi_for_stmt (stmt);
4918 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4920 gsi_replace (&m_gsi, g, true);
4921 break;
4922 case IFN_CLRSB:
4923 bb = gsi_bb (m_gsi);
4924 remove_edge (find_edge (bb, edge_bb));
4925 edge e;
4926 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
4927 e->probability = profile_probability::always ();
4928 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
4929 get_immediate_dominator (CDI_DOMINATORS,
4930 edge_bb));
4931 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4932 edge_bb);
4933 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4934 edge_bb);
4935 phi3 = create_phi_node (make_ssa_name (integer_type_node),
4936 gimple_bb (stmt));
4937 for (unsigned i = 0; i < cnt; i++)
4939 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
4940 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
4941 UNKNOWN_LOCATION);
4942 tree a = bqp[i].addend;
4943 if (i && kind == bitint_prec_large)
4944 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
4945 if (i)
4946 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
4948 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
4949 UNKNOWN_LOCATION);
4950 m_gsi = gsi_after_labels (edge_bb);
4951 g = gimple_build_call (fndecl, 1,
4952 add_cast (signed_type_for (m_limb_type),
4953 gimple_phi_result (phi1)));
4954 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4955 insert_before (g);
4956 g = gimple_build_assign (make_ssa_name (integer_type_node),
4957 PLUS_EXPR, gimple_call_lhs (g),
4958 gimple_phi_result (phi2));
4959 insert_before (g);
4960 if (kind != bitint_prec_large)
4962 g = gimple_build_assign (make_ssa_name (integer_type_node),
4963 PLUS_EXPR, gimple_assign_lhs (g),
4964 integer_one_node);
4965 insert_before (g);
4967 add_phi_arg (phi3, gimple_assign_lhs (g),
4968 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
4969 m_gsi = gsi_for_stmt (stmt);
4970 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4971 gsi_replace (&m_gsi, g, true);
4972 break;
4973 case IFN_PARITY:
4974 g = gimple_build_call (fndecl, 1, res);
4975 gimple_call_set_lhs (g, lhs);
4976 gsi_replace (&m_gsi, g, true);
4977 break;
4978 case IFN_POPCOUNT:
4979 g = gimple_build_assign (lhs, res);
4980 gsi_replace (&m_gsi, g, true);
4981 break;
4982 default:
4983 gcc_unreachable ();
4987 /* Lower a call statement with one or more large/huge _BitInt
4988 arguments or large/huge _BitInt return value. */
4990 void
4991 bitint_large_huge::lower_call (tree obj, gimple *stmt)
4993 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4994 unsigned int nargs = gimple_call_num_args (stmt);
4995 if (gimple_call_internal_p (stmt))
4996 switch (gimple_call_internal_fn (stmt))
4998 case IFN_ADD_OVERFLOW:
4999 case IFN_SUB_OVERFLOW:
5000 case IFN_UBSAN_CHECK_ADD:
5001 case IFN_UBSAN_CHECK_SUB:
5002 lower_addsub_overflow (obj, stmt);
5003 return;
5004 case IFN_MUL_OVERFLOW:
5005 case IFN_UBSAN_CHECK_MUL:
5006 lower_mul_overflow (obj, stmt);
5007 return;
5008 case IFN_CLZ:
5009 case IFN_CTZ:
5010 case IFN_CLRSB:
5011 case IFN_FFS:
5012 case IFN_PARITY:
5013 case IFN_POPCOUNT:
5014 lower_bit_query (stmt);
5015 return;
5016 default:
5017 break;
5019 for (unsigned int i = 0; i < nargs; ++i)
5021 tree arg = gimple_call_arg (stmt, i);
5022 if (TREE_CODE (arg) != SSA_NAME
5023 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5024 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5025 continue;
5026 int p = var_to_partition (m_map, arg);
5027 tree v = m_vars[p];
5028 gcc_assert (v != NULL_TREE);
5029 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5030 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5031 arg = make_ssa_name (TREE_TYPE (arg));
5032 gimple *g = gimple_build_assign (arg, v);
5033 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5034 gimple_call_set_arg (stmt, i, arg);
5035 if (m_preserved == NULL)
5036 m_preserved = BITMAP_ALLOC (NULL);
5037 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5039 tree lhs = gimple_call_lhs (stmt);
5040 if (lhs
5041 && TREE_CODE (lhs) == SSA_NAME
5042 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5043 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5045 int p = var_to_partition (m_map, lhs);
5046 tree v = m_vars[p];
5047 gcc_assert (v != NULL_TREE);
5048 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5049 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5050 gimple_call_set_lhs (stmt, v);
5051 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5053 update_stmt (stmt);
5056 /* Lower __asm STMT which involves large/huge _BitInt values. */
5058 void
5059 bitint_large_huge::lower_asm (gimple *stmt)
5061 gasm *g = as_a <gasm *> (stmt);
5062 unsigned noutputs = gimple_asm_noutputs (g);
5063 unsigned ninputs = gimple_asm_ninputs (g);
5065 for (unsigned i = 0; i < noutputs; ++i)
5067 tree t = gimple_asm_output_op (g, i);
5068 tree s = TREE_VALUE (t);
5069 if (TREE_CODE (s) == SSA_NAME
5070 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5071 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5073 int part = var_to_partition (m_map, s);
5074 gcc_assert (m_vars[part] != NULL_TREE);
5075 TREE_VALUE (t) = m_vars[part];
5078 for (unsigned i = 0; i < ninputs; ++i)
5080 tree t = gimple_asm_input_op (g, i);
5081 tree s = TREE_VALUE (t);
5082 if (TREE_CODE (s) == SSA_NAME
5083 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5084 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5086 int part = var_to_partition (m_map, s);
5087 gcc_assert (m_vars[part] != NULL_TREE);
5088 TREE_VALUE (t) = m_vars[part];
5091 update_stmt (stmt);
5094 /* Lower statement STMT which involves large/huge _BitInt values
5095 into code accessing individual limbs. */
5097 void
5098 bitint_large_huge::lower_stmt (gimple *stmt)
5100 m_first = true;
5101 m_lhs = NULL_TREE;
5102 m_data.truncate (0);
5103 m_data_cnt = 0;
5104 m_gsi = gsi_for_stmt (stmt);
5105 m_after_stmt = NULL;
5106 m_bb = NULL;
5107 m_init_gsi = m_gsi;
5108 gsi_prev (&m_init_gsi);
5109 m_preheader_bb = NULL;
5110 m_upwards_2limb = 0;
5111 m_upwards = false;
5112 m_var_msb = false;
5113 m_cast_conditional = false;
5114 m_bitfld_load = 0;
5115 m_loc = gimple_location (stmt);
5116 if (is_gimple_call (stmt))
5118 lower_call (NULL_TREE, stmt);
5119 return;
5121 if (gimple_code (stmt) == GIMPLE_ASM)
5123 lower_asm (stmt);
5124 return;
5126 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5127 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5128 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5129 bool mergeable_cast_p = false;
5130 bool final_cast_p = false;
5131 if (gimple_assign_cast_p (stmt))
5133 lhs = gimple_assign_lhs (stmt);
5134 tree rhs1 = gimple_assign_rhs1 (stmt);
5135 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5136 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5137 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5138 mergeable_cast_p = true;
5139 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5140 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5141 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5143 final_cast_p = true;
5144 if (TREE_CODE (rhs1) == SSA_NAME
5145 && (m_names == NULL
5146 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5148 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5149 if (is_gimple_assign (g)
5150 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5152 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5153 if (TREE_CODE (rhs2) == SSA_NAME
5154 && (m_names == NULL
5155 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5157 g = SSA_NAME_DEF_STMT (rhs2);
5158 int ovf = optimizable_arith_overflow (g);
5159 if (ovf == 2)
5160 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5161 and IMAGPART_EXPR uses, where the latter is cast to
5162 non-_BitInt, it will be optimized when handling
5163 the REALPART_EXPR. */
5164 return;
5165 if (ovf == 1)
5167 lower_call (NULL_TREE, g);
5168 return;
5175 if (gimple_store_p (stmt))
5177 tree rhs1 = gimple_assign_rhs1 (stmt);
5178 if (TREE_CODE (rhs1) == SSA_NAME
5179 && (m_names == NULL
5180 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5182 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5183 m_loc = gimple_location (g);
5184 lhs = gimple_assign_lhs (stmt);
5185 if (is_gimple_assign (g) && !mergeable_op (g))
5186 switch (gimple_assign_rhs_code (g))
5188 case LSHIFT_EXPR:
5189 case RSHIFT_EXPR:
5190 lower_shift_stmt (lhs, g);
5191 handled:
5192 m_gsi = gsi_for_stmt (stmt);
5193 unlink_stmt_vdef (stmt);
5194 release_ssa_name (gimple_vdef (stmt));
5195 gsi_remove (&m_gsi, true);
5196 return;
5197 case MULT_EXPR:
5198 case TRUNC_DIV_EXPR:
5199 case TRUNC_MOD_EXPR:
5200 lower_muldiv_stmt (lhs, g);
5201 goto handled;
5202 case FIX_TRUNC_EXPR:
5203 lower_float_conv_stmt (lhs, g);
5204 goto handled;
5205 case REALPART_EXPR:
5206 case IMAGPART_EXPR:
5207 lower_cplxpart_stmt (lhs, g);
5208 goto handled;
5209 default:
5210 break;
5212 else if (optimizable_arith_overflow (g) == 3)
5214 lower_call (lhs, g);
5215 goto handled;
5217 m_loc = gimple_location (stmt);
5220 if (mergeable_op (stmt)
5221 || gimple_store_p (stmt)
5222 || gimple_assign_load_p (stmt)
5223 || eq_p
5224 || mergeable_cast_p)
5226 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5227 if (!eq_p)
5228 return;
5230 else if (cmp_code != ERROR_MARK)
5231 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5232 if (cmp_code != ERROR_MARK)
5234 if (gimple_code (stmt) == GIMPLE_COND)
5236 gcond *cstmt = as_a <gcond *> (stmt);
5237 gimple_cond_set_lhs (cstmt, lhs);
5238 gimple_cond_set_rhs (cstmt, boolean_false_node);
5239 gimple_cond_set_code (cstmt, cmp_code);
5240 update_stmt (stmt);
5241 return;
5243 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5245 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5246 boolean_false_node);
5247 gimple_assign_set_rhs1 (stmt, cond);
5248 lhs = gimple_assign_lhs (stmt);
5249 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5250 || (bitint_precision_kind (TREE_TYPE (lhs))
5251 <= bitint_prec_middle));
5252 update_stmt (stmt);
5253 return;
5255 gimple_assign_set_rhs1 (stmt, lhs);
5256 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5257 gimple_assign_set_rhs_code (stmt, cmp_code);
5258 update_stmt (stmt);
5259 return;
5261 if (final_cast_p)
5263 tree lhs_type = TREE_TYPE (lhs);
5264 /* Add support for 3 or more limbs filled in from normal integral
5265 type if this assert fails. If no target chooses limb mode smaller
5266 than half of largest supported normal integral type, this will not
5267 be needed. */
5268 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5269 gimple *g;
5270 if (TREE_CODE (lhs_type) == BITINT_TYPE
5271 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5272 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5273 TYPE_UNSIGNED (lhs_type));
5274 m_data_cnt = 0;
5275 tree rhs1 = gimple_assign_rhs1 (stmt);
5276 tree r1 = handle_operand (rhs1, size_int (0));
5277 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5278 r1 = add_cast (lhs_type, r1);
5279 if (TYPE_PRECISION (lhs_type) > limb_prec)
5281 m_data_cnt = 0;
5282 m_first = false;
5283 tree r2 = handle_operand (rhs1, size_int (1));
5284 r2 = add_cast (lhs_type, r2);
5285 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5286 build_int_cst (unsigned_type_node,
5287 limb_prec));
5288 insert_before (g);
5289 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5290 gimple_assign_lhs (g));
5291 insert_before (g);
5292 r1 = gimple_assign_lhs (g);
5294 if (lhs_type != TREE_TYPE (lhs))
5295 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5296 else
5297 g = gimple_build_assign (lhs, r1);
5298 gsi_replace (&m_gsi, g, true);
5299 return;
5301 if (is_gimple_assign (stmt))
5302 switch (gimple_assign_rhs_code (stmt))
5304 case LSHIFT_EXPR:
5305 case RSHIFT_EXPR:
5306 lower_shift_stmt (NULL_TREE, stmt);
5307 return;
5308 case MULT_EXPR:
5309 case TRUNC_DIV_EXPR:
5310 case TRUNC_MOD_EXPR:
5311 lower_muldiv_stmt (NULL_TREE, stmt);
5312 return;
5313 case FIX_TRUNC_EXPR:
5314 case FLOAT_EXPR:
5315 lower_float_conv_stmt (NULL_TREE, stmt);
5316 return;
5317 case REALPART_EXPR:
5318 case IMAGPART_EXPR:
5319 lower_cplxpart_stmt (NULL_TREE, stmt);
5320 return;
5321 case COMPLEX_EXPR:
5322 lower_complexexpr_stmt (stmt);
5323 return;
5324 default:
5325 break;
5327 gcc_unreachable ();
5330 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5331 the desired memory state. */
5333 void *
5334 vuse_eq (ao_ref *, tree vuse1, void *data)
5336 tree vuse2 = (tree) data;
5337 if (vuse1 == vuse2)
5338 return data;
5340 return NULL;
5343 /* Return true if STMT uses a library function and needs to take
5344 address of its inputs. We need to avoid bit-fields in those
5345 cases. */
5347 bool
5348 stmt_needs_operand_addr (gimple *stmt)
5350 if (is_gimple_assign (stmt))
5351 switch (gimple_assign_rhs_code (stmt))
5353 case MULT_EXPR:
5354 case TRUNC_DIV_EXPR:
5355 case TRUNC_MOD_EXPR:
5356 case FLOAT_EXPR:
5357 return true;
5358 default:
5359 break;
5361 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5362 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5363 return true;
5364 return false;
5367 /* Dominator walker used to discover which large/huge _BitInt
5368 loads could be sunk into all their uses. */
5370 class bitint_dom_walker : public dom_walker
5372 public:
5373 bitint_dom_walker (bitmap names, bitmap loads)
5374 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5376 edge before_dom_children (basic_block) final override;
5378 private:
5379 bitmap m_names, m_loads;
5382 edge
5383 bitint_dom_walker::before_dom_children (basic_block bb)
5385 gphi *phi = get_virtual_phi (bb);
5386 tree vop;
5387 if (phi)
5388 vop = gimple_phi_result (phi);
5389 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5390 vop = NULL_TREE;
5391 else
5392 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5394 auto_vec<tree, 16> worklist;
5395 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5396 !gsi_end_p (gsi); gsi_next (&gsi))
5398 gimple *stmt = gsi_stmt (gsi);
5399 if (is_gimple_debug (stmt))
5400 continue;
5402 if (!vop && gimple_vuse (stmt))
5403 vop = gimple_vuse (stmt);
5405 tree cvop = vop;
5406 if (gimple_vdef (stmt))
5407 vop = gimple_vdef (stmt);
5409 tree lhs = gimple_get_lhs (stmt);
5410 if (lhs
5411 && TREE_CODE (lhs) == SSA_NAME
5412 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5413 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5414 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5415 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5416 it means it will be handled in a loop or straight line code
5417 at the location of its (ultimate) immediate use, so for
5418 vop checking purposes check these only at the ultimate
5419 immediate use. */
5420 continue;
5422 ssa_op_iter oi;
5423 use_operand_p use_p;
5424 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5426 tree s = USE_FROM_PTR (use_p);
5427 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5428 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5429 worklist.safe_push (s);
5432 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5433 while (worklist.length () > 0)
5435 tree s = worklist.pop ();
5437 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5439 gimple *g = SSA_NAME_DEF_STMT (s);
5440 needs_operand_addr |= stmt_needs_operand_addr (g);
5441 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5443 tree s2 = USE_FROM_PTR (use_p);
5444 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5445 && (bitint_precision_kind (TREE_TYPE (s2))
5446 >= bitint_prec_large))
5447 worklist.safe_push (s2);
5449 continue;
5451 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5452 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5454 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5455 if (TREE_CODE (rhs) == SSA_NAME
5456 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5457 s = rhs;
5458 else
5459 continue;
5461 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5462 continue;
5464 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5465 if (needs_operand_addr
5466 && TREE_CODE (rhs1) == COMPONENT_REF
5467 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5469 tree fld = TREE_OPERAND (rhs1, 1);
5470 /* For little-endian, we can allow as inputs bit-fields
5471 which start at a limb boundary. */
5472 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5473 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5474 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5475 % limb_prec) == 0)
5477 else
5479 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5480 continue;
5484 ao_ref ref;
5485 ao_ref_init (&ref, rhs1);
5486 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5487 unsigned limit = 64;
5488 tree vuse = cvop;
5489 if (vop != cvop
5490 && is_gimple_assign (stmt)
5491 && gimple_store_p (stmt)
5492 && !operand_equal_p (lhs,
5493 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5495 vuse = vop;
5496 if (vuse != lvop
5497 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5498 NULL, NULL, limit, lvop) == NULL)
5499 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5503 bb->aux = (void *) vop;
5504 return NULL;
5509 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5510 build_ssa_conflict_graph.
5511 The differences are:
5512 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5513 2) for large/huge _BitInt multiplication/division/modulo process def
5514 only after processing uses rather than before to make uses conflict
5515 with the definition
5516 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5517 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5518 the final statement. */
5520 void
5521 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5522 ssa_conflicts *graph, bitmap names,
5523 void (*def) (live_track *, tree,
5524 ssa_conflicts *),
5525 void (*use) (live_track *, tree))
5527 bool muldiv_p = false;
5528 tree lhs = NULL_TREE;
5529 if (is_gimple_assign (stmt))
5531 lhs = gimple_assign_lhs (stmt);
5532 if (TREE_CODE (lhs) == SSA_NAME
5533 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5534 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5536 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5537 return;
5538 switch (gimple_assign_rhs_code (stmt))
5540 case MULT_EXPR:
5541 case TRUNC_DIV_EXPR:
5542 case TRUNC_MOD_EXPR:
5543 muldiv_p = true;
5544 default:
5545 break;
5550 ssa_op_iter iter;
5551 tree var;
5552 if (!muldiv_p)
5554 /* For stmts with more than one SSA_NAME definition pretend all the
5555 SSA_NAME outputs but the first one are live at this point, so
5556 that conflicts are added in between all those even when they are
5557 actually not really live after the asm, because expansion might
5558 copy those into pseudos after the asm and if multiple outputs
5559 share the same partition, it might overwrite those that should
5560 be live. E.g.
5561 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5562 return a;
5563 See PR70593. */
5564 bool first = true;
5565 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5566 if (first)
5567 first = false;
5568 else
5569 use (live, var);
5571 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5572 def (live, var, graph);
5575 auto_vec<tree, 16> worklist;
5576 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5577 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5578 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5580 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5581 use (live, var);
5582 else
5583 worklist.safe_push (var);
5586 while (worklist.length () > 0)
5588 tree s = worklist.pop ();
5589 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5590 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5591 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5593 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5594 use (live, var);
5595 else
5596 worklist.safe_push (var);
5600 if (muldiv_p)
5601 def (live, lhs, graph);
5604 /* Entry point for _BitInt(N) operation lowering during optimization. */
5606 static unsigned int
5607 gimple_lower_bitint (void)
5609 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5610 limb_prec = 0;
5612 unsigned int i;
5613 for (i = 0; i < num_ssa_names; ++i)
5615 tree s = ssa_name (i);
5616 if (s == NULL)
5617 continue;
5618 tree type = TREE_TYPE (s);
5619 if (TREE_CODE (type) == COMPLEX_TYPE)
5620 type = TREE_TYPE (type);
5621 if (TREE_CODE (type) == BITINT_TYPE
5622 && bitint_precision_kind (type) != bitint_prec_small)
5623 break;
5624 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5625 into memory. Such functions could have no large/huge SSA_NAMEs. */
5626 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5628 gimple *g = SSA_NAME_DEF_STMT (s);
5629 if (is_gimple_assign (g) && gimple_store_p (g))
5631 tree t = gimple_assign_rhs1 (g);
5632 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5633 && (bitint_precision_kind (TREE_TYPE (t))
5634 >= bitint_prec_large))
5635 break;
5638 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5639 to floating point types need to be rewritten. */
5640 else if (SCALAR_FLOAT_TYPE_P (type))
5642 gimple *g = SSA_NAME_DEF_STMT (s);
5643 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5645 tree t = gimple_assign_rhs1 (g);
5646 if (TREE_CODE (t) == INTEGER_CST
5647 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5648 && (bitint_precision_kind (TREE_TYPE (t))
5649 != bitint_prec_small))
5650 break;
5654 if (i == num_ssa_names)
5655 return 0;
5657 basic_block bb;
5658 auto_vec<gimple *, 4> switch_statements;
5659 FOR_EACH_BB_FN (bb, cfun)
5661 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5663 tree idx = gimple_switch_index (swtch);
5664 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5665 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5666 continue;
5668 if (optimize)
5669 group_case_labels_stmt (swtch);
5670 switch_statements.safe_push (swtch);
5674 if (!switch_statements.is_empty ())
5676 bool expanded = false;
5677 gimple *stmt;
5678 unsigned int j;
5679 i = 0;
5680 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5682 gswitch *swtch = as_a<gswitch *> (stmt);
5683 tree_switch_conversion::switch_decision_tree dt (swtch);
5684 expanded |= dt.analyze_switch_statement ();
5687 if (expanded)
5689 free_dominance_info (CDI_DOMINATORS);
5690 free_dominance_info (CDI_POST_DOMINATORS);
5691 mark_virtual_operands_for_renaming (cfun);
5692 cleanup_tree_cfg (TODO_update_ssa);
5696 struct bitint_large_huge large_huge;
5697 bool has_large_huge_parm_result = false;
5698 bool has_large_huge = false;
5699 unsigned int ret = 0, first_large_huge = ~0U;
5700 bool edge_insertions = false;
5701 for (; i < num_ssa_names; ++i)
5703 tree s = ssa_name (i);
5704 if (s == NULL)
5705 continue;
5706 tree type = TREE_TYPE (s);
5707 if (TREE_CODE (type) == COMPLEX_TYPE)
5708 type = TREE_TYPE (type);
5709 if (TREE_CODE (type) == BITINT_TYPE
5710 && bitint_precision_kind (type) >= bitint_prec_large)
5712 if (first_large_huge == ~0U)
5713 first_large_huge = i;
5714 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5715 gimple_stmt_iterator gsi;
5716 tree_code rhs_code;
5717 /* Unoptimize certain constructs to simpler alternatives to
5718 avoid having to lower all of them. */
5719 if (is_gimple_assign (stmt))
5720 switch (rhs_code = gimple_assign_rhs_code (stmt))
5722 default:
5723 break;
5724 case LROTATE_EXPR:
5725 case RROTATE_EXPR:
5727 first_large_huge = 0;
5728 location_t loc = gimple_location (stmt);
5729 gsi = gsi_for_stmt (stmt);
5730 tree rhs1 = gimple_assign_rhs1 (stmt);
5731 tree type = TREE_TYPE (rhs1);
5732 tree n = gimple_assign_rhs2 (stmt), m;
5733 tree p = build_int_cst (TREE_TYPE (n),
5734 TYPE_PRECISION (type));
5735 if (TREE_CODE (n) == INTEGER_CST)
5736 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5737 else
5739 m = make_ssa_name (TREE_TYPE (n));
5740 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5741 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5742 gimple_set_location (g, loc);
5744 if (!TYPE_UNSIGNED (type))
5746 tree utype = build_bitint_type (TYPE_PRECISION (type),
5748 if (TREE_CODE (rhs1) == INTEGER_CST)
5749 rhs1 = fold_convert (utype, rhs1);
5750 else
5752 tree t = make_ssa_name (type);
5753 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5754 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5755 gimple_set_location (g, loc);
5758 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5759 rhs_code == LROTATE_EXPR
5760 ? LSHIFT_EXPR : RSHIFT_EXPR,
5761 rhs1, n);
5762 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5763 gimple_set_location (g, loc);
5764 tree op1 = gimple_assign_lhs (g);
5765 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5766 rhs_code == LROTATE_EXPR
5767 ? RSHIFT_EXPR : LSHIFT_EXPR,
5768 rhs1, m);
5769 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5770 gimple_set_location (g, loc);
5771 tree op2 = gimple_assign_lhs (g);
5772 tree lhs = gimple_assign_lhs (stmt);
5773 if (!TYPE_UNSIGNED (type))
5775 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5776 BIT_IOR_EXPR, op1, op2);
5777 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5778 gimple_set_location (g, loc);
5779 g = gimple_build_assign (lhs, NOP_EXPR,
5780 gimple_assign_lhs (g));
5782 else
5783 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5784 gsi_replace (&gsi, g, true);
5785 gimple_set_location (g, loc);
5787 break;
5788 case ABS_EXPR:
5789 case ABSU_EXPR:
5790 case MIN_EXPR:
5791 case MAX_EXPR:
5792 case COND_EXPR:
5793 first_large_huge = 0;
5794 gsi = gsi_for_stmt (stmt);
5795 tree lhs = gimple_assign_lhs (stmt);
5796 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5797 location_t loc = gimple_location (stmt);
5798 if (rhs_code == ABS_EXPR)
5799 g = gimple_build_cond (LT_EXPR, rhs1,
5800 build_zero_cst (TREE_TYPE (rhs1)),
5801 NULL_TREE, NULL_TREE);
5802 else if (rhs_code == ABSU_EXPR)
5804 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5805 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5806 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5807 gimple_set_location (g, loc);
5808 g = gimple_build_cond (LT_EXPR, rhs1,
5809 build_zero_cst (TREE_TYPE (rhs1)),
5810 NULL_TREE, NULL_TREE);
5811 rhs1 = rhs2;
5813 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5815 rhs2 = gimple_assign_rhs2 (stmt);
5816 if (TREE_CODE (rhs1) == INTEGER_CST)
5817 std::swap (rhs1, rhs2);
5818 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5819 NULL_TREE, NULL_TREE);
5820 if (rhs_code == MAX_EXPR)
5821 std::swap (rhs1, rhs2);
5823 else
5825 g = gimple_build_cond (NE_EXPR, rhs1,
5826 build_zero_cst (TREE_TYPE (rhs1)),
5827 NULL_TREE, NULL_TREE);
5828 rhs1 = gimple_assign_rhs2 (stmt);
5829 rhs2 = gimple_assign_rhs3 (stmt);
5831 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5832 gimple_set_location (g, loc);
5833 edge e1 = split_block (gsi_bb (gsi), g);
5834 edge e2 = split_block (e1->dest, (gimple *) NULL);
5835 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5836 e3->probability = profile_probability::even ();
5837 e1->flags = EDGE_TRUE_VALUE;
5838 e1->probability = e3->probability.invert ();
5839 if (dom_info_available_p (CDI_DOMINATORS))
5840 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5841 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5843 gsi = gsi_after_labels (e1->dest);
5844 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5845 NEGATE_EXPR, rhs1);
5846 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5847 gimple_set_location (g, loc);
5848 rhs2 = gimple_assign_lhs (g);
5849 std::swap (rhs1, rhs2);
5851 gsi = gsi_for_stmt (stmt);
5852 gsi_remove (&gsi, true);
5853 gphi *phi = create_phi_node (lhs, e2->dest);
5854 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5855 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5856 break;
5859 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5860 into memory. Such functions could have no large/huge SSA_NAMEs. */
5861 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5863 gimple *g = SSA_NAME_DEF_STMT (s);
5864 if (is_gimple_assign (g) && gimple_store_p (g))
5866 tree t = gimple_assign_rhs1 (g);
5867 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5868 && (bitint_precision_kind (TREE_TYPE (t))
5869 >= bitint_prec_large))
5870 has_large_huge = true;
5873 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5874 to floating point types need to be rewritten. */
5875 else if (SCALAR_FLOAT_TYPE_P (type))
5877 gimple *g = SSA_NAME_DEF_STMT (s);
5878 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5880 tree t = gimple_assign_rhs1 (g);
5881 if (TREE_CODE (t) == INTEGER_CST
5882 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5883 && (bitint_precision_kind (TREE_TYPE (t))
5884 >= bitint_prec_large))
5885 has_large_huge = true;
5889 for (i = first_large_huge; i < num_ssa_names; ++i)
5891 tree s = ssa_name (i);
5892 if (s == NULL)
5893 continue;
5894 tree type = TREE_TYPE (s);
5895 if (TREE_CODE (type) == COMPLEX_TYPE)
5896 type = TREE_TYPE (type);
5897 if (TREE_CODE (type) == BITINT_TYPE
5898 && bitint_precision_kind (type) >= bitint_prec_large)
5900 use_operand_p use_p;
5901 gimple *use_stmt;
5902 has_large_huge = true;
5903 if (optimize
5904 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
5905 continue;
5906 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5907 the same bb and could be handled in the same loop with the
5908 immediate use. */
5909 if (optimize
5910 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5911 && single_imm_use (s, &use_p, &use_stmt)
5912 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
5914 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
5916 if (mergeable_op (use_stmt))
5917 continue;
5918 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
5919 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
5920 continue;
5921 if (gimple_assign_cast_p (use_stmt))
5923 tree lhs = gimple_assign_lhs (use_stmt);
5924 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5925 continue;
5927 else if (gimple_store_p (use_stmt)
5928 && is_gimple_assign (use_stmt)
5929 && !gimple_has_volatile_ops (use_stmt)
5930 && !stmt_ends_bb_p (use_stmt))
5931 continue;
5933 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5935 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5936 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5937 && ((is_gimple_assign (use_stmt)
5938 && (gimple_assign_rhs_code (use_stmt)
5939 != COMPLEX_EXPR))
5940 || gimple_code (use_stmt) == GIMPLE_COND)
5941 && (!gimple_store_p (use_stmt)
5942 || (is_gimple_assign (use_stmt)
5943 && !gimple_has_volatile_ops (use_stmt)
5944 && !stmt_ends_bb_p (use_stmt)))
5945 && (TREE_CODE (rhs1) != SSA_NAME
5946 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5948 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
5949 || (bitint_precision_kind (TREE_TYPE (rhs1))
5950 < bitint_prec_large)
5951 || (TYPE_PRECISION (TREE_TYPE (rhs1))
5952 >= TYPE_PRECISION (TREE_TYPE (s)))
5953 || mergeable_op (SSA_NAME_DEF_STMT (s)))
5954 continue;
5955 /* Prevent merging a widening non-mergeable cast
5956 on result of some narrower mergeable op
5957 together with later mergeable operations. E.g.
5958 result of _BitInt(223) addition shouldn't be
5959 sign-extended to _BitInt(513) and have another
5960 _BitInt(513) added to it, as handle_plus_minus
5961 with its PHI node handling inside of handle_cast
5962 will not work correctly. An exception is if
5963 use_stmt is a store, this is handled directly
5964 in lower_mergeable_stmt. */
5965 if (TREE_CODE (rhs1) != SSA_NAME
5966 || !has_single_use (rhs1)
5967 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
5968 != gimple_bb (SSA_NAME_DEF_STMT (s)))
5969 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
5970 || gimple_store_p (use_stmt))
5971 continue;
5972 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
5974 /* Another exception is if the widening cast is
5975 from mergeable same precision cast from something
5976 not mergeable. */
5977 tree rhs2
5978 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
5979 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
5980 && (TYPE_PRECISION (TREE_TYPE (rhs1))
5981 == TYPE_PRECISION (TREE_TYPE (rhs2))))
5983 if (TREE_CODE (rhs2) != SSA_NAME
5984 || !has_single_use (rhs2)
5985 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
5986 != gimple_bb (SSA_NAME_DEF_STMT (s)))
5987 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
5988 continue;
5993 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
5994 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
5996 case IMAGPART_EXPR:
5998 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5999 rhs1 = TREE_OPERAND (rhs1, 0);
6000 if (TREE_CODE (rhs1) == SSA_NAME)
6002 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6003 if (optimizable_arith_overflow (g))
6004 continue;
6007 /* FALLTHRU */
6008 case LSHIFT_EXPR:
6009 case RSHIFT_EXPR:
6010 case MULT_EXPR:
6011 case TRUNC_DIV_EXPR:
6012 case TRUNC_MOD_EXPR:
6013 case FIX_TRUNC_EXPR:
6014 case REALPART_EXPR:
6015 if (gimple_store_p (use_stmt)
6016 && is_gimple_assign (use_stmt)
6017 && !gimple_has_volatile_ops (use_stmt)
6018 && !stmt_ends_bb_p (use_stmt))
6020 tree lhs = gimple_assign_lhs (use_stmt);
6021 /* As multiply/division passes address of the lhs
6022 to library function and that assumes it can extend
6023 it to whole number of limbs, avoid merging those
6024 with bit-field stores. Don't allow it for
6025 shifts etc. either, so that the bit-field store
6026 handling doesn't have to be done everywhere. */
6027 if (TREE_CODE (lhs) == COMPONENT_REF
6028 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6029 break;
6030 continue;
6032 break;
6033 default:
6034 break;
6038 /* Also ignore uninitialized uses. */
6039 if (SSA_NAME_IS_DEFAULT_DEF (s)
6040 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6041 continue;
6043 if (!large_huge.m_names)
6044 large_huge.m_names = BITMAP_ALLOC (NULL);
6045 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6046 if (has_single_use (s))
6048 if (!large_huge.m_single_use_names)
6049 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6050 bitmap_set_bit (large_huge.m_single_use_names,
6051 SSA_NAME_VERSION (s));
6053 if (SSA_NAME_VAR (s)
6054 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6055 && SSA_NAME_IS_DEFAULT_DEF (s))
6056 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6057 has_large_huge_parm_result = true;
6058 if (optimize
6059 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6060 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6061 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6062 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6064 use_operand_p use_p;
6065 imm_use_iterator iter;
6066 bool optimizable_load = true;
6067 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6069 gimple *use_stmt = USE_STMT (use_p);
6070 if (is_gimple_debug (use_stmt))
6071 continue;
6072 if (gimple_code (use_stmt) == GIMPLE_PHI
6073 || is_gimple_call (use_stmt))
6075 optimizable_load = false;
6076 break;
6080 ssa_op_iter oi;
6081 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6082 oi, SSA_OP_USE)
6084 tree s2 = USE_FROM_PTR (use_p);
6085 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6087 optimizable_load = false;
6088 break;
6092 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6094 if (!large_huge.m_loads)
6095 large_huge.m_loads = BITMAP_ALLOC (NULL);
6096 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6100 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6101 into memory. Such functions could have no large/huge SSA_NAMEs. */
6102 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6104 gimple *g = SSA_NAME_DEF_STMT (s);
6105 if (is_gimple_assign (g) && gimple_store_p (g))
6107 tree t = gimple_assign_rhs1 (g);
6108 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6109 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6110 has_large_huge = true;
6115 if (large_huge.m_names || has_large_huge)
6117 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6118 calculate_dominance_info (CDI_DOMINATORS);
6119 if (optimize)
6120 enable_ranger (cfun);
6121 if (large_huge.m_loads)
6123 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6124 entry->aux = NULL;
6125 bitint_dom_walker (large_huge.m_names,
6126 large_huge.m_loads).walk (entry);
6127 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6128 clear_aux_for_blocks ();
6129 BITMAP_FREE (large_huge.m_loads);
6131 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6132 large_huge.m_limb_size
6133 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6135 if (large_huge.m_names)
6137 large_huge.m_map
6138 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6139 coalesce_ssa_name (large_huge.m_map);
6140 partition_view_normal (large_huge.m_map);
6141 if (dump_file && (dump_flags & TDF_DETAILS))
6143 fprintf (dump_file, "After Coalescing:\n");
6144 dump_var_map (dump_file, large_huge.m_map);
6146 large_huge.m_vars
6147 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6148 bitmap_iterator bi;
6149 if (has_large_huge_parm_result)
6150 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6152 tree s = ssa_name (i);
6153 if (SSA_NAME_VAR (s)
6154 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6155 && SSA_NAME_IS_DEFAULT_DEF (s))
6156 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6158 int p = var_to_partition (large_huge.m_map, s);
6159 if (large_huge.m_vars[p] == NULL_TREE)
6161 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6162 mark_addressable (SSA_NAME_VAR (s));
6166 tree atype = NULL_TREE;
6167 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6169 tree s = ssa_name (i);
6170 int p = var_to_partition (large_huge.m_map, s);
6171 if (large_huge.m_vars[p] != NULL_TREE)
6172 continue;
6173 if (atype == NULL_TREE
6174 || !tree_int_cst_equal (TYPE_SIZE (atype),
6175 TYPE_SIZE (TREE_TYPE (s))))
6177 unsigned HOST_WIDE_INT nelts
6178 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6179 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6181 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6182 mark_addressable (large_huge.m_vars[p]);
6186 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6188 gimple_stmt_iterator prev;
6189 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6190 gsi = prev)
6192 prev = gsi;
6193 gsi_prev (&prev);
6194 ssa_op_iter iter;
6195 gimple *stmt = gsi_stmt (gsi);
6196 if (is_gimple_debug (stmt))
6197 continue;
6198 bitint_prec_kind kind = bitint_prec_small;
6199 tree t;
6200 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6201 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6203 bitint_prec_kind this_kind
6204 = bitint_precision_kind (TREE_TYPE (t));
6205 if (this_kind > kind)
6206 kind = this_kind;
6208 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6210 t = gimple_assign_rhs1 (stmt);
6211 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6213 bitint_prec_kind this_kind
6214 = bitint_precision_kind (TREE_TYPE (t));
6215 if (this_kind > kind)
6216 kind = this_kind;
6219 if (is_gimple_assign (stmt)
6220 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6222 t = gimple_assign_rhs1 (stmt);
6223 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6224 && TREE_CODE (t) == INTEGER_CST)
6226 bitint_prec_kind this_kind
6227 = bitint_precision_kind (TREE_TYPE (t));
6228 if (this_kind > kind)
6229 kind = this_kind;
6232 if (is_gimple_call (stmt))
6234 t = gimple_call_lhs (stmt);
6235 if (t
6236 && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
6237 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6239 bitint_prec_kind this_kind
6240 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6241 if (this_kind > kind)
6242 kind = this_kind;
6245 if (kind == bitint_prec_small)
6246 continue;
6247 switch (gimple_code (stmt))
6249 case GIMPLE_CALL:
6250 /* For now. We'll need to handle some internal functions and
6251 perhaps some builtins. */
6252 if (kind == bitint_prec_middle)
6253 continue;
6254 break;
6255 case GIMPLE_ASM:
6256 if (kind == bitint_prec_middle)
6257 continue;
6258 break;
6259 case GIMPLE_RETURN:
6260 continue;
6261 case GIMPLE_ASSIGN:
6262 if (gimple_clobber_p (stmt))
6263 continue;
6264 if (kind >= bitint_prec_large)
6265 break;
6266 if (gimple_assign_single_p (stmt))
6267 /* No need to lower copies, loads or stores. */
6268 continue;
6269 if (gimple_assign_cast_p (stmt))
6271 tree lhs = gimple_assign_lhs (stmt);
6272 tree rhs = gimple_assign_rhs1 (stmt);
6273 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6274 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6275 && (TYPE_PRECISION (TREE_TYPE (lhs))
6276 == TYPE_PRECISION (TREE_TYPE (rhs))))
6277 /* No need to lower casts to same precision. */
6278 continue;
6280 break;
6281 default:
6282 break;
6285 if (kind == bitint_prec_middle)
6287 tree type = NULL_TREE;
6288 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6289 with the same precision and back. */
6290 if (tree lhs = gimple_get_lhs (stmt))
6291 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6292 && (bitint_precision_kind (TREE_TYPE (lhs))
6293 == bitint_prec_middle))
6295 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6296 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6297 type = build_nonstandard_integer_type (prec, uns);
6298 tree lhs2 = make_ssa_name (type);
6299 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6300 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6301 gimple_set_lhs (stmt, lhs2);
6303 unsigned int nops = gimple_num_ops (stmt);
6304 for (unsigned int i = 0; i < nops; ++i)
6305 if (tree op = gimple_op (stmt, i))
6307 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6308 if (nop != op)
6309 gimple_set_op (stmt, i, nop);
6310 else if (COMPARISON_CLASS_P (op))
6312 TREE_OPERAND (op, 0)
6313 = maybe_cast_middle_bitint (&gsi,
6314 TREE_OPERAND (op, 0),
6315 type);
6316 TREE_OPERAND (op, 1)
6317 = maybe_cast_middle_bitint (&gsi,
6318 TREE_OPERAND (op, 1),
6319 type);
6321 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6323 CASE_LOW (op)
6324 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6325 type);
6326 CASE_HIGH (op)
6327 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6328 type);
6331 update_stmt (stmt);
6332 continue;
6335 if (tree lhs = gimple_get_lhs (stmt))
6336 if (TREE_CODE (lhs) == SSA_NAME)
6338 tree type = TREE_TYPE (lhs);
6339 if (TREE_CODE (type) == COMPLEX_TYPE)
6340 type = TREE_TYPE (type);
6341 if (TREE_CODE (type) == BITINT_TYPE
6342 && bitint_precision_kind (type) >= bitint_prec_large
6343 && (large_huge.m_names == NULL
6344 || !bitmap_bit_p (large_huge.m_names,
6345 SSA_NAME_VERSION (lhs))))
6346 continue;
6349 large_huge.lower_stmt (stmt);
6352 tree atype = NULL_TREE;
6353 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6354 gsi_next (&gsi))
6356 gphi *phi = gsi.phi ();
6357 tree lhs = gimple_phi_result (phi);
6358 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6359 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6360 continue;
6361 int p1 = var_to_partition (large_huge.m_map, lhs);
6362 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6363 tree v1 = large_huge.m_vars[p1];
6364 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6366 tree arg = gimple_phi_arg_def (phi, i);
6367 edge e = gimple_phi_arg_edge (phi, i);
6368 gimple *g;
6369 switch (TREE_CODE (arg))
6371 case INTEGER_CST:
6372 if (integer_zerop (arg) && VAR_P (v1))
6374 tree zero = build_zero_cst (TREE_TYPE (v1));
6375 g = gimple_build_assign (v1, zero);
6376 gsi_insert_on_edge (e, g);
6377 edge_insertions = true;
6378 break;
6380 int ext;
6381 unsigned int min_prec, prec, rem;
6382 tree c;
6383 prec = TYPE_PRECISION (TREE_TYPE (arg));
6384 rem = prec % (2 * limb_prec);
6385 min_prec = bitint_min_cst_precision (arg, ext);
6386 if (min_prec > prec - rem - 2 * limb_prec
6387 && min_prec > (unsigned) limb_prec)
6388 /* Constant which has enough significant bits that it
6389 isn't worth trying to save .rodata space by extending
6390 from smaller number. */
6391 min_prec = prec;
6392 else
6393 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6394 if (min_prec == 0)
6395 c = NULL_TREE;
6396 else if (min_prec == prec)
6397 c = tree_output_constant_def (arg);
6398 else if (min_prec == (unsigned) limb_prec)
6399 c = fold_convert (large_huge.m_limb_type, arg);
6400 else
6402 tree ctype = build_bitint_type (min_prec, 1);
6403 c = tree_output_constant_def (fold_convert (ctype, arg));
6405 if (c)
6407 if (VAR_P (v1) && min_prec == prec)
6409 tree v2 = build1 (VIEW_CONVERT_EXPR,
6410 TREE_TYPE (v1), c);
6411 g = gimple_build_assign (v1, v2);
6412 gsi_insert_on_edge (e, g);
6413 edge_insertions = true;
6414 break;
6416 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6417 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6418 TREE_TYPE (c), v1),
6420 else
6422 unsigned HOST_WIDE_INT nelts
6423 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6424 / limb_prec;
6425 tree vtype
6426 = build_array_type_nelts (large_huge.m_limb_type,
6427 nelts);
6428 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6429 vtype, v1),
6430 build1 (VIEW_CONVERT_EXPR,
6431 vtype, c));
6433 gsi_insert_on_edge (e, g);
6435 if (ext == 0)
6437 unsigned HOST_WIDE_INT nelts
6438 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6439 - min_prec) / limb_prec;
6440 tree vtype
6441 = build_array_type_nelts (large_huge.m_limb_type,
6442 nelts);
6443 tree ptype = build_pointer_type (TREE_TYPE (v1));
6444 tree off = fold_convert (ptype,
6445 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6446 tree vd = build2 (MEM_REF, vtype,
6447 build_fold_addr_expr (v1), off);
6448 g = gimple_build_assign (vd, build_zero_cst (vtype));
6450 else
6452 tree vd = v1;
6453 if (c)
6455 tree ptype = build_pointer_type (TREE_TYPE (v1));
6456 tree off
6457 = fold_convert (ptype,
6458 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6459 vd = build2 (MEM_REF, large_huge.m_limb_type,
6460 build_fold_addr_expr (v1), off);
6462 vd = build_fold_addr_expr (vd);
6463 unsigned HOST_WIDE_INT nbytes
6464 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6465 if (c)
6466 nbytes
6467 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6468 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6469 g = gimple_build_call (fn, 3, vd,
6470 integer_minus_one_node,
6471 build_int_cst (sizetype,
6472 nbytes));
6474 gsi_insert_on_edge (e, g);
6475 edge_insertions = true;
6476 break;
6477 default:
6478 gcc_unreachable ();
6479 case SSA_NAME:
6480 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6482 if (large_huge.m_names == NULL
6483 || !bitmap_bit_p (large_huge.m_names,
6484 SSA_NAME_VERSION (arg)))
6485 continue;
6487 int p2 = var_to_partition (large_huge.m_map, arg);
6488 if (p1 == p2)
6489 continue;
6490 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6491 tree v2 = large_huge.m_vars[p2];
6492 if (VAR_P (v1) && VAR_P (v2))
6493 g = gimple_build_assign (v1, v2);
6494 else if (VAR_P (v1))
6495 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6496 TREE_TYPE (v1), v2));
6497 else if (VAR_P (v2))
6498 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6499 TREE_TYPE (v2), v1), v2);
6500 else
6502 if (atype == NULL_TREE
6503 || !tree_int_cst_equal (TYPE_SIZE (atype),
6504 TYPE_SIZE (TREE_TYPE (lhs))))
6506 unsigned HOST_WIDE_INT nelts
6507 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6508 / limb_prec;
6509 atype
6510 = build_array_type_nelts (large_huge.m_limb_type,
6511 nelts);
6513 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6514 atype, v1),
6515 build1 (VIEW_CONVERT_EXPR,
6516 atype, v2));
6518 gsi_insert_on_edge (e, g);
6519 edge_insertions = true;
6520 break;
6526 if (large_huge.m_names || has_large_huge)
6528 gimple *nop = NULL;
6529 for (i = 0; i < num_ssa_names; ++i)
6531 tree s = ssa_name (i);
6532 if (s == NULL_TREE)
6533 continue;
6534 tree type = TREE_TYPE (s);
6535 if (TREE_CODE (type) == COMPLEX_TYPE)
6536 type = TREE_TYPE (type);
6537 if (TREE_CODE (type) == BITINT_TYPE
6538 && bitint_precision_kind (type) >= bitint_prec_large)
6540 if (large_huge.m_preserved
6541 && bitmap_bit_p (large_huge.m_preserved,
6542 SSA_NAME_VERSION (s)))
6543 continue;
6544 gimple *g = SSA_NAME_DEF_STMT (s);
6545 if (gimple_code (g) == GIMPLE_NOP)
6547 if (SSA_NAME_VAR (s))
6548 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6549 release_ssa_name (s);
6550 continue;
6552 if (gimple_code (g) != GIMPLE_ASM)
6554 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6555 bool save_vta = flag_var_tracking_assignments;
6556 flag_var_tracking_assignments = false;
6557 gsi_remove (&gsi, true);
6558 flag_var_tracking_assignments = save_vta;
6560 if (nop == NULL)
6561 nop = gimple_build_nop ();
6562 SSA_NAME_DEF_STMT (s) = nop;
6563 release_ssa_name (s);
6566 if (optimize)
6567 disable_ranger (cfun);
6570 if (edge_insertions)
6571 gsi_commit_edge_inserts ();
6573 return ret;
6576 namespace {
6578 const pass_data pass_data_lower_bitint =
6580 GIMPLE_PASS, /* type */
6581 "bitintlower", /* name */
6582 OPTGROUP_NONE, /* optinfo_flags */
6583 TV_NONE, /* tv_id */
6584 PROP_ssa, /* properties_required */
6585 PROP_gimple_lbitint, /* properties_provided */
6586 0, /* properties_destroyed */
6587 0, /* todo_flags_start */
6588 0, /* todo_flags_finish */
6591 class pass_lower_bitint : public gimple_opt_pass
6593 public:
6594 pass_lower_bitint (gcc::context *ctxt)
6595 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6598 /* opt_pass methods: */
6599 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6600 unsigned int execute (function *) final override
6602 return gimple_lower_bitint ();
6605 }; // class pass_lower_bitint
6607 } // anon namespace
6609 gimple_opt_pass *
6610 make_pass_lower_bitint (gcc::context *ctxt)
6612 return new pass_lower_bitint (ctxt);
6616 namespace {
6618 const pass_data pass_data_lower_bitint_O0 =
6620 GIMPLE_PASS, /* type */
6621 "bitintlower0", /* name */
6622 OPTGROUP_NONE, /* optinfo_flags */
6623 TV_NONE, /* tv_id */
6624 PROP_cfg, /* properties_required */
6625 PROP_gimple_lbitint, /* properties_provided */
6626 0, /* properties_destroyed */
6627 0, /* todo_flags_start */
6628 0, /* todo_flags_finish */
6631 class pass_lower_bitint_O0 : public gimple_opt_pass
6633 public:
6634 pass_lower_bitint_O0 (gcc::context *ctxt)
6635 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6638 /* opt_pass methods: */
6639 bool gate (function *fun) final override
6641 /* With errors, normal optimization passes are not run. If we don't
6642 lower bitint operations at all, rtl expansion will abort. */
6643 return !(fun->curr_properties & PROP_gimple_lbitint);
6646 unsigned int execute (function *) final override
6648 return gimple_lower_bitint ();
6651 }; // class pass_lower_bitint_O0
6653 } // anon namespace
6655 gimple_opt_pass *
6656 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6658 return new pass_lower_bitint_O0 (ctxt);