ada: Further cleanup in finalization machinery
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob696bba4cb56c47aa5c5e60d8849ee090e214898b
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
342 else
343 return 0;
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
365 else if (is_gimple_assign (stmt))
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
381 *pop1 = op1;
382 *pop2 = op2;
384 return code;
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_bit_query (gimple *);
431 void lower_call (tree, gimple *);
432 void lower_asm (gimple *);
433 void lower_stmt (gimple *);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
437 bitmap m_names;
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
440 their uses. */
441 bitmap m_loads;
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
444 bitmap m_preserved;
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
452 set in m_names. */
453 var_map m_map;
454 /* Mapping of the partitions to corresponding decls. */
455 tree *m_vars;
456 /* Unsigned integer type with limb precision. */
457 tree m_limb_type;
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size;
460 /* Location of a gimple stmt which is being currently lowered. */
461 location_t m_loc;
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple *m_after_stmt;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb, m_preheader_bb;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi;
470 /* Decl into which a mergeable statement stores result. */
471 tree m_lhs;
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
520 bool m_first;
521 bool m_var_msb;
522 unsigned m_upwards_2limb;
523 bool m_upwards;
524 bool m_cast_conditional;
525 unsigned m_bitfld_load;
526 vec<tree> m_data;
527 unsigned int m_data_cnt;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names);
533 BITMAP_FREE (m_loads);
534 BITMAP_FREE (m_preserved);
535 BITMAP_FREE (m_single_use_names);
536 if (m_map)
537 delete_var_map (m_map);
538 XDELETEVEC (m_vars);
539 m_data.release ();
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
545 void
546 bitint_large_huge::insert_before (gimple *g)
548 gimple_set_location (g, m_loc);
549 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
556 tree
557 bitint_large_huge::limb_access_type (tree type, tree idx)
559 if (type == NULL_TREE)
560 return m_limb_type;
561 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
562 unsigned int prec = TYPE_PRECISION (type);
563 gcc_assert (i * limb_prec < prec);
564 if ((i + 1) * limb_prec <= prec)
565 return m_limb_type;
566 else
567 return build_nonstandard_integer_type (prec % limb_prec,
568 TYPE_UNSIGNED (type));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
574 tree
575 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
577 tree atype = (tree_fits_uhwi_p (idx)
578 ? limb_access_type (type, idx) : m_limb_type);
579 tree ret;
580 if (DECL_P (var) && tree_fits_uhwi_p (idx))
582 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
583 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
584 ret = build2 (MEM_REF, m_limb_type,
585 build_fold_addr_expr (var),
586 build_int_cst (ptype, off));
587 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
588 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
590 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
593 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
594 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
596 tree_to_uhwi (idx)
597 * m_limb_size)));
598 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
599 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
600 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
602 else
604 var = unshare_expr (var);
605 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type,
607 TREE_TYPE (TREE_TYPE (var))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
611 tree atype = build_array_type_nelts (m_limb_type, nelts);
612 var = build1 (VIEW_CONVERT_EXPR, atype, var);
614 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
616 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
618 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
619 insert_before (g);
620 ret = gimple_assign_lhs (g);
621 ret = build1 (NOP_EXPR, atype, ret);
623 return ret;
626 /* Emit a half diamond,
627 if (COND)
631 | new_bb1
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
641 void
642 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
643 edge &edge_true, edge &edge_false)
645 insert_before (cond);
646 edge e1 = split_block (gsi_bb (m_gsi), cond);
647 edge e2 = split_block (e1->dest, (gimple *) NULL);
648 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
649 e1->flags = EDGE_TRUE_VALUE;
650 e1->probability = prob;
651 e3->probability = prob.invert ();
652 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
653 edge_true = e2;
654 edge_false = e3;
655 m_gsi = gsi_after_labels (e1->dest);
658 /* Emit a full diamond,
659 if (COND)
663 new_bb1 new_bb2
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
673 void
674 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
675 edge &edge_true, edge &edge_false)
677 insert_before (cond);
678 edge e1 = split_block (gsi_bb (m_gsi), cond);
679 edge e2 = split_block (e1->dest, (gimple *) NULL);
680 basic_block bb = create_empty_bb (e1->dest);
681 add_bb_to_loop (bb, e1->dest->loop_father);
682 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
683 e1->flags = EDGE_FALSE_VALUE;
684 e3->probability = prob;
685 e1->probability = prob.invert ();
686 bb->count = e1->src->count.apply_probability (prob);
687 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
688 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
689 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
690 edge_false = e2;
691 m_gsi = gsi_after_labels (bb);
694 /* Emit a half diamond with full diamond in it
695 if (COND1)
699 | if (COND2)
700 | / \
701 | / \
702 |new_bb1 new_bb2
703 | | /
704 \ | /
705 \ | /
706 \ | /
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
719 void
720 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
721 profile_probability prob1,
722 profile_probability prob2,
723 edge &edge_true_true,
724 edge &edge_true_false,
725 edge &edge_false)
727 edge e2, e3, e4 = NULL;
728 if_then (cond1, prob1, e2, e3);
729 if (cond2 == NULL)
731 edge_true_true = NULL;
732 edge_true_false = e2;
733 edge_false = e3;
734 return;
736 insert_before (cond2);
737 e2 = split_block (gsi_bb (m_gsi), cond2);
738 basic_block bb = create_empty_bb (e2->dest);
739 add_bb_to_loop (bb, e2->dest->loop_father);
740 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
741 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
742 e4->probability = prob2;
743 e2->flags = EDGE_FALSE_VALUE;
744 e2->probability = prob2.invert ();
745 bb->count = e2->src->count.apply_probability (prob2);
746 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
747 e2 = find_edge (e2->dest, e3->dest);
748 edge_true_true = e4;
749 edge_true_false = e2;
750 edge_false = e3;
751 m_gsi = gsi_after_labels (e2->src);
754 /* Emit code to access limb IDX from OP. */
756 tree
757 bitint_large_huge::handle_operand (tree op, tree idx)
759 switch (TREE_CODE (op))
761 case SSA_NAME:
762 if (m_names == NULL
763 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op))
767 if (m_first)
769 tree v = create_tmp_reg (m_limb_type);
770 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
772 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
773 DECL_SOURCE_LOCATION (v)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
776 v = get_or_create_ssa_default_def (cfun, v);
777 m_data.safe_push (v);
779 tree ret = m_data[m_data_cnt];
780 m_data_cnt++;
781 if (tree_fits_uhwi_p (idx))
783 tree type = limb_access_type (TREE_TYPE (op), idx);
784 ret = add_cast (type, ret);
786 return ret;
788 location_t loc_save = m_loc;
789 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
790 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
791 m_loc = loc_save;
792 return ret;
794 int p;
795 gimple *g;
796 tree t;
797 p = var_to_partition (m_map, op);
798 gcc_assert (m_vars[p] != NULL_TREE);
799 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
801 insert_before (g);
802 t = gimple_assign_lhs (g);
803 if (m_first
804 && m_single_use_names
805 && m_vars[p] != m_lhs
806 && m_after_stmt
807 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
809 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
810 CLOBBER_STORAGE_END);
811 g = gimple_build_assign (m_vars[p], clobber);
812 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
813 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
815 return t;
816 case INTEGER_CST:
817 if (tree_fits_uhwi_p (idx))
819 tree c, type = limb_access_type (TREE_TYPE (op), idx);
820 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
821 if (m_first)
823 m_data.safe_push (NULL_TREE);
824 m_data.safe_push (NULL_TREE);
826 if (limb_prec != HOST_BITS_PER_WIDE_INT)
828 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
829 TYPE_SIGN (TREE_TYPE (op)));
830 c = wide_int_to_tree (type,
831 wide_int::from (w, TYPE_PRECISION (type),
832 UNSIGNED));
834 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
835 c = build_int_cst (type,
836 tree_int_cst_sgn (op) < 0 ? -1 : 0);
837 else
838 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
839 m_data_cnt += 2;
840 return c;
842 if (m_first
843 || (m_data[m_data_cnt] == NULL_TREE
844 && m_data[m_data_cnt + 1] == NULL_TREE))
846 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
847 unsigned int rem = prec % (2 * limb_prec);
848 int ext;
849 unsigned min_prec = bitint_min_cst_precision (op, ext);
850 if (m_first)
852 m_data.safe_push (NULL_TREE);
853 m_data.safe_push (NULL_TREE);
855 if (integer_zerop (op))
857 tree c = build_zero_cst (m_limb_type);
858 m_data[m_data_cnt] = c;
859 m_data[m_data_cnt + 1] = c;
861 else if (integer_all_onesp (op))
863 tree c = build_all_ones_cst (m_limb_type);
864 m_data[m_data_cnt] = c;
865 m_data[m_data_cnt + 1] = c;
867 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
869 /* Single limb constant. Use a phi with that limb from
870 the preheader edge and 0 or -1 constant from the other edge
871 and for the second limb in the loop. */
872 tree out;
873 gcc_assert (m_first);
874 m_data.pop ();
875 m_data.pop ();
876 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
877 build_int_cst (m_limb_type, ext));
879 else if (min_prec > prec - rem - 2 * limb_prec)
881 /* Constant which has enough significant bits that it isn't
882 worth trying to save .rodata space by extending from smaller
883 number. */
884 tree type;
885 if (m_var_msb)
886 type = TREE_TYPE (op);
887 else
888 /* If we have a guarantee the most significant partial limb
889 (if any) will be only accessed through handle_operand
890 with INTEGER_CST idx, we don't need to include the partial
891 limb in .rodata. */
892 type = build_bitint_type (prec - rem, 1);
893 tree c = tree_output_constant_def (fold_convert (type, op));
894 m_data[m_data_cnt] = c;
895 m_data[m_data_cnt + 1] = NULL_TREE;
897 else if (m_upwards_2limb)
899 /* Constant with smaller number of bits. Trade conditional
900 code for .rodata space by extending from smaller number. */
901 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
902 tree type = build_bitint_type (min_prec, 1);
903 tree c = tree_output_constant_def (fold_convert (type, op));
904 tree idx2 = make_ssa_name (sizetype);
905 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
906 insert_before (g);
907 g = gimple_build_cond (LT_EXPR, idx,
908 size_int (min_prec / limb_prec),
909 NULL_TREE, NULL_TREE);
910 edge edge_true, edge_false;
911 if_then (g, (min_prec >= (prec - rem) / 2
912 ? profile_probability::likely ()
913 : profile_probability::unlikely ()),
914 edge_true, edge_false);
915 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
916 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
917 insert_before (g);
918 c1 = gimple_assign_lhs (g);
919 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
920 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
921 insert_before (g);
922 c2 = gimple_assign_lhs (g);
923 tree c3 = build_int_cst (m_limb_type, ext);
924 m_gsi = gsi_after_labels (edge_true->dest);
925 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
926 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
927 gphi *phi = create_phi_node (m_data[m_data_cnt],
928 edge_true->dest);
929 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
930 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
931 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
932 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
933 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
935 else
937 /* Constant with smaller number of bits. Trade conditional
938 code for .rodata space by extending from smaller number.
939 Version for loops with random access to the limbs or
940 downwards loops. */
941 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
942 tree c;
943 if (min_prec <= (unsigned) limb_prec)
944 c = fold_convert (m_limb_type, op);
945 else
947 tree type = build_bitint_type (min_prec, 1);
948 c = tree_output_constant_def (fold_convert (type, op));
950 m_data[m_data_cnt] = c;
951 m_data[m_data_cnt + 1] = integer_type_node;
953 t = m_data[m_data_cnt];
954 if (m_data[m_data_cnt + 1] == NULL_TREE)
956 t = limb_access (TREE_TYPE (op), t, idx, false);
957 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
958 insert_before (g);
959 t = gimple_assign_lhs (g);
962 else if (m_data[m_data_cnt + 1] == NULL_TREE)
964 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
965 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
966 insert_before (g);
967 t = gimple_assign_lhs (g);
969 else
970 t = m_data[m_data_cnt + 1];
971 if (m_data[m_data_cnt + 1] == integer_type_node)
973 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
974 unsigned rem = prec % (2 * limb_prec);
975 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
976 tree c = m_data[m_data_cnt];
977 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
978 g = gimple_build_cond (LT_EXPR, idx,
979 size_int (min_prec / limb_prec),
980 NULL_TREE, NULL_TREE);
981 edge edge_true, edge_false;
982 if_then (g, (min_prec >= (prec - rem) / 2
983 ? profile_probability::likely ()
984 : profile_probability::unlikely ()),
985 edge_true, edge_false);
986 if (min_prec > (unsigned) limb_prec)
988 c = limb_access (TREE_TYPE (op), c, idx, false);
989 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
990 insert_before (g);
991 c = gimple_assign_lhs (g);
993 tree c2 = build_int_cst (m_limb_type, ext);
994 m_gsi = gsi_after_labels (edge_true->dest);
995 t = make_ssa_name (m_limb_type);
996 gphi *phi = create_phi_node (t, edge_true->dest);
997 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
998 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1000 m_data_cnt += 2;
1001 return t;
1002 default:
1003 gcc_unreachable ();
1007 /* Helper method, add a PHI node with VAL from preheader edge if
1008 inside of a loop and m_first. Keep state in a pair of m_data
1009 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1010 the latch edge, otherwise create a new SSA_NAME for it and let
1011 caller initialize it. */
1013 tree
1014 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1015 tree val_out)
1017 if (!m_first)
1019 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1020 return m_data[m_data_cnt];
1023 *data_out = NULL_TREE;
1024 if (tree_fits_uhwi_p (idx))
1026 m_data.safe_push (val);
1027 m_data.safe_push (NULL_TREE);
1028 return val;
1031 tree in = make_ssa_name (TREE_TYPE (val));
1032 gphi *phi = create_phi_node (in, m_bb);
1033 edge e1 = find_edge (m_preheader_bb, m_bb);
1034 edge e2 = EDGE_PRED (m_bb, 0);
1035 if (e1 == e2)
1036 e2 = EDGE_PRED (m_bb, 1);
1037 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1038 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1039 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1040 m_data.safe_push (in);
1041 m_data.safe_push (out);
1042 return in;
1045 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1046 convert it without emitting any code, otherwise emit
1047 the conversion statement before the current location. */
1049 tree
1050 bitint_large_huge::add_cast (tree type, tree val)
1052 if (TREE_CODE (val) == INTEGER_CST)
1053 return fold_convert (type, val);
1055 tree lhs = make_ssa_name (type);
1056 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1057 insert_before (g);
1058 return lhs;
1061 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1063 tree
1064 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1065 tree idx)
1067 tree lhs, data_out, ctype;
1068 tree rhs1_type = TREE_TYPE (rhs1);
1069 gimple *g;
1070 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1071 &data_out);
1073 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1074 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1076 ctype = build_complex_type (m_limb_type);
1077 if (!types_compatible_p (rhs1_type, m_limb_type))
1079 if (!TYPE_UNSIGNED (rhs1_type))
1081 tree type = unsigned_type_for (rhs1_type);
1082 rhs1 = add_cast (type, rhs1);
1083 rhs2 = add_cast (type, rhs2);
1085 rhs1 = add_cast (m_limb_type, rhs1);
1086 rhs2 = add_cast (m_limb_type, rhs2);
1088 lhs = make_ssa_name (ctype);
1089 g = gimple_build_call_internal (code == PLUS_EXPR
1090 ? IFN_UADDC : IFN_USUBC,
1091 3, rhs1, rhs2, data_in);
1092 gimple_call_set_lhs (g, lhs);
1093 insert_before (g);
1094 if (data_out == NULL_TREE)
1095 data_out = make_ssa_name (m_limb_type);
1096 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1097 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1098 insert_before (g);
1100 else if (types_compatible_p (rhs1_type, m_limb_type))
1102 ctype = build_complex_type (m_limb_type);
1103 lhs = make_ssa_name (ctype);
1104 g = gimple_build_call_internal (code == PLUS_EXPR
1105 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1106 2, rhs1, rhs2);
1107 gimple_call_set_lhs (g, lhs);
1108 insert_before (g);
1109 if (data_out == NULL_TREE)
1110 data_out = make_ssa_name (m_limb_type);
1111 if (!integer_zerop (data_in))
1113 rhs1 = make_ssa_name (m_limb_type);
1114 g = gimple_build_assign (rhs1, REALPART_EXPR,
1115 build1 (REALPART_EXPR, m_limb_type, lhs));
1116 insert_before (g);
1117 rhs2 = make_ssa_name (m_limb_type);
1118 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1119 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1120 insert_before (g);
1121 lhs = make_ssa_name (ctype);
1122 g = gimple_build_call_internal (code == PLUS_EXPR
1123 ? IFN_ADD_OVERFLOW
1124 : IFN_SUB_OVERFLOW,
1125 2, rhs1, data_in);
1126 gimple_call_set_lhs (g, lhs);
1127 insert_before (g);
1128 data_in = make_ssa_name (m_limb_type);
1129 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1130 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1131 insert_before (g);
1132 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1133 insert_before (g);
1135 else
1137 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1138 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1139 insert_before (g);
1142 else
1144 tree in = add_cast (rhs1_type, data_in);
1145 lhs = make_ssa_name (rhs1_type);
1146 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1147 insert_before (g);
1148 rhs1 = make_ssa_name (rhs1_type);
1149 g = gimple_build_assign (rhs1, code, lhs, in);
1150 insert_before (g);
1151 m_data[m_data_cnt] = NULL_TREE;
1152 m_data_cnt += 2;
1153 return rhs1;
1155 rhs1 = make_ssa_name (m_limb_type);
1156 g = gimple_build_assign (rhs1, REALPART_EXPR,
1157 build1 (REALPART_EXPR, m_limb_type, lhs));
1158 insert_before (g);
1159 if (!types_compatible_p (rhs1_type, m_limb_type))
1160 rhs1 = add_cast (rhs1_type, rhs1);
1161 m_data[m_data_cnt] = data_out;
1162 m_data_cnt += 2;
1163 return rhs1;
1166 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1167 count in [0, limb_prec - 1] range. */
1169 tree
1170 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1172 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1173 gcc_checking_assert (cnt < (unsigned) limb_prec);
1174 if (cnt == 0)
1175 return rhs1;
1177 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1178 gimple *g;
1179 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1180 &data_out);
1182 if (!integer_zerop (data_in))
1184 lhs = make_ssa_name (m_limb_type);
1185 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1186 build_int_cst (unsigned_type_node,
1187 limb_prec - cnt));
1188 insert_before (g);
1189 if (!types_compatible_p (rhs1_type, m_limb_type))
1190 lhs = add_cast (rhs1_type, lhs);
1191 data_in = lhs;
1193 if (types_compatible_p (rhs1_type, m_limb_type))
1195 if (data_out == NULL_TREE)
1196 data_out = make_ssa_name (m_limb_type);
1197 g = gimple_build_assign (data_out, rhs1);
1198 insert_before (g);
1200 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1202 lhs = make_ssa_name (rhs1_type);
1203 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1204 insert_before (g);
1205 if (!integer_zerop (data_in))
1207 rhs1 = lhs;
1208 lhs = make_ssa_name (rhs1_type);
1209 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1210 insert_before (g);
1213 else
1214 lhs = data_in;
1215 m_data[m_data_cnt] = data_out;
1216 m_data_cnt += 2;
1217 return lhs;
1220 /* Helper function for handle_stmt method, handle an integral
1221 to integral conversion. */
1223 tree
1224 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1226 tree rhs_type = TREE_TYPE (rhs1);
1227 gimple *g;
1228 if (TREE_CODE (rhs1) == SSA_NAME
1229 && TREE_CODE (lhs_type) == BITINT_TYPE
1230 && TREE_CODE (rhs_type) == BITINT_TYPE
1231 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1232 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1234 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1235 /* If lhs has bigger precision than rhs, we can use
1236 the simple case only if there is a guarantee that
1237 the most significant limb is handled in straight
1238 line code. If m_var_msb (on left shifts) or
1239 if m_upwards_2limb * limb_prec is equal to
1240 lhs precision that is not the case. */
1241 || (!m_var_msb
1242 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1243 TYPE_SIZE (lhs_type))
1244 && (!m_upwards_2limb
1245 || (m_upwards_2limb * limb_prec
1246 < TYPE_PRECISION (lhs_type)))))
1248 rhs1 = handle_operand (rhs1, idx);
1249 if (tree_fits_uhwi_p (idx))
1251 tree type = limb_access_type (lhs_type, idx);
1252 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1253 rhs1 = add_cast (type, rhs1);
1255 return rhs1;
1257 tree t;
1258 /* Indexes lower than this don't need any special processing. */
1259 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1260 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1261 /* Indexes >= than this always contain an extension. */
1262 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1263 bool save_first = m_first;
1264 if (m_first)
1266 m_data.safe_push (NULL_TREE);
1267 m_data.safe_push (NULL_TREE);
1268 m_data.safe_push (NULL_TREE);
1269 if (TYPE_UNSIGNED (rhs_type))
1270 /* No need to keep state between iterations. */
1272 else if (m_upwards && !m_upwards_2limb)
1273 /* We need to keep state between iterations, but
1274 not within any loop, everything is straight line
1275 code with only increasing indexes. */
1277 else if (!m_upwards_2limb)
1279 unsigned save_data_cnt = m_data_cnt;
1280 gimple_stmt_iterator save_gsi = m_gsi;
1281 m_gsi = m_init_gsi;
1282 if (gsi_end_p (m_gsi))
1283 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1284 else
1285 gsi_next (&m_gsi);
1286 m_data_cnt = save_data_cnt + 3;
1287 t = handle_operand (rhs1, size_int (low));
1288 m_first = false;
1289 m_data[save_data_cnt + 2]
1290 = build_int_cst (NULL_TREE, m_data_cnt);
1291 m_data_cnt = save_data_cnt;
1292 t = add_cast (signed_type_for (m_limb_type), t);
1293 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1294 tree n = make_ssa_name (TREE_TYPE (t));
1295 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1296 insert_before (g);
1297 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1298 m_init_gsi = m_gsi;
1299 if (gsi_end_p (m_init_gsi))
1300 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1301 else
1302 gsi_prev (&m_init_gsi);
1303 m_gsi = save_gsi;
1305 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1306 /* We need to keep state between iterations, but
1307 fortunately not within the loop, only afterwards. */
1309 else
1311 tree out;
1312 m_data.truncate (m_data_cnt);
1313 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1314 m_data.safe_push (NULL_TREE);
1318 unsigned save_data_cnt = m_data_cnt;
1319 m_data_cnt += 3;
1320 if (!tree_fits_uhwi_p (idx))
1322 if (m_upwards_2limb
1323 && (m_upwards_2limb * limb_prec
1324 <= ((unsigned) TYPE_PRECISION (rhs_type)
1325 - !TYPE_UNSIGNED (rhs_type))))
1327 rhs1 = handle_operand (rhs1, idx);
1328 if (m_first)
1329 m_data[save_data_cnt + 2]
1330 = build_int_cst (NULL_TREE, m_data_cnt);
1331 m_first = save_first;
1332 return rhs1;
1334 bool single_comparison
1335 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1336 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1337 idx, size_int (low), NULL_TREE, NULL_TREE);
1338 edge edge_true_true, edge_true_false, edge_false;
1339 if_then_if_then_else (g, (single_comparison ? NULL
1340 : gimple_build_cond (EQ_EXPR, idx,
1341 size_int (low),
1342 NULL_TREE,
1343 NULL_TREE)),
1344 profile_probability::likely (),
1345 profile_probability::unlikely (),
1346 edge_true_true, edge_true_false, edge_false);
1347 bool save_cast_conditional = m_cast_conditional;
1348 m_cast_conditional = true;
1349 m_bitfld_load = 0;
1350 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1351 if (m_first)
1352 m_data[save_data_cnt + 2]
1353 = build_int_cst (NULL_TREE, m_data_cnt);
1354 tree ext = NULL_TREE;
1355 tree bitfld = NULL_TREE;
1356 if (!single_comparison)
1358 m_gsi = gsi_after_labels (edge_true_true->src);
1359 m_first = false;
1360 m_data_cnt = save_data_cnt + 3;
1361 if (m_bitfld_load)
1363 bitfld = m_data[m_bitfld_load];
1364 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1365 m_bitfld_load = 0;
1367 t2 = handle_operand (rhs1, size_int (low));
1368 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1369 t2 = add_cast (m_limb_type, t2);
1370 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1372 ext = add_cast (signed_type_for (m_limb_type), t2);
1373 tree lpm1 = build_int_cst (unsigned_type_node,
1374 limb_prec - 1);
1375 tree n = make_ssa_name (TREE_TYPE (ext));
1376 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1377 insert_before (g);
1378 ext = add_cast (m_limb_type, n);
1381 tree t3;
1382 if (TYPE_UNSIGNED (rhs_type))
1383 t3 = build_zero_cst (m_limb_type);
1384 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1385 t3 = m_data[save_data_cnt];
1386 else
1387 t3 = m_data[save_data_cnt + 1];
1388 m_gsi = gsi_after_labels (edge_true_false->dest);
1389 t = make_ssa_name (m_limb_type);
1390 gphi *phi = create_phi_node (t, edge_true_false->dest);
1391 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1392 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1393 if (edge_true_true)
1394 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1395 if (ext)
1397 tree t4 = make_ssa_name (m_limb_type);
1398 phi = create_phi_node (t4, edge_true_false->dest);
1399 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1400 UNKNOWN_LOCATION);
1401 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1402 UNKNOWN_LOCATION);
1403 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1404 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1405 insert_before (g);
1407 if (m_bitfld_load)
1409 tree t4;
1410 if (!m_first)
1411 t4 = m_data[m_bitfld_load + 1];
1412 else
1413 t4 = make_ssa_name (m_limb_type);
1414 phi = create_phi_node (t4, edge_true_false->dest);
1415 add_phi_arg (phi,
1416 edge_true_true ? bitfld : m_data[m_bitfld_load],
1417 edge_true_false, UNKNOWN_LOCATION);
1418 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1419 edge_false, UNKNOWN_LOCATION);
1420 if (edge_true_true)
1421 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1422 UNKNOWN_LOCATION);
1423 m_data[m_bitfld_load] = t4;
1424 m_data[m_bitfld_load + 2] = t4;
1425 m_bitfld_load = 0;
1427 m_cast_conditional = save_cast_conditional;
1428 m_first = save_first;
1429 return t;
1431 else
1433 if (tree_to_uhwi (idx) < low)
1435 t = handle_operand (rhs1, idx);
1436 if (m_first)
1437 m_data[save_data_cnt + 2]
1438 = build_int_cst (NULL_TREE, m_data_cnt);
1440 else if (tree_to_uhwi (idx) < high)
1442 t = handle_operand (rhs1, size_int (low));
1443 if (m_first)
1444 m_data[save_data_cnt + 2]
1445 = build_int_cst (NULL_TREE, m_data_cnt);
1446 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1447 t = add_cast (m_limb_type, t);
1448 tree ext = NULL_TREE;
1449 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1451 ext = add_cast (signed_type_for (m_limb_type), t);
1452 tree lpm1 = build_int_cst (unsigned_type_node,
1453 limb_prec - 1);
1454 tree n = make_ssa_name (TREE_TYPE (ext));
1455 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1456 insert_before (g);
1457 ext = add_cast (m_limb_type, n);
1458 m_data[save_data_cnt + 1] = ext;
1461 else
1463 if (TYPE_UNSIGNED (rhs_type) && m_first)
1465 handle_operand (rhs1, size_zero_node);
1466 m_data[save_data_cnt + 2]
1467 = build_int_cst (NULL_TREE, m_data_cnt);
1469 else
1470 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1471 if (TYPE_UNSIGNED (rhs_type))
1472 t = build_zero_cst (m_limb_type);
1473 else
1474 t = m_data[save_data_cnt + 1];
1476 tree type = limb_access_type (lhs_type, idx);
1477 if (!useless_type_conversion_p (type, m_limb_type))
1478 t = add_cast (type, t);
1479 m_first = save_first;
1480 return t;
1483 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1484 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1485 && INTEGRAL_TYPE_P (rhs_type))
1487 /* Add support for 3 or more limbs filled in from normal integral
1488 type if this assert fails. If no target chooses limb mode smaller
1489 than half of largest supported normal integral type, this will not
1490 be needed. */
1491 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1492 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1493 if (m_first)
1495 gimple_stmt_iterator save_gsi = m_gsi;
1496 m_gsi = m_init_gsi;
1497 if (gsi_end_p (m_gsi))
1498 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1499 else
1500 gsi_next (&m_gsi);
1501 if (TREE_CODE (rhs_type) == BITINT_TYPE
1502 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1504 tree type = NULL_TREE;
1505 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1506 rhs_type = TREE_TYPE (rhs1);
1508 r1 = rhs1;
1509 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1510 r1 = add_cast (m_limb_type, rhs1);
1511 if (TYPE_PRECISION (rhs_type) > limb_prec)
1513 g = gimple_build_assign (make_ssa_name (rhs_type),
1514 RSHIFT_EXPR, rhs1,
1515 build_int_cst (unsigned_type_node,
1516 limb_prec));
1517 insert_before (g);
1518 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1520 if (TYPE_UNSIGNED (rhs_type))
1521 rext = build_zero_cst (m_limb_type);
1522 else
1524 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1525 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1526 RSHIFT_EXPR, rext,
1527 build_int_cst (unsigned_type_node,
1528 limb_prec - 1));
1529 insert_before (g);
1530 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1532 m_init_gsi = m_gsi;
1533 if (gsi_end_p (m_init_gsi))
1534 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1535 else
1536 gsi_prev (&m_init_gsi);
1537 m_gsi = save_gsi;
1539 tree t;
1540 if (m_upwards_2limb)
1542 if (m_first)
1544 tree out1, out2;
1545 prepare_data_in_out (r1, idx, &out1, rext);
1546 if (TYPE_PRECISION (rhs_type) > limb_prec)
1548 prepare_data_in_out (r2, idx, &out2, rext);
1549 m_data.pop ();
1550 t = m_data.pop ();
1551 m_data[m_data_cnt + 1] = t;
1553 else
1554 m_data[m_data_cnt + 1] = rext;
1555 m_data.safe_push (rext);
1556 t = m_data[m_data_cnt];
1558 else if (!tree_fits_uhwi_p (idx))
1559 t = m_data[m_data_cnt + 1];
1560 else
1562 tree type = limb_access_type (lhs_type, idx);
1563 t = m_data[m_data_cnt + 2];
1564 if (!useless_type_conversion_p (type, m_limb_type))
1565 t = add_cast (type, t);
1567 m_data_cnt += 3;
1568 return t;
1570 else if (m_first)
1572 m_data.safe_push (r1);
1573 m_data.safe_push (r2);
1574 m_data.safe_push (rext);
1576 if (tree_fits_uhwi_p (idx))
1578 tree type = limb_access_type (lhs_type, idx);
1579 if (integer_zerop (idx))
1580 t = m_data[m_data_cnt];
1581 else if (TYPE_PRECISION (rhs_type) > limb_prec
1582 && integer_onep (idx))
1583 t = m_data[m_data_cnt + 1];
1584 else
1585 t = m_data[m_data_cnt + 2];
1586 if (!useless_type_conversion_p (type, m_limb_type))
1587 t = add_cast (type, t);
1588 m_data_cnt += 3;
1589 return t;
1591 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1592 NULL_TREE, NULL_TREE);
1593 edge e2, e3, e4 = NULL;
1594 if_then (g, profile_probability::likely (), e2, e3);
1595 if (m_data[m_data_cnt + 1])
1597 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1598 NULL_TREE, NULL_TREE);
1599 insert_before (g);
1600 edge e5 = split_block (gsi_bb (m_gsi), g);
1601 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1602 e2 = find_edge (e5->dest, e2->dest);
1603 e4->probability = profile_probability::unlikely ();
1604 e5->flags = EDGE_FALSE_VALUE;
1605 e5->probability = e4->probability.invert ();
1607 m_gsi = gsi_after_labels (e2->dest);
1608 t = make_ssa_name (m_limb_type);
1609 gphi *phi = create_phi_node (t, e2->dest);
1610 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1611 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1612 if (e4)
1613 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1614 m_data_cnt += 3;
1615 return t;
1617 return NULL_TREE;
1620 /* Helper function for handle_stmt method, handle a load from memory. */
1622 tree
1623 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1625 tree rhs1 = gimple_assign_rhs1 (stmt);
1626 tree rhs_type = TREE_TYPE (rhs1);
1627 bool eh = stmt_ends_bb_p (stmt);
1628 edge eh_edge = NULL;
1629 gimple *g;
1631 if (eh)
1633 edge_iterator ei;
1634 basic_block bb = gimple_bb (stmt);
1636 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1637 if (eh_edge->flags & EDGE_EH)
1638 break;
1641 if (TREE_CODE (rhs1) == COMPONENT_REF
1642 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1644 tree fld = TREE_OPERAND (rhs1, 1);
1645 /* For little-endian, we can allow as inputs bit-fields
1646 which start at a limb boundary. */
1647 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1648 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1649 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1650 goto normal_load;
1651 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1652 handle it normally for now. */
1653 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1654 goto normal_load;
1655 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1656 poly_int64 bitoffset;
1657 poly_uint64 field_offset, repr_offset;
1658 bool var_field_off = false;
1659 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1660 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1661 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1662 else
1664 bitoffset = 0;
1665 var_field_off = true;
1667 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1668 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1669 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1670 TREE_OPERAND (rhs1, 0), repr,
1671 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1672 HOST_WIDE_INT bo = bitoffset.to_constant ();
1673 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1674 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1675 if (m_first)
1677 if (m_upwards)
1679 gimple_stmt_iterator save_gsi = m_gsi;
1680 m_gsi = m_init_gsi;
1681 if (gsi_end_p (m_gsi))
1682 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1683 else
1684 gsi_next (&m_gsi);
1685 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1686 tree iv = make_ssa_name (m_limb_type);
1687 g = gimple_build_assign (iv, t);
1688 insert_before (g);
1689 if (eh)
1691 maybe_duplicate_eh_stmt (g, stmt);
1692 if (eh_edge)
1694 edge e = split_block (gsi_bb (m_gsi), g);
1695 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1696 = profile_probability::very_unlikely ();
1697 m_gsi = gsi_after_labels (e->dest);
1698 if (gsi_bb (save_gsi) == e->src)
1700 if (gsi_end_p (save_gsi))
1701 save_gsi = gsi_end_bb (e->dest);
1702 else
1703 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1705 if (m_preheader_bb == e->src)
1706 m_preheader_bb = e->dest;
1709 m_init_gsi = m_gsi;
1710 if (gsi_end_p (m_init_gsi))
1711 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1712 else
1713 gsi_prev (&m_init_gsi);
1714 m_gsi = save_gsi;
1715 tree out;
1716 prepare_data_in_out (iv, idx, &out);
1717 out = m_data[m_data_cnt];
1718 m_data.safe_push (out);
1720 else
1722 m_data.safe_push (NULL_TREE);
1723 m_data.safe_push (NULL_TREE);
1724 m_data.safe_push (NULL_TREE);
1728 tree nidx0 = NULL_TREE, nidx1;
1729 tree iv = m_data[m_data_cnt];
1730 if (m_cast_conditional && iv)
1732 gcc_assert (!m_bitfld_load);
1733 m_bitfld_load = m_data_cnt;
1735 if (tree_fits_uhwi_p (idx))
1737 unsigned prec = TYPE_PRECISION (rhs_type);
1738 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1739 gcc_assert (i * limb_prec < prec);
1740 nidx1 = size_int (i + bo_idx + 1);
1741 if ((i + 1) * limb_prec > prec)
1743 prec %= limb_prec;
1744 if (prec + bo_bit <= (unsigned) limb_prec)
1745 nidx1 = NULL_TREE;
1747 if (!iv)
1748 nidx0 = size_int (i + bo_idx);
1750 else
1752 if (!iv)
1754 if (bo_idx == 0)
1755 nidx0 = idx;
1756 else
1758 nidx0 = make_ssa_name (sizetype);
1759 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1760 size_int (bo_idx));
1761 insert_before (g);
1764 nidx1 = make_ssa_name (sizetype);
1765 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1766 size_int (bo_idx + 1));
1767 insert_before (g);
1770 tree iv2 = NULL_TREE;
1771 if (nidx0)
1773 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1774 iv = make_ssa_name (m_limb_type);
1775 g = gimple_build_assign (iv, t);
1776 insert_before (g);
1777 gcc_assert (!eh);
1779 if (nidx1)
1781 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1782 unsigned prec = TYPE_PRECISION (rhs_type);
1783 if (conditional)
1785 if ((prec % limb_prec) == 0
1786 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1787 conditional = false;
1789 edge edge_true = NULL, edge_false = NULL;
1790 if (conditional)
1792 g = gimple_build_cond (NE_EXPR, idx,
1793 size_int (prec / limb_prec),
1794 NULL_TREE, NULL_TREE);
1795 if_then (g, profile_probability::likely (),
1796 edge_true, edge_false);
1798 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1799 if (m_upwards_2limb
1800 && !m_first
1801 && !m_bitfld_load
1802 && !tree_fits_uhwi_p (idx))
1803 iv2 = m_data[m_data_cnt + 1];
1804 else
1805 iv2 = make_ssa_name (m_limb_type);
1806 g = gimple_build_assign (iv2, t);
1807 insert_before (g);
1808 if (eh)
1810 maybe_duplicate_eh_stmt (g, stmt);
1811 if (eh_edge)
1813 edge e = split_block (gsi_bb (m_gsi), g);
1814 m_gsi = gsi_after_labels (e->dest);
1815 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1816 = profile_probability::very_unlikely ();
1819 if (conditional)
1821 tree iv3 = make_ssa_name (m_limb_type);
1822 if (eh)
1823 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1824 gphi *phi = create_phi_node (iv3, edge_true->dest);
1825 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1826 add_phi_arg (phi, build_zero_cst (m_limb_type),
1827 edge_false, UNKNOWN_LOCATION);
1828 m_gsi = gsi_after_labels (edge_true->dest);
1831 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1832 iv, build_int_cst (unsigned_type_node, bo_bit));
1833 insert_before (g);
1834 iv = gimple_assign_lhs (g);
1835 if (iv2)
1837 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1838 iv2, build_int_cst (unsigned_type_node,
1839 limb_prec - bo_bit));
1840 insert_before (g);
1841 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1842 gimple_assign_lhs (g), iv);
1843 insert_before (g);
1844 iv = gimple_assign_lhs (g);
1845 if (m_data[m_data_cnt])
1846 m_data[m_data_cnt] = iv2;
1848 if (tree_fits_uhwi_p (idx))
1850 tree atype = limb_access_type (rhs_type, idx);
1851 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1852 iv = add_cast (atype, iv);
1854 m_data_cnt += 3;
1855 return iv;
1858 normal_load:
1859 /* Use write_p = true for loads with EH edges to make
1860 sure limb_access doesn't add a cast as separate
1861 statement after it. */
1862 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1863 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1864 g = gimple_build_assign (ret, rhs1);
1865 insert_before (g);
1866 if (eh)
1868 maybe_duplicate_eh_stmt (g, stmt);
1869 if (eh_edge)
1871 edge e = split_block (gsi_bb (m_gsi), g);
1872 m_gsi = gsi_after_labels (e->dest);
1873 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1874 = profile_probability::very_unlikely ();
1876 if (tree_fits_uhwi_p (idx))
1878 tree atype = limb_access_type (rhs_type, idx);
1879 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1880 ret = add_cast (atype, ret);
1883 return ret;
1886 /* Return a limb IDX from a mergeable statement STMT. */
1888 tree
1889 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1891 tree lhs, rhs1, rhs2 = NULL_TREE;
1892 gimple *g;
1893 switch (gimple_code (stmt))
1895 case GIMPLE_ASSIGN:
1896 if (gimple_assign_load_p (stmt))
1897 return handle_load (stmt, idx);
1898 switch (gimple_assign_rhs_code (stmt))
1900 case BIT_AND_EXPR:
1901 case BIT_IOR_EXPR:
1902 case BIT_XOR_EXPR:
1903 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1904 /* FALLTHRU */
1905 case BIT_NOT_EXPR:
1906 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1907 lhs = make_ssa_name (TREE_TYPE (rhs1));
1908 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1909 rhs1, rhs2);
1910 insert_before (g);
1911 return lhs;
1912 case PLUS_EXPR:
1913 case MINUS_EXPR:
1914 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1915 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1916 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1917 rhs1, rhs2, idx);
1918 case NEGATE_EXPR:
1919 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1920 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1921 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1922 case LSHIFT_EXPR:
1923 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1924 idx),
1925 gimple_assign_rhs2 (stmt), idx);
1926 case SSA_NAME:
1927 case INTEGER_CST:
1928 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1929 CASE_CONVERT:
1930 case VIEW_CONVERT_EXPR:
1931 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1932 gimple_assign_rhs1 (stmt), idx);
1933 default:
1934 break;
1936 break;
1937 default:
1938 break;
1940 gcc_unreachable ();
1943 /* Return minimum precision of OP at STMT.
1944 Positive value is minimum precision above which all bits
1945 are zero, negative means all bits above negation of the
1946 value are copies of the sign bit. */
1948 static int
1949 range_to_prec (tree op, gimple *stmt)
1951 int_range_max r;
1952 wide_int w;
1953 tree type = TREE_TYPE (op);
1954 unsigned int prec = TYPE_PRECISION (type);
1956 if (!optimize
1957 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1958 || r.undefined_p ())
1960 if (TYPE_UNSIGNED (type))
1961 return prec;
1962 else
1963 return MIN ((int) -prec, -2);
1966 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1968 w = r.lower_bound ();
1969 if (wi::neg_p (w))
1971 int min_prec1 = wi::min_precision (w, SIGNED);
1972 w = r.upper_bound ();
1973 int min_prec2 = wi::min_precision (w, SIGNED);
1974 int min_prec = MAX (min_prec1, min_prec2);
1975 return MIN (-min_prec, -2);
1979 w = r.upper_bound ();
1980 int min_prec = wi::min_precision (w, UNSIGNED);
1981 return MAX (min_prec, 1);
1984 /* Return address of the first limb of OP and write into *PREC
1985 its precision. If positive, the operand is zero extended
1986 from that precision, if it is negative, the operand is sign-extended
1987 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1988 otherwise *PREC_STORED is prec from the innermost call without
1989 range optimizations. */
1991 tree
1992 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
1993 int *prec_stored, int *prec)
1995 wide_int w;
1996 location_t loc_save = m_loc;
1997 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
1998 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
1999 && TREE_CODE (op) != INTEGER_CST)
2001 do_int:
2002 *prec = range_to_prec (op, stmt);
2003 bitint_prec_kind kind = bitint_prec_small;
2004 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2005 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2006 kind = bitint_precision_kind (TREE_TYPE (op));
2007 if (kind == bitint_prec_middle)
2009 tree type = NULL_TREE;
2010 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2012 tree op_type = TREE_TYPE (op);
2013 unsigned HOST_WIDE_INT nelts
2014 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2015 /* Add support for 3 or more limbs filled in from normal
2016 integral type if this assert fails. If no target chooses
2017 limb mode smaller than half of largest supported normal
2018 integral type, this will not be needed. */
2019 gcc_assert (nelts <= 2);
2020 if (prec_stored)
2021 *prec_stored = (TYPE_UNSIGNED (op_type)
2022 ? TYPE_PRECISION (op_type)
2023 : -TYPE_PRECISION (op_type));
2024 if (*prec <= limb_prec && *prec >= -limb_prec)
2026 nelts = 1;
2027 if (prec_stored)
2029 if (TYPE_UNSIGNED (op_type))
2031 if (*prec_stored > limb_prec)
2032 *prec_stored = limb_prec;
2034 else if (*prec_stored < -limb_prec)
2035 *prec_stored = -limb_prec;
2038 tree atype = build_array_type_nelts (m_limb_type, nelts);
2039 tree var = create_tmp_var (atype);
2040 tree t1 = op;
2041 if (!useless_type_conversion_p (m_limb_type, op_type))
2042 t1 = add_cast (m_limb_type, t1);
2043 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2044 NULL_TREE, NULL_TREE);
2045 gimple *g = gimple_build_assign (v, t1);
2046 insert_before (g);
2047 if (nelts > 1)
2049 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2050 g = gimple_build_assign (make_ssa_name (op_type),
2051 RSHIFT_EXPR, op, lp);
2052 insert_before (g);
2053 tree t2 = gimple_assign_lhs (g);
2054 t2 = add_cast (m_limb_type, t2);
2055 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2056 NULL_TREE, NULL_TREE);
2057 g = gimple_build_assign (v, t2);
2058 insert_before (g);
2060 tree ret = build_fold_addr_expr (var);
2061 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2063 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2064 g = gimple_build_assign (var, clobber);
2065 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2067 m_loc = loc_save;
2068 return ret;
2070 switch (TREE_CODE (op))
2072 case SSA_NAME:
2073 if (m_names == NULL
2074 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2076 gimple *g = SSA_NAME_DEF_STMT (op);
2077 tree ret;
2078 m_loc = gimple_location (g);
2079 if (gimple_assign_load_p (g))
2081 *prec = range_to_prec (op, NULL);
2082 if (prec_stored)
2083 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2084 ? TYPE_PRECISION (TREE_TYPE (op))
2085 : -TYPE_PRECISION (TREE_TYPE (op)));
2086 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2087 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2088 NULL_TREE, true, GSI_SAME_STMT);
2090 else if (gimple_code (g) == GIMPLE_NOP)
2092 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2093 if (prec_stored)
2094 *prec_stored = *prec;
2095 tree var = create_tmp_var (m_limb_type);
2096 TREE_ADDRESSABLE (var) = 1;
2097 ret = build_fold_addr_expr (var);
2098 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2100 tree clobber = build_clobber (m_limb_type,
2101 CLOBBER_STORAGE_END);
2102 g = gimple_build_assign (var, clobber);
2103 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2106 else
2108 gcc_assert (gimple_assign_cast_p (g));
2109 tree rhs1 = gimple_assign_rhs1 (g);
2110 bitint_prec_kind kind = bitint_prec_small;
2111 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2112 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2113 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2114 if (kind >= bitint_prec_large)
2116 tree lhs_type = TREE_TYPE (op);
2117 tree rhs_type = TREE_TYPE (rhs1);
2118 int prec_stored_val = 0;
2119 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2120 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2122 if (TYPE_UNSIGNED (lhs_type)
2123 && !TYPE_UNSIGNED (rhs_type))
2124 gcc_assert (*prec >= 0 || prec_stored == NULL);
2126 else
2128 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2130 else if (TYPE_UNSIGNED (lhs_type))
2132 gcc_assert (*prec > 0
2133 || prec_stored_val > 0
2134 || (-prec_stored_val
2135 >= TYPE_PRECISION (lhs_type)));
2136 *prec = TYPE_PRECISION (lhs_type);
2138 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2140 else
2141 *prec = -TYPE_PRECISION (lhs_type);
2144 else
2146 op = rhs1;
2147 stmt = g;
2148 goto do_int;
2151 m_loc = loc_save;
2152 return ret;
2154 else
2156 int p = var_to_partition (m_map, op);
2157 gcc_assert (m_vars[p] != NULL_TREE);
2158 *prec = range_to_prec (op, stmt);
2159 if (prec_stored)
2160 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2161 ? TYPE_PRECISION (TREE_TYPE (op))
2162 : -TYPE_PRECISION (TREE_TYPE (op)));
2163 return build_fold_addr_expr (m_vars[p]);
2165 case INTEGER_CST:
2166 unsigned int min_prec, mp;
2167 tree type;
2168 w = wi::to_wide (op);
2169 if (tree_int_cst_sgn (op) >= 0)
2171 min_prec = wi::min_precision (w, UNSIGNED);
2172 *prec = MAX (min_prec, 1);
2174 else
2176 min_prec = wi::min_precision (w, SIGNED);
2177 *prec = MIN ((int) -min_prec, -2);
2179 mp = CEIL (min_prec, limb_prec) * limb_prec;
2180 if (mp == 0)
2181 mp = 1;
2182 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2183 type = TREE_TYPE (op);
2184 else
2185 type = build_bitint_type (mp, 1);
2186 if (TREE_CODE (type) != BITINT_TYPE
2187 || bitint_precision_kind (type) == bitint_prec_small)
2189 if (TYPE_PRECISION (type) <= limb_prec)
2190 type = m_limb_type;
2191 else
2192 /* This case is for targets which e.g. have 64-bit
2193 limb but categorize up to 128-bits _BitInts as
2194 small. We could use type of m_limb_type[2] and
2195 similar instead to save space. */
2196 type = build_bitint_type (mid_min_prec, 1);
2198 if (prec_stored)
2200 if (tree_int_cst_sgn (op) >= 0)
2201 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2202 else
2203 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2205 op = tree_output_constant_def (fold_convert (type, op));
2206 return build_fold_addr_expr (op);
2207 default:
2208 gcc_unreachable ();
2212 /* Helper function, create a loop before the current location,
2213 start with sizetype INIT value from the preheader edge. Return
2214 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2215 from the latch edge. */
2217 tree
2218 bitint_large_huge::create_loop (tree init, tree *idx_next)
2220 if (!gsi_end_p (m_gsi))
2221 gsi_prev (&m_gsi);
2222 else
2223 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2224 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2225 edge e2 = split_block (e1->dest, (gimple *) NULL);
2226 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2227 e3->probability = profile_probability::very_unlikely ();
2228 e2->flags = EDGE_FALSE_VALUE;
2229 e2->probability = e3->probability.invert ();
2230 tree idx = make_ssa_name (sizetype);
2231 gphi *phi = create_phi_node (idx, e1->dest);
2232 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2233 *idx_next = make_ssa_name (sizetype);
2234 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2235 m_gsi = gsi_after_labels (e1->dest);
2236 m_bb = e1->dest;
2237 m_preheader_bb = e1->src;
2238 class loop *loop = alloc_loop ();
2239 loop->header = e1->dest;
2240 add_loop (loop, e1->src->loop_father);
2241 return idx;
2244 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2245 lowered using iteration from the least significant limb up to the most
2246 significant limb. For large _BitInt it is emitted as straight line code
2247 before current location, for huge _BitInt as a loop handling two limbs
2248 at once, followed by handling up to limbs in straight line code (at most
2249 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2250 comparisons, in that case CMP_CODE should be the comparison code and
2251 CMP_OP1/CMP_OP2 the comparison operands. */
2253 tree
2254 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2255 tree cmp_op1, tree cmp_op2)
2257 bool eq_p = cmp_code != ERROR_MARK;
2258 tree type;
2259 if (eq_p)
2260 type = TREE_TYPE (cmp_op1);
2261 else
2262 type = TREE_TYPE (gimple_assign_lhs (stmt));
2263 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2264 bitint_prec_kind kind = bitint_precision_kind (type);
2265 gcc_assert (kind >= bitint_prec_large);
2266 gimple *g;
2267 tree lhs = gimple_get_lhs (stmt);
2268 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2269 if (lhs
2270 && TREE_CODE (lhs) == SSA_NAME
2271 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2272 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2274 int p = var_to_partition (m_map, lhs);
2275 gcc_assert (m_vars[p] != NULL_TREE);
2276 m_lhs = lhs = m_vars[p];
2278 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2279 bool sext = false;
2280 tree ext = NULL_TREE, store_operand = NULL_TREE;
2281 bool eh = false;
2282 basic_block eh_pad = NULL;
2283 tree nlhs = NULL_TREE;
2284 unsigned HOST_WIDE_INT bo_idx = 0;
2285 unsigned HOST_WIDE_INT bo_bit = 0;
2286 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2287 if (gimple_store_p (stmt))
2289 store_operand = gimple_assign_rhs1 (stmt);
2290 eh = stmt_ends_bb_p (stmt);
2291 if (eh)
2293 edge e;
2294 edge_iterator ei;
2295 basic_block bb = gimple_bb (stmt);
2297 FOR_EACH_EDGE (e, ei, bb->succs)
2298 if (e->flags & EDGE_EH)
2300 eh_pad = e->dest;
2301 break;
2304 if (TREE_CODE (lhs) == COMPONENT_REF
2305 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2307 tree fld = TREE_OPERAND (lhs, 1);
2308 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2309 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2310 poly_int64 bitoffset;
2311 poly_uint64 field_offset, repr_offset;
2312 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2313 nlhs = lhs;
2314 else
2316 bool var_field_off = false;
2317 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2318 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2319 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2320 else
2322 bitoffset = 0;
2323 var_field_off = true;
2325 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2326 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2327 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2328 TREE_OPERAND (lhs, 0), repr,
2329 var_field_off
2330 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2331 HOST_WIDE_INT bo = bitoffset.to_constant ();
2332 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2333 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2337 if ((store_operand
2338 && TREE_CODE (store_operand) == SSA_NAME
2339 && (m_names == NULL
2340 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2341 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2342 || gimple_assign_cast_p (stmt))
2344 rhs1 = gimple_assign_rhs1 (store_operand
2345 ? SSA_NAME_DEF_STMT (store_operand)
2346 : stmt);
2347 /* Optimize mergeable ops ending with widening cast to _BitInt
2348 (or followed by store). We can lower just the limbs of the
2349 cast operand and widen afterwards. */
2350 if (TREE_CODE (rhs1) == SSA_NAME
2351 && (m_names == NULL
2352 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2353 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2354 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2355 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2356 limb_prec) < CEIL (prec, limb_prec)
2357 || (kind == bitint_prec_huge
2358 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2360 store_operand = rhs1;
2361 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2362 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2363 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2364 sext = true;
2367 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2368 if (kind == bitint_prec_large)
2369 cnt = CEIL (prec, limb_prec);
2370 else
2372 rem = (prec % (2 * limb_prec));
2373 end = (prec - rem) / limb_prec;
2374 cnt = 2 + CEIL (rem, limb_prec);
2375 idx = idx_first = create_loop (size_zero_node, &idx_next);
2378 basic_block edge_bb = NULL;
2379 if (eq_p)
2381 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2382 gsi_prev (&gsi);
2383 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2384 edge_bb = e->src;
2385 if (kind == bitint_prec_large)
2386 m_gsi = gsi_end_bb (edge_bb);
2388 else
2389 m_after_stmt = stmt;
2390 if (kind != bitint_prec_large)
2391 m_upwards_2limb = end;
2392 m_upwards = true;
2394 bool separate_ext
2395 = (prec != (unsigned) TYPE_PRECISION (type)
2396 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2397 > CEIL (prec, limb_prec)));
2399 for (unsigned i = 0; i < cnt; i++)
2401 m_data_cnt = 0;
2402 if (kind == bitint_prec_large)
2403 idx = size_int (i);
2404 else if (i >= 2)
2405 idx = size_int (end + (i > 2));
2406 if (eq_p)
2408 rhs1 = handle_operand (cmp_op1, idx);
2409 tree rhs2 = handle_operand (cmp_op2, idx);
2410 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2411 insert_before (g);
2412 edge e1 = split_block (gsi_bb (m_gsi), g);
2413 e1->flags = EDGE_FALSE_VALUE;
2414 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2415 e1->probability = profile_probability::unlikely ();
2416 e2->probability = e1->probability.invert ();
2417 if (i == 0)
2418 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2419 m_gsi = gsi_after_labels (e1->dest);
2421 else
2423 if (store_operand)
2424 rhs1 = handle_operand (store_operand, idx);
2425 else
2426 rhs1 = handle_stmt (stmt, idx);
2427 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2428 rhs1 = add_cast (m_limb_type, rhs1);
2429 if (sext && i == cnt - 1)
2430 ext = rhs1;
2431 tree nidx = idx;
2432 if (bo_idx)
2434 if (tree_fits_uhwi_p (idx))
2435 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2436 else
2438 nidx = make_ssa_name (sizetype);
2439 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2440 size_int (bo_idx));
2441 insert_before (g);
2444 bool done = false;
2445 basic_block new_bb = NULL;
2446 /* Handle stores into bit-fields. */
2447 if (bo_bit)
2449 if (i == 0)
2451 edge e2 = NULL;
2452 if (kind != bitint_prec_large)
2454 prepare_data_in_out (build_zero_cst (m_limb_type),
2455 idx, &bf_next);
2456 bf_next = m_data.pop ();
2457 bf_cur = m_data.pop ();
2458 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2459 NULL_TREE, NULL_TREE);
2460 edge edge_true;
2461 if_then_else (g, profile_probability::unlikely (),
2462 edge_true, e2);
2463 new_bb = e2->dest;
2465 tree ftype
2466 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2467 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2468 bitsize_int (limb_prec - bo_bit),
2469 bitsize_int (bo_idx * limb_prec + bo_bit));
2470 tree t = add_cast (ftype, rhs1);
2471 g = gimple_build_assign (bfr, t);
2472 insert_before (g);
2473 if (eh)
2475 maybe_duplicate_eh_stmt (g, stmt);
2476 if (eh_pad)
2478 edge e = split_block (gsi_bb (m_gsi), g);
2479 m_gsi = gsi_after_labels (e->dest);
2480 make_edge (e->src, eh_pad, EDGE_EH)->probability
2481 = profile_probability::very_unlikely ();
2484 if (kind == bitint_prec_large)
2486 bf_cur = rhs1;
2487 done = true;
2489 else if (e2)
2490 m_gsi = gsi_after_labels (e2->src);
2492 if (!done)
2494 tree t1 = make_ssa_name (m_limb_type);
2495 tree t2 = make_ssa_name (m_limb_type);
2496 tree t3 = make_ssa_name (m_limb_type);
2497 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2498 build_int_cst (unsigned_type_node,
2499 limb_prec - bo_bit));
2500 insert_before (g);
2501 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2502 build_int_cst (unsigned_type_node,
2503 bo_bit));
2504 insert_before (g);
2505 bf_cur = rhs1;
2506 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2507 insert_before (g);
2508 rhs1 = t3;
2509 if (bf_next && i == 1)
2511 g = gimple_build_assign (bf_next, bf_cur);
2512 insert_before (g);
2516 if (!done)
2518 /* Handle bit-field access to partial last limb if needed. */
2519 if (nlhs
2520 && i == cnt - 1
2521 && !separate_ext
2522 && tree_fits_uhwi_p (idx))
2524 unsigned int tprec = TYPE_PRECISION (type);
2525 unsigned int rprec = tprec % limb_prec;
2526 if (rprec + bo_bit < (unsigned) limb_prec)
2528 tree ftype
2529 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2530 tree bfr = build3 (BIT_FIELD_REF, ftype,
2531 unshare_expr (nlhs),
2532 bitsize_int (rprec + bo_bit),
2533 bitsize_int ((bo_idx
2534 + tprec / limb_prec)
2535 * limb_prec));
2536 tree t = add_cast (ftype, rhs1);
2537 g = gimple_build_assign (bfr, t);
2538 done = true;
2539 bf_cur = NULL_TREE;
2541 else if (rprec + bo_bit == (unsigned) limb_prec)
2542 bf_cur = NULL_TREE;
2544 /* Otherwise, stores to any other lhs. */
2545 if (!done)
2547 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2548 nidx, true);
2549 g = gimple_build_assign (l, rhs1);
2551 insert_before (g);
2552 if (eh)
2554 maybe_duplicate_eh_stmt (g, stmt);
2555 if (eh_pad)
2557 edge e = split_block (gsi_bb (m_gsi), g);
2558 m_gsi = gsi_after_labels (e->dest);
2559 make_edge (e->src, eh_pad, EDGE_EH)->probability
2560 = profile_probability::very_unlikely ();
2563 if (new_bb)
2564 m_gsi = gsi_after_labels (new_bb);
2567 m_first = false;
2568 if (kind == bitint_prec_huge && i <= 1)
2570 if (i == 0)
2572 idx = make_ssa_name (sizetype);
2573 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2574 size_one_node);
2575 insert_before (g);
2577 else
2579 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2580 size_int (2));
2581 insert_before (g);
2582 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2583 NULL_TREE, NULL_TREE);
2584 insert_before (g);
2585 if (eq_p)
2586 m_gsi = gsi_after_labels (edge_bb);
2587 else
2588 m_gsi = gsi_for_stmt (stmt);
2593 if (separate_ext)
2595 if (sext)
2597 ext = add_cast (signed_type_for (m_limb_type), ext);
2598 tree lpm1 = build_int_cst (unsigned_type_node,
2599 limb_prec - 1);
2600 tree n = make_ssa_name (TREE_TYPE (ext));
2601 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2602 insert_before (g);
2603 ext = add_cast (m_limb_type, n);
2605 else
2606 ext = build_zero_cst (m_limb_type);
2607 kind = bitint_precision_kind (type);
2608 unsigned start = CEIL (prec, limb_prec);
2609 prec = TYPE_PRECISION (type);
2610 idx = idx_first = idx_next = NULL_TREE;
2611 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2612 kind = bitint_prec_large;
2613 if (kind == bitint_prec_large)
2614 cnt = CEIL (prec, limb_prec) - start;
2615 else
2617 rem = prec % limb_prec;
2618 end = (prec - rem) / limb_prec;
2619 cnt = (bo_bit != 0) + 1 + (rem != 0);
2621 for (unsigned i = 0; i < cnt; i++)
2623 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2624 idx = size_int (start + i);
2625 else if (i == cnt - 1 && (rem != 0))
2626 idx = size_int (end);
2627 else if (i == (bo_bit != 0))
2628 idx = create_loop (size_int (start + i), &idx_next);
2629 rhs1 = ext;
2630 if (bf_cur != NULL_TREE && bf_cur != ext)
2632 tree t1 = make_ssa_name (m_limb_type);
2633 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2634 build_int_cst (unsigned_type_node,
2635 limb_prec - bo_bit));
2636 insert_before (g);
2637 if (integer_zerop (ext))
2638 rhs1 = t1;
2639 else
2641 tree t2 = make_ssa_name (m_limb_type);
2642 rhs1 = make_ssa_name (m_limb_type);
2643 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2644 build_int_cst (unsigned_type_node,
2645 bo_bit));
2646 insert_before (g);
2647 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2648 insert_before (g);
2650 bf_cur = ext;
2652 tree nidx = idx;
2653 if (bo_idx)
2655 if (tree_fits_uhwi_p (idx))
2656 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2657 else
2659 nidx = make_ssa_name (sizetype);
2660 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2661 size_int (bo_idx));
2662 insert_before (g);
2665 bool done = false;
2666 /* Handle bit-field access to partial last limb if needed. */
2667 if (nlhs && i == cnt - 1)
2669 unsigned int tprec = TYPE_PRECISION (type);
2670 unsigned int rprec = tprec % limb_prec;
2671 if (rprec + bo_bit < (unsigned) limb_prec)
2673 tree ftype
2674 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2675 tree bfr = build3 (BIT_FIELD_REF, ftype,
2676 unshare_expr (nlhs),
2677 bitsize_int (rprec + bo_bit),
2678 bitsize_int ((bo_idx + tprec / limb_prec)
2679 * limb_prec));
2680 tree t = add_cast (ftype, rhs1);
2681 g = gimple_build_assign (bfr, t);
2682 done = true;
2683 bf_cur = NULL_TREE;
2685 else if (rprec + bo_bit == (unsigned) limb_prec)
2686 bf_cur = NULL_TREE;
2688 /* Otherwise, stores to any other lhs. */
2689 if (!done)
2691 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2692 g = gimple_build_assign (l, rhs1);
2694 insert_before (g);
2695 if (eh)
2697 maybe_duplicate_eh_stmt (g, stmt);
2698 if (eh_pad)
2700 edge e = split_block (gsi_bb (m_gsi), g);
2701 m_gsi = gsi_after_labels (e->dest);
2702 make_edge (e->src, eh_pad, EDGE_EH)->probability
2703 = profile_probability::very_unlikely ();
2706 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2708 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2709 size_one_node);
2710 insert_before (g);
2711 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2712 NULL_TREE, NULL_TREE);
2713 insert_before (g);
2714 m_gsi = gsi_for_stmt (stmt);
2718 if (bf_cur != NULL_TREE)
2720 unsigned int tprec = TYPE_PRECISION (type);
2721 unsigned int rprec = tprec % limb_prec;
2722 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2723 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2724 bitsize_int (rprec + bo_bit),
2725 bitsize_int ((bo_idx + tprec / limb_prec)
2726 * limb_prec));
2727 rhs1 = bf_cur;
2728 if (bf_cur != ext)
2730 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2731 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2732 build_int_cst (unsigned_type_node,
2733 limb_prec - bo_bit));
2734 insert_before (g);
2736 rhs1 = add_cast (ftype, rhs1);
2737 g = gimple_build_assign (bfr, rhs1);
2738 insert_before (g);
2739 if (eh)
2741 maybe_duplicate_eh_stmt (g, stmt);
2742 if (eh_pad)
2744 edge e = split_block (gsi_bb (m_gsi), g);
2745 m_gsi = gsi_after_labels (e->dest);
2746 make_edge (e->src, eh_pad, EDGE_EH)->probability
2747 = profile_probability::very_unlikely ();
2752 if (gimple_store_p (stmt))
2754 unlink_stmt_vdef (stmt);
2755 release_ssa_name (gimple_vdef (stmt));
2756 gsi_remove (&m_gsi, true);
2758 if (eq_p)
2760 lhs = make_ssa_name (boolean_type_node);
2761 basic_block bb = gimple_bb (stmt);
2762 gphi *phi = create_phi_node (lhs, bb);
2763 edge e = find_edge (gsi_bb (m_gsi), bb);
2764 unsigned int n = EDGE_COUNT (bb->preds);
2765 for (unsigned int i = 0; i < n; i++)
2767 edge e2 = EDGE_PRED (bb, i);
2768 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2769 e2, UNKNOWN_LOCATION);
2771 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2772 return lhs;
2774 else
2775 return NULL_TREE;
2778 /* Handle a large/huge _BitInt comparison statement STMT other than
2779 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2780 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2781 lowered by iteration from the most significant limb downwards to
2782 the least significant one, for large _BitInt in straight line code,
2783 otherwise with most significant limb handled in
2784 straight line code followed by a loop handling one limb at a time.
2785 Comparisons with unsigned huge _BitInt with precisions which are
2786 multiples of limb precision can use just the loop and don't need to
2787 handle most significant limb before the loop. The loop or straight
2788 line code jumps to final basic block if a particular pair of limbs
2789 is not equal. */
2791 tree
2792 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2793 tree cmp_op1, tree cmp_op2)
2795 tree type = TREE_TYPE (cmp_op1);
2796 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2797 bitint_prec_kind kind = bitint_precision_kind (type);
2798 gcc_assert (kind >= bitint_prec_large);
2799 gimple *g;
2800 if (!TYPE_UNSIGNED (type)
2801 && integer_zerop (cmp_op2)
2802 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2804 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2805 tree idx = size_int (end);
2806 m_data_cnt = 0;
2807 tree rhs1 = handle_operand (cmp_op1, idx);
2808 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2810 tree stype = signed_type_for (TREE_TYPE (rhs1));
2811 rhs1 = add_cast (stype, rhs1);
2813 tree lhs = make_ssa_name (boolean_type_node);
2814 g = gimple_build_assign (lhs, cmp_code, rhs1,
2815 build_zero_cst (TREE_TYPE (rhs1)));
2816 insert_before (g);
2817 cmp_code = NE_EXPR;
2818 return lhs;
2821 unsigned cnt, rem = 0, end = 0;
2822 tree idx = NULL_TREE, idx_next = NULL_TREE;
2823 if (kind == bitint_prec_large)
2824 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2825 else
2827 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2828 if (rem == 0 && !TYPE_UNSIGNED (type))
2829 rem = limb_prec;
2830 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2831 cnt = 1 + (rem != 0);
2834 basic_block edge_bb = NULL;
2835 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2836 gsi_prev (&gsi);
2837 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2838 edge_bb = e->src;
2839 m_gsi = gsi_end_bb (edge_bb);
2841 edge *edges = XALLOCAVEC (edge, cnt * 2);
2842 for (unsigned i = 0; i < cnt; i++)
2844 m_data_cnt = 0;
2845 if (kind == bitint_prec_large)
2846 idx = size_int (cnt - i - 1);
2847 else if (i == cnt - 1)
2848 idx = create_loop (size_int (end - 1), &idx_next);
2849 else
2850 idx = size_int (end);
2851 tree rhs1 = handle_operand (cmp_op1, idx);
2852 tree rhs2 = handle_operand (cmp_op2, idx);
2853 if (i == 0
2854 && !TYPE_UNSIGNED (type)
2855 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2857 tree stype = signed_type_for (TREE_TYPE (rhs1));
2858 rhs1 = add_cast (stype, rhs1);
2859 rhs2 = add_cast (stype, rhs2);
2861 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2862 insert_before (g);
2863 edge e1 = split_block (gsi_bb (m_gsi), g);
2864 e1->flags = EDGE_FALSE_VALUE;
2865 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2866 e1->probability = profile_probability::likely ();
2867 e2->probability = e1->probability.invert ();
2868 if (i == 0)
2869 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2870 m_gsi = gsi_after_labels (e1->dest);
2871 edges[2 * i] = e2;
2872 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2873 insert_before (g);
2874 e1 = split_block (gsi_bb (m_gsi), g);
2875 e1->flags = EDGE_FALSE_VALUE;
2876 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2877 e1->probability = profile_probability::unlikely ();
2878 e2->probability = e1->probability.invert ();
2879 m_gsi = gsi_after_labels (e1->dest);
2880 edges[2 * i + 1] = e2;
2881 m_first = false;
2882 if (kind == bitint_prec_huge && i == cnt - 1)
2884 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2885 insert_before (g);
2886 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2887 NULL_TREE, NULL_TREE);
2888 insert_before (g);
2889 edge true_edge, false_edge;
2890 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2891 &true_edge, &false_edge);
2892 m_gsi = gsi_after_labels (false_edge->dest);
2896 tree lhs = make_ssa_name (boolean_type_node);
2897 basic_block bb = gimple_bb (stmt);
2898 gphi *phi = create_phi_node (lhs, bb);
2899 for (unsigned int i = 0; i < cnt * 2; i++)
2901 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2902 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2903 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2905 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2906 ? boolean_true_node : boolean_false_node,
2907 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2908 cmp_code = NE_EXPR;
2909 return lhs;
2912 /* Lower large/huge _BitInt left and right shift except for left
2913 shift by < limb_prec constant. */
2915 void
2916 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2918 tree rhs1 = gimple_assign_rhs1 (stmt);
2919 tree lhs = gimple_assign_lhs (stmt);
2920 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2921 tree type = TREE_TYPE (rhs1);
2922 gimple *final_stmt = gsi_stmt (m_gsi);
2923 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2924 && bitint_precision_kind (type) >= bitint_prec_large);
2925 int prec = TYPE_PRECISION (type);
2926 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2927 gimple *g;
2928 if (obj == NULL_TREE)
2930 int part = var_to_partition (m_map, lhs);
2931 gcc_assert (m_vars[part] != NULL_TREE);
2932 obj = m_vars[part];
2934 /* Preparation code common for both left and right shifts.
2935 unsigned n1 = n % limb_prec;
2936 size_t n2 = n / limb_prec;
2937 size_t n3 = n1 != 0;
2938 unsigned n4 = (limb_prec - n1) % limb_prec;
2939 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2940 if (TREE_CODE (n) == INTEGER_CST)
2942 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2943 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2944 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2945 n3 = size_int (!integer_zerop (n1));
2946 n4 = int_const_binop (TRUNC_MOD_EXPR,
2947 int_const_binop (MINUS_EXPR, lp, n1), lp);
2949 else
2951 n1 = make_ssa_name (TREE_TYPE (n));
2952 n2 = make_ssa_name (sizetype);
2953 n3 = make_ssa_name (sizetype);
2954 n4 = make_ssa_name (TREE_TYPE (n));
2955 if (pow2p_hwi (limb_prec))
2957 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2958 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2959 insert_before (g);
2960 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2961 TREE_TYPE (n))
2962 ? n2 : make_ssa_name (TREE_TYPE (n)),
2963 RSHIFT_EXPR, n,
2964 build_int_cst (TREE_TYPE (n),
2965 exact_log2 (limb_prec)));
2966 insert_before (g);
2967 if (gimple_assign_lhs (g) != n2)
2969 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2970 insert_before (g);
2972 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2973 NEGATE_EXPR, n1);
2974 insert_before (g);
2975 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
2976 lpm1);
2977 insert_before (g);
2979 else
2981 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2982 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
2983 insert_before (g);
2984 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2985 TREE_TYPE (n))
2986 ? n2 : make_ssa_name (TREE_TYPE (n)),
2987 TRUNC_DIV_EXPR, n, lp);
2988 insert_before (g);
2989 if (gimple_assign_lhs (g) != n2)
2991 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2992 insert_before (g);
2994 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2995 MINUS_EXPR, lp, n1);
2996 insert_before (g);
2997 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
2998 lp);
2999 insert_before (g);
3001 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3002 build_zero_cst (TREE_TYPE (n)));
3003 insert_before (g);
3004 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3005 insert_before (g);
3007 tree p = build_int_cst (sizetype,
3008 prec / limb_prec - (prec % limb_prec == 0));
3009 if (rhs_code == RSHIFT_EXPR)
3011 /* Lower
3012 dst = src >> n;
3014 unsigned n1 = n % limb_prec;
3015 size_t n2 = n / limb_prec;
3016 size_t n3 = n1 != 0;
3017 unsigned n4 = (limb_prec - n1) % limb_prec;
3018 size_t idx;
3019 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3020 int signed_p = (typeof (src) -1) < 0;
3021 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3022 ? p : p - n3); ++idx)
3023 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3024 limb_type ext;
3025 if (prec % limb_prec == 0)
3026 ext = src[p];
3027 else if (signed_p)
3028 ext = ((signed limb_type) (src[p] << (limb_prec
3029 - (prec % limb_prec))))
3030 >> (limb_prec - (prec % limb_prec));
3031 else
3032 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3033 if (!signed_p && (prec % limb_prec == 0))
3035 else if (idx < prec / 64)
3037 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3038 ++idx;
3040 idx -= n2;
3041 if (signed_p)
3043 dst[idx] = ((signed limb_type) ext) >> n1;
3044 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3046 else
3048 dst[idx] = ext >> n1;
3049 ext = 0;
3051 for (++idx; idx <= p; ++idx)
3052 dst[idx] = ext; */
3053 tree pmn3;
3054 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3055 pmn3 = p;
3056 else if (TREE_CODE (n3) == INTEGER_CST)
3057 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3058 else
3060 pmn3 = make_ssa_name (sizetype);
3061 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3062 insert_before (g);
3064 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3065 edge edge_true, edge_false;
3066 if_then (g, profile_probability::likely (), edge_true, edge_false);
3067 tree idx_next;
3068 tree idx = create_loop (n2, &idx_next);
3069 tree idxmn2 = make_ssa_name (sizetype);
3070 tree idxpn3 = make_ssa_name (sizetype);
3071 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3072 insert_before (g);
3073 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3074 insert_before (g);
3075 m_data_cnt = 0;
3076 tree t1 = handle_operand (rhs1, idx);
3077 m_first = false;
3078 g = gimple_build_assign (make_ssa_name (m_limb_type),
3079 RSHIFT_EXPR, t1, n1);
3080 insert_before (g);
3081 t1 = gimple_assign_lhs (g);
3082 if (!integer_zerop (n3))
3084 m_data_cnt = 0;
3085 tree t2 = handle_operand (rhs1, idxpn3);
3086 g = gimple_build_assign (make_ssa_name (m_limb_type),
3087 LSHIFT_EXPR, t2, n4);
3088 insert_before (g);
3089 t2 = gimple_assign_lhs (g);
3090 g = gimple_build_assign (make_ssa_name (m_limb_type),
3091 BIT_IOR_EXPR, t1, t2);
3092 insert_before (g);
3093 t1 = gimple_assign_lhs (g);
3095 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3096 g = gimple_build_assign (l, t1);
3097 insert_before (g);
3098 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3099 insert_before (g);
3100 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3101 insert_before (g);
3102 idx = make_ssa_name (sizetype);
3103 m_gsi = gsi_for_stmt (final_stmt);
3104 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3105 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3106 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3107 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3108 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3109 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3110 m_data_cnt = 0;
3111 tree ms = handle_operand (rhs1, p);
3112 tree ext = ms;
3113 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3114 ext = add_cast (m_limb_type, ms);
3115 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3116 && !integer_zerop (n3))
3118 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3119 if_then (g, profile_probability::likely (), edge_true, edge_false);
3120 m_data_cnt = 0;
3121 t1 = handle_operand (rhs1, idx);
3122 g = gimple_build_assign (make_ssa_name (m_limb_type),
3123 RSHIFT_EXPR, t1, n1);
3124 insert_before (g);
3125 t1 = gimple_assign_lhs (g);
3126 g = gimple_build_assign (make_ssa_name (m_limb_type),
3127 LSHIFT_EXPR, ext, n4);
3128 insert_before (g);
3129 tree t2 = gimple_assign_lhs (g);
3130 g = gimple_build_assign (make_ssa_name (m_limb_type),
3131 BIT_IOR_EXPR, t1, t2);
3132 insert_before (g);
3133 t1 = gimple_assign_lhs (g);
3134 idxmn2 = make_ssa_name (sizetype);
3135 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3136 insert_before (g);
3137 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3138 g = gimple_build_assign (l, t1);
3139 insert_before (g);
3140 idx_next = make_ssa_name (sizetype);
3141 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3142 insert_before (g);
3143 m_gsi = gsi_for_stmt (final_stmt);
3144 tree nidx = make_ssa_name (sizetype);
3145 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3146 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3147 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3148 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3149 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3150 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3151 idx = nidx;
3153 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3154 insert_before (g);
3155 idx = gimple_assign_lhs (g);
3156 tree sext = ext;
3157 if (!TYPE_UNSIGNED (type))
3158 sext = add_cast (signed_type_for (m_limb_type), ext);
3159 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3160 RSHIFT_EXPR, sext, n1);
3161 insert_before (g);
3162 t1 = gimple_assign_lhs (g);
3163 if (!TYPE_UNSIGNED (type))
3165 t1 = add_cast (m_limb_type, t1);
3166 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3167 RSHIFT_EXPR, sext,
3168 build_int_cst (TREE_TYPE (n),
3169 limb_prec - 1));
3170 insert_before (g);
3171 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3173 else
3174 ext = build_zero_cst (m_limb_type);
3175 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3176 g = gimple_build_assign (l, t1);
3177 insert_before (g);
3178 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3179 size_one_node);
3180 insert_before (g);
3181 idx = gimple_assign_lhs (g);
3182 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3183 if_then (g, profile_probability::likely (), edge_true, edge_false);
3184 idx = create_loop (idx, &idx_next);
3185 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3186 g = gimple_build_assign (l, ext);
3187 insert_before (g);
3188 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3189 insert_before (g);
3190 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3191 insert_before (g);
3193 else
3195 /* Lower
3196 dst = src << n;
3198 unsigned n1 = n % limb_prec;
3199 size_t n2 = n / limb_prec;
3200 size_t n3 = n1 != 0;
3201 unsigned n4 = (limb_prec - n1) % limb_prec;
3202 size_t idx;
3203 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3204 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3205 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3206 if (n1)
3208 dst[idx] = src[idx - n2] << n1;
3209 --idx;
3211 for (; (ssize_t) idx >= 0; --idx)
3212 dst[idx] = 0; */
3213 tree n2pn3;
3214 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3215 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3216 else
3218 n2pn3 = make_ssa_name (sizetype);
3219 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3220 insert_before (g);
3222 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3223 idx even to access the most significant partial limb. */
3224 m_var_msb = true;
3225 if (integer_zerop (n3))
3226 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3227 counts. Emit if (true) condition that can be optimized later. */
3228 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3229 NULL_TREE, NULL_TREE);
3230 else
3231 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3232 edge edge_true, edge_false;
3233 if_then (g, profile_probability::likely (), edge_true, edge_false);
3234 tree idx_next;
3235 tree idx = create_loop (p, &idx_next);
3236 tree idxmn2 = make_ssa_name (sizetype);
3237 tree idxmn2mn3 = make_ssa_name (sizetype);
3238 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3239 insert_before (g);
3240 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3241 insert_before (g);
3242 m_data_cnt = 0;
3243 tree t1 = handle_operand (rhs1, idxmn2);
3244 m_first = false;
3245 g = gimple_build_assign (make_ssa_name (m_limb_type),
3246 LSHIFT_EXPR, t1, n1);
3247 insert_before (g);
3248 t1 = gimple_assign_lhs (g);
3249 if (!integer_zerop (n3))
3251 m_data_cnt = 0;
3252 tree t2 = handle_operand (rhs1, idxmn2mn3);
3253 g = gimple_build_assign (make_ssa_name (m_limb_type),
3254 RSHIFT_EXPR, t2, n4);
3255 insert_before (g);
3256 t2 = gimple_assign_lhs (g);
3257 g = gimple_build_assign (make_ssa_name (m_limb_type),
3258 BIT_IOR_EXPR, t1, t2);
3259 insert_before (g);
3260 t1 = gimple_assign_lhs (g);
3262 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3263 g = gimple_build_assign (l, t1);
3264 insert_before (g);
3265 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3266 insert_before (g);
3267 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3268 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3269 NULL_TREE, NULL_TREE);
3270 insert_before (g);
3271 idx = make_ssa_name (sizetype);
3272 m_gsi = gsi_for_stmt (final_stmt);
3273 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3274 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3275 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3276 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3277 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3278 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3279 m_data_cnt = 0;
3280 if (!integer_zerop (n3))
3282 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3283 NULL_TREE, NULL_TREE);
3284 if_then (g, profile_probability::likely (), edge_true, edge_false);
3285 idxmn2 = make_ssa_name (sizetype);
3286 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3287 insert_before (g);
3288 m_data_cnt = 0;
3289 t1 = handle_operand (rhs1, idxmn2);
3290 g = gimple_build_assign (make_ssa_name (m_limb_type),
3291 LSHIFT_EXPR, t1, n1);
3292 insert_before (g);
3293 t1 = gimple_assign_lhs (g);
3294 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3295 g = gimple_build_assign (l, t1);
3296 insert_before (g);
3297 idx_next = make_ssa_name (sizetype);
3298 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3299 insert_before (g);
3300 m_gsi = gsi_for_stmt (final_stmt);
3301 tree nidx = make_ssa_name (sizetype);
3302 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3303 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3304 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3305 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3306 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3307 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3308 idx = nidx;
3310 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3311 ssize_int (0), NULL_TREE, NULL_TREE);
3312 if_then (g, profile_probability::likely (), edge_true, edge_false);
3313 idx = create_loop (idx, &idx_next);
3314 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3315 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3316 insert_before (g);
3317 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3318 insert_before (g);
3319 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3320 ssize_int (0), NULL_TREE, NULL_TREE);
3321 insert_before (g);
3325 /* Lower large/huge _BitInt multiplication or division. */
3327 void
3328 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3330 tree rhs1 = gimple_assign_rhs1 (stmt);
3331 tree rhs2 = gimple_assign_rhs2 (stmt);
3332 tree lhs = gimple_assign_lhs (stmt);
3333 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3334 tree type = TREE_TYPE (rhs1);
3335 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3336 && bitint_precision_kind (type) >= bitint_prec_large);
3337 int prec = TYPE_PRECISION (type), prec1, prec2;
3338 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3339 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3340 if (obj == NULL_TREE)
3342 int part = var_to_partition (m_map, lhs);
3343 gcc_assert (m_vars[part] != NULL_TREE);
3344 obj = m_vars[part];
3345 lhs = build_fold_addr_expr (obj);
3347 else
3349 lhs = build_fold_addr_expr (obj);
3350 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3351 NULL_TREE, true, GSI_SAME_STMT);
3353 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3354 gimple *g;
3355 switch (rhs_code)
3357 case MULT_EXPR:
3358 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3359 lhs, build_int_cst (sitype, prec),
3360 rhs1, build_int_cst (sitype, prec1),
3361 rhs2, build_int_cst (sitype, prec2));
3362 insert_before (g);
3363 break;
3364 case TRUNC_DIV_EXPR:
3365 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3366 lhs, build_int_cst (sitype, prec),
3367 null_pointer_node,
3368 build_int_cst (sitype, 0),
3369 rhs1, build_int_cst (sitype, prec1),
3370 rhs2, build_int_cst (sitype, prec2));
3371 if (!stmt_ends_bb_p (stmt))
3372 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3373 insert_before (g);
3374 break;
3375 case TRUNC_MOD_EXPR:
3376 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3377 build_int_cst (sitype, 0),
3378 lhs, build_int_cst (sitype, prec),
3379 rhs1, build_int_cst (sitype, prec1),
3380 rhs2, build_int_cst (sitype, prec2));
3381 if (!stmt_ends_bb_p (stmt))
3382 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3383 insert_before (g);
3384 break;
3385 default:
3386 gcc_unreachable ();
3388 if (stmt_ends_bb_p (stmt))
3390 maybe_duplicate_eh_stmt (g, stmt);
3391 edge e1;
3392 edge_iterator ei;
3393 basic_block bb = gimple_bb (stmt);
3395 FOR_EACH_EDGE (e1, ei, bb->succs)
3396 if (e1->flags & EDGE_EH)
3397 break;
3398 if (e1)
3400 edge e2 = split_block (gsi_bb (m_gsi), g);
3401 m_gsi = gsi_after_labels (e2->dest);
3402 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3403 = profile_probability::very_unlikely ();
3408 /* Lower large/huge _BitInt conversion to/from floating point. */
3410 void
3411 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3413 tree rhs1 = gimple_assign_rhs1 (stmt);
3414 tree lhs = gimple_assign_lhs (stmt);
3415 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3416 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3417 gimple *g;
3418 if (rhs_code == FIX_TRUNC_EXPR)
3420 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3421 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3422 prec = -prec;
3423 if (obj == NULL_TREE)
3425 int part = var_to_partition (m_map, lhs);
3426 gcc_assert (m_vars[part] != NULL_TREE);
3427 obj = m_vars[part];
3428 lhs = build_fold_addr_expr (obj);
3430 else
3432 lhs = build_fold_addr_expr (obj);
3433 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3434 NULL_TREE, true, GSI_SAME_STMT);
3436 scalar_mode from_mode
3437 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3438 #ifdef HAVE_SFmode
3439 /* IEEE single is a full superset of both IEEE half and
3440 bfloat formats, convert to float first and then to _BitInt
3441 to avoid the need of another 2 library routines. */
3442 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3443 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3444 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3446 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3447 if (type)
3448 rhs1 = add_cast (type, rhs1);
3450 #endif
3451 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3452 lhs, build_int_cst (sitype, prec),
3453 rhs1);
3454 insert_before (g);
3456 else
3458 int prec;
3459 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3460 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3461 rhs1, build_int_cst (sitype, prec));
3462 gimple_call_set_lhs (g, lhs);
3463 if (!stmt_ends_bb_p (stmt))
3464 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3465 gsi_replace (&m_gsi, g, true);
3469 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3470 If check_zero is true, caller wants to check if all bits in [start, end)
3471 are zero, otherwise if bits in [start, end) are either all zero or
3472 all ones. L is the limb with index LIMB, START and END are measured
3473 in bits. */
3475 tree
3476 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3477 unsigned int end, tree l,
3478 unsigned int limb,
3479 bool check_zero)
3481 unsigned startlimb = start / limb_prec;
3482 unsigned endlimb = (end - 1) / limb_prec;
3483 gimple *g;
3485 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3486 return l;
3487 if (startlimb == endlimb && limb == startlimb)
3489 if (check_zero)
3491 wide_int w = wi::shifted_mask (start % limb_prec,
3492 end - start, false, limb_prec);
3493 g = gimple_build_assign (make_ssa_name (m_limb_type),
3494 BIT_AND_EXPR, l,
3495 wide_int_to_tree (m_limb_type, w));
3496 insert_before (g);
3497 return gimple_assign_lhs (g);
3499 unsigned int shift = start % limb_prec;
3500 if ((end % limb_prec) != 0)
3502 unsigned int lshift = (-end) % limb_prec;
3503 shift += lshift;
3504 g = gimple_build_assign (make_ssa_name (m_limb_type),
3505 LSHIFT_EXPR, l,
3506 build_int_cst (unsigned_type_node,
3507 lshift));
3508 insert_before (g);
3509 l = gimple_assign_lhs (g);
3511 l = add_cast (signed_type_for (m_limb_type), l);
3512 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3513 RSHIFT_EXPR, l,
3514 build_int_cst (unsigned_type_node, shift));
3515 insert_before (g);
3516 return add_cast (m_limb_type, gimple_assign_lhs (g));
3518 else if (limb == startlimb)
3520 if ((start % limb_prec) == 0)
3521 return l;
3522 if (!check_zero)
3523 l = add_cast (signed_type_for (m_limb_type), l);
3524 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3525 RSHIFT_EXPR, l,
3526 build_int_cst (unsigned_type_node,
3527 start % limb_prec));
3528 insert_before (g);
3529 l = gimple_assign_lhs (g);
3530 if (!check_zero)
3531 l = add_cast (m_limb_type, l);
3532 return l;
3534 else if (limb == endlimb)
3536 if ((end % limb_prec) == 0)
3537 return l;
3538 if (check_zero)
3540 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3541 g = gimple_build_assign (make_ssa_name (m_limb_type),
3542 BIT_AND_EXPR, l,
3543 wide_int_to_tree (m_limb_type, w));
3544 insert_before (g);
3545 return gimple_assign_lhs (g);
3547 unsigned int shift = (-end) % limb_prec;
3548 g = gimple_build_assign (make_ssa_name (m_limb_type),
3549 LSHIFT_EXPR, l,
3550 build_int_cst (unsigned_type_node, shift));
3551 insert_before (g);
3552 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3553 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3554 RSHIFT_EXPR, l,
3555 build_int_cst (unsigned_type_node, shift));
3556 insert_before (g);
3557 return add_cast (m_limb_type, gimple_assign_lhs (g));
3559 return l;
3562 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3563 result including overflow flag into the right locations. */
3565 void
3566 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3567 tree ovf, tree lhs, tree orig_obj,
3568 gimple *stmt, tree_code code)
3570 gimple *g;
3572 if (obj == NULL_TREE
3573 && (TREE_CODE (type) != BITINT_TYPE
3574 || bitint_precision_kind (type) < bitint_prec_large))
3576 /* Add support for 3 or more limbs filled in from normal integral
3577 type if this assert fails. If no target chooses limb mode smaller
3578 than half of largest supported normal integral type, this will not
3579 be needed. */
3580 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3581 tree lhs_type = type;
3582 if (TREE_CODE (type) == BITINT_TYPE
3583 && bitint_precision_kind (type) == bitint_prec_middle)
3584 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3585 TYPE_UNSIGNED (type));
3586 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3587 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3588 insert_before (g);
3589 r1 = gimple_assign_lhs (g);
3590 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3591 r1 = add_cast (lhs_type, r1);
3592 if (TYPE_PRECISION (lhs_type) > limb_prec)
3594 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3595 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3596 insert_before (g);
3597 r2 = gimple_assign_lhs (g);
3598 r2 = add_cast (lhs_type, r2);
3599 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3600 build_int_cst (unsigned_type_node,
3601 limb_prec));
3602 insert_before (g);
3603 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3604 gimple_assign_lhs (g));
3605 insert_before (g);
3606 r1 = gimple_assign_lhs (g);
3608 if (lhs_type != type)
3609 r1 = add_cast (type, r1);
3610 ovf = add_cast (lhs_type, ovf);
3611 if (lhs_type != type)
3612 ovf = add_cast (type, ovf);
3613 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3614 m_gsi = gsi_for_stmt (stmt);
3615 gsi_replace (&m_gsi, g, true);
3617 else
3619 unsigned HOST_WIDE_INT nelts = 0;
3620 tree atype = NULL_TREE;
3621 if (obj)
3623 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3624 if (orig_obj == NULL_TREE)
3625 nelts >>= 1;
3626 atype = build_array_type_nelts (m_limb_type, nelts);
3628 if (var && obj)
3630 tree v1, v2;
3631 tree zero;
3632 if (orig_obj == NULL_TREE)
3634 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3635 v1 = build2 (MEM_REF, atype,
3636 build_fold_addr_expr (unshare_expr (obj)), zero);
3638 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3639 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3640 else
3641 v1 = unshare_expr (obj);
3642 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3643 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3644 g = gimple_build_assign (v1, v2);
3645 insert_before (g);
3647 if (orig_obj == NULL_TREE && obj)
3649 ovf = add_cast (m_limb_type, ovf);
3650 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3651 g = gimple_build_assign (l, ovf);
3652 insert_before (g);
3653 if (nelts > 1)
3655 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3656 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3657 (nelts + 1) * m_limb_size);
3658 tree v1 = build2 (MEM_REF, atype,
3659 build_fold_addr_expr (unshare_expr (obj)),
3660 off);
3661 g = gimple_build_assign (v1, build_zero_cst (atype));
3662 insert_before (g);
3665 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3667 imm_use_iterator ui;
3668 use_operand_p use_p;
3669 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3671 g = USE_STMT (use_p);
3672 if (!is_gimple_assign (g)
3673 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3674 continue;
3675 tree lhs2 = gimple_assign_lhs (g);
3676 gimple *use_stmt;
3677 single_imm_use (lhs2, &use_p, &use_stmt);
3678 lhs2 = gimple_assign_lhs (use_stmt);
3679 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3680 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3681 g = gimple_build_assign (lhs2, ovf);
3682 else
3683 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3684 gsi_replace (&gsi, g, true);
3685 if (gsi_stmt (m_gsi) == use_stmt)
3686 m_gsi = gsi_for_stmt (g);
3687 break;
3690 else if (ovf != boolean_false_node)
3692 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3693 NULL_TREE, NULL_TREE);
3694 edge edge_true, edge_false;
3695 if_then (g, profile_probability::very_unlikely (),
3696 edge_true, edge_false);
3697 tree zero = build_zero_cst (TREE_TYPE (lhs));
3698 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3699 TREE_TYPE (lhs),
3700 zero, zero, NULL);
3701 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3702 true, GSI_SAME_STMT);
3703 m_gsi = gsi_after_labels (edge_true->dest);
3706 if (var)
3708 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3709 g = gimple_build_assign (var, clobber);
3710 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3714 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3715 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3716 argument 1 precision PREC1 and minimum precision for the result
3717 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3719 static tree
3720 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3721 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3723 *start = 0;
3724 *end = 0;
3725 *check_zero = true;
3726 /* Ignore this special rule for subtraction, even if both
3727 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3728 in infinite precision. */
3729 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3731 /* Result in [0, prec2) is unsigned, if prec > prec2,
3732 all bits above it will be zero. */
3733 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3734 return boolean_false_node;
3735 else
3737 /* ovf if any of bits in [start, end) is non-zero. */
3738 *start = prec - !TYPE_UNSIGNED (type);
3739 *end = prec2;
3742 else if (TYPE_UNSIGNED (type))
3744 /* If result in [0, prec2) is signed and if prec > prec2,
3745 all bits above it will be sign bit copies. */
3746 if (prec >= prec2)
3748 /* ovf if bit prec - 1 is non-zero. */
3749 *start = prec - 1;
3750 *end = prec;
3752 else
3754 /* ovf if any of bits in [start, end) is non-zero. */
3755 *start = prec;
3756 *end = prec2;
3759 else if (prec >= prec2)
3760 return boolean_false_node;
3761 else
3763 /* ovf if [start, end) bits aren't all zeros or all ones. */
3764 *start = prec - 1;
3765 *end = prec2;
3766 *check_zero = false;
3768 return NULL_TREE;
3771 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3772 argument or return type _Complex large/huge _BitInt. */
3774 void
3775 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3777 tree arg0 = gimple_call_arg (stmt, 0);
3778 tree arg1 = gimple_call_arg (stmt, 1);
3779 tree lhs = gimple_call_lhs (stmt);
3780 gimple *g;
3782 if (!lhs)
3784 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3785 gsi_remove (&gsi, true);
3786 return;
3788 gimple *final_stmt = gsi_stmt (m_gsi);
3789 tree type = TREE_TYPE (lhs);
3790 if (TREE_CODE (type) == COMPLEX_TYPE)
3791 type = TREE_TYPE (type);
3792 int prec = TYPE_PRECISION (type);
3793 int prec0 = range_to_prec (arg0, stmt);
3794 int prec1 = range_to_prec (arg1, stmt);
3795 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3796 the be minimum unsigned precision of any possible operation's
3797 result, otherwise it is minimum signed precision.
3798 Some examples:
3799 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3800 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3801 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3802 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3803 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3804 8 + 8 [0, 0x1fe] 9 UNSIGNED
3805 8 + 10 [0, 0x4fe] 11 UNSIGNED
3806 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3807 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3808 8 + -8 [-0x80, 0x17e] 10 SIGNED
3809 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3810 10 + -8 [-0x80, 0x47e] 12 SIGNED
3811 8 - 8 [-0xff, 0xff] 9 SIGNED
3812 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3813 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3814 -8 - -8 [-0xff, 0xff] 9 SIGNED
3815 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3816 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3817 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3818 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3819 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3820 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3821 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3822 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3823 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3824 prec1 < 0 ? -prec1 : prec1);
3825 /* If operands are either both signed or both unsigned,
3826 we need just one additional bit. */
3827 prec2 = (((prec0 < 0) == (prec1 < 0)
3828 /* If one operand is signed and one unsigned and
3829 the signed one has larger precision, we need
3830 just one extra bit, otherwise two. */
3831 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3832 : (prec2 == -prec1 && prec2 != prec0)))
3833 ? prec2 + 1 : prec2 + 2);
3834 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3835 prec1 < 0 ? -prec1 : prec1);
3836 prec3 = MAX (prec3, prec);
3837 tree var = NULL_TREE;
3838 tree orig_obj = obj;
3839 if (obj == NULL_TREE
3840 && TREE_CODE (type) == BITINT_TYPE
3841 && bitint_precision_kind (type) >= bitint_prec_large
3842 && m_names
3843 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3845 int part = var_to_partition (m_map, lhs);
3846 gcc_assert (m_vars[part] != NULL_TREE);
3847 obj = m_vars[part];
3848 if (TREE_TYPE (lhs) == type)
3849 orig_obj = obj;
3851 if (TREE_CODE (type) != BITINT_TYPE
3852 || bitint_precision_kind (type) < bitint_prec_large)
3854 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3855 tree atype = build_array_type_nelts (m_limb_type, nelts);
3856 var = create_tmp_var (atype);
3859 enum tree_code code;
3860 switch (gimple_call_internal_fn (stmt))
3862 case IFN_ADD_OVERFLOW:
3863 case IFN_UBSAN_CHECK_ADD:
3864 code = PLUS_EXPR;
3865 break;
3866 case IFN_SUB_OVERFLOW:
3867 case IFN_UBSAN_CHECK_SUB:
3868 code = MINUS_EXPR;
3869 break;
3870 default:
3871 gcc_unreachable ();
3873 unsigned start, end;
3874 bool check_zero;
3875 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3876 &start, &end, &check_zero);
3878 unsigned startlimb, endlimb;
3879 if (ovf)
3881 startlimb = ~0U;
3882 endlimb = ~0U;
3884 else
3886 startlimb = start / limb_prec;
3887 endlimb = (end - 1) / limb_prec;
3890 int prec4 = ovf != NULL_TREE ? prec : prec3;
3891 bitint_prec_kind kind = bitint_precision_kind (prec4);
3892 unsigned cnt, rem = 0, fin = 0;
3893 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3894 bool last_ovf = (ovf == NULL_TREE
3895 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3896 if (kind != bitint_prec_huge)
3897 cnt = CEIL (prec4, limb_prec) + last_ovf;
3898 else
3900 rem = (prec4 % (2 * limb_prec));
3901 fin = (prec4 - rem) / limb_prec;
3902 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3903 idx = idx_first = create_loop (size_zero_node, &idx_next);
3906 if (kind == bitint_prec_huge)
3907 m_upwards_2limb = fin;
3908 m_upwards = true;
3910 tree type0 = TREE_TYPE (arg0);
3911 tree type1 = TREE_TYPE (arg1);
3912 int prec5 = prec3;
3913 if (bitint_precision_kind (prec5) < bitint_prec_large)
3914 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3915 if (TYPE_PRECISION (type0) < prec5)
3917 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3918 if (TREE_CODE (arg0) == INTEGER_CST)
3919 arg0 = fold_convert (type0, arg0);
3921 if (TYPE_PRECISION (type1) < prec5)
3923 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3924 if (TREE_CODE (arg1) == INTEGER_CST)
3925 arg1 = fold_convert (type1, arg1);
3927 unsigned int data_cnt = 0;
3928 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3929 tree cmp = build_zero_cst (m_limb_type);
3930 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3931 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3932 for (unsigned i = 0; i < cnt; i++)
3934 m_data_cnt = 0;
3935 tree rhs1, rhs2;
3936 if (kind != bitint_prec_huge)
3937 idx = size_int (i);
3938 else if (i >= 2)
3939 idx = size_int (fin + (i > 2));
3940 if (!last_ovf || i < cnt - 1)
3942 if (type0 != TREE_TYPE (arg0))
3943 rhs1 = handle_cast (type0, arg0, idx);
3944 else
3945 rhs1 = handle_operand (arg0, idx);
3946 if (type1 != TREE_TYPE (arg1))
3947 rhs2 = handle_cast (type1, arg1, idx);
3948 else
3949 rhs2 = handle_operand (arg1, idx);
3950 if (i == 0)
3951 data_cnt = m_data_cnt;
3952 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3953 rhs1 = add_cast (m_limb_type, rhs1);
3954 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3955 rhs2 = add_cast (m_limb_type, rhs2);
3956 last_rhs1 = rhs1;
3957 last_rhs2 = rhs2;
3959 else
3961 m_data_cnt = data_cnt;
3962 if (TYPE_UNSIGNED (type0))
3963 rhs1 = build_zero_cst (m_limb_type);
3964 else
3966 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3967 if (TREE_CODE (rhs1) == INTEGER_CST)
3968 rhs1 = build_int_cst (m_limb_type,
3969 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3970 else
3972 tree lpm1 = build_int_cst (unsigned_type_node,
3973 limb_prec - 1);
3974 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
3975 RSHIFT_EXPR, rhs1, lpm1);
3976 insert_before (g);
3977 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
3980 if (TYPE_UNSIGNED (type1))
3981 rhs2 = build_zero_cst (m_limb_type);
3982 else
3984 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
3985 if (TREE_CODE (rhs2) == INTEGER_CST)
3986 rhs2 = build_int_cst (m_limb_type,
3987 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
3988 else
3990 tree lpm1 = build_int_cst (unsigned_type_node,
3991 limb_prec - 1);
3992 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
3993 RSHIFT_EXPR, rhs2, lpm1);
3994 insert_before (g);
3995 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
3999 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4000 if (ovf != boolean_false_node)
4002 if (tree_fits_uhwi_p (idx))
4004 unsigned limb = tree_to_uhwi (idx);
4005 if (limb >= startlimb && limb <= endlimb)
4007 tree l = arith_overflow_extract_bits (start, end, rhs,
4008 limb, check_zero);
4009 tree this_ovf = make_ssa_name (boolean_type_node);
4010 if (ovf == NULL_TREE && !check_zero)
4012 cmp = l;
4013 g = gimple_build_assign (make_ssa_name (m_limb_type),
4014 PLUS_EXPR, l,
4015 build_int_cst (m_limb_type, 1));
4016 insert_before (g);
4017 g = gimple_build_assign (this_ovf, GT_EXPR,
4018 gimple_assign_lhs (g),
4019 build_int_cst (m_limb_type, 1));
4021 else
4022 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4023 insert_before (g);
4024 if (ovf == NULL_TREE)
4025 ovf = this_ovf;
4026 else
4028 tree b = make_ssa_name (boolean_type_node);
4029 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4030 insert_before (g);
4031 ovf = b;
4035 else if (startlimb < fin)
4037 if (m_first && startlimb + 2 < fin)
4039 tree data_out;
4040 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4041 ovf_out = m_data.pop ();
4042 m_data.pop ();
4043 if (!check_zero)
4045 cmp = prepare_data_in_out (cmp, idx, &data_out);
4046 cmp_out = m_data.pop ();
4047 m_data.pop ();
4050 if (i != 0 || startlimb != fin - 1)
4052 tree_code cmp_code;
4053 bool single_comparison
4054 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4055 if (!single_comparison)
4057 cmp_code = GE_EXPR;
4058 if (!check_zero && (start % limb_prec) == 0)
4059 single_comparison = true;
4061 else if ((startlimb & 1) == (i & 1))
4062 cmp_code = EQ_EXPR;
4063 else
4064 cmp_code = GT_EXPR;
4065 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4066 NULL_TREE, NULL_TREE);
4067 edge edge_true_true, edge_true_false, edge_false;
4068 gimple *g2 = NULL;
4069 if (!single_comparison)
4070 g2 = gimple_build_cond (NE_EXPR, idx,
4071 size_int (startlimb), NULL_TREE,
4072 NULL_TREE);
4073 if_then_if_then_else (g, g2, profile_probability::likely (),
4074 profile_probability::likely (),
4075 edge_true_true, edge_true_false,
4076 edge_false);
4077 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4078 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4079 check_zero);
4080 tree this_ovf = make_ssa_name (boolean_type_node);
4081 if (cmp_code != GT_EXPR && !check_zero)
4083 g = gimple_build_assign (make_ssa_name (m_limb_type),
4084 PLUS_EXPR, l,
4085 build_int_cst (m_limb_type, 1));
4086 insert_before (g);
4087 g = gimple_build_assign (this_ovf, GT_EXPR,
4088 gimple_assign_lhs (g),
4089 build_int_cst (m_limb_type, 1));
4091 else
4092 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4093 insert_before (g);
4094 if (cmp_code == GT_EXPR)
4096 tree t = make_ssa_name (boolean_type_node);
4097 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4098 insert_before (g);
4099 this_ovf = t;
4101 tree this_ovf2 = NULL_TREE;
4102 if (!single_comparison)
4104 m_gsi = gsi_after_labels (edge_true_true->src);
4105 tree t = make_ssa_name (boolean_type_node);
4106 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4107 insert_before (g);
4108 this_ovf2 = make_ssa_name (boolean_type_node);
4109 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4110 ovf, t);
4111 insert_before (g);
4113 m_gsi = gsi_after_labels (edge_true_false->dest);
4114 tree t;
4115 if (i == 1 && ovf_out)
4116 t = ovf_out;
4117 else
4118 t = make_ssa_name (boolean_type_node);
4119 gphi *phi = create_phi_node (t, edge_true_false->dest);
4120 add_phi_arg (phi, this_ovf, edge_true_false,
4121 UNKNOWN_LOCATION);
4122 add_phi_arg (phi, ovf ? ovf
4123 : boolean_false_node, edge_false,
4124 UNKNOWN_LOCATION);
4125 if (edge_true_true)
4126 add_phi_arg (phi, this_ovf2, edge_true_true,
4127 UNKNOWN_LOCATION);
4128 ovf = t;
4129 if (!check_zero && cmp_code != GT_EXPR)
4131 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4132 phi = create_phi_node (t, edge_true_false->dest);
4133 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4134 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4135 if (edge_true_true)
4136 add_phi_arg (phi, cmp, edge_true_true,
4137 UNKNOWN_LOCATION);
4138 cmp = t;
4144 if (var || obj)
4146 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4148 else if (!tree_fits_uhwi_p (idx)
4149 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4151 bool single_comparison
4152 = (((unsigned) prec % limb_prec) == 0
4153 || prec_limbs + 1 >= fin
4154 || (prec_limbs & 1) == (i & 1));
4155 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4156 NULL_TREE, NULL_TREE);
4157 gimple *g2 = NULL;
4158 if (!single_comparison)
4159 g2 = gimple_build_cond (LT_EXPR, idx,
4160 size_int (prec_limbs - 1),
4161 NULL_TREE, NULL_TREE);
4162 edge edge_true_true, edge_true_false, edge_false;
4163 if_then_if_then_else (g, g2, profile_probability::likely (),
4164 profile_probability::likely (),
4165 edge_true_true, edge_true_false,
4166 edge_false);
4167 tree l = limb_access (type, var ? var : obj, idx, true);
4168 g = gimple_build_assign (l, rhs);
4169 insert_before (g);
4170 if (!single_comparison)
4172 m_gsi = gsi_after_labels (edge_true_true->src);
4173 l = limb_access (type, var ? var : obj,
4174 size_int (prec_limbs - 1), true);
4175 if (!useless_type_conversion_p (TREE_TYPE (l),
4176 TREE_TYPE (rhs)))
4177 rhs = add_cast (TREE_TYPE (l), rhs);
4178 g = gimple_build_assign (l, rhs);
4179 insert_before (g);
4181 m_gsi = gsi_after_labels (edge_true_false->dest);
4183 else
4185 tree l = limb_access (type, var ? var : obj, idx, true);
4186 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4187 rhs = add_cast (TREE_TYPE (l), rhs);
4188 g = gimple_build_assign (l, rhs);
4189 insert_before (g);
4192 m_first = false;
4193 if (kind == bitint_prec_huge && i <= 1)
4195 if (i == 0)
4197 idx = make_ssa_name (sizetype);
4198 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4199 size_one_node);
4200 insert_before (g);
4202 else
4204 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4205 size_int (2));
4206 insert_before (g);
4207 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4208 NULL_TREE, NULL_TREE);
4209 insert_before (g);
4210 m_gsi = gsi_for_stmt (final_stmt);
4215 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4218 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4219 argument or return type _Complex large/huge _BitInt. */
4221 void
4222 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4224 tree arg0 = gimple_call_arg (stmt, 0);
4225 tree arg1 = gimple_call_arg (stmt, 1);
4226 tree lhs = gimple_call_lhs (stmt);
4227 if (!lhs)
4229 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4230 gsi_remove (&gsi, true);
4231 return;
4233 gimple *final_stmt = gsi_stmt (m_gsi);
4234 tree type = TREE_TYPE (lhs);
4235 if (TREE_CODE (type) == COMPLEX_TYPE)
4236 type = TREE_TYPE (type);
4237 int prec = TYPE_PRECISION (type), prec0, prec1;
4238 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4239 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4240 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4241 + (prec1 < 0 ? -prec1 : prec1));
4242 if (prec0 == 1 || prec1 == 1)
4243 --prec2;
4244 tree var = NULL_TREE;
4245 tree orig_obj = obj;
4246 bool force_var = false;
4247 if (obj == NULL_TREE
4248 && TREE_CODE (type) == BITINT_TYPE
4249 && bitint_precision_kind (type) >= bitint_prec_large
4250 && m_names
4251 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4253 int part = var_to_partition (m_map, lhs);
4254 gcc_assert (m_vars[part] != NULL_TREE);
4255 obj = m_vars[part];
4256 if (TREE_TYPE (lhs) == type)
4257 orig_obj = obj;
4259 else if (obj != NULL_TREE && DECL_P (obj))
4261 for (int i = 0; i < 2; ++i)
4263 tree arg = i ? arg1 : arg0;
4264 if (TREE_CODE (arg) == ADDR_EXPR)
4265 arg = TREE_OPERAND (arg, 0);
4266 if (get_base_address (arg) == obj)
4268 force_var = true;
4269 break;
4273 if (obj == NULL_TREE
4274 || force_var
4275 || TREE_CODE (type) != BITINT_TYPE
4276 || bitint_precision_kind (type) < bitint_prec_large
4277 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4279 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4280 tree atype = build_array_type_nelts (m_limb_type, nelts);
4281 var = create_tmp_var (atype);
4283 tree addr = build_fold_addr_expr (var ? var : obj);
4284 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4285 NULL_TREE, true, GSI_SAME_STMT);
4286 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4287 gimple *g
4288 = gimple_build_call_internal (IFN_MULBITINT, 6,
4289 addr, build_int_cst (sitype,
4290 MAX (prec2, prec)),
4291 arg0, build_int_cst (sitype, prec0),
4292 arg1, build_int_cst (sitype, prec1));
4293 insert_before (g);
4295 unsigned start, end;
4296 bool check_zero;
4297 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4298 &start, &end, &check_zero);
4299 if (ovf == NULL_TREE)
4301 unsigned startlimb = start / limb_prec;
4302 unsigned endlimb = (end - 1) / limb_prec;
4303 unsigned cnt;
4304 bool use_loop = false;
4305 if (startlimb == endlimb)
4306 cnt = 1;
4307 else if (startlimb + 1 == endlimb)
4308 cnt = 2;
4309 else if ((end % limb_prec) == 0)
4311 cnt = 2;
4312 use_loop = true;
4314 else
4316 cnt = 3;
4317 use_loop = startlimb + 2 < endlimb;
4319 if (cnt == 1)
4321 tree l = limb_access (NULL_TREE, var ? var : obj,
4322 size_int (startlimb), true);
4323 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4324 insert_before (g);
4325 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4326 startlimb, check_zero);
4327 ovf = make_ssa_name (boolean_type_node);
4328 if (check_zero)
4329 g = gimple_build_assign (ovf, NE_EXPR, l,
4330 build_zero_cst (m_limb_type));
4331 else
4333 g = gimple_build_assign (make_ssa_name (m_limb_type),
4334 PLUS_EXPR, l,
4335 build_int_cst (m_limb_type, 1));
4336 insert_before (g);
4337 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4338 build_int_cst (m_limb_type, 1));
4340 insert_before (g);
4342 else
4344 basic_block edge_bb = NULL;
4345 gimple_stmt_iterator gsi = m_gsi;
4346 gsi_prev (&gsi);
4347 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4348 edge_bb = e->src;
4349 m_gsi = gsi_end_bb (edge_bb);
4351 tree cmp = build_zero_cst (m_limb_type);
4352 for (unsigned i = 0; i < cnt; i++)
4354 tree idx, idx_next = NULL_TREE;
4355 if (i == 0)
4356 idx = size_int (startlimb);
4357 else if (i == 2)
4358 idx = size_int (endlimb);
4359 else if (use_loop)
4360 idx = create_loop (size_int (startlimb + 1), &idx_next);
4361 else
4362 idx = size_int (startlimb + 1);
4363 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4364 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4365 insert_before (g);
4366 l = gimple_assign_lhs (g);
4367 if (i == 0 || i == 2)
4368 l = arith_overflow_extract_bits (start, end, l,
4369 tree_to_uhwi (idx),
4370 check_zero);
4371 if (i == 0 && !check_zero)
4373 cmp = l;
4374 g = gimple_build_assign (make_ssa_name (m_limb_type),
4375 PLUS_EXPR, l,
4376 build_int_cst (m_limb_type, 1));
4377 insert_before (g);
4378 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4379 build_int_cst (m_limb_type, 1),
4380 NULL_TREE, NULL_TREE);
4382 else
4383 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4384 insert_before (g);
4385 edge e1 = split_block (gsi_bb (m_gsi), g);
4386 e1->flags = EDGE_FALSE_VALUE;
4387 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4388 EDGE_TRUE_VALUE);
4389 e1->probability = profile_probability::likely ();
4390 e2->probability = e1->probability.invert ();
4391 if (i == 0)
4392 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4393 m_gsi = gsi_after_labels (e1->dest);
4394 if (i == 1 && use_loop)
4396 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4397 size_one_node);
4398 insert_before (g);
4399 g = gimple_build_cond (NE_EXPR, idx_next,
4400 size_int (endlimb + (cnt == 1)),
4401 NULL_TREE, NULL_TREE);
4402 insert_before (g);
4403 edge true_edge, false_edge;
4404 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4405 &true_edge,
4406 &false_edge);
4407 m_gsi = gsi_after_labels (false_edge->dest);
4411 ovf = make_ssa_name (boolean_type_node);
4412 basic_block bb = gimple_bb (final_stmt);
4413 gphi *phi = create_phi_node (ovf, bb);
4414 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4415 edge_iterator ei;
4416 FOR_EACH_EDGE (e, ei, bb->preds)
4418 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4419 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4421 m_gsi = gsi_for_stmt (final_stmt);
4425 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4428 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4429 .{ADD,SUB,MUL}_OVERFLOW call. */
4431 void
4432 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4434 tree rhs1 = gimple_assign_rhs1 (stmt);
4435 rhs1 = TREE_OPERAND (rhs1, 0);
4436 if (obj == NULL_TREE)
4438 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4439 gcc_assert (m_vars[part] != NULL_TREE);
4440 obj = m_vars[part];
4442 if (TREE_CODE (rhs1) == SSA_NAME
4443 && (m_names == NULL
4444 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4446 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4447 return;
4449 int part = var_to_partition (m_map, rhs1);
4450 gcc_assert (m_vars[part] != NULL_TREE);
4451 tree var = m_vars[part];
4452 unsigned HOST_WIDE_INT nelts
4453 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4454 tree atype = build_array_type_nelts (m_limb_type, nelts);
4455 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4456 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4457 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4458 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4459 ? 0 : nelts * m_limb_size);
4460 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4461 gimple *g = gimple_build_assign (obj, v2);
4462 insert_before (g);
4465 /* Lower COMPLEX_EXPR stmt. */
4467 void
4468 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4470 tree lhs = gimple_assign_lhs (stmt);
4471 tree rhs1 = gimple_assign_rhs1 (stmt);
4472 tree rhs2 = gimple_assign_rhs2 (stmt);
4473 int part = var_to_partition (m_map, lhs);
4474 gcc_assert (m_vars[part] != NULL_TREE);
4475 lhs = m_vars[part];
4476 unsigned HOST_WIDE_INT nelts
4477 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4478 tree atype = build_array_type_nelts (m_limb_type, nelts);
4479 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4480 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4481 tree v2;
4482 if (TREE_CODE (rhs1) == SSA_NAME)
4484 part = var_to_partition (m_map, rhs1);
4485 gcc_assert (m_vars[part] != NULL_TREE);
4486 v2 = m_vars[part];
4488 else if (integer_zerop (rhs1))
4489 v2 = build_zero_cst (atype);
4490 else
4491 v2 = tree_output_constant_def (rhs1);
4492 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4493 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4494 gimple *g = gimple_build_assign (v1, v2);
4495 insert_before (g);
4496 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4497 TYPE_SIZE_UNIT (atype));
4498 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4499 if (TREE_CODE (rhs2) == SSA_NAME)
4501 part = var_to_partition (m_map, rhs2);
4502 gcc_assert (m_vars[part] != NULL_TREE);
4503 v2 = m_vars[part];
4505 else if (integer_zerop (rhs2))
4506 v2 = build_zero_cst (atype);
4507 else
4508 v2 = tree_output_constant_def (rhs2);
4509 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4510 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4511 g = gimple_build_assign (v1, v2);
4512 insert_before (g);
4515 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4516 argument. */
4518 void
4519 bitint_large_huge::lower_bit_query (gimple *stmt)
4521 tree arg0 = gimple_call_arg (stmt, 0);
4522 tree arg1 = (gimple_call_num_args (stmt) == 2
4523 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4524 tree lhs = gimple_call_lhs (stmt);
4525 gimple *g;
4527 if (!lhs)
4529 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4530 gsi_remove (&gsi, true);
4531 return;
4533 tree type = TREE_TYPE (arg0);
4534 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4535 bitint_prec_kind kind = bitint_precision_kind (type);
4536 gcc_assert (kind >= bitint_prec_large);
4537 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4538 enum built_in_function fcode = END_BUILTINS;
4539 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4540 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4541 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4542 switch (ifn)
4544 case IFN_CLZ:
4545 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4546 fcode = BUILT_IN_CLZ;
4547 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4548 fcode = BUILT_IN_CLZL;
4549 else
4550 fcode = BUILT_IN_CLZLL;
4551 break;
4552 case IFN_FFS:
4553 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4554 we don't add the addend at the end. */
4555 arg1 = integer_zero_node;
4556 /* FALLTHRU */
4557 case IFN_CTZ:
4558 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4559 fcode = BUILT_IN_CTZ;
4560 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4561 fcode = BUILT_IN_CTZL;
4562 else
4563 fcode = BUILT_IN_CTZLL;
4564 m_upwards = true;
4565 break;
4566 case IFN_CLRSB:
4567 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4568 fcode = BUILT_IN_CLRSB;
4569 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4570 fcode = BUILT_IN_CLRSBL;
4571 else
4572 fcode = BUILT_IN_CLRSBLL;
4573 break;
4574 case IFN_PARITY:
4575 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4576 fcode = BUILT_IN_PARITY;
4577 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4578 fcode = BUILT_IN_PARITYL;
4579 else
4580 fcode = BUILT_IN_PARITYLL;
4581 m_upwards = true;
4582 break;
4583 case IFN_POPCOUNT:
4584 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4585 fcode = BUILT_IN_POPCOUNT;
4586 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4587 fcode = BUILT_IN_POPCOUNTL;
4588 else
4589 fcode = BUILT_IN_POPCOUNTLL;
4590 m_upwards = true;
4591 break;
4592 default:
4593 gcc_unreachable ();
4595 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4596 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4597 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4598 basic_block edge_bb = NULL;
4599 if (m_upwards)
4601 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4602 if (kind == bitint_prec_large)
4603 cnt = CEIL (prec, limb_prec);
4604 else
4606 rem = (prec % (2 * limb_prec));
4607 end = (prec - rem) / limb_prec;
4608 cnt = 2 + CEIL (rem, limb_prec);
4609 idx = idx_first = create_loop (size_zero_node, &idx_next);
4612 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4614 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4615 gsi_prev (&gsi);
4616 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4617 edge_bb = e->src;
4618 if (kind == bitint_prec_large)
4619 m_gsi = gsi_end_bb (edge_bb);
4620 bqp = XALLOCAVEC (struct bq_details, cnt);
4622 else
4623 m_after_stmt = stmt;
4624 if (kind != bitint_prec_large)
4625 m_upwards_2limb = end;
4627 for (unsigned i = 0; i < cnt; i++)
4629 m_data_cnt = 0;
4630 if (kind == bitint_prec_large)
4631 idx = size_int (i);
4632 else if (i >= 2)
4633 idx = size_int (end + (i > 2));
4635 tree rhs1 = handle_operand (arg0, idx);
4636 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4638 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4639 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4640 rhs1 = add_cast (m_limb_type, rhs1);
4643 tree in, out, tem;
4644 if (ifn == IFN_PARITY)
4645 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4646 else if (ifn == IFN_FFS)
4647 in = prepare_data_in_out (integer_one_node, idx, &out);
4648 else
4649 in = prepare_data_in_out (integer_zero_node, idx, &out);
4651 switch (ifn)
4653 case IFN_CTZ:
4654 case IFN_FFS:
4655 g = gimple_build_cond (NE_EXPR, rhs1,
4656 build_zero_cst (m_limb_type),
4657 NULL_TREE, NULL_TREE);
4658 insert_before (g);
4659 edge e1, e2;
4660 e1 = split_block (gsi_bb (m_gsi), g);
4661 e1->flags = EDGE_FALSE_VALUE;
4662 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4663 e1->probability = profile_probability::unlikely ();
4664 e2->probability = e1->probability.invert ();
4665 if (i == 0)
4666 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4667 m_gsi = gsi_after_labels (e1->dest);
4668 bqp[i].e = e2;
4669 bqp[i].val = rhs1;
4670 if (tree_fits_uhwi_p (idx))
4671 bqp[i].addend
4672 = build_int_cst (integer_type_node,
4673 tree_to_uhwi (idx) * limb_prec
4674 + (ifn == IFN_FFS));
4675 else
4677 bqp[i].addend = in;
4678 if (i == 1)
4679 res = out;
4680 else
4681 res = make_ssa_name (integer_type_node);
4682 g = gimple_build_assign (res, PLUS_EXPR, in,
4683 build_int_cst (integer_type_node,
4684 limb_prec));
4685 insert_before (g);
4686 m_data[m_data_cnt] = res;
4688 break;
4689 case IFN_PARITY:
4690 if (!integer_zerop (in))
4692 if (kind == bitint_prec_huge && i == 1)
4693 res = out;
4694 else
4695 res = make_ssa_name (m_limb_type);
4696 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4697 insert_before (g);
4699 else
4700 res = rhs1;
4701 m_data[m_data_cnt] = res;
4702 break;
4703 case IFN_POPCOUNT:
4704 g = gimple_build_call (fndecl, 1, rhs1);
4705 tem = make_ssa_name (integer_type_node);
4706 gimple_call_set_lhs (g, tem);
4707 insert_before (g);
4708 if (!integer_zerop (in))
4710 if (kind == bitint_prec_huge && i == 1)
4711 res = out;
4712 else
4713 res = make_ssa_name (integer_type_node);
4714 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4715 insert_before (g);
4717 else
4718 res = tem;
4719 m_data[m_data_cnt] = res;
4720 break;
4721 default:
4722 gcc_unreachable ();
4725 m_first = false;
4726 if (kind == bitint_prec_huge && i <= 1)
4728 if (i == 0)
4730 idx = make_ssa_name (sizetype);
4731 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4732 size_one_node);
4733 insert_before (g);
4735 else
4737 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4738 size_int (2));
4739 insert_before (g);
4740 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4741 NULL_TREE, NULL_TREE);
4742 insert_before (g);
4743 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4744 m_gsi = gsi_after_labels (edge_bb);
4745 else
4746 m_gsi = gsi_for_stmt (stmt);
4751 else
4753 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4754 int sub_one = 0;
4755 if (kind == bitint_prec_large)
4756 cnt = CEIL (prec, limb_prec);
4757 else
4759 rem = prec % limb_prec;
4760 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4761 rem = limb_prec;
4762 end = (prec - rem) / limb_prec;
4763 cnt = 1 + (rem != 0);
4764 if (ifn == IFN_CLRSB)
4765 sub_one = 1;
4768 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4769 gsi_prev (&gsi);
4770 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4771 edge_bb = e->src;
4772 m_gsi = gsi_end_bb (edge_bb);
4774 if (ifn == IFN_CLZ)
4775 bqp = XALLOCAVEC (struct bq_details, cnt);
4776 else
4778 gsi = gsi_for_stmt (stmt);
4779 gsi_prev (&gsi);
4780 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4781 edge_bb = e->src;
4782 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4785 for (unsigned i = 0; i < cnt; i++)
4787 m_data_cnt = 0;
4788 if (kind == bitint_prec_large)
4789 idx = size_int (cnt - i - 1);
4790 else if (i == cnt - 1)
4791 idx = create_loop (size_int (end - 1), &idx_next);
4792 else
4793 idx = size_int (end);
4795 tree rhs1 = handle_operand (arg0, idx);
4796 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4798 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4799 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4800 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4801 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4802 rhs1 = add_cast (m_limb_type, rhs1);
4805 if (ifn == IFN_CLZ)
4807 g = gimple_build_cond (NE_EXPR, rhs1,
4808 build_zero_cst (m_limb_type),
4809 NULL_TREE, NULL_TREE);
4810 insert_before (g);
4811 edge e1 = split_block (gsi_bb (m_gsi), g);
4812 e1->flags = EDGE_FALSE_VALUE;
4813 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4814 e1->probability = profile_probability::unlikely ();
4815 e2->probability = e1->probability.invert ();
4816 if (i == 0)
4817 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4818 m_gsi = gsi_after_labels (e1->dest);
4819 bqp[i].e = e2;
4820 bqp[i].val = rhs1;
4822 else
4824 if (i == 0)
4826 first = rhs1;
4827 g = gimple_build_assign (make_ssa_name (m_limb_type),
4828 PLUS_EXPR, rhs1,
4829 build_int_cst (m_limb_type, 1));
4830 insert_before (g);
4831 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4832 build_int_cst (m_limb_type, 1),
4833 NULL_TREE, NULL_TREE);
4834 insert_before (g);
4836 else
4838 g = gimple_build_assign (make_ssa_name (m_limb_type),
4839 BIT_XOR_EXPR, rhs1, first);
4840 insert_before (g);
4841 tree stype = signed_type_for (m_limb_type);
4842 g = gimple_build_cond (LT_EXPR,
4843 add_cast (stype,
4844 gimple_assign_lhs (g)),
4845 build_zero_cst (stype),
4846 NULL_TREE, NULL_TREE);
4847 insert_before (g);
4848 edge e1 = split_block (gsi_bb (m_gsi), g);
4849 e1->flags = EDGE_FALSE_VALUE;
4850 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4851 EDGE_TRUE_VALUE);
4852 e1->probability = profile_probability::unlikely ();
4853 e2->probability = e1->probability.invert ();
4854 if (i == 1)
4855 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4856 e2->src);
4857 m_gsi = gsi_after_labels (e1->dest);
4858 bqp[2 * i].e = e2;
4859 g = gimple_build_cond (NE_EXPR, rhs1, first,
4860 NULL_TREE, NULL_TREE);
4861 insert_before (g);
4863 edge e1 = split_block (gsi_bb (m_gsi), g);
4864 e1->flags = EDGE_FALSE_VALUE;
4865 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4866 e1->probability = profile_probability::unlikely ();
4867 e2->probability = e1->probability.invert ();
4868 if (i == 0)
4869 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4870 m_gsi = gsi_after_labels (e1->dest);
4871 bqp[2 * i + 1].e = e2;
4872 bqp[i].val = rhs1;
4874 if (tree_fits_uhwi_p (idx))
4875 bqp[i].addend
4876 = build_int_cst (integer_type_node,
4877 (int) prec
4878 - (((int) tree_to_uhwi (idx) + 1)
4879 * limb_prec) - sub_one);
4880 else
4882 tree in, out;
4883 in = build_int_cst (integer_type_node, rem - sub_one);
4884 m_first = true;
4885 in = prepare_data_in_out (in, idx, &out);
4886 out = m_data[m_data_cnt + 1];
4887 bqp[i].addend = in;
4888 g = gimple_build_assign (out, PLUS_EXPR, in,
4889 build_int_cst (integer_type_node,
4890 limb_prec));
4891 insert_before (g);
4892 m_data[m_data_cnt] = out;
4895 m_first = false;
4896 if (kind == bitint_prec_huge && i == cnt - 1)
4898 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4899 size_int (-1));
4900 insert_before (g);
4901 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4902 NULL_TREE, NULL_TREE);
4903 insert_before (g);
4904 edge true_edge, false_edge;
4905 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4906 &true_edge, &false_edge);
4907 m_gsi = gsi_after_labels (false_edge->dest);
4911 switch (ifn)
4913 case IFN_CLZ:
4914 case IFN_CTZ:
4915 case IFN_FFS:
4916 gphi *phi1, *phi2, *phi3;
4917 basic_block bb;
4918 bb = gsi_bb (m_gsi);
4919 remove_edge (find_edge (bb, gimple_bb (stmt)));
4920 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4921 gimple_bb (stmt));
4922 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4923 gimple_bb (stmt));
4924 for (unsigned i = 0; i < cnt; i++)
4926 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4927 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4929 if (arg1 == NULL_TREE)
4931 g = gimple_build_builtin_unreachable (m_loc);
4932 insert_before (g);
4934 m_gsi = gsi_for_stmt (stmt);
4935 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4936 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4937 insert_before (g);
4938 if (arg1 == NULL_TREE)
4939 g = gimple_build_assign (lhs, PLUS_EXPR,
4940 gimple_phi_result (phi2),
4941 gimple_call_lhs (g));
4942 else
4944 g = gimple_build_assign (make_ssa_name (integer_type_node),
4945 PLUS_EXPR, gimple_phi_result (phi2),
4946 gimple_call_lhs (g));
4947 insert_before (g);
4948 edge e1 = split_block (gimple_bb (stmt), g);
4949 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
4950 e2->probability = profile_probability::always ();
4951 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
4952 get_immediate_dominator (CDI_DOMINATORS,
4953 e1->src));
4954 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
4955 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
4956 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
4957 m_gsi = gsi_for_stmt (stmt);
4958 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4960 gsi_replace (&m_gsi, g, true);
4961 break;
4962 case IFN_CLRSB:
4963 bb = gsi_bb (m_gsi);
4964 remove_edge (find_edge (bb, edge_bb));
4965 edge e;
4966 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
4967 e->probability = profile_probability::always ();
4968 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
4969 get_immediate_dominator (CDI_DOMINATORS,
4970 edge_bb));
4971 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4972 edge_bb);
4973 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4974 edge_bb);
4975 phi3 = create_phi_node (make_ssa_name (integer_type_node),
4976 gimple_bb (stmt));
4977 for (unsigned i = 0; i < cnt; i++)
4979 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
4980 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
4981 UNKNOWN_LOCATION);
4982 tree a = bqp[i].addend;
4983 if (i && kind == bitint_prec_large)
4984 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
4985 if (i)
4986 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
4988 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
4989 UNKNOWN_LOCATION);
4990 m_gsi = gsi_after_labels (edge_bb);
4991 g = gimple_build_call (fndecl, 1,
4992 add_cast (signed_type_for (m_limb_type),
4993 gimple_phi_result (phi1)));
4994 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4995 insert_before (g);
4996 g = gimple_build_assign (make_ssa_name (integer_type_node),
4997 PLUS_EXPR, gimple_call_lhs (g),
4998 gimple_phi_result (phi2));
4999 insert_before (g);
5000 if (kind != bitint_prec_large)
5002 g = gimple_build_assign (make_ssa_name (integer_type_node),
5003 PLUS_EXPR, gimple_assign_lhs (g),
5004 integer_one_node);
5005 insert_before (g);
5007 add_phi_arg (phi3, gimple_assign_lhs (g),
5008 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5009 m_gsi = gsi_for_stmt (stmt);
5010 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5011 gsi_replace (&m_gsi, g, true);
5012 break;
5013 case IFN_PARITY:
5014 g = gimple_build_call (fndecl, 1, res);
5015 gimple_call_set_lhs (g, lhs);
5016 gsi_replace (&m_gsi, g, true);
5017 break;
5018 case IFN_POPCOUNT:
5019 g = gimple_build_assign (lhs, res);
5020 gsi_replace (&m_gsi, g, true);
5021 break;
5022 default:
5023 gcc_unreachable ();
5027 /* Lower a call statement with one or more large/huge _BitInt
5028 arguments or large/huge _BitInt return value. */
5030 void
5031 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5033 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5034 unsigned int nargs = gimple_call_num_args (stmt);
5035 if (gimple_call_internal_p (stmt))
5036 switch (gimple_call_internal_fn (stmt))
5038 case IFN_ADD_OVERFLOW:
5039 case IFN_SUB_OVERFLOW:
5040 case IFN_UBSAN_CHECK_ADD:
5041 case IFN_UBSAN_CHECK_SUB:
5042 lower_addsub_overflow (obj, stmt);
5043 return;
5044 case IFN_MUL_OVERFLOW:
5045 case IFN_UBSAN_CHECK_MUL:
5046 lower_mul_overflow (obj, stmt);
5047 return;
5048 case IFN_CLZ:
5049 case IFN_CTZ:
5050 case IFN_CLRSB:
5051 case IFN_FFS:
5052 case IFN_PARITY:
5053 case IFN_POPCOUNT:
5054 lower_bit_query (stmt);
5055 return;
5056 default:
5057 break;
5059 for (unsigned int i = 0; i < nargs; ++i)
5061 tree arg = gimple_call_arg (stmt, i);
5062 if (TREE_CODE (arg) != SSA_NAME
5063 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5064 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5065 continue;
5066 int p = var_to_partition (m_map, arg);
5067 tree v = m_vars[p];
5068 gcc_assert (v != NULL_TREE);
5069 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5070 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5071 arg = make_ssa_name (TREE_TYPE (arg));
5072 gimple *g = gimple_build_assign (arg, v);
5073 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5074 gimple_call_set_arg (stmt, i, arg);
5075 if (m_preserved == NULL)
5076 m_preserved = BITMAP_ALLOC (NULL);
5077 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5079 tree lhs = gimple_call_lhs (stmt);
5080 if (lhs
5081 && TREE_CODE (lhs) == SSA_NAME
5082 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5083 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5085 int p = var_to_partition (m_map, lhs);
5086 tree v = m_vars[p];
5087 gcc_assert (v != NULL_TREE);
5088 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5089 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5090 gimple_call_set_lhs (stmt, v);
5091 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5093 update_stmt (stmt);
5096 /* Lower __asm STMT which involves large/huge _BitInt values. */
5098 void
5099 bitint_large_huge::lower_asm (gimple *stmt)
5101 gasm *g = as_a <gasm *> (stmt);
5102 unsigned noutputs = gimple_asm_noutputs (g);
5103 unsigned ninputs = gimple_asm_ninputs (g);
5105 for (unsigned i = 0; i < noutputs; ++i)
5107 tree t = gimple_asm_output_op (g, i);
5108 tree s = TREE_VALUE (t);
5109 if (TREE_CODE (s) == SSA_NAME
5110 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5111 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5113 int part = var_to_partition (m_map, s);
5114 gcc_assert (m_vars[part] != NULL_TREE);
5115 TREE_VALUE (t) = m_vars[part];
5118 for (unsigned i = 0; i < ninputs; ++i)
5120 tree t = gimple_asm_input_op (g, i);
5121 tree s = TREE_VALUE (t);
5122 if (TREE_CODE (s) == SSA_NAME
5123 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5124 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5126 int part = var_to_partition (m_map, s);
5127 gcc_assert (m_vars[part] != NULL_TREE);
5128 TREE_VALUE (t) = m_vars[part];
5131 update_stmt (stmt);
5134 /* Lower statement STMT which involves large/huge _BitInt values
5135 into code accessing individual limbs. */
5137 void
5138 bitint_large_huge::lower_stmt (gimple *stmt)
5140 m_first = true;
5141 m_lhs = NULL_TREE;
5142 m_data.truncate (0);
5143 m_data_cnt = 0;
5144 m_gsi = gsi_for_stmt (stmt);
5145 m_after_stmt = NULL;
5146 m_bb = NULL;
5147 m_init_gsi = m_gsi;
5148 gsi_prev (&m_init_gsi);
5149 m_preheader_bb = NULL;
5150 m_upwards_2limb = 0;
5151 m_upwards = false;
5152 m_var_msb = false;
5153 m_cast_conditional = false;
5154 m_bitfld_load = 0;
5155 m_loc = gimple_location (stmt);
5156 if (is_gimple_call (stmt))
5158 lower_call (NULL_TREE, stmt);
5159 return;
5161 if (gimple_code (stmt) == GIMPLE_ASM)
5163 lower_asm (stmt);
5164 return;
5166 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5167 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5168 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5169 bool mergeable_cast_p = false;
5170 bool final_cast_p = false;
5171 if (gimple_assign_cast_p (stmt))
5173 lhs = gimple_assign_lhs (stmt);
5174 tree rhs1 = gimple_assign_rhs1 (stmt);
5175 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5176 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5177 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5178 mergeable_cast_p = true;
5179 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5180 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5181 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5183 final_cast_p = true;
5184 if (TREE_CODE (rhs1) == SSA_NAME
5185 && (m_names == NULL
5186 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5188 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5189 if (is_gimple_assign (g)
5190 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5192 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5193 if (TREE_CODE (rhs2) == SSA_NAME
5194 && (m_names == NULL
5195 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5197 g = SSA_NAME_DEF_STMT (rhs2);
5198 int ovf = optimizable_arith_overflow (g);
5199 if (ovf == 2)
5200 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5201 and IMAGPART_EXPR uses, where the latter is cast to
5202 non-_BitInt, it will be optimized when handling
5203 the REALPART_EXPR. */
5204 return;
5205 if (ovf == 1)
5207 lower_call (NULL_TREE, g);
5208 return;
5215 if (gimple_store_p (stmt))
5217 tree rhs1 = gimple_assign_rhs1 (stmt);
5218 if (TREE_CODE (rhs1) == SSA_NAME
5219 && (m_names == NULL
5220 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5222 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5223 m_loc = gimple_location (g);
5224 lhs = gimple_assign_lhs (stmt);
5225 if (is_gimple_assign (g) && !mergeable_op (g))
5226 switch (gimple_assign_rhs_code (g))
5228 case LSHIFT_EXPR:
5229 case RSHIFT_EXPR:
5230 lower_shift_stmt (lhs, g);
5231 handled:
5232 m_gsi = gsi_for_stmt (stmt);
5233 unlink_stmt_vdef (stmt);
5234 release_ssa_name (gimple_vdef (stmt));
5235 gsi_remove (&m_gsi, true);
5236 return;
5237 case MULT_EXPR:
5238 case TRUNC_DIV_EXPR:
5239 case TRUNC_MOD_EXPR:
5240 lower_muldiv_stmt (lhs, g);
5241 goto handled;
5242 case FIX_TRUNC_EXPR:
5243 lower_float_conv_stmt (lhs, g);
5244 goto handled;
5245 case REALPART_EXPR:
5246 case IMAGPART_EXPR:
5247 lower_cplxpart_stmt (lhs, g);
5248 goto handled;
5249 default:
5250 break;
5252 else if (optimizable_arith_overflow (g) == 3)
5254 lower_call (lhs, g);
5255 goto handled;
5257 m_loc = gimple_location (stmt);
5260 if (mergeable_op (stmt)
5261 || gimple_store_p (stmt)
5262 || gimple_assign_load_p (stmt)
5263 || eq_p
5264 || mergeable_cast_p)
5266 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5267 if (!eq_p)
5268 return;
5270 else if (cmp_code != ERROR_MARK)
5271 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5272 if (cmp_code != ERROR_MARK)
5274 if (gimple_code (stmt) == GIMPLE_COND)
5276 gcond *cstmt = as_a <gcond *> (stmt);
5277 gimple_cond_set_lhs (cstmt, lhs);
5278 gimple_cond_set_rhs (cstmt, boolean_false_node);
5279 gimple_cond_set_code (cstmt, cmp_code);
5280 update_stmt (stmt);
5281 return;
5283 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5285 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5286 boolean_false_node);
5287 gimple_assign_set_rhs1 (stmt, cond);
5288 lhs = gimple_assign_lhs (stmt);
5289 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5290 || (bitint_precision_kind (TREE_TYPE (lhs))
5291 <= bitint_prec_middle));
5292 update_stmt (stmt);
5293 return;
5295 gimple_assign_set_rhs1 (stmt, lhs);
5296 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5297 gimple_assign_set_rhs_code (stmt, cmp_code);
5298 update_stmt (stmt);
5299 return;
5301 if (final_cast_p)
5303 tree lhs_type = TREE_TYPE (lhs);
5304 /* Add support for 3 or more limbs filled in from normal integral
5305 type if this assert fails. If no target chooses limb mode smaller
5306 than half of largest supported normal integral type, this will not
5307 be needed. */
5308 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5309 gimple *g;
5310 if (TREE_CODE (lhs_type) == BITINT_TYPE
5311 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5312 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5313 TYPE_UNSIGNED (lhs_type));
5314 m_data_cnt = 0;
5315 tree rhs1 = gimple_assign_rhs1 (stmt);
5316 tree r1 = handle_operand (rhs1, size_int (0));
5317 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5318 r1 = add_cast (lhs_type, r1);
5319 if (TYPE_PRECISION (lhs_type) > limb_prec)
5321 m_data_cnt = 0;
5322 m_first = false;
5323 tree r2 = handle_operand (rhs1, size_int (1));
5324 r2 = add_cast (lhs_type, r2);
5325 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5326 build_int_cst (unsigned_type_node,
5327 limb_prec));
5328 insert_before (g);
5329 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5330 gimple_assign_lhs (g));
5331 insert_before (g);
5332 r1 = gimple_assign_lhs (g);
5334 if (lhs_type != TREE_TYPE (lhs))
5335 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5336 else
5337 g = gimple_build_assign (lhs, r1);
5338 gsi_replace (&m_gsi, g, true);
5339 return;
5341 if (is_gimple_assign (stmt))
5342 switch (gimple_assign_rhs_code (stmt))
5344 case LSHIFT_EXPR:
5345 case RSHIFT_EXPR:
5346 lower_shift_stmt (NULL_TREE, stmt);
5347 return;
5348 case MULT_EXPR:
5349 case TRUNC_DIV_EXPR:
5350 case TRUNC_MOD_EXPR:
5351 lower_muldiv_stmt (NULL_TREE, stmt);
5352 return;
5353 case FIX_TRUNC_EXPR:
5354 case FLOAT_EXPR:
5355 lower_float_conv_stmt (NULL_TREE, stmt);
5356 return;
5357 case REALPART_EXPR:
5358 case IMAGPART_EXPR:
5359 lower_cplxpart_stmt (NULL_TREE, stmt);
5360 return;
5361 case COMPLEX_EXPR:
5362 lower_complexexpr_stmt (stmt);
5363 return;
5364 default:
5365 break;
5367 gcc_unreachable ();
5370 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5371 the desired memory state. */
5373 void *
5374 vuse_eq (ao_ref *, tree vuse1, void *data)
5376 tree vuse2 = (tree) data;
5377 if (vuse1 == vuse2)
5378 return data;
5380 return NULL;
5383 /* Return true if STMT uses a library function and needs to take
5384 address of its inputs. We need to avoid bit-fields in those
5385 cases. */
5387 bool
5388 stmt_needs_operand_addr (gimple *stmt)
5390 if (is_gimple_assign (stmt))
5391 switch (gimple_assign_rhs_code (stmt))
5393 case MULT_EXPR:
5394 case TRUNC_DIV_EXPR:
5395 case TRUNC_MOD_EXPR:
5396 case FLOAT_EXPR:
5397 return true;
5398 default:
5399 break;
5401 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5402 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5403 return true;
5404 return false;
5407 /* Dominator walker used to discover which large/huge _BitInt
5408 loads could be sunk into all their uses. */
5410 class bitint_dom_walker : public dom_walker
5412 public:
5413 bitint_dom_walker (bitmap names, bitmap loads)
5414 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5416 edge before_dom_children (basic_block) final override;
5418 private:
5419 bitmap m_names, m_loads;
5422 edge
5423 bitint_dom_walker::before_dom_children (basic_block bb)
5425 gphi *phi = get_virtual_phi (bb);
5426 tree vop;
5427 if (phi)
5428 vop = gimple_phi_result (phi);
5429 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5430 vop = NULL_TREE;
5431 else
5432 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5434 auto_vec<tree, 16> worklist;
5435 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5436 !gsi_end_p (gsi); gsi_next (&gsi))
5438 gimple *stmt = gsi_stmt (gsi);
5439 if (is_gimple_debug (stmt))
5440 continue;
5442 if (!vop && gimple_vuse (stmt))
5443 vop = gimple_vuse (stmt);
5445 tree cvop = vop;
5446 if (gimple_vdef (stmt))
5447 vop = gimple_vdef (stmt);
5449 tree lhs = gimple_get_lhs (stmt);
5450 if (lhs
5451 && TREE_CODE (lhs) == SSA_NAME
5452 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5453 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5454 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5455 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5456 it means it will be handled in a loop or straight line code
5457 at the location of its (ultimate) immediate use, so for
5458 vop checking purposes check these only at the ultimate
5459 immediate use. */
5460 continue;
5462 ssa_op_iter oi;
5463 use_operand_p use_p;
5464 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5466 tree s = USE_FROM_PTR (use_p);
5467 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5468 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5469 worklist.safe_push (s);
5472 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5473 while (worklist.length () > 0)
5475 tree s = worklist.pop ();
5477 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5479 gimple *g = SSA_NAME_DEF_STMT (s);
5480 needs_operand_addr |= stmt_needs_operand_addr (g);
5481 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5483 tree s2 = USE_FROM_PTR (use_p);
5484 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5485 && (bitint_precision_kind (TREE_TYPE (s2))
5486 >= bitint_prec_large))
5487 worklist.safe_push (s2);
5489 continue;
5491 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5492 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5494 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5495 if (TREE_CODE (rhs) == SSA_NAME
5496 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5497 s = rhs;
5498 else
5499 continue;
5501 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5502 continue;
5504 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5505 if (needs_operand_addr
5506 && TREE_CODE (rhs1) == COMPONENT_REF
5507 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5509 tree fld = TREE_OPERAND (rhs1, 1);
5510 /* For little-endian, we can allow as inputs bit-fields
5511 which start at a limb boundary. */
5512 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5513 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5514 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5515 % limb_prec) == 0)
5517 else
5519 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5520 continue;
5524 ao_ref ref;
5525 ao_ref_init (&ref, rhs1);
5526 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5527 unsigned limit = 64;
5528 tree vuse = cvop;
5529 if (vop != cvop
5530 && is_gimple_assign (stmt)
5531 && gimple_store_p (stmt)
5532 && !operand_equal_p (lhs,
5533 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5535 vuse = vop;
5536 if (vuse != lvop
5537 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5538 NULL, NULL, limit, lvop) == NULL)
5539 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5543 bb->aux = (void *) vop;
5544 return NULL;
5549 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5550 build_ssa_conflict_graph.
5551 The differences are:
5552 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5553 2) for large/huge _BitInt multiplication/division/modulo process def
5554 only after processing uses rather than before to make uses conflict
5555 with the definition
5556 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5557 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5558 the final statement. */
5560 void
5561 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5562 ssa_conflicts *graph, bitmap names,
5563 void (*def) (live_track *, tree,
5564 ssa_conflicts *),
5565 void (*use) (live_track *, tree))
5567 bool muldiv_p = false;
5568 tree lhs = NULL_TREE;
5569 if (is_gimple_assign (stmt))
5571 lhs = gimple_assign_lhs (stmt);
5572 if (TREE_CODE (lhs) == SSA_NAME
5573 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5574 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5576 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5577 return;
5578 switch (gimple_assign_rhs_code (stmt))
5580 case MULT_EXPR:
5581 case TRUNC_DIV_EXPR:
5582 case TRUNC_MOD_EXPR:
5583 muldiv_p = true;
5584 default:
5585 break;
5590 ssa_op_iter iter;
5591 tree var;
5592 if (!muldiv_p)
5594 /* For stmts with more than one SSA_NAME definition pretend all the
5595 SSA_NAME outputs but the first one are live at this point, so
5596 that conflicts are added in between all those even when they are
5597 actually not really live after the asm, because expansion might
5598 copy those into pseudos after the asm and if multiple outputs
5599 share the same partition, it might overwrite those that should
5600 be live. E.g.
5601 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5602 return a;
5603 See PR70593. */
5604 bool first = true;
5605 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5606 if (first)
5607 first = false;
5608 else
5609 use (live, var);
5611 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5612 def (live, var, graph);
5615 auto_vec<tree, 16> worklist;
5616 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5617 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5618 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5620 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5621 use (live, var);
5622 else
5623 worklist.safe_push (var);
5626 while (worklist.length () > 0)
5628 tree s = worklist.pop ();
5629 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5630 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5631 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5633 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5634 use (live, var);
5635 else
5636 worklist.safe_push (var);
5640 if (muldiv_p)
5641 def (live, lhs, graph);
5644 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5645 return the largest bitint_prec_kind of them, otherwise return
5646 bitint_prec_small. */
5648 static bitint_prec_kind
5649 arith_overflow_arg_kind (gimple *stmt)
5651 bitint_prec_kind ret = bitint_prec_small;
5652 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5653 switch (gimple_call_internal_fn (stmt))
5655 case IFN_ADD_OVERFLOW:
5656 case IFN_SUB_OVERFLOW:
5657 case IFN_MUL_OVERFLOW:
5658 for (int i = 0; i < 2; ++i)
5660 tree a = gimple_call_arg (stmt, i);
5661 if (TREE_CODE (a) == INTEGER_CST
5662 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5664 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5665 ret = MAX (ret, kind);
5668 break;
5669 default:
5670 break;
5672 return ret;
5675 /* Entry point for _BitInt(N) operation lowering during optimization. */
5677 static unsigned int
5678 gimple_lower_bitint (void)
5680 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5681 limb_prec = 0;
5683 unsigned int i;
5684 for (i = 0; i < num_ssa_names; ++i)
5686 tree s = ssa_name (i);
5687 if (s == NULL)
5688 continue;
5689 tree type = TREE_TYPE (s);
5690 if (TREE_CODE (type) == COMPLEX_TYPE)
5692 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5693 != bitint_prec_small)
5694 break;
5695 type = TREE_TYPE (type);
5697 if (TREE_CODE (type) == BITINT_TYPE
5698 && bitint_precision_kind (type) != bitint_prec_small)
5699 break;
5700 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5701 into memory. Such functions could have no large/huge SSA_NAMEs. */
5702 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5704 gimple *g = SSA_NAME_DEF_STMT (s);
5705 if (is_gimple_assign (g) && gimple_store_p (g))
5707 tree t = gimple_assign_rhs1 (g);
5708 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5709 && (bitint_precision_kind (TREE_TYPE (t))
5710 >= bitint_prec_large))
5711 break;
5714 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5715 to floating point types need to be rewritten. */
5716 else if (SCALAR_FLOAT_TYPE_P (type))
5718 gimple *g = SSA_NAME_DEF_STMT (s);
5719 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5721 tree t = gimple_assign_rhs1 (g);
5722 if (TREE_CODE (t) == INTEGER_CST
5723 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5724 && (bitint_precision_kind (TREE_TYPE (t))
5725 != bitint_prec_small))
5726 break;
5730 if (i == num_ssa_names)
5731 return 0;
5733 basic_block bb;
5734 auto_vec<gimple *, 4> switch_statements;
5735 FOR_EACH_BB_FN (bb, cfun)
5737 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5739 tree idx = gimple_switch_index (swtch);
5740 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5741 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5742 continue;
5744 if (optimize)
5745 group_case_labels_stmt (swtch);
5746 switch_statements.safe_push (swtch);
5750 if (!switch_statements.is_empty ())
5752 bool expanded = false;
5753 gimple *stmt;
5754 unsigned int j;
5755 i = 0;
5756 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5758 gswitch *swtch = as_a<gswitch *> (stmt);
5759 tree_switch_conversion::switch_decision_tree dt (swtch);
5760 expanded |= dt.analyze_switch_statement ();
5763 if (expanded)
5765 free_dominance_info (CDI_DOMINATORS);
5766 free_dominance_info (CDI_POST_DOMINATORS);
5767 mark_virtual_operands_for_renaming (cfun);
5768 cleanup_tree_cfg (TODO_update_ssa);
5772 struct bitint_large_huge large_huge;
5773 bool has_large_huge_parm_result = false;
5774 bool has_large_huge = false;
5775 unsigned int ret = 0, first_large_huge = ~0U;
5776 bool edge_insertions = false;
5777 for (; i < num_ssa_names; ++i)
5779 tree s = ssa_name (i);
5780 if (s == NULL)
5781 continue;
5782 tree type = TREE_TYPE (s);
5783 if (TREE_CODE (type) == COMPLEX_TYPE)
5785 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5786 >= bitint_prec_large)
5787 has_large_huge = true;
5788 type = TREE_TYPE (type);
5790 if (TREE_CODE (type) == BITINT_TYPE
5791 && bitint_precision_kind (type) >= bitint_prec_large)
5793 if (first_large_huge == ~0U)
5794 first_large_huge = i;
5795 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5796 gimple_stmt_iterator gsi;
5797 tree_code rhs_code;
5798 /* Unoptimize certain constructs to simpler alternatives to
5799 avoid having to lower all of them. */
5800 if (is_gimple_assign (stmt))
5801 switch (rhs_code = gimple_assign_rhs_code (stmt))
5803 default:
5804 break;
5805 case LROTATE_EXPR:
5806 case RROTATE_EXPR:
5808 first_large_huge = 0;
5809 location_t loc = gimple_location (stmt);
5810 gsi = gsi_for_stmt (stmt);
5811 tree rhs1 = gimple_assign_rhs1 (stmt);
5812 tree type = TREE_TYPE (rhs1);
5813 tree n = gimple_assign_rhs2 (stmt), m;
5814 tree p = build_int_cst (TREE_TYPE (n),
5815 TYPE_PRECISION (type));
5816 if (TREE_CODE (n) == INTEGER_CST)
5817 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5818 else
5820 m = make_ssa_name (TREE_TYPE (n));
5821 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5822 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5823 gimple_set_location (g, loc);
5825 if (!TYPE_UNSIGNED (type))
5827 tree utype = build_bitint_type (TYPE_PRECISION (type),
5829 if (TREE_CODE (rhs1) == INTEGER_CST)
5830 rhs1 = fold_convert (utype, rhs1);
5831 else
5833 tree t = make_ssa_name (type);
5834 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5835 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5836 gimple_set_location (g, loc);
5839 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5840 rhs_code == LROTATE_EXPR
5841 ? LSHIFT_EXPR : RSHIFT_EXPR,
5842 rhs1, n);
5843 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5844 gimple_set_location (g, loc);
5845 tree op1 = gimple_assign_lhs (g);
5846 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5847 rhs_code == LROTATE_EXPR
5848 ? RSHIFT_EXPR : LSHIFT_EXPR,
5849 rhs1, m);
5850 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5851 gimple_set_location (g, loc);
5852 tree op2 = gimple_assign_lhs (g);
5853 tree lhs = gimple_assign_lhs (stmt);
5854 if (!TYPE_UNSIGNED (type))
5856 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5857 BIT_IOR_EXPR, op1, op2);
5858 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5859 gimple_set_location (g, loc);
5860 g = gimple_build_assign (lhs, NOP_EXPR,
5861 gimple_assign_lhs (g));
5863 else
5864 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5865 gsi_replace (&gsi, g, true);
5866 gimple_set_location (g, loc);
5868 break;
5869 case ABS_EXPR:
5870 case ABSU_EXPR:
5871 case MIN_EXPR:
5872 case MAX_EXPR:
5873 case COND_EXPR:
5874 first_large_huge = 0;
5875 gsi = gsi_for_stmt (stmt);
5876 tree lhs = gimple_assign_lhs (stmt);
5877 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5878 location_t loc = gimple_location (stmt);
5879 if (rhs_code == ABS_EXPR)
5880 g = gimple_build_cond (LT_EXPR, rhs1,
5881 build_zero_cst (TREE_TYPE (rhs1)),
5882 NULL_TREE, NULL_TREE);
5883 else if (rhs_code == ABSU_EXPR)
5885 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5886 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5887 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5888 gimple_set_location (g, loc);
5889 g = gimple_build_cond (LT_EXPR, rhs1,
5890 build_zero_cst (TREE_TYPE (rhs1)),
5891 NULL_TREE, NULL_TREE);
5892 rhs1 = rhs2;
5894 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5896 rhs2 = gimple_assign_rhs2 (stmt);
5897 if (TREE_CODE (rhs1) == INTEGER_CST)
5898 std::swap (rhs1, rhs2);
5899 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5900 NULL_TREE, NULL_TREE);
5901 if (rhs_code == MAX_EXPR)
5902 std::swap (rhs1, rhs2);
5904 else
5906 g = gimple_build_cond (NE_EXPR, rhs1,
5907 build_zero_cst (TREE_TYPE (rhs1)),
5908 NULL_TREE, NULL_TREE);
5909 rhs1 = gimple_assign_rhs2 (stmt);
5910 rhs2 = gimple_assign_rhs3 (stmt);
5912 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5913 gimple_set_location (g, loc);
5914 edge e1 = split_block (gsi_bb (gsi), g);
5915 edge e2 = split_block (e1->dest, (gimple *) NULL);
5916 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5917 e3->probability = profile_probability::even ();
5918 e1->flags = EDGE_TRUE_VALUE;
5919 e1->probability = e3->probability.invert ();
5920 if (dom_info_available_p (CDI_DOMINATORS))
5921 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5922 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5924 gsi = gsi_after_labels (e1->dest);
5925 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5926 NEGATE_EXPR, rhs1);
5927 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5928 gimple_set_location (g, loc);
5929 rhs2 = gimple_assign_lhs (g);
5930 std::swap (rhs1, rhs2);
5932 gsi = gsi_for_stmt (stmt);
5933 gsi_remove (&gsi, true);
5934 gphi *phi = create_phi_node (lhs, e2->dest);
5935 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5936 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5937 break;
5940 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5941 into memory. Such functions could have no large/huge SSA_NAMEs. */
5942 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5944 gimple *g = SSA_NAME_DEF_STMT (s);
5945 if (is_gimple_assign (g) && gimple_store_p (g))
5947 tree t = gimple_assign_rhs1 (g);
5948 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5949 && (bitint_precision_kind (TREE_TYPE (t))
5950 >= bitint_prec_large))
5951 has_large_huge = true;
5954 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5955 to floating point types need to be rewritten. */
5956 else if (SCALAR_FLOAT_TYPE_P (type))
5958 gimple *g = SSA_NAME_DEF_STMT (s);
5959 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5961 tree t = gimple_assign_rhs1 (g);
5962 if (TREE_CODE (t) == INTEGER_CST
5963 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5964 && (bitint_precision_kind (TREE_TYPE (t))
5965 >= bitint_prec_large))
5966 has_large_huge = true;
5970 for (i = first_large_huge; i < num_ssa_names; ++i)
5972 tree s = ssa_name (i);
5973 if (s == NULL)
5974 continue;
5975 tree type = TREE_TYPE (s);
5976 if (TREE_CODE (type) == COMPLEX_TYPE)
5977 type = TREE_TYPE (type);
5978 if (TREE_CODE (type) == BITINT_TYPE
5979 && bitint_precision_kind (type) >= bitint_prec_large)
5981 use_operand_p use_p;
5982 gimple *use_stmt;
5983 has_large_huge = true;
5984 if (optimize
5985 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
5986 continue;
5987 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5988 the same bb and could be handled in the same loop with the
5989 immediate use. */
5990 if (optimize
5991 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5992 && single_imm_use (s, &use_p, &use_stmt)
5993 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
5995 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
5997 if (mergeable_op (use_stmt))
5998 continue;
5999 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6000 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6001 continue;
6002 if (gimple_assign_cast_p (use_stmt))
6004 tree lhs = gimple_assign_lhs (use_stmt);
6005 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6006 continue;
6008 else if (gimple_store_p (use_stmt)
6009 && is_gimple_assign (use_stmt)
6010 && !gimple_has_volatile_ops (use_stmt)
6011 && !stmt_ends_bb_p (use_stmt))
6012 continue;
6014 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6016 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6017 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6018 && ((is_gimple_assign (use_stmt)
6019 && (gimple_assign_rhs_code (use_stmt)
6020 != COMPLEX_EXPR))
6021 || gimple_code (use_stmt) == GIMPLE_COND)
6022 && (!gimple_store_p (use_stmt)
6023 || (is_gimple_assign (use_stmt)
6024 && !gimple_has_volatile_ops (use_stmt)
6025 && !stmt_ends_bb_p (use_stmt)))
6026 && (TREE_CODE (rhs1) != SSA_NAME
6027 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6029 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6030 || (bitint_precision_kind (TREE_TYPE (rhs1))
6031 < bitint_prec_large))
6032 continue;
6033 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6034 >= TYPE_PRECISION (TREE_TYPE (s)))
6035 && mergeable_op (use_stmt))
6036 continue;
6037 /* Prevent merging a widening non-mergeable cast
6038 on result of some narrower mergeable op
6039 together with later mergeable operations. E.g.
6040 result of _BitInt(223) addition shouldn't be
6041 sign-extended to _BitInt(513) and have another
6042 _BitInt(513) added to it, as handle_plus_minus
6043 with its PHI node handling inside of handle_cast
6044 will not work correctly. An exception is if
6045 use_stmt is a store, this is handled directly
6046 in lower_mergeable_stmt. */
6047 if (TREE_CODE (rhs1) != SSA_NAME
6048 || !has_single_use (rhs1)
6049 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6050 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6051 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6052 || gimple_store_p (use_stmt))
6053 continue;
6054 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6055 < TYPE_PRECISION (TREE_TYPE (s)))
6056 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6058 /* Another exception is if the widening cast is
6059 from mergeable same precision cast from something
6060 not mergeable. */
6061 tree rhs2
6062 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6063 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6064 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6065 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6067 if (TREE_CODE (rhs2) != SSA_NAME
6068 || !has_single_use (rhs2)
6069 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6070 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6071 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6072 continue;
6077 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6078 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6080 case IMAGPART_EXPR:
6082 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6083 rhs1 = TREE_OPERAND (rhs1, 0);
6084 if (TREE_CODE (rhs1) == SSA_NAME)
6086 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6087 if (optimizable_arith_overflow (g))
6088 continue;
6091 /* FALLTHRU */
6092 case LSHIFT_EXPR:
6093 case RSHIFT_EXPR:
6094 case MULT_EXPR:
6095 case TRUNC_DIV_EXPR:
6096 case TRUNC_MOD_EXPR:
6097 case FIX_TRUNC_EXPR:
6098 case REALPART_EXPR:
6099 if (gimple_store_p (use_stmt)
6100 && is_gimple_assign (use_stmt)
6101 && !gimple_has_volatile_ops (use_stmt)
6102 && !stmt_ends_bb_p (use_stmt))
6104 tree lhs = gimple_assign_lhs (use_stmt);
6105 /* As multiply/division passes address of the lhs
6106 to library function and that assumes it can extend
6107 it to whole number of limbs, avoid merging those
6108 with bit-field stores. Don't allow it for
6109 shifts etc. either, so that the bit-field store
6110 handling doesn't have to be done everywhere. */
6111 if (TREE_CODE (lhs) == COMPONENT_REF
6112 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6113 break;
6114 continue;
6116 break;
6117 default:
6118 break;
6122 /* Also ignore uninitialized uses. */
6123 if (SSA_NAME_IS_DEFAULT_DEF (s)
6124 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6125 continue;
6127 if (!large_huge.m_names)
6128 large_huge.m_names = BITMAP_ALLOC (NULL);
6129 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6130 if (has_single_use (s))
6132 if (!large_huge.m_single_use_names)
6133 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6134 bitmap_set_bit (large_huge.m_single_use_names,
6135 SSA_NAME_VERSION (s));
6137 if (SSA_NAME_VAR (s)
6138 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6139 && SSA_NAME_IS_DEFAULT_DEF (s))
6140 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6141 has_large_huge_parm_result = true;
6142 if (optimize
6143 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6144 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6145 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6146 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6148 use_operand_p use_p;
6149 imm_use_iterator iter;
6150 bool optimizable_load = true;
6151 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6153 gimple *use_stmt = USE_STMT (use_p);
6154 if (is_gimple_debug (use_stmt))
6155 continue;
6156 if (gimple_code (use_stmt) == GIMPLE_PHI
6157 || is_gimple_call (use_stmt))
6159 optimizable_load = false;
6160 break;
6164 ssa_op_iter oi;
6165 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6166 oi, SSA_OP_USE)
6168 tree s2 = USE_FROM_PTR (use_p);
6169 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6171 optimizable_load = false;
6172 break;
6176 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6178 if (!large_huge.m_loads)
6179 large_huge.m_loads = BITMAP_ALLOC (NULL);
6180 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6184 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6185 into memory. Such functions could have no large/huge SSA_NAMEs. */
6186 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6188 gimple *g = SSA_NAME_DEF_STMT (s);
6189 if (is_gimple_assign (g) && gimple_store_p (g))
6191 tree t = gimple_assign_rhs1 (g);
6192 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6193 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6194 has_large_huge = true;
6199 if (large_huge.m_names || has_large_huge)
6201 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6202 calculate_dominance_info (CDI_DOMINATORS);
6203 if (optimize)
6204 enable_ranger (cfun);
6205 if (large_huge.m_loads)
6207 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6208 entry->aux = NULL;
6209 bitint_dom_walker (large_huge.m_names,
6210 large_huge.m_loads).walk (entry);
6211 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6212 clear_aux_for_blocks ();
6213 BITMAP_FREE (large_huge.m_loads);
6215 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6216 large_huge.m_limb_size
6217 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6219 if (large_huge.m_names)
6221 large_huge.m_map
6222 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6223 coalesce_ssa_name (large_huge.m_map);
6224 partition_view_normal (large_huge.m_map);
6225 if (dump_file && (dump_flags & TDF_DETAILS))
6227 fprintf (dump_file, "After Coalescing:\n");
6228 dump_var_map (dump_file, large_huge.m_map);
6230 large_huge.m_vars
6231 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6232 bitmap_iterator bi;
6233 if (has_large_huge_parm_result)
6234 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6236 tree s = ssa_name (i);
6237 if (SSA_NAME_VAR (s)
6238 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6239 && SSA_NAME_IS_DEFAULT_DEF (s))
6240 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6242 int p = var_to_partition (large_huge.m_map, s);
6243 if (large_huge.m_vars[p] == NULL_TREE)
6245 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6246 mark_addressable (SSA_NAME_VAR (s));
6250 tree atype = NULL_TREE;
6251 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6253 tree s = ssa_name (i);
6254 int p = var_to_partition (large_huge.m_map, s);
6255 if (large_huge.m_vars[p] != NULL_TREE)
6256 continue;
6257 if (atype == NULL_TREE
6258 || !tree_int_cst_equal (TYPE_SIZE (atype),
6259 TYPE_SIZE (TREE_TYPE (s))))
6261 unsigned HOST_WIDE_INT nelts
6262 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6263 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6265 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6266 mark_addressable (large_huge.m_vars[p]);
6270 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6272 gimple_stmt_iterator prev;
6273 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6274 gsi = prev)
6276 prev = gsi;
6277 gsi_prev (&prev);
6278 ssa_op_iter iter;
6279 gimple *stmt = gsi_stmt (gsi);
6280 if (is_gimple_debug (stmt))
6281 continue;
6282 bitint_prec_kind kind = bitint_prec_small;
6283 tree t;
6284 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6285 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6287 bitint_prec_kind this_kind
6288 = bitint_precision_kind (TREE_TYPE (t));
6289 kind = MAX (kind, this_kind);
6291 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6293 t = gimple_assign_rhs1 (stmt);
6294 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6296 bitint_prec_kind this_kind
6297 = bitint_precision_kind (TREE_TYPE (t));
6298 kind = MAX (kind, this_kind);
6301 if (is_gimple_assign (stmt)
6302 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6304 t = gimple_assign_rhs1 (stmt);
6305 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6306 && TREE_CODE (t) == INTEGER_CST)
6308 bitint_prec_kind this_kind
6309 = bitint_precision_kind (TREE_TYPE (t));
6310 kind = MAX (kind, this_kind);
6313 if (is_gimple_call (stmt))
6315 t = gimple_call_lhs (stmt);
6316 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6318 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6319 kind = MAX (kind, this_kind);
6320 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6322 this_kind
6323 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6324 kind = MAX (kind, this_kind);
6328 if (kind == bitint_prec_small)
6329 continue;
6330 switch (gimple_code (stmt))
6332 case GIMPLE_CALL:
6333 /* For now. We'll need to handle some internal functions and
6334 perhaps some builtins. */
6335 if (kind == bitint_prec_middle)
6336 continue;
6337 break;
6338 case GIMPLE_ASM:
6339 if (kind == bitint_prec_middle)
6340 continue;
6341 break;
6342 case GIMPLE_RETURN:
6343 continue;
6344 case GIMPLE_ASSIGN:
6345 if (gimple_clobber_p (stmt))
6346 continue;
6347 if (kind >= bitint_prec_large)
6348 break;
6349 if (gimple_assign_single_p (stmt))
6350 /* No need to lower copies, loads or stores. */
6351 continue;
6352 if (gimple_assign_cast_p (stmt))
6354 tree lhs = gimple_assign_lhs (stmt);
6355 tree rhs = gimple_assign_rhs1 (stmt);
6356 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6357 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6358 && (TYPE_PRECISION (TREE_TYPE (lhs))
6359 == TYPE_PRECISION (TREE_TYPE (rhs))))
6360 /* No need to lower casts to same precision. */
6361 continue;
6363 break;
6364 default:
6365 break;
6368 if (kind == bitint_prec_middle)
6370 tree type = NULL_TREE;
6371 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6372 with the same precision and back. */
6373 unsigned int nops = gimple_num_ops (stmt);
6374 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6375 i < nops; ++i)
6376 if (tree op = gimple_op (stmt, i))
6378 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6379 if (nop != op)
6380 gimple_set_op (stmt, i, nop);
6381 else if (COMPARISON_CLASS_P (op))
6383 TREE_OPERAND (op, 0)
6384 = maybe_cast_middle_bitint (&gsi,
6385 TREE_OPERAND (op, 0),
6386 type);
6387 TREE_OPERAND (op, 1)
6388 = maybe_cast_middle_bitint (&gsi,
6389 TREE_OPERAND (op, 1),
6390 type);
6392 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6394 CASE_LOW (op)
6395 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6396 type);
6397 CASE_HIGH (op)
6398 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6399 type);
6402 if (tree lhs = gimple_get_lhs (stmt))
6403 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6404 && (bitint_precision_kind (TREE_TYPE (lhs))
6405 == bitint_prec_middle))
6407 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6408 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6409 type = build_nonstandard_integer_type (prec, uns);
6410 tree lhs2 = make_ssa_name (type);
6411 gimple_set_lhs (stmt, lhs2);
6412 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6413 if (stmt_ends_bb_p (stmt))
6415 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6416 gsi_insert_on_edge_immediate (e, g);
6418 else
6419 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6421 update_stmt (stmt);
6422 continue;
6425 if (tree lhs = gimple_get_lhs (stmt))
6426 if (TREE_CODE (lhs) == SSA_NAME)
6428 tree type = TREE_TYPE (lhs);
6429 if (TREE_CODE (type) == COMPLEX_TYPE)
6430 type = TREE_TYPE (type);
6431 if (TREE_CODE (type) == BITINT_TYPE
6432 && bitint_precision_kind (type) >= bitint_prec_large
6433 && (large_huge.m_names == NULL
6434 || !bitmap_bit_p (large_huge.m_names,
6435 SSA_NAME_VERSION (lhs))))
6436 continue;
6439 large_huge.lower_stmt (stmt);
6442 tree atype = NULL_TREE;
6443 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6444 gsi_next (&gsi))
6446 gphi *phi = gsi.phi ();
6447 tree lhs = gimple_phi_result (phi);
6448 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6449 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6450 continue;
6451 int p1 = var_to_partition (large_huge.m_map, lhs);
6452 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6453 tree v1 = large_huge.m_vars[p1];
6454 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6456 tree arg = gimple_phi_arg_def (phi, i);
6457 edge e = gimple_phi_arg_edge (phi, i);
6458 gimple *g;
6459 switch (TREE_CODE (arg))
6461 case INTEGER_CST:
6462 if (integer_zerop (arg) && VAR_P (v1))
6464 tree zero = build_zero_cst (TREE_TYPE (v1));
6465 g = gimple_build_assign (v1, zero);
6466 gsi_insert_on_edge (e, g);
6467 edge_insertions = true;
6468 break;
6470 int ext;
6471 unsigned int min_prec, prec, rem;
6472 tree c;
6473 prec = TYPE_PRECISION (TREE_TYPE (arg));
6474 rem = prec % (2 * limb_prec);
6475 min_prec = bitint_min_cst_precision (arg, ext);
6476 if (min_prec > prec - rem - 2 * limb_prec
6477 && min_prec > (unsigned) limb_prec)
6478 /* Constant which has enough significant bits that it
6479 isn't worth trying to save .rodata space by extending
6480 from smaller number. */
6481 min_prec = prec;
6482 else
6483 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6484 if (min_prec == 0)
6485 c = NULL_TREE;
6486 else if (min_prec == prec)
6487 c = tree_output_constant_def (arg);
6488 else if (min_prec == (unsigned) limb_prec)
6489 c = fold_convert (large_huge.m_limb_type, arg);
6490 else
6492 tree ctype = build_bitint_type (min_prec, 1);
6493 c = tree_output_constant_def (fold_convert (ctype, arg));
6495 if (c)
6497 if (VAR_P (v1) && min_prec == prec)
6499 tree v2 = build1 (VIEW_CONVERT_EXPR,
6500 TREE_TYPE (v1), c);
6501 g = gimple_build_assign (v1, v2);
6502 gsi_insert_on_edge (e, g);
6503 edge_insertions = true;
6504 break;
6506 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6507 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6508 TREE_TYPE (c), v1),
6510 else
6512 unsigned HOST_WIDE_INT nelts
6513 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6514 / limb_prec;
6515 tree vtype
6516 = build_array_type_nelts (large_huge.m_limb_type,
6517 nelts);
6518 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6519 vtype, v1),
6520 build1 (VIEW_CONVERT_EXPR,
6521 vtype, c));
6523 gsi_insert_on_edge (e, g);
6525 if (ext == 0)
6527 unsigned HOST_WIDE_INT nelts
6528 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6529 - min_prec) / limb_prec;
6530 tree vtype
6531 = build_array_type_nelts (large_huge.m_limb_type,
6532 nelts);
6533 tree ptype = build_pointer_type (TREE_TYPE (v1));
6534 tree off = fold_convert (ptype,
6535 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6536 tree vd = build2 (MEM_REF, vtype,
6537 build_fold_addr_expr (v1), off);
6538 g = gimple_build_assign (vd, build_zero_cst (vtype));
6540 else
6542 tree vd = v1;
6543 if (c)
6545 tree ptype = build_pointer_type (TREE_TYPE (v1));
6546 tree off
6547 = fold_convert (ptype,
6548 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6549 vd = build2 (MEM_REF, large_huge.m_limb_type,
6550 build_fold_addr_expr (v1), off);
6552 vd = build_fold_addr_expr (vd);
6553 unsigned HOST_WIDE_INT nbytes
6554 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6555 if (c)
6556 nbytes
6557 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6558 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6559 g = gimple_build_call (fn, 3, vd,
6560 integer_minus_one_node,
6561 build_int_cst (sizetype,
6562 nbytes));
6564 gsi_insert_on_edge (e, g);
6565 edge_insertions = true;
6566 break;
6567 default:
6568 gcc_unreachable ();
6569 case SSA_NAME:
6570 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6572 if (large_huge.m_names == NULL
6573 || !bitmap_bit_p (large_huge.m_names,
6574 SSA_NAME_VERSION (arg)))
6575 continue;
6577 int p2 = var_to_partition (large_huge.m_map, arg);
6578 if (p1 == p2)
6579 continue;
6580 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6581 tree v2 = large_huge.m_vars[p2];
6582 if (VAR_P (v1) && VAR_P (v2))
6583 g = gimple_build_assign (v1, v2);
6584 else if (VAR_P (v1))
6585 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6586 TREE_TYPE (v1), v2));
6587 else if (VAR_P (v2))
6588 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6589 TREE_TYPE (v2), v1), v2);
6590 else
6592 if (atype == NULL_TREE
6593 || !tree_int_cst_equal (TYPE_SIZE (atype),
6594 TYPE_SIZE (TREE_TYPE (lhs))))
6596 unsigned HOST_WIDE_INT nelts
6597 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6598 / limb_prec;
6599 atype
6600 = build_array_type_nelts (large_huge.m_limb_type,
6601 nelts);
6603 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6604 atype, v1),
6605 build1 (VIEW_CONVERT_EXPR,
6606 atype, v2));
6608 gsi_insert_on_edge (e, g);
6609 edge_insertions = true;
6610 break;
6616 if (large_huge.m_names || has_large_huge)
6618 gimple *nop = NULL;
6619 for (i = 0; i < num_ssa_names; ++i)
6621 tree s = ssa_name (i);
6622 if (s == NULL_TREE)
6623 continue;
6624 tree type = TREE_TYPE (s);
6625 if (TREE_CODE (type) == COMPLEX_TYPE)
6626 type = TREE_TYPE (type);
6627 if (TREE_CODE (type) == BITINT_TYPE
6628 && bitint_precision_kind (type) >= bitint_prec_large)
6630 if (large_huge.m_preserved
6631 && bitmap_bit_p (large_huge.m_preserved,
6632 SSA_NAME_VERSION (s)))
6633 continue;
6634 gimple *g = SSA_NAME_DEF_STMT (s);
6635 if (gimple_code (g) == GIMPLE_NOP)
6637 if (SSA_NAME_VAR (s))
6638 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6639 release_ssa_name (s);
6640 continue;
6642 if (gimple_code (g) != GIMPLE_ASM)
6644 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6645 bool save_vta = flag_var_tracking_assignments;
6646 flag_var_tracking_assignments = false;
6647 gsi_remove (&gsi, true);
6648 flag_var_tracking_assignments = save_vta;
6650 if (nop == NULL)
6651 nop = gimple_build_nop ();
6652 SSA_NAME_DEF_STMT (s) = nop;
6653 release_ssa_name (s);
6656 if (optimize)
6657 disable_ranger (cfun);
6660 if (edge_insertions)
6661 gsi_commit_edge_inserts ();
6663 return ret;
6666 namespace {
6668 const pass_data pass_data_lower_bitint =
6670 GIMPLE_PASS, /* type */
6671 "bitintlower", /* name */
6672 OPTGROUP_NONE, /* optinfo_flags */
6673 TV_NONE, /* tv_id */
6674 PROP_ssa, /* properties_required */
6675 PROP_gimple_lbitint, /* properties_provided */
6676 0, /* properties_destroyed */
6677 0, /* todo_flags_start */
6678 0, /* todo_flags_finish */
6681 class pass_lower_bitint : public gimple_opt_pass
6683 public:
6684 pass_lower_bitint (gcc::context *ctxt)
6685 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6688 /* opt_pass methods: */
6689 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6690 unsigned int execute (function *) final override
6692 return gimple_lower_bitint ();
6695 }; // class pass_lower_bitint
6697 } // anon namespace
6699 gimple_opt_pass *
6700 make_pass_lower_bitint (gcc::context *ctxt)
6702 return new pass_lower_bitint (ctxt);
6706 namespace {
6708 const pass_data pass_data_lower_bitint_O0 =
6710 GIMPLE_PASS, /* type */
6711 "bitintlower0", /* name */
6712 OPTGROUP_NONE, /* optinfo_flags */
6713 TV_NONE, /* tv_id */
6714 PROP_cfg, /* properties_required */
6715 PROP_gimple_lbitint, /* properties_provided */
6716 0, /* properties_destroyed */
6717 0, /* todo_flags_start */
6718 0, /* todo_flags_finish */
6721 class pass_lower_bitint_O0 : public gimple_opt_pass
6723 public:
6724 pass_lower_bitint_O0 (gcc::context *ctxt)
6725 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6728 /* opt_pass methods: */
6729 bool gate (function *fun) final override
6731 /* With errors, normal optimization passes are not run. If we don't
6732 lower bitint operations at all, rtl expansion will abort. */
6733 return !(fun->curr_properties & PROP_gimple_lbitint);
6736 unsigned int execute (function *) final override
6738 return gimple_lower_bitint ();
6741 }; // class pass_lower_bitint_O0
6743 } // anon namespace
6745 gimple_opt_pass *
6746 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6748 return new pass_lower_bitint_O0 (ctxt);