c++: retval dtor on rethrow [PR112301]
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob66558594b51b2804b5c310a39a386b578b11d40c
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
342 else
343 return 0;
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
365 else if (is_gimple_assign (stmt))
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
381 *pop1 = op1;
382 *pop2 = op2;
384 return code;
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_call (tree, gimple *);
431 void lower_asm (gimple *);
432 void lower_stmt (gimple *);
434 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
435 merged with their uses. */
436 bitmap m_names;
437 /* Subset of those for lhs of load statements. These will be
438 cleared in m_names if the loads will be mergeable with all
439 their uses. */
440 bitmap m_loads;
441 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
442 to later passes (arguments or return values of calls). */
443 bitmap m_preserved;
444 /* Subset of m_names which have a single use. As the lowering
445 can replace various original statements with their lowered
446 form even before it is done iterating over all basic blocks,
447 testing has_single_use for the purpose of emitting clobbers
448 doesn't work properly. */
449 bitmap m_single_use_names;
450 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
451 set in m_names. */
452 var_map m_map;
453 /* Mapping of the partitions to corresponding decls. */
454 tree *m_vars;
455 /* Unsigned integer type with limb precision. */
456 tree m_limb_type;
457 /* Its TYPE_SIZE_UNIT. */
458 unsigned HOST_WIDE_INT m_limb_size;
459 /* Location of a gimple stmt which is being currently lowered. */
460 location_t m_loc;
461 /* Current stmt iterator where code is being lowered currently. */
462 gimple_stmt_iterator m_gsi;
463 /* Statement after which any clobbers should be added if non-NULL. */
464 gimple *m_after_stmt;
465 /* Set when creating loops to the loop header bb and its preheader. */
466 basic_block m_bb, m_preheader_bb;
467 /* Stmt iterator after which initialization statements should be emitted. */
468 gimple_stmt_iterator m_init_gsi;
469 /* Decl into which a mergeable statement stores result. */
470 tree m_lhs;
471 /* handle_operand/handle_stmt can be invoked in various ways.
473 lower_mergeable_stmt for large _BitInt calls those with constant
474 idx only, expanding to straight line code, for huge _BitInt
475 emits a loop from least significant limb upwards, where each loop
476 iteration handles 2 limbs, plus there can be up to one full limb
477 and one partial limb processed after the loop, where handle_operand
478 and/or handle_stmt are called with constant idx. m_upwards_2limb
479 is set for this case, false otherwise. m_upwards is true if it
480 is either large or huge _BitInt handled by lower_mergeable_stmt,
481 i.e. indexes always increase.
483 Another way is used by lower_comparison_stmt, which walks limbs
484 from most significant to least significant, partial limb if any
485 processed first with constant idx and then loop processing a single
486 limb per iteration with non-constant idx.
488 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
489 destination limbs are processed from most significant to least
490 significant or for RSHIFT_EXPR the other way around, in loops or
491 straight line code, but idx usually is non-constant (so from
492 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
493 handling there can access even partial limbs using non-constant
494 idx (then m_var_msb should be true, for all the other cases
495 including lower_mergeable_stmt/lower_comparison_stmt that is
496 not the case and so m_var_msb should be false.
498 m_first should be set the first time handle_operand/handle_stmt
499 is called and clear when it is called for some other limb with
500 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
501 or statement (e.g. +/-/<< with < limb_prec constant) needs some
502 state between the different calls, when m_first is true it should
503 push some trees to m_data vector and also make sure m_data_cnt is
504 incremented by how many trees were pushed, and when m_first is
505 false, it can use the m_data[m_data_cnt] etc. data or update them,
506 just needs to bump m_data_cnt by the same amount as when it was
507 called with m_first set. The toplevel calls to
508 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
509 m_data vector when setting m_first to true.
511 m_cast_conditional and m_bitfld_load are used when handling a
512 bit-field load inside of a widening cast. handle_cast sometimes
513 needs to do runtime comparisons and handle_operand only conditionally
514 or even in two separate conditional blocks for one idx (once with
515 constant index after comparing the runtime one for equality with the
516 constant). In these cases, m_cast_conditional is set to true and
517 the bit-field load then communicates its m_data_cnt to handle_cast
518 using m_bitfld_load. */
519 bool m_first;
520 bool m_var_msb;
521 unsigned m_upwards_2limb;
522 bool m_upwards;
523 bool m_cast_conditional;
524 unsigned m_bitfld_load;
525 vec<tree> m_data;
526 unsigned int m_data_cnt;
529 bitint_large_huge::~bitint_large_huge ()
531 BITMAP_FREE (m_names);
532 BITMAP_FREE (m_loads);
533 BITMAP_FREE (m_preserved);
534 BITMAP_FREE (m_single_use_names);
535 if (m_map)
536 delete_var_map (m_map);
537 XDELETEVEC (m_vars);
538 m_data.release ();
541 /* Insert gimple statement G before current location
542 and set its gimple_location. */
544 void
545 bitint_large_huge::insert_before (gimple *g)
547 gimple_set_location (g, m_loc);
548 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
551 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
552 This is normally m_limb_type, except for a partial most
553 significant limb if any. */
555 tree
556 bitint_large_huge::limb_access_type (tree type, tree idx)
558 if (type == NULL_TREE)
559 return m_limb_type;
560 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
561 unsigned int prec = TYPE_PRECISION (type);
562 gcc_assert (i * limb_prec < prec);
563 if ((i + 1) * limb_prec <= prec)
564 return m_limb_type;
565 else
566 return build_nonstandard_integer_type (prec % limb_prec,
567 TYPE_UNSIGNED (type));
570 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
571 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
573 tree
574 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
576 tree atype = (tree_fits_uhwi_p (idx)
577 ? limb_access_type (type, idx) : m_limb_type);
578 tree ret;
579 if (DECL_P (var) && tree_fits_uhwi_p (idx))
581 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
582 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
583 ret = build2 (MEM_REF, m_limb_type,
584 build_fold_addr_expr (var),
585 build_int_cst (ptype, off));
586 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
587 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
589 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
592 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
593 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
594 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
595 tree_to_uhwi (idx)
596 * m_limb_size)));
597 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
598 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
599 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
601 else
603 var = unshare_expr (var);
604 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
605 || !useless_type_conversion_p (m_limb_type,
606 TREE_TYPE (TREE_TYPE (var))))
608 unsigned HOST_WIDE_INT nelts
609 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
610 tree atype = build_array_type_nelts (m_limb_type, nelts);
611 var = build1 (VIEW_CONVERT_EXPR, atype, var);
613 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
615 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
617 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
618 insert_before (g);
619 ret = gimple_assign_lhs (g);
620 ret = build1 (NOP_EXPR, atype, ret);
622 return ret;
625 /* Emit a half diamond,
626 if (COND)
630 | new_bb1
634 or if (COND) new_bb1;
635 PROB is the probability that the condition is true.
636 Updates m_gsi to start of new_bb1.
637 Sets EDGE_TRUE to edge from new_bb1 to successor and
638 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
640 void
641 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
642 edge &edge_true, edge &edge_false)
644 insert_before (cond);
645 edge e1 = split_block (gsi_bb (m_gsi), cond);
646 edge e2 = split_block (e1->dest, (gimple *) NULL);
647 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
648 e1->flags = EDGE_TRUE_VALUE;
649 e1->probability = prob;
650 e3->probability = prob.invert ();
651 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
652 edge_true = e2;
653 edge_false = e3;
654 m_gsi = gsi_after_labels (e1->dest);
657 /* Emit a full diamond,
658 if (COND)
662 new_bb1 new_bb2
666 or if (COND) new_bb2; else new_bb1;
667 PROB is the probability that the condition is true.
668 Updates m_gsi to start of new_bb2.
669 Sets EDGE_TRUE to edge from new_bb1 to successor and
670 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
672 void
673 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
674 edge &edge_true, edge &edge_false)
676 insert_before (cond);
677 edge e1 = split_block (gsi_bb (m_gsi), cond);
678 edge e2 = split_block (e1->dest, (gimple *) NULL);
679 basic_block bb = create_empty_bb (e1->dest);
680 add_bb_to_loop (bb, e1->dest->loop_father);
681 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
682 e1->flags = EDGE_FALSE_VALUE;
683 e3->probability = prob;
684 e1->probability = prob.invert ();
685 bb->count = e1->src->count.apply_probability (prob);
686 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
687 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
688 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
689 edge_false = e2;
690 m_gsi = gsi_after_labels (bb);
693 /* Emit a half diamond with full diamond in it
694 if (COND1)
698 | if (COND2)
699 | / \
700 | / \
701 |new_bb1 new_bb2
702 | | /
703 \ | /
704 \ | /
705 \ | /
707 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
708 PROB1 is the probability that the condition 1 is true.
709 PROB2 is the probability that the condition 2 is true.
710 Updates m_gsi to start of new_bb1.
711 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
712 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
713 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
714 If COND2 is NULL, this is equivalent to
715 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
716 EDGE_TRUE_TRUE = NULL; */
718 void
719 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
720 profile_probability prob1,
721 profile_probability prob2,
722 edge &edge_true_true,
723 edge &edge_true_false,
724 edge &edge_false)
726 edge e2, e3, e4 = NULL;
727 if_then (cond1, prob1, e2, e3);
728 if (cond2 == NULL)
730 edge_true_true = NULL;
731 edge_true_false = e2;
732 edge_false = e3;
733 return;
735 insert_before (cond2);
736 e2 = split_block (gsi_bb (m_gsi), cond2);
737 basic_block bb = create_empty_bb (e2->dest);
738 add_bb_to_loop (bb, e2->dest->loop_father);
739 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
740 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
741 e4->probability = prob2;
742 e2->flags = EDGE_FALSE_VALUE;
743 e2->probability = prob2.invert ();
744 bb->count = e2->src->count.apply_probability (prob2);
745 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
746 e2 = find_edge (e2->dest, e3->dest);
747 edge_true_true = e4;
748 edge_true_false = e2;
749 edge_false = e3;
750 m_gsi = gsi_after_labels (e2->src);
753 /* Emit code to access limb IDX from OP. */
755 tree
756 bitint_large_huge::handle_operand (tree op, tree idx)
758 switch (TREE_CODE (op))
760 case SSA_NAME:
761 if (m_names == NULL
762 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
764 if (SSA_NAME_IS_DEFAULT_DEF (op))
766 if (m_first)
768 tree v = create_tmp_reg (m_limb_type);
769 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
771 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
772 DECL_SOURCE_LOCATION (v)
773 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
775 v = get_or_create_ssa_default_def (cfun, v);
776 m_data.safe_push (v);
778 tree ret = m_data[m_data_cnt];
779 m_data_cnt++;
780 if (tree_fits_uhwi_p (idx))
782 tree type = limb_access_type (TREE_TYPE (op), idx);
783 ret = add_cast (type, ret);
785 return ret;
787 location_t loc_save = m_loc;
788 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
789 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
790 m_loc = loc_save;
791 return ret;
793 int p;
794 gimple *g;
795 tree t;
796 p = var_to_partition (m_map, op);
797 gcc_assert (m_vars[p] != NULL_TREE);
798 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
799 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
800 insert_before (g);
801 t = gimple_assign_lhs (g);
802 if (m_first
803 && m_single_use_names
804 && m_vars[p] != m_lhs
805 && m_after_stmt
806 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
808 tree clobber = build_clobber (TREE_TYPE (m_vars[p]), CLOBBER_EOL);
809 g = gimple_build_assign (m_vars[p], clobber);
810 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
811 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
813 return t;
814 case INTEGER_CST:
815 if (tree_fits_uhwi_p (idx))
817 tree c, type = limb_access_type (TREE_TYPE (op), idx);
818 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
819 if (m_first)
821 m_data.safe_push (NULL_TREE);
822 m_data.safe_push (NULL_TREE);
824 if (limb_prec != HOST_BITS_PER_WIDE_INT)
826 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
827 TYPE_SIGN (TREE_TYPE (op)));
828 c = wide_int_to_tree (type,
829 wide_int::from (w, TYPE_PRECISION (type),
830 UNSIGNED));
832 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
833 c = build_int_cst (type,
834 tree_int_cst_sgn (op) < 0 ? -1 : 0);
835 else
836 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
837 m_data_cnt += 2;
838 return c;
840 if (m_first
841 || (m_data[m_data_cnt] == NULL_TREE
842 && m_data[m_data_cnt + 1] == NULL_TREE))
844 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
845 unsigned int rem = prec % (2 * limb_prec);
846 int ext;
847 unsigned min_prec = bitint_min_cst_precision (op, ext);
848 if (m_first)
850 m_data.safe_push (NULL_TREE);
851 m_data.safe_push (NULL_TREE);
853 if (integer_zerop (op))
855 tree c = build_zero_cst (m_limb_type);
856 m_data[m_data_cnt] = c;
857 m_data[m_data_cnt + 1] = c;
859 else if (integer_all_onesp (op))
861 tree c = build_all_ones_cst (m_limb_type);
862 m_data[m_data_cnt] = c;
863 m_data[m_data_cnt + 1] = c;
865 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
867 /* Single limb constant. Use a phi with that limb from
868 the preheader edge and 0 or -1 constant from the other edge
869 and for the second limb in the loop. */
870 tree out;
871 gcc_assert (m_first);
872 m_data.pop ();
873 m_data.pop ();
874 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out);
875 g = gimple_build_assign (m_data[m_data_cnt + 1],
876 build_int_cst (m_limb_type, ext));
877 insert_before (g);
878 m_data[m_data_cnt + 1] = gimple_assign_rhs1 (g);
880 else if (min_prec > prec - rem - 2 * limb_prec)
882 /* Constant which has enough significant bits that it isn't
883 worth trying to save .rodata space by extending from smaller
884 number. */
885 tree type;
886 if (m_var_msb)
887 type = TREE_TYPE (op);
888 else
889 /* If we have a guarantee the most significant partial limb
890 (if any) will be only accessed through handle_operand
891 with INTEGER_CST idx, we don't need to include the partial
892 limb in .rodata. */
893 type = build_bitint_type (prec - rem, 1);
894 tree c = tree_output_constant_def (fold_convert (type, op));
895 m_data[m_data_cnt] = c;
896 m_data[m_data_cnt + 1] = NULL_TREE;
898 else if (m_upwards_2limb)
900 /* Constant with smaller number of bits. Trade conditional
901 code for .rodata space by extending from smaller number. */
902 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
903 tree type = build_bitint_type (min_prec, 1);
904 tree c = tree_output_constant_def (fold_convert (type, op));
905 tree idx2 = make_ssa_name (sizetype);
906 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
907 insert_before (g);
908 g = gimple_build_cond (LT_EXPR, idx,
909 size_int (min_prec / limb_prec),
910 NULL_TREE, NULL_TREE);
911 edge edge_true, edge_false;
912 if_then (g, (min_prec >= (prec - rem) / 2
913 ? profile_probability::likely ()
914 : profile_probability::unlikely ()),
915 edge_true, edge_false);
916 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
917 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
918 insert_before (g);
919 c1 = gimple_assign_lhs (g);
920 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
921 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
922 insert_before (g);
923 c2 = gimple_assign_lhs (g);
924 tree c3 = build_int_cst (m_limb_type, ext);
925 m_gsi = gsi_after_labels (edge_true->dest);
926 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
927 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
928 gphi *phi = create_phi_node (m_data[m_data_cnt],
929 edge_true->dest);
930 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
931 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
932 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
933 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
934 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
936 else
938 /* Constant with smaller number of bits. Trade conditional
939 code for .rodata space by extending from smaller number.
940 Version for loops with random access to the limbs or
941 downwards loops. */
942 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
943 tree c;
944 if (min_prec <= (unsigned) limb_prec)
945 c = fold_convert (m_limb_type, op);
946 else
948 tree type = build_bitint_type (min_prec, 1);
949 c = tree_output_constant_def (fold_convert (type, op));
951 m_data[m_data_cnt] = c;
952 m_data[m_data_cnt + 1] = integer_type_node;
954 t = m_data[m_data_cnt];
955 if (m_data[m_data_cnt + 1] == NULL_TREE)
957 t = limb_access (TREE_TYPE (op), t, idx, false);
958 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
959 insert_before (g);
960 t = gimple_assign_lhs (g);
963 else if (m_data[m_data_cnt + 1] == NULL_TREE)
965 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
966 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
967 insert_before (g);
968 t = gimple_assign_lhs (g);
970 else
971 t = m_data[m_data_cnt + 1];
972 if (m_data[m_data_cnt + 1] == integer_type_node)
974 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
975 unsigned rem = prec % (2 * limb_prec);
976 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
977 tree c = m_data[m_data_cnt];
978 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
979 g = gimple_build_cond (LT_EXPR, idx,
980 size_int (min_prec / limb_prec),
981 NULL_TREE, NULL_TREE);
982 edge edge_true, edge_false;
983 if_then (g, (min_prec >= (prec - rem) / 2
984 ? profile_probability::likely ()
985 : profile_probability::unlikely ()),
986 edge_true, edge_false);
987 if (min_prec > (unsigned) limb_prec)
989 c = limb_access (TREE_TYPE (op), c, idx, false);
990 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
991 insert_before (g);
992 c = gimple_assign_lhs (g);
994 tree c2 = build_int_cst (m_limb_type, ext);
995 m_gsi = gsi_after_labels (edge_true->dest);
996 t = make_ssa_name (m_limb_type);
997 gphi *phi = create_phi_node (t, edge_true->dest);
998 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
999 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1001 m_data_cnt += 2;
1002 return t;
1003 default:
1004 gcc_unreachable ();
1008 /* Helper method, add a PHI node with VAL from preheader edge if
1009 inside of a loop and m_first. Keep state in a pair of m_data
1010 elements. */
1012 tree
1013 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out)
1015 if (!m_first)
1017 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1018 return m_data[m_data_cnt];
1021 *data_out = NULL_TREE;
1022 if (tree_fits_uhwi_p (idx))
1024 m_data.safe_push (val);
1025 m_data.safe_push (NULL_TREE);
1026 return val;
1029 tree in = make_ssa_name (TREE_TYPE (val));
1030 gphi *phi = create_phi_node (in, m_bb);
1031 edge e1 = find_edge (m_preheader_bb, m_bb);
1032 edge e2 = EDGE_PRED (m_bb, 0);
1033 if (e1 == e2)
1034 e2 = EDGE_PRED (m_bb, 1);
1035 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1036 tree out = make_ssa_name (TREE_TYPE (val));
1037 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1038 m_data.safe_push (in);
1039 m_data.safe_push (out);
1040 return in;
1043 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1044 convert it without emitting any code, otherwise emit
1045 the conversion statement before the current location. */
1047 tree
1048 bitint_large_huge::add_cast (tree type, tree val)
1050 if (TREE_CODE (val) == INTEGER_CST)
1051 return fold_convert (type, val);
1053 tree lhs = make_ssa_name (type);
1054 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1055 insert_before (g);
1056 return lhs;
1059 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1061 tree
1062 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1063 tree idx)
1065 tree lhs, data_out, ctype;
1066 tree rhs1_type = TREE_TYPE (rhs1);
1067 gimple *g;
1068 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1069 &data_out);
1071 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1072 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1074 ctype = build_complex_type (m_limb_type);
1075 if (!types_compatible_p (rhs1_type, m_limb_type))
1077 if (!TYPE_UNSIGNED (rhs1_type))
1079 tree type = unsigned_type_for (rhs1_type);
1080 rhs1 = add_cast (type, rhs1);
1081 rhs2 = add_cast (type, rhs2);
1083 rhs1 = add_cast (m_limb_type, rhs1);
1084 rhs2 = add_cast (m_limb_type, rhs2);
1086 lhs = make_ssa_name (ctype);
1087 g = gimple_build_call_internal (code == PLUS_EXPR
1088 ? IFN_UADDC : IFN_USUBC,
1089 3, rhs1, rhs2, data_in);
1090 gimple_call_set_lhs (g, lhs);
1091 insert_before (g);
1092 if (data_out == NULL_TREE)
1093 data_out = make_ssa_name (m_limb_type);
1094 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1095 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1096 insert_before (g);
1098 else if (types_compatible_p (rhs1_type, m_limb_type))
1100 ctype = build_complex_type (m_limb_type);
1101 lhs = make_ssa_name (ctype);
1102 g = gimple_build_call_internal (code == PLUS_EXPR
1103 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1104 2, rhs1, rhs2);
1105 gimple_call_set_lhs (g, lhs);
1106 insert_before (g);
1107 if (data_out == NULL_TREE)
1108 data_out = make_ssa_name (m_limb_type);
1109 if (!integer_zerop (data_in))
1111 rhs1 = make_ssa_name (m_limb_type);
1112 g = gimple_build_assign (rhs1, REALPART_EXPR,
1113 build1 (REALPART_EXPR, m_limb_type, lhs));
1114 insert_before (g);
1115 rhs2 = make_ssa_name (m_limb_type);
1116 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1117 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1118 insert_before (g);
1119 lhs = make_ssa_name (ctype);
1120 g = gimple_build_call_internal (code == PLUS_EXPR
1121 ? IFN_ADD_OVERFLOW
1122 : IFN_SUB_OVERFLOW,
1123 2, rhs1, data_in);
1124 gimple_call_set_lhs (g, lhs);
1125 insert_before (g);
1126 data_in = make_ssa_name (m_limb_type);
1127 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1128 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1129 insert_before (g);
1130 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1131 insert_before (g);
1133 else
1135 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1136 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1137 insert_before (g);
1140 else
1142 tree in = add_cast (rhs1_type, data_in);
1143 lhs = make_ssa_name (rhs1_type);
1144 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1145 insert_before (g);
1146 rhs1 = make_ssa_name (rhs1_type);
1147 g = gimple_build_assign (rhs1, code, lhs, in);
1148 insert_before (g);
1149 m_data[m_data_cnt] = NULL_TREE;
1150 m_data_cnt += 2;
1151 return rhs1;
1153 rhs1 = make_ssa_name (m_limb_type);
1154 g = gimple_build_assign (rhs1, REALPART_EXPR,
1155 build1 (REALPART_EXPR, m_limb_type, lhs));
1156 insert_before (g);
1157 if (!types_compatible_p (rhs1_type, m_limb_type))
1158 rhs1 = add_cast (rhs1_type, rhs1);
1159 m_data[m_data_cnt] = data_out;
1160 m_data_cnt += 2;
1161 return rhs1;
1164 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1165 count in [0, limb_prec - 1] range. */
1167 tree
1168 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1170 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1171 gcc_checking_assert (cnt < (unsigned) limb_prec);
1172 if (cnt == 0)
1173 return rhs1;
1175 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1176 gimple *g;
1177 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1178 &data_out);
1180 if (!integer_zerop (data_in))
1182 lhs = make_ssa_name (m_limb_type);
1183 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1184 build_int_cst (unsigned_type_node,
1185 limb_prec - cnt));
1186 insert_before (g);
1187 if (!types_compatible_p (rhs1_type, m_limb_type))
1188 lhs = add_cast (rhs1_type, lhs);
1189 data_in = lhs;
1191 if (types_compatible_p (rhs1_type, m_limb_type))
1193 if (data_out == NULL_TREE)
1194 data_out = make_ssa_name (m_limb_type);
1195 g = gimple_build_assign (data_out, rhs1);
1196 insert_before (g);
1198 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1200 lhs = make_ssa_name (rhs1_type);
1201 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1202 insert_before (g);
1203 if (!integer_zerop (data_in))
1205 rhs1 = lhs;
1206 lhs = make_ssa_name (rhs1_type);
1207 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1208 insert_before (g);
1211 else
1212 lhs = data_in;
1213 m_data[m_data_cnt] = data_out;
1214 m_data_cnt += 2;
1215 return lhs;
1218 /* Helper function for handle_stmt method, handle an integral
1219 to integral conversion. */
1221 tree
1222 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1224 tree rhs_type = TREE_TYPE (rhs1);
1225 gimple *g;
1226 if (TREE_CODE (rhs1) == SSA_NAME
1227 && TREE_CODE (lhs_type) == BITINT_TYPE
1228 && TREE_CODE (rhs_type) == BITINT_TYPE
1229 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1230 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1232 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1233 /* If lhs has bigger precision than rhs, we can use
1234 the simple case only if there is a guarantee that
1235 the most significant limb is handled in straight
1236 line code. If m_var_msb (on left shifts) or
1237 if m_upwards_2limb * limb_prec is equal to
1238 lhs precision that is not the case. */
1239 || (!m_var_msb
1240 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1241 TYPE_SIZE (lhs_type))
1242 && (!m_upwards_2limb
1243 || (m_upwards_2limb * limb_prec
1244 < TYPE_PRECISION (lhs_type)))))
1246 rhs1 = handle_operand (rhs1, idx);
1247 if (tree_fits_uhwi_p (idx))
1249 tree type = limb_access_type (lhs_type, idx);
1250 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1251 rhs1 = add_cast (type, rhs1);
1253 return rhs1;
1255 tree t;
1256 /* Indexes lower than this don't need any special processing. */
1257 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1258 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1259 /* Indexes >= than this always contain an extension. */
1260 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1261 bool save_first = m_first;
1262 if (m_first)
1264 m_data.safe_push (NULL_TREE);
1265 m_data.safe_push (NULL_TREE);
1266 m_data.safe_push (NULL_TREE);
1267 if (TYPE_UNSIGNED (rhs_type))
1268 /* No need to keep state between iterations. */
1270 else if (m_upwards && !m_upwards_2limb)
1271 /* We need to keep state between iterations, but
1272 not within any loop, everything is straight line
1273 code with only increasing indexes. */
1275 else if (!m_upwards_2limb)
1277 unsigned save_data_cnt = m_data_cnt;
1278 gimple_stmt_iterator save_gsi = m_gsi;
1279 m_gsi = m_init_gsi;
1280 if (gsi_end_p (m_gsi))
1281 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1282 else
1283 gsi_next (&m_gsi);
1284 m_data_cnt = save_data_cnt + 3;
1285 t = handle_operand (rhs1, size_int (low));
1286 m_first = false;
1287 m_data[save_data_cnt + 2]
1288 = build_int_cst (NULL_TREE, m_data_cnt);
1289 m_data_cnt = save_data_cnt;
1290 t = add_cast (signed_type_for (m_limb_type), t);
1291 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1292 tree n = make_ssa_name (TREE_TYPE (t));
1293 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1294 insert_before (g);
1295 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1296 m_gsi = save_gsi;
1298 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1299 /* We need to keep state between iterations, but
1300 fortunately not within the loop, only afterwards. */
1302 else
1304 tree out;
1305 m_data.truncate (m_data_cnt);
1306 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1307 m_data.safe_push (NULL_TREE);
1311 unsigned save_data_cnt = m_data_cnt;
1312 m_data_cnt += 3;
1313 if (!tree_fits_uhwi_p (idx))
1315 if (m_upwards_2limb
1316 && (m_upwards_2limb * limb_prec
1317 <= ((unsigned) TYPE_PRECISION (rhs_type)
1318 - !TYPE_UNSIGNED (rhs_type))))
1320 rhs1 = handle_operand (rhs1, idx);
1321 if (m_first)
1322 m_data[save_data_cnt + 2]
1323 = build_int_cst (NULL_TREE, m_data_cnt);
1324 m_first = save_first;
1325 return rhs1;
1327 bool single_comparison
1328 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1329 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1330 idx, size_int (low), NULL_TREE, NULL_TREE);
1331 edge edge_true_true, edge_true_false, edge_false;
1332 if_then_if_then_else (g, (single_comparison ? NULL
1333 : gimple_build_cond (EQ_EXPR, idx,
1334 size_int (low),
1335 NULL_TREE,
1336 NULL_TREE)),
1337 profile_probability::likely (),
1338 profile_probability::unlikely (),
1339 edge_true_true, edge_true_false, edge_false);
1340 bool save_cast_conditional = m_cast_conditional;
1341 m_cast_conditional = true;
1342 m_bitfld_load = 0;
1343 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1344 if (m_first)
1345 m_data[save_data_cnt + 2]
1346 = build_int_cst (NULL_TREE, m_data_cnt);
1347 tree ext = NULL_TREE;
1348 tree bitfld = NULL_TREE;
1349 if (!single_comparison)
1351 m_gsi = gsi_after_labels (edge_true_true->src);
1352 m_first = false;
1353 m_data_cnt = save_data_cnt + 3;
1354 if (m_bitfld_load)
1356 bitfld = m_data[m_bitfld_load];
1357 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1358 m_bitfld_load = 0;
1360 t2 = handle_operand (rhs1, size_int (low));
1361 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1362 t2 = add_cast (m_limb_type, t2);
1363 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1365 ext = add_cast (signed_type_for (m_limb_type), t2);
1366 tree lpm1 = build_int_cst (unsigned_type_node,
1367 limb_prec - 1);
1368 tree n = make_ssa_name (TREE_TYPE (ext));
1369 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1370 insert_before (g);
1371 ext = add_cast (m_limb_type, n);
1374 tree t3;
1375 if (TYPE_UNSIGNED (rhs_type))
1376 t3 = build_zero_cst (m_limb_type);
1377 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1378 t3 = m_data[save_data_cnt];
1379 else
1380 t3 = m_data[save_data_cnt + 1];
1381 m_gsi = gsi_after_labels (edge_true_false->dest);
1382 t = make_ssa_name (m_limb_type);
1383 gphi *phi = create_phi_node (t, edge_true_false->dest);
1384 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1385 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1386 if (edge_true_true)
1387 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1388 if (ext)
1390 tree t4 = make_ssa_name (m_limb_type);
1391 phi = create_phi_node (t4, edge_true_false->dest);
1392 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1393 UNKNOWN_LOCATION);
1394 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1395 UNKNOWN_LOCATION);
1396 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1397 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1398 insert_before (g);
1400 if (m_bitfld_load)
1402 tree t4;
1403 if (!m_first)
1404 t4 = m_data[m_bitfld_load + 1];
1405 else
1406 t4 = make_ssa_name (m_limb_type);
1407 phi = create_phi_node (t4, edge_true_false->dest);
1408 add_phi_arg (phi,
1409 edge_true_true ? bitfld : m_data[m_bitfld_load],
1410 edge_true_false, UNKNOWN_LOCATION);
1411 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1412 edge_false, UNKNOWN_LOCATION);
1413 if (edge_true_true)
1414 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1415 UNKNOWN_LOCATION);
1416 m_data[m_bitfld_load] = t4;
1417 m_data[m_bitfld_load + 2] = t4;
1418 m_bitfld_load = 0;
1420 m_cast_conditional = save_cast_conditional;
1421 m_first = save_first;
1422 return t;
1424 else
1426 if (tree_to_uhwi (idx) < low)
1428 t = handle_operand (rhs1, idx);
1429 if (m_first)
1430 m_data[save_data_cnt + 2]
1431 = build_int_cst (NULL_TREE, m_data_cnt);
1433 else if (tree_to_uhwi (idx) < high)
1435 t = handle_operand (rhs1, size_int (low));
1436 if (m_first)
1437 m_data[save_data_cnt + 2]
1438 = build_int_cst (NULL_TREE, m_data_cnt);
1439 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1440 t = add_cast (m_limb_type, t);
1441 tree ext = NULL_TREE;
1442 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1444 ext = add_cast (signed_type_for (m_limb_type), t);
1445 tree lpm1 = build_int_cst (unsigned_type_node,
1446 limb_prec - 1);
1447 tree n = make_ssa_name (TREE_TYPE (ext));
1448 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1449 insert_before (g);
1450 ext = add_cast (m_limb_type, n);
1451 m_data[save_data_cnt + 1] = ext;
1454 else
1456 if (TYPE_UNSIGNED (rhs_type) && m_first)
1458 handle_operand (rhs1, size_zero_node);
1459 m_data[save_data_cnt + 2]
1460 = build_int_cst (NULL_TREE, m_data_cnt);
1462 else
1463 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1464 if (TYPE_UNSIGNED (rhs_type))
1465 t = build_zero_cst (m_limb_type);
1466 else
1467 t = m_data[save_data_cnt + 1];
1469 tree type = limb_access_type (lhs_type, idx);
1470 if (!useless_type_conversion_p (type, m_limb_type))
1471 t = add_cast (type, t);
1472 m_first = save_first;
1473 return t;
1476 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1477 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1478 && INTEGRAL_TYPE_P (rhs_type))
1480 /* Add support for 3 or more limbs filled in from normal integral
1481 type if this assert fails. If no target chooses limb mode smaller
1482 than half of largest supported normal integral type, this will not
1483 be needed. */
1484 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1485 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1486 if (m_first)
1488 gimple_stmt_iterator save_gsi = m_gsi;
1489 m_gsi = m_init_gsi;
1490 if (gsi_end_p (m_gsi))
1491 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1492 else
1493 gsi_next (&m_gsi);
1494 if (TREE_CODE (rhs_type) == BITINT_TYPE
1495 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1497 tree type = NULL_TREE;
1498 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1499 rhs_type = TREE_TYPE (rhs1);
1501 r1 = rhs1;
1502 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1503 r1 = add_cast (m_limb_type, rhs1);
1504 if (TYPE_PRECISION (rhs_type) > limb_prec)
1506 g = gimple_build_assign (make_ssa_name (rhs_type),
1507 RSHIFT_EXPR, rhs1,
1508 build_int_cst (unsigned_type_node,
1509 limb_prec));
1510 insert_before (g);
1511 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1513 if (TYPE_UNSIGNED (rhs_type))
1514 rext = build_zero_cst (m_limb_type);
1515 else
1517 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1518 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1519 RSHIFT_EXPR, rext,
1520 build_int_cst (unsigned_type_node,
1521 limb_prec - 1));
1522 insert_before (g);
1523 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1525 m_gsi = save_gsi;
1527 tree t;
1528 if (m_upwards_2limb)
1530 if (m_first)
1532 tree out1, out2;
1533 prepare_data_in_out (r1, idx, &out1);
1534 g = gimple_build_assign (m_data[m_data_cnt + 1], rext);
1535 insert_before (g);
1536 if (TYPE_PRECISION (rhs_type) > limb_prec)
1538 prepare_data_in_out (r2, idx, &out2);
1539 g = gimple_build_assign (m_data[m_data_cnt + 3], rext);
1540 insert_before (g);
1541 m_data.pop ();
1542 t = m_data.pop ();
1543 m_data[m_data_cnt + 1] = t;
1545 else
1546 m_data[m_data_cnt + 1] = rext;
1547 m_data.safe_push (rext);
1548 t = m_data[m_data_cnt];
1550 else if (!tree_fits_uhwi_p (idx))
1551 t = m_data[m_data_cnt + 1];
1552 else
1554 tree type = limb_access_type (lhs_type, idx);
1555 t = m_data[m_data_cnt + 2];
1556 if (!useless_type_conversion_p (type, m_limb_type))
1557 t = add_cast (type, t);
1559 m_data_cnt += 3;
1560 return t;
1562 else if (m_first)
1564 m_data.safe_push (r1);
1565 m_data.safe_push (r2);
1566 m_data.safe_push (rext);
1568 if (tree_fits_uhwi_p (idx))
1570 tree type = limb_access_type (lhs_type, idx);
1571 if (integer_zerop (idx))
1572 t = m_data[m_data_cnt];
1573 else if (TYPE_PRECISION (rhs_type) > limb_prec
1574 && integer_onep (idx))
1575 t = m_data[m_data_cnt + 1];
1576 else
1577 t = m_data[m_data_cnt + 2];
1578 if (!useless_type_conversion_p (type, m_limb_type))
1579 t = add_cast (type, t);
1580 m_data_cnt += 3;
1581 return t;
1583 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1584 NULL_TREE, NULL_TREE);
1585 edge e2, e3, e4 = NULL;
1586 if_then (g, profile_probability::likely (), e2, e3);
1587 if (m_data[m_data_cnt + 1])
1589 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1590 NULL_TREE, NULL_TREE);
1591 insert_before (g);
1592 edge e5 = split_block (gsi_bb (m_gsi), g);
1593 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1594 e2 = find_edge (e5->dest, e2->dest);
1595 e4->probability = profile_probability::unlikely ();
1596 e5->flags = EDGE_FALSE_VALUE;
1597 e5->probability = e4->probability.invert ();
1599 m_gsi = gsi_after_labels (e2->dest);
1600 t = make_ssa_name (m_limb_type);
1601 gphi *phi = create_phi_node (t, e2->dest);
1602 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1603 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1604 if (e4)
1605 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1606 m_data_cnt += 3;
1607 return t;
1609 return NULL_TREE;
1612 /* Helper function for handle_stmt method, handle a load from memory. */
1614 tree
1615 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1617 tree rhs1 = gimple_assign_rhs1 (stmt);
1618 tree rhs_type = TREE_TYPE (rhs1);
1619 bool eh = stmt_ends_bb_p (stmt);
1620 edge eh_edge = NULL;
1621 gimple *g;
1623 if (eh)
1625 edge_iterator ei;
1626 basic_block bb = gimple_bb (stmt);
1628 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1629 if (eh_edge->flags & EDGE_EH)
1630 break;
1633 if (TREE_CODE (rhs1) == COMPONENT_REF
1634 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1636 tree fld = TREE_OPERAND (rhs1, 1);
1637 /* For little-endian, we can allow as inputs bit-fields
1638 which start at a limb boundary. */
1639 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1640 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1641 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1642 goto normal_load;
1643 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1644 handle it normally for now. */
1645 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1646 goto normal_load;
1647 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1648 poly_int64 bitoffset;
1649 poly_uint64 field_offset, repr_offset;
1650 bool var_field_off = false;
1651 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1652 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1653 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1654 else
1656 bitoffset = 0;
1657 var_field_off = true;
1659 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1660 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1661 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1662 TREE_OPERAND (rhs1, 0), repr,
1663 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1664 HOST_WIDE_INT bo = bitoffset.to_constant ();
1665 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1666 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1667 if (m_first)
1669 if (m_upwards)
1671 gimple_stmt_iterator save_gsi = m_gsi;
1672 m_gsi = m_init_gsi;
1673 if (gsi_end_p (m_gsi))
1674 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1675 else
1676 gsi_next (&m_gsi);
1677 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1678 tree iv = make_ssa_name (m_limb_type);
1679 g = gimple_build_assign (iv, t);
1680 insert_before (g);
1681 if (eh)
1683 maybe_duplicate_eh_stmt (g, stmt);
1684 if (eh_edge)
1686 edge e = split_block (gsi_bb (m_gsi), g);
1687 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1688 = profile_probability::very_unlikely ();
1689 m_init_gsi.bb = e->dest;
1692 m_gsi = save_gsi;
1693 tree out;
1694 prepare_data_in_out (iv, idx, &out);
1695 out = m_data[m_data_cnt];
1696 m_data.safe_push (out);
1698 else
1700 m_data.safe_push (NULL_TREE);
1701 m_data.safe_push (NULL_TREE);
1702 m_data.safe_push (NULL_TREE);
1706 tree nidx0 = NULL_TREE, nidx1;
1707 tree iv = m_data[m_data_cnt];
1708 if (m_cast_conditional && iv)
1710 gcc_assert (!m_bitfld_load);
1711 m_bitfld_load = m_data_cnt;
1713 if (tree_fits_uhwi_p (idx))
1715 unsigned prec = TYPE_PRECISION (rhs_type);
1716 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1717 gcc_assert (i * limb_prec < prec);
1718 nidx1 = size_int (i + bo_idx + 1);
1719 if ((i + 1) * limb_prec > prec)
1721 prec %= limb_prec;
1722 if (prec + bo_bit <= (unsigned) limb_prec)
1723 nidx1 = NULL_TREE;
1725 if (!iv)
1726 nidx0 = size_int (i + bo_idx);
1728 else
1730 if (!iv)
1732 if (bo_idx == 0)
1733 nidx0 = idx;
1734 else
1736 nidx0 = make_ssa_name (sizetype);
1737 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1738 size_int (bo_idx));
1739 insert_before (g);
1742 nidx1 = make_ssa_name (sizetype);
1743 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1744 size_int (bo_idx + 1));
1745 insert_before (g);
1748 tree iv2 = NULL_TREE;
1749 if (nidx0)
1751 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1752 iv = make_ssa_name (m_limb_type);
1753 g = gimple_build_assign (iv, t);
1754 insert_before (g);
1755 gcc_assert (!eh);
1757 if (nidx1)
1759 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1760 unsigned prec = TYPE_PRECISION (rhs_type);
1761 if (conditional)
1763 if ((prec % limb_prec) == 0
1764 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1765 conditional = false;
1767 edge edge_true = NULL, edge_false = NULL;
1768 if (conditional)
1770 g = gimple_build_cond (NE_EXPR, idx,
1771 size_int (prec / limb_prec),
1772 NULL_TREE, NULL_TREE);
1773 if_then (g, profile_probability::likely (),
1774 edge_true, edge_false);
1776 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1777 if (m_upwards_2limb
1778 && !m_first
1779 && !m_bitfld_load
1780 && !tree_fits_uhwi_p (idx))
1781 iv2 = m_data[m_data_cnt + 1];
1782 else
1783 iv2 = make_ssa_name (m_limb_type);
1784 g = gimple_build_assign (iv2, t);
1785 insert_before (g);
1786 if (eh)
1788 maybe_duplicate_eh_stmt (g, stmt);
1789 if (eh_edge)
1791 edge e = split_block (gsi_bb (m_gsi), g);
1792 m_gsi = gsi_after_labels (e->dest);
1793 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1794 = profile_probability::very_unlikely ();
1797 if (conditional)
1799 tree iv3 = make_ssa_name (m_limb_type);
1800 if (eh)
1801 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1802 gphi *phi = create_phi_node (iv3, edge_true->dest);
1803 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1804 add_phi_arg (phi, build_zero_cst (m_limb_type),
1805 edge_false, UNKNOWN_LOCATION);
1806 m_gsi = gsi_after_labels (edge_true->dest);
1809 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1810 iv, build_int_cst (unsigned_type_node, bo_bit));
1811 insert_before (g);
1812 iv = gimple_assign_lhs (g);
1813 if (iv2)
1815 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1816 iv2, build_int_cst (unsigned_type_node,
1817 limb_prec - bo_bit));
1818 insert_before (g);
1819 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1820 gimple_assign_lhs (g), iv);
1821 insert_before (g);
1822 iv = gimple_assign_lhs (g);
1823 if (m_data[m_data_cnt])
1824 m_data[m_data_cnt] = iv2;
1826 if (tree_fits_uhwi_p (idx))
1828 tree atype = limb_access_type (rhs_type, idx);
1829 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1830 iv = add_cast (atype, iv);
1832 m_data_cnt += 3;
1833 return iv;
1836 normal_load:
1837 /* Use write_p = true for loads with EH edges to make
1838 sure limb_access doesn't add a cast as separate
1839 statement after it. */
1840 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1841 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1842 g = gimple_build_assign (ret, rhs1);
1843 insert_before (g);
1844 if (eh)
1846 maybe_duplicate_eh_stmt (g, stmt);
1847 if (eh_edge)
1849 edge e = split_block (gsi_bb (m_gsi), g);
1850 m_gsi = gsi_after_labels (e->dest);
1851 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1852 = profile_probability::very_unlikely ();
1854 if (tree_fits_uhwi_p (idx))
1856 tree atype = limb_access_type (rhs_type, idx);
1857 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1858 ret = add_cast (atype, ret);
1861 return ret;
1864 /* Return a limb IDX from a mergeable statement STMT. */
1866 tree
1867 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1869 tree lhs, rhs1, rhs2 = NULL_TREE;
1870 gimple *g;
1871 switch (gimple_code (stmt))
1873 case GIMPLE_ASSIGN:
1874 if (gimple_assign_load_p (stmt))
1875 return handle_load (stmt, idx);
1876 switch (gimple_assign_rhs_code (stmt))
1878 case BIT_AND_EXPR:
1879 case BIT_IOR_EXPR:
1880 case BIT_XOR_EXPR:
1881 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1882 /* FALLTHRU */
1883 case BIT_NOT_EXPR:
1884 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1885 lhs = make_ssa_name (TREE_TYPE (rhs1));
1886 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1887 rhs1, rhs2);
1888 insert_before (g);
1889 return lhs;
1890 case PLUS_EXPR:
1891 case MINUS_EXPR:
1892 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1893 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1894 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1895 rhs1, rhs2, idx);
1896 case NEGATE_EXPR:
1897 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1898 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1899 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1900 case LSHIFT_EXPR:
1901 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1902 idx),
1903 gimple_assign_rhs2 (stmt), idx);
1904 case SSA_NAME:
1905 case INTEGER_CST:
1906 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1907 CASE_CONVERT:
1908 case VIEW_CONVERT_EXPR:
1909 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1910 gimple_assign_rhs1 (stmt), idx);
1911 default:
1912 break;
1914 break;
1915 default:
1916 break;
1918 gcc_unreachable ();
1921 /* Return minimum precision of OP at STMT.
1922 Positive value is minimum precision above which all bits
1923 are zero, negative means all bits above negation of the
1924 value are copies of the sign bit. */
1926 static int
1927 range_to_prec (tree op, gimple *stmt)
1929 int_range_max r;
1930 wide_int w;
1931 tree type = TREE_TYPE (op);
1932 unsigned int prec = TYPE_PRECISION (type);
1934 if (!optimize
1935 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1936 || r.undefined_p ())
1938 if (TYPE_UNSIGNED (type))
1939 return prec;
1940 else
1941 return -prec;
1944 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1946 w = r.lower_bound ();
1947 if (wi::neg_p (w))
1949 int min_prec1 = wi::min_precision (w, SIGNED);
1950 w = r.upper_bound ();
1951 int min_prec2 = wi::min_precision (w, SIGNED);
1952 int min_prec = MAX (min_prec1, min_prec2);
1953 return MIN (-min_prec, -2);
1957 w = r.upper_bound ();
1958 int min_prec = wi::min_precision (w, UNSIGNED);
1959 return MAX (min_prec, 1);
1962 /* Return address of the first limb of OP and write into *PREC
1963 its precision. If positive, the operand is zero extended
1964 from that precision, if it is negative, the operand is sign-extended
1965 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1966 otherwise *PREC_STORED is prec from the innermost call without
1967 range optimizations. */
1969 tree
1970 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
1971 int *prec_stored, int *prec)
1973 wide_int w;
1974 location_t loc_save = m_loc;
1975 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
1976 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
1977 && TREE_CODE (op) != INTEGER_CST)
1979 do_int:
1980 *prec = range_to_prec (op, stmt);
1981 bitint_prec_kind kind = bitint_prec_small;
1982 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
1983 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
1984 kind = bitint_precision_kind (TREE_TYPE (op));
1985 if (kind == bitint_prec_middle)
1987 tree type = NULL_TREE;
1988 op = maybe_cast_middle_bitint (&m_gsi, op, type);
1990 tree op_type = TREE_TYPE (op);
1991 unsigned HOST_WIDE_INT nelts
1992 = CEIL (TYPE_PRECISION (op_type), limb_prec);
1993 /* Add support for 3 or more limbs filled in from normal
1994 integral type if this assert fails. If no target chooses
1995 limb mode smaller than half of largest supported normal
1996 integral type, this will not be needed. */
1997 gcc_assert (nelts <= 2);
1998 if (prec_stored)
1999 *prec_stored = (TYPE_UNSIGNED (op_type)
2000 ? TYPE_PRECISION (op_type)
2001 : -TYPE_PRECISION (op_type));
2002 if (*prec <= limb_prec && *prec >= -limb_prec)
2004 nelts = 1;
2005 if (prec_stored)
2007 if (TYPE_UNSIGNED (op_type))
2009 if (*prec_stored > limb_prec)
2010 *prec_stored = limb_prec;
2012 else if (*prec_stored < -limb_prec)
2013 *prec_stored = -limb_prec;
2016 tree atype = build_array_type_nelts (m_limb_type, nelts);
2017 tree var = create_tmp_var (atype);
2018 tree t1 = op;
2019 if (!useless_type_conversion_p (m_limb_type, op_type))
2020 t1 = add_cast (m_limb_type, t1);
2021 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2022 NULL_TREE, NULL_TREE);
2023 gimple *g = gimple_build_assign (v, t1);
2024 insert_before (g);
2025 if (nelts > 1)
2027 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2028 g = gimple_build_assign (make_ssa_name (op_type),
2029 RSHIFT_EXPR, op, lp);
2030 insert_before (g);
2031 tree t2 = gimple_assign_lhs (g);
2032 t2 = add_cast (m_limb_type, t2);
2033 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2034 NULL_TREE, NULL_TREE);
2035 g = gimple_build_assign (v, t2);
2036 insert_before (g);
2038 tree ret = build_fold_addr_expr (var);
2039 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2041 tree clobber = build_clobber (atype, CLOBBER_EOL);
2042 g = gimple_build_assign (var, clobber);
2043 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2045 m_loc = loc_save;
2046 return ret;
2048 switch (TREE_CODE (op))
2050 case SSA_NAME:
2051 if (m_names == NULL
2052 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2054 gimple *g = SSA_NAME_DEF_STMT (op);
2055 tree ret;
2056 m_loc = gimple_location (g);
2057 if (gimple_assign_load_p (g))
2059 *prec = range_to_prec (op, NULL);
2060 if (prec_stored)
2061 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2062 ? TYPE_PRECISION (TREE_TYPE (op))
2063 : -TYPE_PRECISION (TREE_TYPE (op)));
2064 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2065 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2066 NULL_TREE, true, GSI_SAME_STMT);
2068 else if (gimple_code (g) == GIMPLE_NOP)
2070 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2071 if (prec_stored)
2072 *prec_stored = *prec;
2073 tree var = create_tmp_var (m_limb_type);
2074 TREE_ADDRESSABLE (var) = 1;
2075 ret = build_fold_addr_expr (var);
2076 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2078 tree clobber = build_clobber (m_limb_type, CLOBBER_EOL);
2079 g = gimple_build_assign (var, clobber);
2080 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2083 else
2085 gcc_assert (gimple_assign_cast_p (g));
2086 tree rhs1 = gimple_assign_rhs1 (g);
2087 bitint_prec_kind kind = bitint_prec_small;
2088 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2089 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2090 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2091 if (kind >= bitint_prec_large)
2093 tree lhs_type = TREE_TYPE (op);
2094 tree rhs_type = TREE_TYPE (rhs1);
2095 int prec_stored_val = 0;
2096 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2097 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2099 if (TYPE_UNSIGNED (lhs_type)
2100 && !TYPE_UNSIGNED (rhs_type))
2101 gcc_assert (*prec >= 0 || prec_stored == NULL);
2103 else
2105 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2107 else if (TYPE_UNSIGNED (lhs_type))
2109 gcc_assert (*prec > 0
2110 || prec_stored_val > 0
2111 || (-prec_stored_val
2112 >= TYPE_PRECISION (lhs_type)));
2113 *prec = TYPE_PRECISION (lhs_type);
2115 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2117 else
2118 *prec = -TYPE_PRECISION (lhs_type);
2121 else
2123 op = rhs1;
2124 stmt = g;
2125 goto do_int;
2128 m_loc = loc_save;
2129 return ret;
2131 else
2133 int p = var_to_partition (m_map, op);
2134 gcc_assert (m_vars[p] != NULL_TREE);
2135 *prec = range_to_prec (op, stmt);
2136 if (prec_stored)
2137 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2138 ? TYPE_PRECISION (TREE_TYPE (op))
2139 : -TYPE_PRECISION (TREE_TYPE (op)));
2140 return build_fold_addr_expr (m_vars[p]);
2142 case INTEGER_CST:
2143 unsigned int min_prec, mp;
2144 tree type;
2145 w = wi::to_wide (op);
2146 if (tree_int_cst_sgn (op) >= 0)
2148 min_prec = wi::min_precision (w, UNSIGNED);
2149 *prec = MAX (min_prec, 1);
2151 else
2153 min_prec = wi::min_precision (w, SIGNED);
2154 *prec = MIN ((int) -min_prec, -2);
2156 mp = CEIL (min_prec, limb_prec) * limb_prec;
2157 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2158 type = TREE_TYPE (op);
2159 else
2160 type = build_bitint_type (mp, 1);
2161 if (TREE_CODE (type) != BITINT_TYPE
2162 || bitint_precision_kind (type) == bitint_prec_small)
2164 if (TYPE_PRECISION (type) <= limb_prec)
2165 type = m_limb_type;
2166 else
2167 /* This case is for targets which e.g. have 64-bit
2168 limb but categorize up to 128-bits _BitInts as
2169 small. We could use type of m_limb_type[2] and
2170 similar instead to save space. */
2171 type = build_bitint_type (mid_min_prec, 1);
2173 if (prec_stored)
2175 if (tree_int_cst_sgn (op) >= 0)
2176 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2177 else
2178 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2180 op = tree_output_constant_def (fold_convert (type, op));
2181 return build_fold_addr_expr (op);
2182 default:
2183 gcc_unreachable ();
2187 /* Helper function, create a loop before the current location,
2188 start with sizetype INIT value from the preheader edge. Return
2189 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2190 from the latch edge. */
2192 tree
2193 bitint_large_huge::create_loop (tree init, tree *idx_next)
2195 if (!gsi_end_p (m_gsi))
2196 gsi_prev (&m_gsi);
2197 else
2198 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2199 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2200 edge e2 = split_block (e1->dest, (gimple *) NULL);
2201 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2202 e3->probability = profile_probability::very_unlikely ();
2203 e2->flags = EDGE_FALSE_VALUE;
2204 e2->probability = e3->probability.invert ();
2205 tree idx = make_ssa_name (sizetype);
2206 gphi *phi = create_phi_node (idx, e1->dest);
2207 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2208 *idx_next = make_ssa_name (sizetype);
2209 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2210 m_gsi = gsi_after_labels (e1->dest);
2211 m_bb = e1->dest;
2212 m_preheader_bb = e1->src;
2213 class loop *loop = alloc_loop ();
2214 loop->header = e1->dest;
2215 add_loop (loop, e1->src->loop_father);
2216 return idx;
2219 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2220 lowered using iteration from the least significant limb up to the most
2221 significant limb. For large _BitInt it is emitted as straight line code
2222 before current location, for huge _BitInt as a loop handling two limbs
2223 at once, followed by handling up to limbs in straight line code (at most
2224 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2225 comparisons, in that case CMP_CODE should be the comparison code and
2226 CMP_OP1/CMP_OP2 the comparison operands. */
2228 tree
2229 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2230 tree cmp_op1, tree cmp_op2)
2232 bool eq_p = cmp_code != ERROR_MARK;
2233 tree type;
2234 if (eq_p)
2235 type = TREE_TYPE (cmp_op1);
2236 else
2237 type = TREE_TYPE (gimple_assign_lhs (stmt));
2238 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2239 bitint_prec_kind kind = bitint_precision_kind (type);
2240 gcc_assert (kind >= bitint_prec_large);
2241 gimple *g;
2242 tree lhs = gimple_get_lhs (stmt);
2243 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2244 if (lhs
2245 && TREE_CODE (lhs) == SSA_NAME
2246 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2247 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2249 int p = var_to_partition (m_map, lhs);
2250 gcc_assert (m_vars[p] != NULL_TREE);
2251 m_lhs = lhs = m_vars[p];
2253 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2254 bool sext = false;
2255 tree ext = NULL_TREE, store_operand = NULL_TREE;
2256 bool eh = false;
2257 basic_block eh_pad = NULL;
2258 tree nlhs = NULL_TREE;
2259 unsigned HOST_WIDE_INT bo_idx = 0;
2260 unsigned HOST_WIDE_INT bo_bit = 0;
2261 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2262 if (gimple_store_p (stmt))
2264 store_operand = gimple_assign_rhs1 (stmt);
2265 eh = stmt_ends_bb_p (stmt);
2266 if (eh)
2268 edge e;
2269 edge_iterator ei;
2270 basic_block bb = gimple_bb (stmt);
2272 FOR_EACH_EDGE (e, ei, bb->succs)
2273 if (e->flags & EDGE_EH)
2275 eh_pad = e->dest;
2276 break;
2279 if (TREE_CODE (lhs) == COMPONENT_REF
2280 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2282 tree fld = TREE_OPERAND (lhs, 1);
2283 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2284 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2285 poly_int64 bitoffset;
2286 poly_uint64 field_offset, repr_offset;
2287 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2288 nlhs = lhs;
2289 else
2291 bool var_field_off = false;
2292 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2293 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2294 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2295 else
2297 bitoffset = 0;
2298 var_field_off = true;
2300 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2301 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2302 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2303 TREE_OPERAND (lhs, 0), repr,
2304 var_field_off
2305 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2306 HOST_WIDE_INT bo = bitoffset.to_constant ();
2307 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2308 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2312 if ((store_operand
2313 && TREE_CODE (store_operand) == SSA_NAME
2314 && (m_names == NULL
2315 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2316 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2317 || gimple_assign_cast_p (stmt))
2319 rhs1 = gimple_assign_rhs1 (store_operand
2320 ? SSA_NAME_DEF_STMT (store_operand)
2321 : stmt);
2322 /* Optimize mergeable ops ending with widening cast to _BitInt
2323 (or followed by store). We can lower just the limbs of the
2324 cast operand and widen afterwards. */
2325 if (TREE_CODE (rhs1) == SSA_NAME
2326 && (m_names == NULL
2327 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2328 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2329 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2330 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2331 limb_prec) < CEIL (prec, limb_prec)
2332 || (kind == bitint_prec_huge
2333 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2335 store_operand = rhs1;
2336 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2337 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2338 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2339 sext = true;
2342 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2343 if (kind == bitint_prec_large)
2344 cnt = CEIL (prec, limb_prec);
2345 else
2347 rem = (prec % (2 * limb_prec));
2348 end = (prec - rem) / limb_prec;
2349 cnt = 2 + CEIL (rem, limb_prec);
2350 idx = idx_first = create_loop (size_zero_node, &idx_next);
2353 basic_block edge_bb = NULL;
2354 if (eq_p)
2356 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2357 gsi_prev (&gsi);
2358 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2359 edge_bb = e->src;
2360 if (kind == bitint_prec_large)
2362 m_gsi = gsi_last_bb (edge_bb);
2363 if (!gsi_end_p (m_gsi))
2364 gsi_next (&m_gsi);
2367 else
2368 m_after_stmt = stmt;
2369 if (kind != bitint_prec_large)
2370 m_upwards_2limb = end;
2371 m_upwards = true;
2373 bool separate_ext
2374 = (prec != (unsigned) TYPE_PRECISION (type)
2375 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2376 > CEIL (prec, limb_prec)));
2378 for (unsigned i = 0; i < cnt; i++)
2380 m_data_cnt = 0;
2381 if (kind == bitint_prec_large)
2382 idx = size_int (i);
2383 else if (i >= 2)
2384 idx = size_int (end + (i > 2));
2385 if (eq_p)
2387 rhs1 = handle_operand (cmp_op1, idx);
2388 tree rhs2 = handle_operand (cmp_op2, idx);
2389 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2390 insert_before (g);
2391 edge e1 = split_block (gsi_bb (m_gsi), g);
2392 e1->flags = EDGE_FALSE_VALUE;
2393 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2394 e1->probability = profile_probability::unlikely ();
2395 e2->probability = e1->probability.invert ();
2396 if (i == 0)
2397 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2398 m_gsi = gsi_after_labels (e1->dest);
2400 else
2402 if (store_operand)
2403 rhs1 = handle_operand (store_operand, idx);
2404 else
2405 rhs1 = handle_stmt (stmt, idx);
2406 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2407 rhs1 = add_cast (m_limb_type, rhs1);
2408 if (sext && i == cnt - 1)
2409 ext = rhs1;
2410 tree nidx = idx;
2411 if (bo_idx)
2413 if (tree_fits_uhwi_p (idx))
2414 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2415 else
2417 nidx = make_ssa_name (sizetype);
2418 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2419 size_int (bo_idx));
2420 insert_before (g);
2423 bool done = false;
2424 basic_block new_bb = NULL;
2425 /* Handle stores into bit-fields. */
2426 if (bo_bit)
2428 if (i == 0)
2430 edge e2 = NULL;
2431 if (kind != bitint_prec_large)
2433 prepare_data_in_out (build_zero_cst (m_limb_type),
2434 idx, &bf_next);
2435 bf_next = m_data.pop ();
2436 bf_cur = m_data.pop ();
2437 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2438 NULL_TREE, NULL_TREE);
2439 edge edge_true;
2440 if_then_else (g, profile_probability::unlikely (),
2441 edge_true, e2);
2442 new_bb = e2->dest;
2444 tree ftype
2445 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2446 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2447 bitsize_int (limb_prec - bo_bit),
2448 bitsize_int (bo_idx * limb_prec + bo_bit));
2449 tree t = add_cast (ftype, rhs1);
2450 g = gimple_build_assign (bfr, t);
2451 insert_before (g);
2452 if (eh)
2454 maybe_duplicate_eh_stmt (g, stmt);
2455 if (eh_pad)
2457 edge e = split_block (gsi_bb (m_gsi), g);
2458 m_gsi = gsi_after_labels (e->dest);
2459 make_edge (e->src, eh_pad, EDGE_EH)->probability
2460 = profile_probability::very_unlikely ();
2463 if (kind == bitint_prec_large)
2465 bf_cur = rhs1;
2466 done = true;
2468 else if (e2)
2469 m_gsi = gsi_after_labels (e2->src);
2471 if (!done)
2473 tree t1 = make_ssa_name (m_limb_type);
2474 tree t2 = make_ssa_name (m_limb_type);
2475 tree t3 = make_ssa_name (m_limb_type);
2476 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2477 build_int_cst (unsigned_type_node,
2478 limb_prec - bo_bit));
2479 insert_before (g);
2480 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2481 build_int_cst (unsigned_type_node,
2482 bo_bit));
2483 insert_before (g);
2484 bf_cur = rhs1;
2485 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2486 insert_before (g);
2487 rhs1 = t3;
2488 if (bf_next && i == 1)
2490 g = gimple_build_assign (bf_next, bf_cur);
2491 insert_before (g);
2495 if (!done)
2497 /* Handle bit-field access to partial last limb if needed. */
2498 if (nlhs
2499 && i == cnt - 1
2500 && !separate_ext
2501 && tree_fits_uhwi_p (idx))
2503 unsigned int tprec = TYPE_PRECISION (type);
2504 unsigned int rprec = tprec % limb_prec;
2505 if (rprec + bo_bit < (unsigned) limb_prec)
2507 tree ftype
2508 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2509 tree bfr = build3 (BIT_FIELD_REF, ftype,
2510 unshare_expr (nlhs),
2511 bitsize_int (rprec + bo_bit),
2512 bitsize_int ((bo_idx
2513 + tprec / limb_prec)
2514 * limb_prec));
2515 tree t = add_cast (ftype, rhs1);
2516 g = gimple_build_assign (bfr, t);
2517 done = true;
2518 bf_cur = NULL_TREE;
2520 else if (rprec + bo_bit == (unsigned) limb_prec)
2521 bf_cur = NULL_TREE;
2523 /* Otherwise, stores to any other lhs. */
2524 if (!done)
2526 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2527 nidx, true);
2528 g = gimple_build_assign (l, rhs1);
2530 insert_before (g);
2531 if (eh)
2533 maybe_duplicate_eh_stmt (g, stmt);
2534 if (eh_pad)
2536 edge e = split_block (gsi_bb (m_gsi), g);
2537 m_gsi = gsi_after_labels (e->dest);
2538 make_edge (e->src, eh_pad, EDGE_EH)->probability
2539 = profile_probability::very_unlikely ();
2542 if (new_bb)
2543 m_gsi = gsi_after_labels (new_bb);
2546 m_first = false;
2547 if (kind == bitint_prec_huge && i <= 1)
2549 if (i == 0)
2551 idx = make_ssa_name (sizetype);
2552 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2553 size_one_node);
2554 insert_before (g);
2556 else
2558 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2559 size_int (2));
2560 insert_before (g);
2561 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2562 NULL_TREE, NULL_TREE);
2563 insert_before (g);
2564 if (eq_p)
2565 m_gsi = gsi_after_labels (edge_bb);
2566 else
2567 m_gsi = gsi_for_stmt (stmt);
2572 if (separate_ext)
2574 if (sext)
2576 ext = add_cast (signed_type_for (m_limb_type), ext);
2577 tree lpm1 = build_int_cst (unsigned_type_node,
2578 limb_prec - 1);
2579 tree n = make_ssa_name (TREE_TYPE (ext));
2580 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2581 insert_before (g);
2582 ext = add_cast (m_limb_type, n);
2584 else
2585 ext = build_zero_cst (m_limb_type);
2586 kind = bitint_precision_kind (type);
2587 unsigned start = CEIL (prec, limb_prec);
2588 prec = TYPE_PRECISION (type);
2589 idx = idx_first = idx_next = NULL_TREE;
2590 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2591 kind = bitint_prec_large;
2592 if (kind == bitint_prec_large)
2593 cnt = CEIL (prec, limb_prec) - start;
2594 else
2596 rem = prec % limb_prec;
2597 end = (prec - rem) / limb_prec;
2598 cnt = (bo_bit != 0) + 1 + (rem != 0);
2600 for (unsigned i = 0; i < cnt; i++)
2602 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2603 idx = size_int (start + i);
2604 else if (i == cnt - 1)
2605 idx = size_int (end);
2606 else if (i == (bo_bit != 0))
2607 idx = create_loop (size_int (start + i), &idx_next);
2608 rhs1 = ext;
2609 if (bf_cur != NULL_TREE && bf_cur != ext)
2611 tree t1 = make_ssa_name (m_limb_type);
2612 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2613 build_int_cst (unsigned_type_node,
2614 limb_prec - bo_bit));
2615 insert_before (g);
2616 if (integer_zerop (ext))
2617 rhs1 = t1;
2618 else
2620 tree t2 = make_ssa_name (m_limb_type);
2621 rhs1 = make_ssa_name (m_limb_type);
2622 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2623 build_int_cst (unsigned_type_node,
2624 bo_bit));
2625 insert_before (g);
2626 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2627 insert_before (g);
2629 bf_cur = ext;
2631 tree nidx = idx;
2632 if (bo_idx)
2634 if (tree_fits_uhwi_p (idx))
2635 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2636 else
2638 nidx = make_ssa_name (sizetype);
2639 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2640 size_int (bo_idx));
2641 insert_before (g);
2644 bool done = false;
2645 /* Handle bit-field access to partial last limb if needed. */
2646 if (nlhs && i == cnt - 1)
2648 unsigned int tprec = TYPE_PRECISION (type);
2649 unsigned int rprec = tprec % limb_prec;
2650 if (rprec + bo_bit < (unsigned) limb_prec)
2652 tree ftype
2653 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2654 tree bfr = build3 (BIT_FIELD_REF, ftype,
2655 unshare_expr (nlhs),
2656 bitsize_int (rprec + bo_bit),
2657 bitsize_int ((bo_idx + tprec / limb_prec)
2658 * limb_prec));
2659 tree t = add_cast (ftype, rhs1);
2660 g = gimple_build_assign (bfr, t);
2661 done = true;
2662 bf_cur = NULL_TREE;
2664 else if (rprec + bo_bit == (unsigned) limb_prec)
2665 bf_cur = NULL_TREE;
2667 /* Otherwise, stores to any other lhs. */
2668 if (!done)
2670 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2671 g = gimple_build_assign (l, rhs1);
2673 insert_before (g);
2674 if (eh)
2676 maybe_duplicate_eh_stmt (g, stmt);
2677 if (eh_pad)
2679 edge e = split_block (gsi_bb (m_gsi), g);
2680 m_gsi = gsi_after_labels (e->dest);
2681 make_edge (e->src, eh_pad, EDGE_EH)->probability
2682 = profile_probability::very_unlikely ();
2685 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2687 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2688 size_one_node);
2689 insert_before (g);
2690 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2691 NULL_TREE, NULL_TREE);
2692 insert_before (g);
2693 m_gsi = gsi_for_stmt (stmt);
2697 if (bf_cur != NULL_TREE)
2699 unsigned int tprec = TYPE_PRECISION (type);
2700 unsigned int rprec = tprec % limb_prec;
2701 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2702 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2703 bitsize_int (rprec + bo_bit),
2704 bitsize_int ((bo_idx + tprec / limb_prec)
2705 * limb_prec));
2706 rhs1 = bf_cur;
2707 if (bf_cur != ext)
2709 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2710 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2711 build_int_cst (unsigned_type_node,
2712 limb_prec - bo_bit));
2713 insert_before (g);
2715 rhs1 = add_cast (ftype, rhs1);
2716 g = gimple_build_assign (bfr, rhs1);
2717 insert_before (g);
2718 if (eh)
2720 maybe_duplicate_eh_stmt (g, stmt);
2721 if (eh_pad)
2723 edge e = split_block (gsi_bb (m_gsi), g);
2724 m_gsi = gsi_after_labels (e->dest);
2725 make_edge (e->src, eh_pad, EDGE_EH)->probability
2726 = profile_probability::very_unlikely ();
2731 if (gimple_store_p (stmt))
2733 unlink_stmt_vdef (stmt);
2734 release_ssa_name (gimple_vdef (stmt));
2735 gsi_remove (&m_gsi, true);
2737 if (eq_p)
2739 lhs = make_ssa_name (boolean_type_node);
2740 basic_block bb = gimple_bb (stmt);
2741 gphi *phi = create_phi_node (lhs, bb);
2742 edge e = find_edge (gsi_bb (m_gsi), bb);
2743 unsigned int n = EDGE_COUNT (bb->preds);
2744 for (unsigned int i = 0; i < n; i++)
2746 edge e2 = EDGE_PRED (bb, i);
2747 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2748 e2, UNKNOWN_LOCATION);
2750 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2751 return lhs;
2753 else
2754 return NULL_TREE;
2757 /* Handle a large/huge _BitInt comparison statement STMT other than
2758 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2759 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2760 lowered by iteration from the most significant limb downwards to
2761 the least significant one, for large _BitInt in straight line code,
2762 otherwise with most significant limb handled in
2763 straight line code followed by a loop handling one limb at a time.
2764 Comparisons with unsigned huge _BitInt with precisions which are
2765 multiples of limb precision can use just the loop and don't need to
2766 handle most significant limb before the loop. The loop or straight
2767 line code jumps to final basic block if a particular pair of limbs
2768 is not equal. */
2770 tree
2771 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2772 tree cmp_op1, tree cmp_op2)
2774 tree type = TREE_TYPE (cmp_op1);
2775 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2776 bitint_prec_kind kind = bitint_precision_kind (type);
2777 gcc_assert (kind >= bitint_prec_large);
2778 gimple *g;
2779 if (!TYPE_UNSIGNED (type)
2780 && integer_zerop (cmp_op2)
2781 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2783 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2784 tree idx = size_int (end);
2785 m_data_cnt = 0;
2786 tree rhs1 = handle_operand (cmp_op1, idx);
2787 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2789 tree stype = signed_type_for (TREE_TYPE (rhs1));
2790 rhs1 = add_cast (stype, rhs1);
2792 tree lhs = make_ssa_name (boolean_type_node);
2793 g = gimple_build_assign (lhs, cmp_code, rhs1,
2794 build_zero_cst (TREE_TYPE (rhs1)));
2795 insert_before (g);
2796 cmp_code = NE_EXPR;
2797 return lhs;
2800 unsigned cnt, rem = 0, end = 0;
2801 tree idx = NULL_TREE, idx_next = NULL_TREE;
2802 if (kind == bitint_prec_large)
2803 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2804 else
2806 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2807 if (rem == 0 && !TYPE_UNSIGNED (type))
2808 rem = limb_prec;
2809 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2810 cnt = 1 + (rem != 0);
2813 basic_block edge_bb = NULL;
2814 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2815 gsi_prev (&gsi);
2816 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2817 edge_bb = e->src;
2818 m_gsi = gsi_last_bb (edge_bb);
2819 if (!gsi_end_p (m_gsi))
2820 gsi_next (&m_gsi);
2822 edge *edges = XALLOCAVEC (edge, cnt * 2);
2823 for (unsigned i = 0; i < cnt; i++)
2825 m_data_cnt = 0;
2826 if (kind == bitint_prec_large)
2827 idx = size_int (cnt - i - 1);
2828 else if (i == cnt - 1)
2829 idx = create_loop (size_int (end - 1), &idx_next);
2830 else
2831 idx = size_int (end);
2832 tree rhs1 = handle_operand (cmp_op1, idx);
2833 tree rhs2 = handle_operand (cmp_op2, idx);
2834 if (i == 0
2835 && !TYPE_UNSIGNED (type)
2836 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2838 tree stype = signed_type_for (TREE_TYPE (rhs1));
2839 rhs1 = add_cast (stype, rhs1);
2840 rhs2 = add_cast (stype, rhs2);
2842 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2843 insert_before (g);
2844 edge e1 = split_block (gsi_bb (m_gsi), g);
2845 e1->flags = EDGE_FALSE_VALUE;
2846 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2847 e1->probability = profile_probability::likely ();
2848 e2->probability = e1->probability.invert ();
2849 if (i == 0)
2850 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2851 m_gsi = gsi_after_labels (e1->dest);
2852 edges[2 * i] = e2;
2853 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2854 insert_before (g);
2855 e1 = split_block (gsi_bb (m_gsi), g);
2856 e1->flags = EDGE_FALSE_VALUE;
2857 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2858 e1->probability = profile_probability::unlikely ();
2859 e2->probability = e1->probability.invert ();
2860 m_gsi = gsi_after_labels (e1->dest);
2861 edges[2 * i + 1] = e2;
2862 m_first = false;
2863 if (kind == bitint_prec_huge && i == cnt - 1)
2865 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2866 insert_before (g);
2867 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2868 NULL_TREE, NULL_TREE);
2869 insert_before (g);
2870 edge true_edge, false_edge;
2871 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2872 &true_edge, &false_edge);
2873 m_gsi = gsi_after_labels (false_edge->dest);
2877 tree lhs = make_ssa_name (boolean_type_node);
2878 basic_block bb = gimple_bb (stmt);
2879 gphi *phi = create_phi_node (lhs, bb);
2880 for (unsigned int i = 0; i < cnt * 2; i++)
2882 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2883 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2884 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2886 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2887 ? boolean_true_node : boolean_false_node,
2888 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2889 cmp_code = NE_EXPR;
2890 return lhs;
2893 /* Lower large/huge _BitInt left and right shift except for left
2894 shift by < limb_prec constant. */
2896 void
2897 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2899 tree rhs1 = gimple_assign_rhs1 (stmt);
2900 tree lhs = gimple_assign_lhs (stmt);
2901 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2902 tree type = TREE_TYPE (rhs1);
2903 gimple *final_stmt = gsi_stmt (m_gsi);
2904 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2905 && bitint_precision_kind (type) >= bitint_prec_large);
2906 int prec = TYPE_PRECISION (type);
2907 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2908 gimple *g;
2909 if (obj == NULL_TREE)
2911 int part = var_to_partition (m_map, lhs);
2912 gcc_assert (m_vars[part] != NULL_TREE);
2913 obj = m_vars[part];
2915 /* Preparation code common for both left and right shifts.
2916 unsigned n1 = n % limb_prec;
2917 size_t n2 = n / limb_prec;
2918 size_t n3 = n1 != 0;
2919 unsigned n4 = (limb_prec - n1) % limb_prec;
2920 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2921 if (TREE_CODE (n) == INTEGER_CST)
2923 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2924 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2925 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2926 n3 = size_int (!integer_zerop (n1));
2927 n4 = int_const_binop (TRUNC_MOD_EXPR,
2928 int_const_binop (MINUS_EXPR, lp, n1), lp);
2930 else
2932 n1 = make_ssa_name (TREE_TYPE (n));
2933 n2 = make_ssa_name (sizetype);
2934 n3 = make_ssa_name (sizetype);
2935 n4 = make_ssa_name (TREE_TYPE (n));
2936 if (pow2p_hwi (limb_prec))
2938 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2939 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2940 insert_before (g);
2941 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2942 TREE_TYPE (n))
2943 ? n2 : make_ssa_name (TREE_TYPE (n)),
2944 RSHIFT_EXPR, n,
2945 build_int_cst (TREE_TYPE (n),
2946 exact_log2 (limb_prec)));
2947 insert_before (g);
2948 if (gimple_assign_lhs (g) != n2)
2950 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2951 insert_before (g);
2953 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2954 NEGATE_EXPR, n1);
2955 insert_before (g);
2956 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
2957 lpm1);
2958 insert_before (g);
2960 else
2962 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2963 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
2964 insert_before (g);
2965 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2966 TREE_TYPE (n))
2967 ? n2 : make_ssa_name (TREE_TYPE (n)),
2968 TRUNC_DIV_EXPR, n, lp);
2969 insert_before (g);
2970 if (gimple_assign_lhs (g) != n2)
2972 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2973 insert_before (g);
2975 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2976 MINUS_EXPR, lp, n1);
2977 insert_before (g);
2978 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
2979 lp);
2980 insert_before (g);
2982 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
2983 build_zero_cst (TREE_TYPE (n)));
2984 insert_before (g);
2985 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
2986 insert_before (g);
2988 tree p = build_int_cst (sizetype,
2989 prec / limb_prec - (prec % limb_prec == 0));
2990 if (rhs_code == RSHIFT_EXPR)
2992 /* Lower
2993 dst = src >> n;
2995 unsigned n1 = n % limb_prec;
2996 size_t n2 = n / limb_prec;
2997 size_t n3 = n1 != 0;
2998 unsigned n4 = (limb_prec - n1) % limb_prec;
2999 size_t idx;
3000 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3001 int signed_p = (typeof (src) -1) < 0;
3002 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3003 ? p : p - n3); ++idx)
3004 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3005 limb_type ext;
3006 if (prec % limb_prec == 0)
3007 ext = src[p];
3008 else if (signed_p)
3009 ext = ((signed limb_type) (src[p] << (limb_prec
3010 - (prec % limb_prec))))
3011 >> (limb_prec - (prec % limb_prec));
3012 else
3013 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3014 if (!signed_p && (prec % limb_prec == 0))
3016 else if (idx < prec / 64)
3018 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3019 ++idx;
3021 idx -= n2;
3022 if (signed_p)
3024 dst[idx] = ((signed limb_type) ext) >> n1;
3025 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3027 else
3029 dst[idx] = ext >> n1;
3030 ext = 0;
3032 for (++idx; idx <= p; ++idx)
3033 dst[idx] = ext; */
3034 tree pmn3;
3035 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3036 pmn3 = p;
3037 else if (TREE_CODE (n3) == INTEGER_CST)
3038 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3039 else
3041 pmn3 = make_ssa_name (sizetype);
3042 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3043 insert_before (g);
3045 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3046 edge edge_true, edge_false;
3047 if_then (g, profile_probability::likely (), edge_true, edge_false);
3048 tree idx_next;
3049 tree idx = create_loop (n2, &idx_next);
3050 tree idxmn2 = make_ssa_name (sizetype);
3051 tree idxpn3 = make_ssa_name (sizetype);
3052 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3053 insert_before (g);
3054 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3055 insert_before (g);
3056 m_data_cnt = 0;
3057 tree t1 = handle_operand (rhs1, idx);
3058 m_first = false;
3059 g = gimple_build_assign (make_ssa_name (m_limb_type),
3060 RSHIFT_EXPR, t1, n1);
3061 insert_before (g);
3062 t1 = gimple_assign_lhs (g);
3063 if (!integer_zerop (n3))
3065 m_data_cnt = 0;
3066 tree t2 = handle_operand (rhs1, idxpn3);
3067 g = gimple_build_assign (make_ssa_name (m_limb_type),
3068 LSHIFT_EXPR, t2, n4);
3069 insert_before (g);
3070 t2 = gimple_assign_lhs (g);
3071 g = gimple_build_assign (make_ssa_name (m_limb_type),
3072 BIT_IOR_EXPR, t1, t2);
3073 insert_before (g);
3074 t1 = gimple_assign_lhs (g);
3076 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3077 g = gimple_build_assign (l, t1);
3078 insert_before (g);
3079 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3080 insert_before (g);
3081 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3082 insert_before (g);
3083 idx = make_ssa_name (sizetype);
3084 m_gsi = gsi_for_stmt (final_stmt);
3085 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3086 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3087 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3088 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3089 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3090 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3091 m_data_cnt = 0;
3092 tree ms = handle_operand (rhs1, p);
3093 tree ext = ms;
3094 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3095 ext = add_cast (m_limb_type, ms);
3096 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3097 && !integer_zerop (n3))
3099 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3100 if_then (g, profile_probability::likely (), edge_true, edge_false);
3101 m_data_cnt = 0;
3102 t1 = handle_operand (rhs1, idx);
3103 g = gimple_build_assign (make_ssa_name (m_limb_type),
3104 RSHIFT_EXPR, t1, n1);
3105 insert_before (g);
3106 t1 = gimple_assign_lhs (g);
3107 g = gimple_build_assign (make_ssa_name (m_limb_type),
3108 LSHIFT_EXPR, ext, n4);
3109 insert_before (g);
3110 tree t2 = gimple_assign_lhs (g);
3111 g = gimple_build_assign (make_ssa_name (m_limb_type),
3112 BIT_IOR_EXPR, t1, t2);
3113 insert_before (g);
3114 t1 = gimple_assign_lhs (g);
3115 idxmn2 = make_ssa_name (sizetype);
3116 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3117 insert_before (g);
3118 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3119 g = gimple_build_assign (l, t1);
3120 insert_before (g);
3121 idx_next = make_ssa_name (sizetype);
3122 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3123 insert_before (g);
3124 m_gsi = gsi_for_stmt (final_stmt);
3125 tree nidx = make_ssa_name (sizetype);
3126 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3127 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3128 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3129 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3130 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3131 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3132 idx = nidx;
3134 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3135 insert_before (g);
3136 idx = gimple_assign_lhs (g);
3137 tree sext = ext;
3138 if (!TYPE_UNSIGNED (type))
3139 sext = add_cast (signed_type_for (m_limb_type), ext);
3140 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3141 RSHIFT_EXPR, sext, n1);
3142 insert_before (g);
3143 t1 = gimple_assign_lhs (g);
3144 if (!TYPE_UNSIGNED (type))
3146 t1 = add_cast (m_limb_type, t1);
3147 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3148 RSHIFT_EXPR, sext,
3149 build_int_cst (TREE_TYPE (n),
3150 limb_prec - 1));
3151 insert_before (g);
3152 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3154 else
3155 ext = build_zero_cst (m_limb_type);
3156 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3157 g = gimple_build_assign (l, t1);
3158 insert_before (g);
3159 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3160 size_one_node);
3161 insert_before (g);
3162 idx = gimple_assign_lhs (g);
3163 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3164 if_then (g, profile_probability::likely (), edge_true, edge_false);
3165 idx = create_loop (idx, &idx_next);
3166 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3167 g = gimple_build_assign (l, ext);
3168 insert_before (g);
3169 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3170 insert_before (g);
3171 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3172 insert_before (g);
3174 else
3176 /* Lower
3177 dst = src << n;
3179 unsigned n1 = n % limb_prec;
3180 size_t n2 = n / limb_prec;
3181 size_t n3 = n1 != 0;
3182 unsigned n4 = (limb_prec - n1) % limb_prec;
3183 size_t idx;
3184 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3185 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3186 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3187 if (n1)
3189 dst[idx] = src[idx - n2] << n1;
3190 --idx;
3192 for (; (ssize_t) idx >= 0; --idx)
3193 dst[idx] = 0; */
3194 tree n2pn3;
3195 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3196 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3197 else
3199 n2pn3 = make_ssa_name (sizetype);
3200 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3201 insert_before (g);
3203 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3204 idx even to access the most significant partial limb. */
3205 m_var_msb = true;
3206 if (integer_zerop (n3))
3207 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3208 counts. Emit if (true) condition that can be optimized later. */
3209 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3210 NULL_TREE, NULL_TREE);
3211 else
3212 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3213 edge edge_true, edge_false;
3214 if_then (g, profile_probability::likely (), edge_true, edge_false);
3215 tree idx_next;
3216 tree idx = create_loop (p, &idx_next);
3217 tree idxmn2 = make_ssa_name (sizetype);
3218 tree idxmn2mn3 = make_ssa_name (sizetype);
3219 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3220 insert_before (g);
3221 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3222 insert_before (g);
3223 m_data_cnt = 0;
3224 tree t1 = handle_operand (rhs1, idxmn2);
3225 m_first = false;
3226 g = gimple_build_assign (make_ssa_name (m_limb_type),
3227 LSHIFT_EXPR, t1, n1);
3228 insert_before (g);
3229 t1 = gimple_assign_lhs (g);
3230 if (!integer_zerop (n3))
3232 m_data_cnt = 0;
3233 tree t2 = handle_operand (rhs1, idxmn2mn3);
3234 g = gimple_build_assign (make_ssa_name (m_limb_type),
3235 RSHIFT_EXPR, t2, n4);
3236 insert_before (g);
3237 t2 = gimple_assign_lhs (g);
3238 g = gimple_build_assign (make_ssa_name (m_limb_type),
3239 BIT_IOR_EXPR, t1, t2);
3240 insert_before (g);
3241 t1 = gimple_assign_lhs (g);
3243 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3244 g = gimple_build_assign (l, t1);
3245 insert_before (g);
3246 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3247 insert_before (g);
3248 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3249 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3250 NULL_TREE, NULL_TREE);
3251 insert_before (g);
3252 idx = make_ssa_name (sizetype);
3253 m_gsi = gsi_for_stmt (final_stmt);
3254 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3255 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3256 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3257 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3258 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3259 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3260 m_data_cnt = 0;
3261 if (!integer_zerop (n3))
3263 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3264 NULL_TREE, NULL_TREE);
3265 if_then (g, profile_probability::likely (), edge_true, edge_false);
3266 idxmn2 = make_ssa_name (sizetype);
3267 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3268 insert_before (g);
3269 m_data_cnt = 0;
3270 t1 = handle_operand (rhs1, idxmn2);
3271 g = gimple_build_assign (make_ssa_name (m_limb_type),
3272 LSHIFT_EXPR, t1, n1);
3273 insert_before (g);
3274 t1 = gimple_assign_lhs (g);
3275 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3276 g = gimple_build_assign (l, t1);
3277 insert_before (g);
3278 idx_next = make_ssa_name (sizetype);
3279 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3280 insert_before (g);
3281 m_gsi = gsi_for_stmt (final_stmt);
3282 tree nidx = make_ssa_name (sizetype);
3283 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3284 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3285 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3286 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3287 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3288 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3289 idx = nidx;
3291 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3292 ssize_int (0), NULL_TREE, NULL_TREE);
3293 if_then (g, profile_probability::likely (), edge_true, edge_false);
3294 idx = create_loop (idx, &idx_next);
3295 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3296 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3297 insert_before (g);
3298 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3299 insert_before (g);
3300 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3301 ssize_int (0), NULL_TREE, NULL_TREE);
3302 insert_before (g);
3306 /* Lower large/huge _BitInt multiplication or division. */
3308 void
3309 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3311 tree rhs1 = gimple_assign_rhs1 (stmt);
3312 tree rhs2 = gimple_assign_rhs2 (stmt);
3313 tree lhs = gimple_assign_lhs (stmt);
3314 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3315 tree type = TREE_TYPE (rhs1);
3316 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3317 && bitint_precision_kind (type) >= bitint_prec_large);
3318 int prec = TYPE_PRECISION (type), prec1, prec2;
3319 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3320 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3321 if (obj == NULL_TREE)
3323 int part = var_to_partition (m_map, lhs);
3324 gcc_assert (m_vars[part] != NULL_TREE);
3325 obj = m_vars[part];
3326 lhs = build_fold_addr_expr (obj);
3328 else
3330 lhs = build_fold_addr_expr (obj);
3331 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3332 NULL_TREE, true, GSI_SAME_STMT);
3334 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3335 gimple *g;
3336 switch (rhs_code)
3338 case MULT_EXPR:
3339 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3340 lhs, build_int_cst (sitype, prec),
3341 rhs1, build_int_cst (sitype, prec1),
3342 rhs2, build_int_cst (sitype, prec2));
3343 insert_before (g);
3344 break;
3345 case TRUNC_DIV_EXPR:
3346 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3347 lhs, build_int_cst (sitype, prec),
3348 null_pointer_node,
3349 build_int_cst (sitype, 0),
3350 rhs1, build_int_cst (sitype, prec1),
3351 rhs2, build_int_cst (sitype, prec2));
3352 if (!stmt_ends_bb_p (stmt))
3353 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3354 insert_before (g);
3355 break;
3356 case TRUNC_MOD_EXPR:
3357 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3358 build_int_cst (sitype, 0),
3359 lhs, build_int_cst (sitype, prec),
3360 rhs1, build_int_cst (sitype, prec1),
3361 rhs2, build_int_cst (sitype, prec2));
3362 if (!stmt_ends_bb_p (stmt))
3363 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3364 insert_before (g);
3365 break;
3366 default:
3367 gcc_unreachable ();
3369 if (stmt_ends_bb_p (stmt))
3371 maybe_duplicate_eh_stmt (g, stmt);
3372 edge e1;
3373 edge_iterator ei;
3374 basic_block bb = gimple_bb (stmt);
3376 FOR_EACH_EDGE (e1, ei, bb->succs)
3377 if (e1->flags & EDGE_EH)
3378 break;
3379 if (e1)
3381 edge e2 = split_block (gsi_bb (m_gsi), g);
3382 m_gsi = gsi_after_labels (e2->dest);
3383 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3384 = profile_probability::very_unlikely ();
3389 /* Lower large/huge _BitInt conversion to/from floating point. */
3391 void
3392 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3394 tree rhs1 = gimple_assign_rhs1 (stmt);
3395 tree lhs = gimple_assign_lhs (stmt);
3396 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3397 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3398 gimple *g;
3399 if (rhs_code == FIX_TRUNC_EXPR)
3401 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3402 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3403 prec = -prec;
3404 if (obj == NULL_TREE)
3406 int part = var_to_partition (m_map, lhs);
3407 gcc_assert (m_vars[part] != NULL_TREE);
3408 obj = m_vars[part];
3409 lhs = build_fold_addr_expr (obj);
3411 else
3413 lhs = build_fold_addr_expr (obj);
3414 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3415 NULL_TREE, true, GSI_SAME_STMT);
3417 scalar_mode from_mode
3418 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3419 #ifdef HAVE_SFmode
3420 /* IEEE single is a full superset of both IEEE half and
3421 bfloat formats, convert to float first and then to _BitInt
3422 to avoid the need of another 2 library routines. */
3423 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3424 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3425 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3427 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3428 if (type)
3429 rhs1 = add_cast (type, rhs1);
3431 #endif
3432 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3433 lhs, build_int_cst (sitype, prec),
3434 rhs1);
3435 insert_before (g);
3437 else
3439 int prec;
3440 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3441 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3442 rhs1, build_int_cst (sitype, prec));
3443 gimple_call_set_lhs (g, lhs);
3444 if (!stmt_ends_bb_p (stmt))
3445 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3446 gsi_replace (&m_gsi, g, true);
3450 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3451 If check_zero is true, caller wants to check if all bits in [start, end)
3452 are zero, otherwise if bits in [start, end) are either all zero or
3453 all ones. L is the limb with index LIMB, START and END are measured
3454 in bits. */
3456 tree
3457 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3458 unsigned int end, tree l,
3459 unsigned int limb,
3460 bool check_zero)
3462 unsigned startlimb = start / limb_prec;
3463 unsigned endlimb = (end - 1) / limb_prec;
3464 gimple *g;
3466 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3467 return l;
3468 if (startlimb == endlimb && limb == startlimb)
3470 if (check_zero)
3472 wide_int w = wi::shifted_mask (start % limb_prec,
3473 end - start, false, limb_prec);
3474 g = gimple_build_assign (make_ssa_name (m_limb_type),
3475 BIT_AND_EXPR, l,
3476 wide_int_to_tree (m_limb_type, w));
3477 insert_before (g);
3478 return gimple_assign_lhs (g);
3480 unsigned int shift = start % limb_prec;
3481 if ((end % limb_prec) != 0)
3483 unsigned int lshift = (-end) % limb_prec;
3484 shift += lshift;
3485 g = gimple_build_assign (make_ssa_name (m_limb_type),
3486 LSHIFT_EXPR, l,
3487 build_int_cst (unsigned_type_node,
3488 lshift));
3489 insert_before (g);
3490 l = gimple_assign_lhs (g);
3492 l = add_cast (signed_type_for (m_limb_type), l);
3493 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3494 RSHIFT_EXPR, l,
3495 build_int_cst (unsigned_type_node, shift));
3496 insert_before (g);
3497 return add_cast (m_limb_type, gimple_assign_lhs (g));
3499 else if (limb == startlimb)
3501 if ((start % limb_prec) == 0)
3502 return l;
3503 if (!check_zero)
3504 l = add_cast (signed_type_for (m_limb_type), l);
3505 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3506 RSHIFT_EXPR, l,
3507 build_int_cst (unsigned_type_node,
3508 start % limb_prec));
3509 insert_before (g);
3510 l = gimple_assign_lhs (g);
3511 if (!check_zero)
3512 l = add_cast (m_limb_type, l);
3513 return l;
3515 else if (limb == endlimb)
3517 if ((end % limb_prec) == 0)
3518 return l;
3519 if (check_zero)
3521 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3522 g = gimple_build_assign (make_ssa_name (m_limb_type),
3523 BIT_AND_EXPR, l,
3524 wide_int_to_tree (m_limb_type, w));
3525 insert_before (g);
3526 return gimple_assign_lhs (g);
3528 unsigned int shift = (-end) % limb_prec;
3529 g = gimple_build_assign (make_ssa_name (m_limb_type),
3530 LSHIFT_EXPR, l,
3531 build_int_cst (unsigned_type_node, shift));
3532 insert_before (g);
3533 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3534 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3535 RSHIFT_EXPR, l,
3536 build_int_cst (unsigned_type_node, shift));
3537 insert_before (g);
3538 return add_cast (m_limb_type, gimple_assign_lhs (g));
3540 return l;
3543 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3544 result including overflow flag into the right locations. */
3546 void
3547 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3548 tree ovf, tree lhs, tree orig_obj,
3549 gimple *stmt, tree_code code)
3551 gimple *g;
3553 if (obj == NULL_TREE
3554 && (TREE_CODE (type) != BITINT_TYPE
3555 || bitint_precision_kind (type) < bitint_prec_large))
3557 /* Add support for 3 or more limbs filled in from normal integral
3558 type if this assert fails. If no target chooses limb mode smaller
3559 than half of largest supported normal integral type, this will not
3560 be needed. */
3561 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3562 tree lhs_type = type;
3563 if (TREE_CODE (type) == BITINT_TYPE
3564 && bitint_precision_kind (type) == bitint_prec_middle)
3565 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3566 TYPE_UNSIGNED (type));
3567 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3568 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3569 insert_before (g);
3570 r1 = gimple_assign_lhs (g);
3571 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3572 r1 = add_cast (lhs_type, r1);
3573 if (TYPE_PRECISION (lhs_type) > limb_prec)
3575 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3576 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3577 insert_before (g);
3578 r2 = gimple_assign_lhs (g);
3579 r2 = add_cast (lhs_type, r2);
3580 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3581 build_int_cst (unsigned_type_node,
3582 limb_prec));
3583 insert_before (g);
3584 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3585 gimple_assign_lhs (g));
3586 insert_before (g);
3587 r1 = gimple_assign_lhs (g);
3589 if (lhs_type != type)
3590 r1 = add_cast (type, r1);
3591 ovf = add_cast (lhs_type, ovf);
3592 if (lhs_type != type)
3593 ovf = add_cast (type, ovf);
3594 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3595 m_gsi = gsi_for_stmt (stmt);
3596 gsi_replace (&m_gsi, g, true);
3598 else
3600 unsigned HOST_WIDE_INT nelts = 0;
3601 tree atype = NULL_TREE;
3602 if (obj)
3604 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3605 if (orig_obj == NULL_TREE)
3606 nelts >>= 1;
3607 atype = build_array_type_nelts (m_limb_type, nelts);
3609 if (var && obj)
3611 tree v1, v2;
3612 tree zero;
3613 if (orig_obj == NULL_TREE)
3615 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3616 v1 = build2 (MEM_REF, atype,
3617 build_fold_addr_expr (unshare_expr (obj)), zero);
3619 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3620 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3621 else
3622 v1 = unshare_expr (obj);
3623 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3624 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3625 g = gimple_build_assign (v1, v2);
3626 insert_before (g);
3628 if (orig_obj == NULL_TREE && obj)
3630 ovf = add_cast (m_limb_type, ovf);
3631 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3632 g = gimple_build_assign (l, ovf);
3633 insert_before (g);
3634 if (nelts > 1)
3636 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3637 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3638 (nelts + 1) * m_limb_size);
3639 tree v1 = build2 (MEM_REF, atype,
3640 build_fold_addr_expr (unshare_expr (obj)),
3641 off);
3642 g = gimple_build_assign (v1, build_zero_cst (atype));
3643 insert_before (g);
3646 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3648 imm_use_iterator ui;
3649 use_operand_p use_p;
3650 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3652 g = USE_STMT (use_p);
3653 if (!is_gimple_assign (g)
3654 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3655 continue;
3656 tree lhs2 = gimple_assign_lhs (g);
3657 gimple *use_stmt;
3658 single_imm_use (lhs2, &use_p, &use_stmt);
3659 lhs2 = gimple_assign_lhs (use_stmt);
3660 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3661 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3662 g = gimple_build_assign (lhs2, ovf);
3663 else
3664 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3665 gsi_replace (&gsi, g, true);
3666 break;
3669 else if (ovf != boolean_false_node)
3671 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3672 NULL_TREE, NULL_TREE);
3673 edge edge_true, edge_false;
3674 if_then (g, profile_probability::very_unlikely (),
3675 edge_true, edge_false);
3676 tree zero = build_zero_cst (TREE_TYPE (lhs));
3677 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3678 TREE_TYPE (lhs),
3679 zero, zero, NULL);
3680 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3681 true, GSI_SAME_STMT);
3682 m_gsi = gsi_after_labels (edge_true->dest);
3685 if (var)
3687 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_EOL);
3688 g = gimple_build_assign (var, clobber);
3689 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3693 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3694 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3695 argument 1 precision PREC1 and minimum precision for the result
3696 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3698 static tree
3699 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3700 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3702 *start = 0;
3703 *end = 0;
3704 *check_zero = true;
3705 /* Ignore this special rule for subtraction, even if both
3706 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3707 in infinite precision. */
3708 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3710 /* Result in [0, prec2) is unsigned, if prec > prec2,
3711 all bits above it will be zero. */
3712 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3713 return boolean_false_node;
3714 else
3716 /* ovf if any of bits in [start, end) is non-zero. */
3717 *start = prec - !TYPE_UNSIGNED (type);
3718 *end = prec2;
3721 else if (TYPE_UNSIGNED (type))
3723 /* If result in [0, prec2) is signed and if prec > prec2,
3724 all bits above it will be sign bit copies. */
3725 if (prec >= prec2)
3727 /* ovf if bit prec - 1 is non-zero. */
3728 *start = prec - 1;
3729 *end = prec;
3731 else
3733 /* ovf if any of bits in [start, end) is non-zero. */
3734 *start = prec;
3735 *end = prec2;
3738 else if (prec >= prec2)
3739 return boolean_false_node;
3740 else
3742 /* ovf if [start, end) bits aren't all zeros or all ones. */
3743 *start = prec - 1;
3744 *end = prec2;
3745 *check_zero = false;
3747 return NULL_TREE;
3750 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3751 argument or return type _Complex large/huge _BitInt. */
3753 void
3754 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3756 tree arg0 = gimple_call_arg (stmt, 0);
3757 tree arg1 = gimple_call_arg (stmt, 1);
3758 tree lhs = gimple_call_lhs (stmt);
3759 gimple *g;
3761 if (!lhs)
3763 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3764 gsi_remove (&gsi, true);
3765 return;
3767 gimple *final_stmt = gsi_stmt (m_gsi);
3768 tree type = TREE_TYPE (lhs);
3769 if (TREE_CODE (type) == COMPLEX_TYPE)
3770 type = TREE_TYPE (type);
3771 int prec = TYPE_PRECISION (type);
3772 int prec0 = range_to_prec (arg0, stmt);
3773 int prec1 = range_to_prec (arg1, stmt);
3774 int prec2 = ((prec0 < 0) == (prec1 < 0)
3775 ? MAX (prec0 < 0 ? -prec0 : prec0,
3776 prec1 < 0 ? -prec1 : prec1) + 1
3777 : MAX (prec0 < 0 ? -prec0 : prec0 + 1,
3778 prec1 < 0 ? -prec1 : prec1 + 1) + 1);
3779 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3780 prec1 < 0 ? -prec1 : prec1);
3781 prec3 = MAX (prec3, prec);
3782 tree var = NULL_TREE;
3783 tree orig_obj = obj;
3784 if (obj == NULL_TREE
3785 && TREE_CODE (type) == BITINT_TYPE
3786 && bitint_precision_kind (type) >= bitint_prec_large
3787 && m_names
3788 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3790 int part = var_to_partition (m_map, lhs);
3791 gcc_assert (m_vars[part] != NULL_TREE);
3792 obj = m_vars[part];
3793 if (TREE_TYPE (lhs) == type)
3794 orig_obj = obj;
3796 if (TREE_CODE (type) != BITINT_TYPE
3797 || bitint_precision_kind (type) < bitint_prec_large)
3799 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3800 tree atype = build_array_type_nelts (m_limb_type, nelts);
3801 var = create_tmp_var (atype);
3804 enum tree_code code;
3805 switch (gimple_call_internal_fn (stmt))
3807 case IFN_ADD_OVERFLOW:
3808 case IFN_UBSAN_CHECK_ADD:
3809 code = PLUS_EXPR;
3810 break;
3811 case IFN_SUB_OVERFLOW:
3812 case IFN_UBSAN_CHECK_SUB:
3813 code = MINUS_EXPR;
3814 break;
3815 default:
3816 gcc_unreachable ();
3818 unsigned start, end;
3819 bool check_zero;
3820 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3821 &start, &end, &check_zero);
3823 unsigned startlimb, endlimb;
3824 if (ovf)
3826 startlimb = ~0U;
3827 endlimb = ~0U;
3829 else
3831 startlimb = start / limb_prec;
3832 endlimb = (end - 1) / limb_prec;
3835 int prec4 = ovf != NULL_TREE ? prec : prec3;
3836 bitint_prec_kind kind = bitint_precision_kind (prec4);
3837 unsigned cnt, rem = 0, fin = 0;
3838 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3839 bool last_ovf = (ovf == NULL_TREE
3840 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3841 if (kind != bitint_prec_huge)
3842 cnt = CEIL (prec4, limb_prec) + last_ovf;
3843 else
3845 rem = (prec4 % (2 * limb_prec));
3846 fin = (prec4 - rem) / limb_prec;
3847 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3848 idx = idx_first = create_loop (size_zero_node, &idx_next);
3851 if (kind == bitint_prec_huge)
3852 m_upwards_2limb = fin;
3853 m_upwards = true;
3855 tree type0 = TREE_TYPE (arg0);
3856 tree type1 = TREE_TYPE (arg1);
3857 if (TYPE_PRECISION (type0) < prec3)
3859 type0 = build_bitint_type (prec3, TYPE_UNSIGNED (type0));
3860 if (TREE_CODE (arg0) == INTEGER_CST)
3861 arg0 = fold_convert (type0, arg0);
3863 if (TYPE_PRECISION (type1) < prec3)
3865 type1 = build_bitint_type (prec3, TYPE_UNSIGNED (type1));
3866 if (TREE_CODE (arg1) == INTEGER_CST)
3867 arg1 = fold_convert (type1, arg1);
3869 unsigned int data_cnt = 0;
3870 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3871 tree cmp = build_zero_cst (m_limb_type);
3872 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3873 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3874 for (unsigned i = 0; i < cnt; i++)
3876 m_data_cnt = 0;
3877 tree rhs1, rhs2;
3878 if (kind != bitint_prec_huge)
3879 idx = size_int (i);
3880 else if (i >= 2)
3881 idx = size_int (fin + (i > 2));
3882 if (!last_ovf || i < cnt - 1)
3884 if (type0 != TREE_TYPE (arg0))
3885 rhs1 = handle_cast (type0, arg0, idx);
3886 else
3887 rhs1 = handle_operand (arg0, idx);
3888 if (type1 != TREE_TYPE (arg1))
3889 rhs2 = handle_cast (type1, arg1, idx);
3890 else
3891 rhs2 = handle_operand (arg1, idx);
3892 if (i == 0)
3893 data_cnt = m_data_cnt;
3894 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3895 rhs1 = add_cast (m_limb_type, rhs1);
3896 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3897 rhs2 = add_cast (m_limb_type, rhs2);
3898 last_rhs1 = rhs1;
3899 last_rhs2 = rhs2;
3901 else
3903 m_data_cnt = data_cnt;
3904 if (TYPE_UNSIGNED (type0))
3905 rhs1 = build_zero_cst (m_limb_type);
3906 else
3908 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3909 if (TREE_CODE (rhs1) == INTEGER_CST)
3910 rhs1 = build_int_cst (m_limb_type,
3911 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3912 else
3914 tree lpm1 = build_int_cst (unsigned_type_node,
3915 limb_prec - 1);
3916 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
3917 RSHIFT_EXPR, rhs1, lpm1);
3918 insert_before (g);
3919 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
3922 if (TYPE_UNSIGNED (type1))
3923 rhs2 = build_zero_cst (m_limb_type);
3924 else
3926 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
3927 if (TREE_CODE (rhs2) == INTEGER_CST)
3928 rhs2 = build_int_cst (m_limb_type,
3929 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
3930 else
3932 tree lpm1 = build_int_cst (unsigned_type_node,
3933 limb_prec - 1);
3934 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
3935 RSHIFT_EXPR, rhs2, lpm1);
3936 insert_before (g);
3937 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
3941 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
3942 if (ovf != boolean_false_node)
3944 if (tree_fits_uhwi_p (idx))
3946 unsigned limb = tree_to_uhwi (idx);
3947 if (limb >= startlimb && limb <= endlimb)
3949 tree l = arith_overflow_extract_bits (start, end, rhs,
3950 limb, check_zero);
3951 tree this_ovf = make_ssa_name (boolean_type_node);
3952 if (ovf == NULL_TREE && !check_zero)
3954 cmp = l;
3955 g = gimple_build_assign (make_ssa_name (m_limb_type),
3956 PLUS_EXPR, l,
3957 build_int_cst (m_limb_type, 1));
3958 insert_before (g);
3959 g = gimple_build_assign (this_ovf, GT_EXPR,
3960 gimple_assign_lhs (g),
3961 build_int_cst (m_limb_type, 1));
3963 else
3964 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
3965 insert_before (g);
3966 if (ovf == NULL_TREE)
3967 ovf = this_ovf;
3968 else
3970 tree b = make_ssa_name (boolean_type_node);
3971 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
3972 insert_before (g);
3973 ovf = b;
3977 else if (startlimb < fin)
3979 if (m_first && startlimb + 2 < fin)
3981 tree data_out;
3982 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
3983 ovf_out = m_data.pop ();
3984 m_data.pop ();
3985 if (!check_zero)
3987 cmp = prepare_data_in_out (cmp, idx, &data_out);
3988 cmp_out = m_data.pop ();
3989 m_data.pop ();
3992 if (i != 0 || startlimb != fin - 1)
3994 tree_code cmp_code;
3995 bool single_comparison
3996 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
3997 if (!single_comparison)
3999 cmp_code = GE_EXPR;
4000 if (!check_zero && (start % limb_prec) == 0)
4001 single_comparison = true;
4003 else if ((startlimb & 1) == (i & 1))
4004 cmp_code = EQ_EXPR;
4005 else
4006 cmp_code = GT_EXPR;
4007 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4008 NULL_TREE, NULL_TREE);
4009 edge edge_true_true, edge_true_false, edge_false;
4010 gimple *g2 = NULL;
4011 if (!single_comparison)
4012 g2 = gimple_build_cond (EQ_EXPR, idx,
4013 size_int (startlimb), NULL_TREE,
4014 NULL_TREE);
4015 if_then_if_then_else (g, g2, profile_probability::likely (),
4016 profile_probability::unlikely (),
4017 edge_true_true, edge_true_false,
4018 edge_false);
4019 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4020 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4021 check_zero);
4022 tree this_ovf = make_ssa_name (boolean_type_node);
4023 if (cmp_code != GT_EXPR && !check_zero)
4025 g = gimple_build_assign (make_ssa_name (m_limb_type),
4026 PLUS_EXPR, l,
4027 build_int_cst (m_limb_type, 1));
4028 insert_before (g);
4029 g = gimple_build_assign (this_ovf, GT_EXPR,
4030 gimple_assign_lhs (g),
4031 build_int_cst (m_limb_type, 1));
4033 else
4034 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4035 insert_before (g);
4036 if (cmp_code == GT_EXPR)
4038 tree t = make_ssa_name (boolean_type_node);
4039 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4040 insert_before (g);
4041 this_ovf = t;
4043 tree this_ovf2 = NULL_TREE;
4044 if (!single_comparison)
4046 m_gsi = gsi_after_labels (edge_true_true->src);
4047 tree t = make_ssa_name (boolean_type_node);
4048 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4049 insert_before (g);
4050 this_ovf2 = make_ssa_name (boolean_type_node);
4051 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4052 ovf, t);
4053 insert_before (g);
4055 m_gsi = gsi_after_labels (edge_true_false->dest);
4056 tree t;
4057 if (i == 1 && ovf_out)
4058 t = ovf_out;
4059 else
4060 t = make_ssa_name (boolean_type_node);
4061 gphi *phi = create_phi_node (t, edge_true_false->dest);
4062 add_phi_arg (phi, this_ovf, edge_true_false,
4063 UNKNOWN_LOCATION);
4064 add_phi_arg (phi, ovf ? ovf
4065 : boolean_false_node, edge_false,
4066 UNKNOWN_LOCATION);
4067 if (edge_true_true)
4068 add_phi_arg (phi, this_ovf2, edge_true_true,
4069 UNKNOWN_LOCATION);
4070 ovf = t;
4071 if (!check_zero && cmp_code != GT_EXPR)
4073 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4074 phi = create_phi_node (t, edge_true_false->dest);
4075 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4076 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4077 if (edge_true_true)
4078 add_phi_arg (phi, cmp, edge_true_true,
4079 UNKNOWN_LOCATION);
4080 cmp = t;
4086 if (var || obj)
4088 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4090 else if (!tree_fits_uhwi_p (idx)
4091 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4093 bool single_comparison
4094 = (((unsigned) prec % limb_prec) == 0
4095 || prec_limbs + 1 >= fin
4096 || (prec_limbs & 1) == (i & 1));
4097 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4098 NULL_TREE, NULL_TREE);
4099 gimple *g2 = NULL;
4100 if (!single_comparison)
4101 g2 = gimple_build_cond (LT_EXPR, idx,
4102 size_int (prec_limbs - 1),
4103 NULL_TREE, NULL_TREE);
4104 edge edge_true_true, edge_true_false, edge_false;
4105 if_then_if_then_else (g, g2, profile_probability::likely (),
4106 profile_probability::likely (),
4107 edge_true_true, edge_true_false,
4108 edge_false);
4109 tree l = limb_access (type, var ? var : obj, idx, true);
4110 g = gimple_build_assign (l, rhs);
4111 insert_before (g);
4112 if (!single_comparison)
4114 m_gsi = gsi_after_labels (edge_true_true->src);
4115 l = limb_access (type, var ? var : obj,
4116 size_int (prec_limbs - 1), true);
4117 if (!useless_type_conversion_p (TREE_TYPE (l),
4118 TREE_TYPE (rhs)))
4119 rhs = add_cast (TREE_TYPE (l), rhs);
4120 g = gimple_build_assign (l, rhs);
4121 insert_before (g);
4123 m_gsi = gsi_after_labels (edge_true_false->dest);
4125 else
4127 tree l = limb_access (type, var ? var : obj, idx, true);
4128 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4129 rhs = add_cast (TREE_TYPE (l), rhs);
4130 g = gimple_build_assign (l, rhs);
4131 insert_before (g);
4134 m_first = false;
4135 if (kind == bitint_prec_huge && i <= 1)
4137 if (i == 0)
4139 idx = make_ssa_name (sizetype);
4140 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4141 size_one_node);
4142 insert_before (g);
4144 else
4146 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4147 size_int (2));
4148 insert_before (g);
4149 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4150 NULL_TREE, NULL_TREE);
4151 insert_before (g);
4152 m_gsi = gsi_for_stmt (final_stmt);
4157 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4160 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4161 argument or return type _Complex large/huge _BitInt. */
4163 void
4164 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4166 tree arg0 = gimple_call_arg (stmt, 0);
4167 tree arg1 = gimple_call_arg (stmt, 1);
4168 tree lhs = gimple_call_lhs (stmt);
4169 if (!lhs)
4171 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4172 gsi_remove (&gsi, true);
4173 return;
4175 gimple *final_stmt = gsi_stmt (m_gsi);
4176 tree type = TREE_TYPE (lhs);
4177 if (TREE_CODE (type) == COMPLEX_TYPE)
4178 type = TREE_TYPE (type);
4179 int prec = TYPE_PRECISION (type), prec0, prec1;
4180 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4181 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4182 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4183 + (prec1 < 0 ? -prec1 : prec1)
4184 + ((prec0 < 0) != (prec1 < 0)));
4185 tree var = NULL_TREE;
4186 tree orig_obj = obj;
4187 bool force_var = false;
4188 if (obj == NULL_TREE
4189 && TREE_CODE (type) == BITINT_TYPE
4190 && bitint_precision_kind (type) >= bitint_prec_large
4191 && m_names
4192 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4194 int part = var_to_partition (m_map, lhs);
4195 gcc_assert (m_vars[part] != NULL_TREE);
4196 obj = m_vars[part];
4197 if (TREE_TYPE (lhs) == type)
4198 orig_obj = obj;
4200 else if (obj != NULL_TREE && DECL_P (obj))
4202 for (int i = 0; i < 2; ++i)
4204 tree arg = i ? arg1 : arg0;
4205 if (TREE_CODE (arg) == ADDR_EXPR)
4206 arg = TREE_OPERAND (arg, 0);
4207 if (get_base_address (arg) == obj)
4209 force_var = true;
4210 break;
4214 if (obj == NULL_TREE
4215 || force_var
4216 || TREE_CODE (type) != BITINT_TYPE
4217 || bitint_precision_kind (type) < bitint_prec_large
4218 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4220 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4221 tree atype = build_array_type_nelts (m_limb_type, nelts);
4222 var = create_tmp_var (atype);
4224 tree addr = build_fold_addr_expr (var ? var : obj);
4225 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4226 NULL_TREE, true, GSI_SAME_STMT);
4227 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4228 gimple *g
4229 = gimple_build_call_internal (IFN_MULBITINT, 6,
4230 addr, build_int_cst (sitype,
4231 MAX (prec2, prec)),
4232 arg0, build_int_cst (sitype, prec0),
4233 arg1, build_int_cst (sitype, prec1));
4234 insert_before (g);
4236 unsigned start, end;
4237 bool check_zero;
4238 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4239 &start, &end, &check_zero);
4240 if (ovf == NULL_TREE)
4242 unsigned startlimb = start / limb_prec;
4243 unsigned endlimb = (end - 1) / limb_prec;
4244 unsigned cnt;
4245 bool use_loop = false;
4246 if (startlimb == endlimb)
4247 cnt = 1;
4248 else if (startlimb + 1 == endlimb)
4249 cnt = 2;
4250 else if ((end % limb_prec) == 0)
4252 cnt = 2;
4253 use_loop = true;
4255 else
4257 cnt = 3;
4258 use_loop = startlimb + 2 < endlimb;
4260 if (cnt == 1)
4262 tree l = limb_access (NULL_TREE, var ? var : obj,
4263 size_int (startlimb), true);
4264 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4265 insert_before (g);
4266 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4267 startlimb, check_zero);
4268 ovf = make_ssa_name (boolean_type_node);
4269 if (check_zero)
4270 g = gimple_build_assign (ovf, NE_EXPR, l,
4271 build_zero_cst (m_limb_type));
4272 else
4274 g = gimple_build_assign (make_ssa_name (m_limb_type),
4275 PLUS_EXPR, l,
4276 build_int_cst (m_limb_type, 1));
4277 insert_before (g);
4278 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4279 build_int_cst (m_limb_type, 1));
4281 insert_before (g);
4283 else
4285 basic_block edge_bb = NULL;
4286 gimple_stmt_iterator gsi = m_gsi;
4287 gsi_prev (&gsi);
4288 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4289 edge_bb = e->src;
4290 m_gsi = gsi_last_bb (edge_bb);
4291 if (!gsi_end_p (m_gsi))
4292 gsi_next (&m_gsi);
4294 tree cmp = build_zero_cst (m_limb_type);
4295 for (unsigned i = 0; i < cnt; i++)
4297 tree idx, idx_next = NULL_TREE;
4298 if (i == 0)
4299 idx = size_int (startlimb);
4300 else if (i == 2)
4301 idx = size_int (endlimb);
4302 else if (use_loop)
4303 idx = create_loop (size_int (startlimb + 1), &idx_next);
4304 else
4305 idx = size_int (startlimb + 1);
4306 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4307 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4308 insert_before (g);
4309 l = gimple_assign_lhs (g);
4310 if (i == 0 || i == 2)
4311 l = arith_overflow_extract_bits (start, end, l,
4312 tree_to_uhwi (idx),
4313 check_zero);
4314 if (i == 0 && !check_zero)
4316 cmp = l;
4317 g = gimple_build_assign (make_ssa_name (m_limb_type),
4318 PLUS_EXPR, l,
4319 build_int_cst (m_limb_type, 1));
4320 insert_before (g);
4321 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4322 build_int_cst (m_limb_type, 1),
4323 NULL_TREE, NULL_TREE);
4325 else
4326 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4327 insert_before (g);
4328 edge e1 = split_block (gsi_bb (m_gsi), g);
4329 e1->flags = EDGE_FALSE_VALUE;
4330 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4331 EDGE_TRUE_VALUE);
4332 e1->probability = profile_probability::likely ();
4333 e2->probability = e1->probability.invert ();
4334 if (i == 0)
4335 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4336 m_gsi = gsi_after_labels (e1->dest);
4337 if (i == 1 && use_loop)
4339 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4340 size_one_node);
4341 insert_before (g);
4342 g = gimple_build_cond (NE_EXPR, idx_next,
4343 size_int (endlimb + (cnt == 1)),
4344 NULL_TREE, NULL_TREE);
4345 insert_before (g);
4346 edge true_edge, false_edge;
4347 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4348 &true_edge,
4349 &false_edge);
4350 m_gsi = gsi_after_labels (false_edge->dest);
4354 ovf = make_ssa_name (boolean_type_node);
4355 basic_block bb = gimple_bb (final_stmt);
4356 gphi *phi = create_phi_node (ovf, bb);
4357 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4358 edge_iterator ei;
4359 FOR_EACH_EDGE (e, ei, bb->preds)
4361 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4362 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4364 m_gsi = gsi_for_stmt (final_stmt);
4368 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4371 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4372 .{ADD,SUB,MUL}_OVERFLOW call. */
4374 void
4375 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4377 tree rhs1 = gimple_assign_rhs1 (stmt);
4378 rhs1 = TREE_OPERAND (rhs1, 0);
4379 if (obj == NULL_TREE)
4381 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4382 gcc_assert (m_vars[part] != NULL_TREE);
4383 obj = m_vars[part];
4385 if (TREE_CODE (rhs1) == SSA_NAME
4386 && (m_names == NULL
4387 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4389 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4390 return;
4392 int part = var_to_partition (m_map, rhs1);
4393 gcc_assert (m_vars[part] != NULL_TREE);
4394 tree var = m_vars[part];
4395 unsigned HOST_WIDE_INT nelts
4396 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4397 tree atype = build_array_type_nelts (m_limb_type, nelts);
4398 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4399 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4400 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4401 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4402 ? 0 : nelts * m_limb_size);
4403 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4404 gimple *g = gimple_build_assign (obj, v2);
4405 insert_before (g);
4408 /* Lower COMPLEX_EXPR stmt. */
4410 void
4411 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4413 tree lhs = gimple_assign_lhs (stmt);
4414 tree rhs1 = gimple_assign_rhs1 (stmt);
4415 tree rhs2 = gimple_assign_rhs2 (stmt);
4416 int part = var_to_partition (m_map, lhs);
4417 gcc_assert (m_vars[part] != NULL_TREE);
4418 lhs = m_vars[part];
4419 unsigned HOST_WIDE_INT nelts
4420 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4421 tree atype = build_array_type_nelts (m_limb_type, nelts);
4422 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4423 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4424 tree v2;
4425 if (TREE_CODE (rhs1) == SSA_NAME)
4427 part = var_to_partition (m_map, rhs1);
4428 gcc_assert (m_vars[part] != NULL_TREE);
4429 v2 = m_vars[part];
4431 else if (integer_zerop (rhs1))
4432 v2 = build_zero_cst (atype);
4433 else
4434 v2 = tree_output_constant_def (rhs1);
4435 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4436 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4437 gimple *g = gimple_build_assign (v1, v2);
4438 insert_before (g);
4439 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4440 TYPE_SIZE_UNIT (atype));
4441 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4442 if (TREE_CODE (rhs2) == SSA_NAME)
4444 part = var_to_partition (m_map, rhs2);
4445 gcc_assert (m_vars[part] != NULL_TREE);
4446 v2 = m_vars[part];
4448 else if (integer_zerop (rhs2))
4449 v2 = build_zero_cst (atype);
4450 else
4451 v2 = tree_output_constant_def (rhs2);
4452 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4453 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4454 g = gimple_build_assign (v1, v2);
4455 insert_before (g);
4458 /* Lower a call statement with one or more large/huge _BitInt
4459 arguments or large/huge _BitInt return value. */
4461 void
4462 bitint_large_huge::lower_call (tree obj, gimple *stmt)
4464 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4465 unsigned int nargs = gimple_call_num_args (stmt);
4466 if (gimple_call_internal_p (stmt))
4467 switch (gimple_call_internal_fn (stmt))
4469 case IFN_ADD_OVERFLOW:
4470 case IFN_SUB_OVERFLOW:
4471 case IFN_UBSAN_CHECK_ADD:
4472 case IFN_UBSAN_CHECK_SUB:
4473 lower_addsub_overflow (obj, stmt);
4474 return;
4475 case IFN_MUL_OVERFLOW:
4476 case IFN_UBSAN_CHECK_MUL:
4477 lower_mul_overflow (obj, stmt);
4478 return;
4479 default:
4480 break;
4482 for (unsigned int i = 0; i < nargs; ++i)
4484 tree arg = gimple_call_arg (stmt, i);
4485 if (TREE_CODE (arg) != SSA_NAME
4486 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
4487 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
4488 continue;
4489 int p = var_to_partition (m_map, arg);
4490 tree v = m_vars[p];
4491 gcc_assert (v != NULL_TREE);
4492 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
4493 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
4494 arg = make_ssa_name (TREE_TYPE (arg));
4495 gimple *g = gimple_build_assign (arg, v);
4496 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
4497 gimple_call_set_arg (stmt, i, arg);
4498 if (m_preserved == NULL)
4499 m_preserved = BITMAP_ALLOC (NULL);
4500 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
4502 tree lhs = gimple_call_lhs (stmt);
4503 if (lhs
4504 && TREE_CODE (lhs) == SSA_NAME
4505 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
4506 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
4508 int p = var_to_partition (m_map, lhs);
4509 tree v = m_vars[p];
4510 gcc_assert (v != NULL_TREE);
4511 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
4512 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
4513 gimple_call_set_lhs (stmt, v);
4514 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4516 update_stmt (stmt);
4519 /* Lower __asm STMT which involves large/huge _BitInt values. */
4521 void
4522 bitint_large_huge::lower_asm (gimple *stmt)
4524 gasm *g = as_a <gasm *> (stmt);
4525 unsigned noutputs = gimple_asm_noutputs (g);
4526 unsigned ninputs = gimple_asm_ninputs (g);
4528 for (unsigned i = 0; i < noutputs; ++i)
4530 tree t = gimple_asm_output_op (g, i);
4531 tree s = TREE_VALUE (t);
4532 if (TREE_CODE (s) == SSA_NAME
4533 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
4534 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
4536 int part = var_to_partition (m_map, s);
4537 gcc_assert (m_vars[part] != NULL_TREE);
4538 TREE_VALUE (t) = m_vars[part];
4541 for (unsigned i = 0; i < ninputs; ++i)
4543 tree t = gimple_asm_input_op (g, i);
4544 tree s = TREE_VALUE (t);
4545 if (TREE_CODE (s) == SSA_NAME
4546 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
4547 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
4549 int part = var_to_partition (m_map, s);
4550 gcc_assert (m_vars[part] != NULL_TREE);
4551 TREE_VALUE (t) = m_vars[part];
4554 update_stmt (stmt);
4557 /* Lower statement STMT which involves large/huge _BitInt values
4558 into code accessing individual limbs. */
4560 void
4561 bitint_large_huge::lower_stmt (gimple *stmt)
4563 m_first = true;
4564 m_lhs = NULL_TREE;
4565 m_data.truncate (0);
4566 m_data_cnt = 0;
4567 m_gsi = gsi_for_stmt (stmt);
4568 m_after_stmt = NULL;
4569 m_bb = NULL;
4570 m_init_gsi = m_gsi;
4571 gsi_prev (&m_init_gsi);
4572 m_preheader_bb = NULL;
4573 m_upwards_2limb = 0;
4574 m_upwards = false;
4575 m_var_msb = false;
4576 m_cast_conditional = false;
4577 m_bitfld_load = 0;
4578 m_loc = gimple_location (stmt);
4579 if (is_gimple_call (stmt))
4581 lower_call (NULL_TREE, stmt);
4582 return;
4584 if (gimple_code (stmt) == GIMPLE_ASM)
4586 lower_asm (stmt);
4587 return;
4589 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
4590 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
4591 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
4592 bool mergeable_cast_p = false;
4593 bool final_cast_p = false;
4594 if (gimple_assign_cast_p (stmt))
4596 lhs = gimple_assign_lhs (stmt);
4597 tree rhs1 = gimple_assign_rhs1 (stmt);
4598 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
4599 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
4600 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
4601 mergeable_cast_p = true;
4602 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
4603 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
4604 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
4606 final_cast_p = true;
4607 if (TREE_CODE (rhs1) == SSA_NAME
4608 && (m_names == NULL
4609 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4611 gimple *g = SSA_NAME_DEF_STMT (rhs1);
4612 if (is_gimple_assign (g)
4613 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
4615 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
4616 if (TREE_CODE (rhs2) == SSA_NAME
4617 && (m_names == NULL
4618 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
4620 g = SSA_NAME_DEF_STMT (rhs2);
4621 int ovf = optimizable_arith_overflow (g);
4622 if (ovf == 2)
4623 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
4624 and IMAGPART_EXPR uses, where the latter is cast to
4625 non-_BitInt, it will be optimized when handling
4626 the REALPART_EXPR. */
4627 return;
4628 if (ovf == 1)
4630 lower_call (NULL_TREE, g);
4631 return;
4638 if (gimple_store_p (stmt))
4640 tree rhs1 = gimple_assign_rhs1 (stmt);
4641 if (TREE_CODE (rhs1) == SSA_NAME
4642 && (m_names == NULL
4643 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4645 gimple *g = SSA_NAME_DEF_STMT (rhs1);
4646 m_loc = gimple_location (g);
4647 lhs = gimple_assign_lhs (stmt);
4648 if (is_gimple_assign (g) && !mergeable_op (g))
4649 switch (gimple_assign_rhs_code (g))
4651 case LSHIFT_EXPR:
4652 case RSHIFT_EXPR:
4653 lower_shift_stmt (lhs, g);
4654 handled:
4655 m_gsi = gsi_for_stmt (stmt);
4656 unlink_stmt_vdef (stmt);
4657 release_ssa_name (gimple_vdef (stmt));
4658 gsi_remove (&m_gsi, true);
4659 return;
4660 case MULT_EXPR:
4661 case TRUNC_DIV_EXPR:
4662 case TRUNC_MOD_EXPR:
4663 lower_muldiv_stmt (lhs, g);
4664 goto handled;
4665 case FIX_TRUNC_EXPR:
4666 lower_float_conv_stmt (lhs, g);
4667 goto handled;
4668 case REALPART_EXPR:
4669 case IMAGPART_EXPR:
4670 lower_cplxpart_stmt (lhs, g);
4671 goto handled;
4672 default:
4673 break;
4675 else if (optimizable_arith_overflow (g) == 3)
4677 lower_call (lhs, g);
4678 goto handled;
4680 m_loc = gimple_location (stmt);
4683 if (mergeable_op (stmt)
4684 || gimple_store_p (stmt)
4685 || gimple_assign_load_p (stmt)
4686 || eq_p
4687 || mergeable_cast_p)
4689 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
4690 if (!eq_p)
4691 return;
4693 else if (cmp_code != ERROR_MARK)
4694 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
4695 if (cmp_code != ERROR_MARK)
4697 if (gimple_code (stmt) == GIMPLE_COND)
4699 gcond *cstmt = as_a <gcond *> (stmt);
4700 gimple_cond_set_lhs (cstmt, lhs);
4701 gimple_cond_set_rhs (cstmt, boolean_false_node);
4702 gimple_cond_set_code (cstmt, cmp_code);
4703 update_stmt (stmt);
4704 return;
4706 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
4708 tree cond = build2 (cmp_code, boolean_type_node, lhs,
4709 boolean_false_node);
4710 gimple_assign_set_rhs1 (stmt, cond);
4711 lhs = gimple_assign_lhs (stmt);
4712 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
4713 || (bitint_precision_kind (TREE_TYPE (lhs))
4714 <= bitint_prec_middle));
4715 update_stmt (stmt);
4716 return;
4718 gimple_assign_set_rhs1 (stmt, lhs);
4719 gimple_assign_set_rhs2 (stmt, boolean_false_node);
4720 gimple_assign_set_rhs_code (stmt, cmp_code);
4721 update_stmt (stmt);
4722 return;
4724 if (final_cast_p)
4726 tree lhs_type = TREE_TYPE (lhs);
4727 /* Add support for 3 or more limbs filled in from normal integral
4728 type if this assert fails. If no target chooses limb mode smaller
4729 than half of largest supported normal integral type, this will not
4730 be needed. */
4731 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
4732 gimple *g;
4733 if (TREE_CODE (lhs_type) == BITINT_TYPE
4734 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
4735 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
4736 TYPE_UNSIGNED (lhs_type));
4737 m_data_cnt = 0;
4738 tree rhs1 = gimple_assign_rhs1 (stmt);
4739 tree r1 = handle_operand (rhs1, size_int (0));
4740 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
4741 r1 = add_cast (lhs_type, r1);
4742 if (TYPE_PRECISION (lhs_type) > limb_prec)
4744 m_data_cnt = 0;
4745 m_first = false;
4746 tree r2 = handle_operand (rhs1, size_int (1));
4747 r2 = add_cast (lhs_type, r2);
4748 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
4749 build_int_cst (unsigned_type_node,
4750 limb_prec));
4751 insert_before (g);
4752 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
4753 gimple_assign_lhs (g));
4754 insert_before (g);
4755 r1 = gimple_assign_lhs (g);
4757 if (lhs_type != TREE_TYPE (lhs))
4758 g = gimple_build_assign (lhs, NOP_EXPR, r1);
4759 else
4760 g = gimple_build_assign (lhs, r1);
4761 gsi_replace (&m_gsi, g, true);
4762 return;
4764 if (is_gimple_assign (stmt))
4765 switch (gimple_assign_rhs_code (stmt))
4767 case LSHIFT_EXPR:
4768 case RSHIFT_EXPR:
4769 lower_shift_stmt (NULL_TREE, stmt);
4770 return;
4771 case MULT_EXPR:
4772 case TRUNC_DIV_EXPR:
4773 case TRUNC_MOD_EXPR:
4774 lower_muldiv_stmt (NULL_TREE, stmt);
4775 return;
4776 case FIX_TRUNC_EXPR:
4777 case FLOAT_EXPR:
4778 lower_float_conv_stmt (NULL_TREE, stmt);
4779 return;
4780 case REALPART_EXPR:
4781 case IMAGPART_EXPR:
4782 lower_cplxpart_stmt (NULL_TREE, stmt);
4783 return;
4784 case COMPLEX_EXPR:
4785 lower_complexexpr_stmt (stmt);
4786 return;
4787 default:
4788 break;
4790 gcc_unreachable ();
4793 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
4794 the desired memory state. */
4796 void *
4797 vuse_eq (ao_ref *, tree vuse1, void *data)
4799 tree vuse2 = (tree) data;
4800 if (vuse1 == vuse2)
4801 return data;
4803 return NULL;
4806 /* Return true if STMT uses a library function and needs to take
4807 address of its inputs. We need to avoid bit-fields in those
4808 cases. */
4810 bool
4811 stmt_needs_operand_addr (gimple *stmt)
4813 if (is_gimple_assign (stmt))
4814 switch (gimple_assign_rhs_code (stmt))
4816 case MULT_EXPR:
4817 case TRUNC_DIV_EXPR:
4818 case TRUNC_MOD_EXPR:
4819 case FLOAT_EXPR:
4820 return true;
4821 default:
4822 break;
4824 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
4825 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
4826 return true;
4827 return false;
4830 /* Dominator walker used to discover which large/huge _BitInt
4831 loads could be sunk into all their uses. */
4833 class bitint_dom_walker : public dom_walker
4835 public:
4836 bitint_dom_walker (bitmap names, bitmap loads)
4837 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
4839 edge before_dom_children (basic_block) final override;
4841 private:
4842 bitmap m_names, m_loads;
4845 edge
4846 bitint_dom_walker::before_dom_children (basic_block bb)
4848 gphi *phi = get_virtual_phi (bb);
4849 tree vop;
4850 if (phi)
4851 vop = gimple_phi_result (phi);
4852 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
4853 vop = NULL_TREE;
4854 else
4855 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
4857 auto_vec<tree, 16> worklist;
4858 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4859 !gsi_end_p (gsi); gsi_next (&gsi))
4861 gimple *stmt = gsi_stmt (gsi);
4862 if (is_gimple_debug (stmt))
4863 continue;
4865 if (!vop && gimple_vuse (stmt))
4866 vop = gimple_vuse (stmt);
4868 tree cvop = vop;
4869 if (gimple_vdef (stmt))
4870 vop = gimple_vdef (stmt);
4872 tree lhs = gimple_get_lhs (stmt);
4873 if (lhs
4874 && TREE_CODE (lhs) == SSA_NAME
4875 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
4876 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
4877 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4878 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
4879 it means it will be handled in a loop or straight line code
4880 at the location of its (ultimate) immediate use, so for
4881 vop checking purposes check these only at the ultimate
4882 immediate use. */
4883 continue;
4885 ssa_op_iter oi;
4886 use_operand_p use_p;
4887 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
4889 tree s = USE_FROM_PTR (use_p);
4890 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
4891 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
4892 worklist.safe_push (s);
4895 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
4896 while (worklist.length () > 0)
4898 tree s = worklist.pop ();
4900 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
4902 gimple *g = SSA_NAME_DEF_STMT (s);
4903 needs_operand_addr |= stmt_needs_operand_addr (g);
4904 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
4906 tree s2 = USE_FROM_PTR (use_p);
4907 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
4908 && (bitint_precision_kind (TREE_TYPE (s2))
4909 >= bitint_prec_large))
4910 worklist.safe_push (s2);
4912 continue;
4914 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
4915 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
4917 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
4918 if (TREE_CODE (rhs) == SSA_NAME
4919 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
4920 s = rhs;
4921 else
4922 continue;
4924 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
4925 continue;
4927 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
4928 if (needs_operand_addr
4929 && TREE_CODE (rhs1) == COMPONENT_REF
4930 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
4932 tree fld = TREE_OPERAND (rhs1, 1);
4933 /* For little-endian, we can allow as inputs bit-fields
4934 which start at a limb boundary. */
4935 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
4936 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
4937 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
4938 % limb_prec) == 0)
4940 else
4942 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
4943 continue;
4947 ao_ref ref;
4948 ao_ref_init (&ref, rhs1);
4949 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
4950 unsigned limit = 64;
4951 tree vuse = cvop;
4952 if (vop != cvop
4953 && is_gimple_assign (stmt)
4954 && gimple_store_p (stmt)
4955 && !operand_equal_p (lhs,
4956 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
4958 vuse = vop;
4959 if (vuse != lvop
4960 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
4961 NULL, NULL, limit, lvop) == NULL)
4962 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
4966 bb->aux = (void *) vop;
4967 return NULL;
4972 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
4973 build_ssa_conflict_graph.
4974 The differences are:
4975 1) don't process assignments with large/huge _BitInt lhs not in NAMES
4976 2) for large/huge _BitInt multiplication/division/modulo process def
4977 only after processing uses rather than before to make uses conflict
4978 with the definition
4979 3) for large/huge _BitInt uses not in NAMES mark the uses of their
4980 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
4981 the final statement. */
4983 void
4984 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
4985 ssa_conflicts *graph, bitmap names,
4986 void (*def) (live_track *, tree,
4987 ssa_conflicts *),
4988 void (*use) (live_track *, tree))
4990 bool muldiv_p = false;
4991 tree lhs = NULL_TREE;
4992 if (is_gimple_assign (stmt))
4994 lhs = gimple_assign_lhs (stmt);
4995 if (TREE_CODE (lhs) == SSA_NAME
4996 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
4997 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
4999 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5000 return;
5001 switch (gimple_assign_rhs_code (stmt))
5003 case MULT_EXPR:
5004 case TRUNC_DIV_EXPR:
5005 case TRUNC_MOD_EXPR:
5006 muldiv_p = true;
5007 default:
5008 break;
5013 ssa_op_iter iter;
5014 tree var;
5015 if (!muldiv_p)
5017 /* For stmts with more than one SSA_NAME definition pretend all the
5018 SSA_NAME outputs but the first one are live at this point, so
5019 that conflicts are added in between all those even when they are
5020 actually not really live after the asm, because expansion might
5021 copy those into pseudos after the asm and if multiple outputs
5022 share the same partition, it might overwrite those that should
5023 be live. E.g.
5024 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5025 return a;
5026 See PR70593. */
5027 bool first = true;
5028 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5029 if (first)
5030 first = false;
5031 else
5032 use (live, var);
5034 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5035 def (live, var, graph);
5038 auto_vec<tree, 16> worklist;
5039 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5040 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5041 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5043 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5044 use (live, var);
5045 else
5046 worklist.safe_push (var);
5049 while (worklist.length () > 0)
5051 tree s = worklist.pop ();
5052 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5053 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5054 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5056 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5057 use (live, var);
5058 else
5059 worklist.safe_push (var);
5063 if (muldiv_p)
5064 def (live, lhs, graph);
5067 /* Entry point for _BitInt(N) operation lowering during optimization. */
5069 static unsigned int
5070 gimple_lower_bitint (void)
5072 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5073 limb_prec = 0;
5075 unsigned int i;
5076 for (i = 0; i < num_ssa_names; ++i)
5078 tree s = ssa_name (i);
5079 if (s == NULL)
5080 continue;
5081 tree type = TREE_TYPE (s);
5082 if (TREE_CODE (type) == COMPLEX_TYPE)
5083 type = TREE_TYPE (type);
5084 if (TREE_CODE (type) == BITINT_TYPE
5085 && bitint_precision_kind (type) != bitint_prec_small)
5086 break;
5087 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5088 into memory. Such functions could have no large/huge SSA_NAMEs. */
5089 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5091 gimple *g = SSA_NAME_DEF_STMT (s);
5092 if (is_gimple_assign (g) && gimple_store_p (g))
5094 tree t = gimple_assign_rhs1 (g);
5095 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5096 && (bitint_precision_kind (TREE_TYPE (t))
5097 >= bitint_prec_large))
5098 break;
5102 if (i == num_ssa_names)
5103 return 0;
5105 basic_block bb;
5106 auto_vec<gimple *, 4> switch_statements;
5107 FOR_EACH_BB_FN (bb, cfun)
5109 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5111 tree idx = gimple_switch_index (swtch);
5112 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5113 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5114 continue;
5116 if (optimize)
5117 group_case_labels_stmt (swtch);
5118 switch_statements.safe_push (swtch);
5122 if (!switch_statements.is_empty ())
5124 bool expanded = false;
5125 gimple *stmt;
5126 unsigned int j;
5127 i = 0;
5128 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5130 gswitch *swtch = as_a<gswitch *> (stmt);
5131 tree_switch_conversion::switch_decision_tree dt (swtch);
5132 expanded |= dt.analyze_switch_statement ();
5135 if (expanded)
5137 free_dominance_info (CDI_DOMINATORS);
5138 free_dominance_info (CDI_POST_DOMINATORS);
5139 mark_virtual_operands_for_renaming (cfun);
5140 cleanup_tree_cfg (TODO_update_ssa);
5144 struct bitint_large_huge large_huge;
5145 bool has_large_huge_parm_result = false;
5146 bool has_large_huge = false;
5147 unsigned int ret = 0, first_large_huge = ~0U;
5148 bool edge_insertions = false;
5149 for (; i < num_ssa_names; ++i)
5151 tree s = ssa_name (i);
5152 if (s == NULL)
5153 continue;
5154 tree type = TREE_TYPE (s);
5155 if (TREE_CODE (type) == COMPLEX_TYPE)
5156 type = TREE_TYPE (type);
5157 if (TREE_CODE (type) == BITINT_TYPE
5158 && bitint_precision_kind (type) >= bitint_prec_large)
5160 if (first_large_huge == ~0U)
5161 first_large_huge = i;
5162 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5163 gimple_stmt_iterator gsi;
5164 tree_code rhs_code;
5165 /* Unoptimize certain constructs to simpler alternatives to
5166 avoid having to lower all of them. */
5167 if (is_gimple_assign (stmt))
5168 switch (rhs_code = gimple_assign_rhs_code (stmt))
5170 default:
5171 break;
5172 case LROTATE_EXPR:
5173 case RROTATE_EXPR:
5175 first_large_huge = 0;
5176 location_t loc = gimple_location (stmt);
5177 gsi = gsi_for_stmt (stmt);
5178 tree rhs1 = gimple_assign_rhs1 (stmt);
5179 tree type = TREE_TYPE (rhs1);
5180 tree n = gimple_assign_rhs2 (stmt), m;
5181 tree p = build_int_cst (TREE_TYPE (n),
5182 TYPE_PRECISION (type));
5183 if (TREE_CODE (n) == INTEGER_CST)
5184 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5185 else
5187 m = make_ssa_name (TREE_TYPE (n));
5188 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5189 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5190 gimple_set_location (g, loc);
5192 if (!TYPE_UNSIGNED (type))
5194 tree utype = build_bitint_type (TYPE_PRECISION (type),
5196 if (TREE_CODE (rhs1) == INTEGER_CST)
5197 rhs1 = fold_convert (utype, rhs1);
5198 else
5200 tree t = make_ssa_name (type);
5201 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5202 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5203 gimple_set_location (g, loc);
5206 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5207 rhs_code == LROTATE_EXPR
5208 ? LSHIFT_EXPR : RSHIFT_EXPR,
5209 rhs1, n);
5210 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5211 gimple_set_location (g, loc);
5212 tree op1 = gimple_assign_lhs (g);
5213 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5214 rhs_code == LROTATE_EXPR
5215 ? RSHIFT_EXPR : LSHIFT_EXPR,
5216 rhs1, m);
5217 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5218 gimple_set_location (g, loc);
5219 tree op2 = gimple_assign_lhs (g);
5220 tree lhs = gimple_assign_lhs (stmt);
5221 if (!TYPE_UNSIGNED (type))
5223 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5224 BIT_IOR_EXPR, op1, op2);
5225 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5226 gimple_set_location (g, loc);
5227 g = gimple_build_assign (lhs, NOP_EXPR,
5228 gimple_assign_lhs (g));
5230 else
5231 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5232 gsi_replace (&gsi, g, true);
5233 gimple_set_location (g, loc);
5235 break;
5236 case ABS_EXPR:
5237 case ABSU_EXPR:
5238 case MIN_EXPR:
5239 case MAX_EXPR:
5240 case COND_EXPR:
5241 first_large_huge = 0;
5242 gsi = gsi_for_stmt (stmt);
5243 tree lhs = gimple_assign_lhs (stmt);
5244 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5245 location_t loc = gimple_location (stmt);
5246 if (rhs_code == ABS_EXPR)
5247 g = gimple_build_cond (LT_EXPR, rhs1,
5248 build_zero_cst (TREE_TYPE (rhs1)),
5249 NULL_TREE, NULL_TREE);
5250 else if (rhs_code == ABSU_EXPR)
5252 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5253 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5254 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5255 gimple_set_location (g, loc);
5256 g = gimple_build_cond (LT_EXPR, rhs1,
5257 build_zero_cst (TREE_TYPE (rhs1)),
5258 NULL_TREE, NULL_TREE);
5259 rhs1 = rhs2;
5261 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5263 rhs2 = gimple_assign_rhs2 (stmt);
5264 if (TREE_CODE (rhs1) == INTEGER_CST)
5265 std::swap (rhs1, rhs2);
5266 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5267 NULL_TREE, NULL_TREE);
5268 if (rhs_code == MAX_EXPR)
5269 std::swap (rhs1, rhs2);
5271 else
5273 g = gimple_build_cond (NE_EXPR, rhs1,
5274 build_zero_cst (TREE_TYPE (rhs1)),
5275 NULL_TREE, NULL_TREE);
5276 rhs1 = gimple_assign_rhs2 (stmt);
5277 rhs2 = gimple_assign_rhs3 (stmt);
5279 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5280 gimple_set_location (g, loc);
5281 edge e1 = split_block (gsi_bb (gsi), g);
5282 edge e2 = split_block (e1->dest, (gimple *) NULL);
5283 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5284 e3->probability = profile_probability::even ();
5285 e1->flags = EDGE_TRUE_VALUE;
5286 e1->probability = e3->probability.invert ();
5287 if (dom_info_available_p (CDI_DOMINATORS))
5288 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5289 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5291 gsi = gsi_after_labels (e1->dest);
5292 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5293 NEGATE_EXPR, rhs1);
5294 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5295 gimple_set_location (g, loc);
5296 rhs2 = gimple_assign_lhs (g);
5297 std::swap (rhs1, rhs2);
5299 gsi = gsi_for_stmt (stmt);
5300 gsi_remove (&gsi, true);
5301 gphi *phi = create_phi_node (lhs, e2->dest);
5302 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5303 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5304 break;
5307 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5308 into memory. Such functions could have no large/huge SSA_NAMEs. */
5309 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5311 gimple *g = SSA_NAME_DEF_STMT (s);
5312 if (is_gimple_assign (g) && gimple_store_p (g))
5314 tree t = gimple_assign_rhs1 (g);
5315 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5316 && (bitint_precision_kind (TREE_TYPE (t))
5317 >= bitint_prec_large))
5318 has_large_huge = true;
5322 for (i = first_large_huge; i < num_ssa_names; ++i)
5324 tree s = ssa_name (i);
5325 if (s == NULL)
5326 continue;
5327 tree type = TREE_TYPE (s);
5328 if (TREE_CODE (type) == COMPLEX_TYPE)
5329 type = TREE_TYPE (type);
5330 if (TREE_CODE (type) == BITINT_TYPE
5331 && bitint_precision_kind (type) >= bitint_prec_large)
5333 use_operand_p use_p;
5334 gimple *use_stmt;
5335 has_large_huge = true;
5336 if (optimize
5337 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
5338 continue;
5339 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5340 the same bb and could be handled in the same loop with the
5341 immediate use. */
5342 if (optimize
5343 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5344 && single_imm_use (s, &use_p, &use_stmt)
5345 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
5347 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
5349 if (mergeable_op (use_stmt))
5350 continue;
5351 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
5352 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
5353 continue;
5354 if (gimple_assign_cast_p (use_stmt))
5356 tree lhs = gimple_assign_lhs (use_stmt);
5357 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5358 continue;
5360 else if (gimple_store_p (use_stmt)
5361 && is_gimple_assign (use_stmt)
5362 && !gimple_has_volatile_ops (use_stmt)
5363 && !stmt_ends_bb_p (use_stmt))
5364 continue;
5366 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5368 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5369 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5370 && ((is_gimple_assign (use_stmt)
5371 && (gimple_assign_rhs_code (use_stmt)
5372 != COMPLEX_EXPR))
5373 || gimple_code (use_stmt) == GIMPLE_COND)
5374 && (!gimple_store_p (use_stmt)
5375 || (is_gimple_assign (use_stmt)
5376 && !gimple_has_volatile_ops (use_stmt)
5377 && !stmt_ends_bb_p (use_stmt)))
5378 && (TREE_CODE (rhs1) != SSA_NAME
5379 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5381 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
5382 || (bitint_precision_kind (TREE_TYPE (rhs1))
5383 < bitint_prec_large)
5384 || (TYPE_PRECISION (TREE_TYPE (rhs1))
5385 >= TYPE_PRECISION (TREE_TYPE (s)))
5386 || mergeable_op (SSA_NAME_DEF_STMT (s)))
5387 continue;
5388 /* Prevent merging a widening non-mergeable cast
5389 on result of some narrower mergeable op
5390 together with later mergeable operations. E.g.
5391 result of _BitInt(223) addition shouldn't be
5392 sign-extended to _BitInt(513) and have another
5393 _BitInt(513) added to it, as handle_plus_minus
5394 with its PHI node handling inside of handle_cast
5395 will not work correctly. An exception is if
5396 use_stmt is a store, this is handled directly
5397 in lower_mergeable_stmt. */
5398 if (TREE_CODE (rhs1) != SSA_NAME
5399 || !has_single_use (rhs1)
5400 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
5401 != gimple_bb (SSA_NAME_DEF_STMT (s)))
5402 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
5403 || gimple_store_p (use_stmt))
5404 continue;
5405 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
5407 /* Another exception is if the widening cast is
5408 from mergeable same precision cast from something
5409 not mergeable. */
5410 tree rhs2
5411 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
5412 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
5413 && (TYPE_PRECISION (TREE_TYPE (rhs1))
5414 == TYPE_PRECISION (TREE_TYPE (rhs2))))
5416 if (TREE_CODE (rhs2) != SSA_NAME
5417 || !has_single_use (rhs2)
5418 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
5419 != gimple_bb (SSA_NAME_DEF_STMT (s)))
5420 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
5421 continue;
5426 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
5427 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
5429 case IMAGPART_EXPR:
5431 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5432 rhs1 = TREE_OPERAND (rhs1, 0);
5433 if (TREE_CODE (rhs1) == SSA_NAME)
5435 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5436 if (optimizable_arith_overflow (g))
5437 continue;
5440 /* FALLTHRU */
5441 case LSHIFT_EXPR:
5442 case RSHIFT_EXPR:
5443 case MULT_EXPR:
5444 case TRUNC_DIV_EXPR:
5445 case TRUNC_MOD_EXPR:
5446 case FIX_TRUNC_EXPR:
5447 case REALPART_EXPR:
5448 if (gimple_store_p (use_stmt)
5449 && is_gimple_assign (use_stmt)
5450 && !gimple_has_volatile_ops (use_stmt)
5451 && !stmt_ends_bb_p (use_stmt))
5453 tree lhs = gimple_assign_lhs (use_stmt);
5454 /* As multiply/division passes address of the lhs
5455 to library function and that assumes it can extend
5456 it to whole number of limbs, avoid merging those
5457 with bit-field stores. Don't allow it for
5458 shifts etc. either, so that the bit-field store
5459 handling doesn't have to be done everywhere. */
5460 if (TREE_CODE (lhs) == COMPONENT_REF
5461 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
5462 break;
5463 continue;
5465 break;
5466 default:
5467 break;
5471 /* Also ignore uninitialized uses. */
5472 if (SSA_NAME_IS_DEFAULT_DEF (s)
5473 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
5474 continue;
5476 if (!large_huge.m_names)
5477 large_huge.m_names = BITMAP_ALLOC (NULL);
5478 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
5479 if (has_single_use (s))
5481 if (!large_huge.m_single_use_names)
5482 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
5483 bitmap_set_bit (large_huge.m_single_use_names,
5484 SSA_NAME_VERSION (s));
5486 if (SSA_NAME_VAR (s)
5487 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
5488 && SSA_NAME_IS_DEFAULT_DEF (s))
5489 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
5490 has_large_huge_parm_result = true;
5491 if (optimize
5492 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5493 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
5494 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
5495 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
5497 use_operand_p use_p;
5498 imm_use_iterator iter;
5499 bool optimizable_load = true;
5500 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
5502 gimple *use_stmt = USE_STMT (use_p);
5503 if (is_gimple_debug (use_stmt))
5504 continue;
5505 if (gimple_code (use_stmt) == GIMPLE_PHI
5506 || is_gimple_call (use_stmt))
5508 optimizable_load = false;
5509 break;
5513 ssa_op_iter oi;
5514 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
5515 oi, SSA_OP_USE)
5517 tree s2 = USE_FROM_PTR (use_p);
5518 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
5520 optimizable_load = false;
5521 break;
5525 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
5527 if (!large_huge.m_loads)
5528 large_huge.m_loads = BITMAP_ALLOC (NULL);
5529 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
5533 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5534 into memory. Such functions could have no large/huge SSA_NAMEs. */
5535 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5537 gimple *g = SSA_NAME_DEF_STMT (s);
5538 if (is_gimple_assign (g) && gimple_store_p (g))
5540 tree t = gimple_assign_rhs1 (g);
5541 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5542 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
5543 has_large_huge = true;
5548 if (large_huge.m_names || has_large_huge)
5550 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5551 calculate_dominance_info (CDI_DOMINATORS);
5552 if (optimize)
5553 enable_ranger (cfun);
5554 if (large_huge.m_loads)
5556 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
5557 entry->aux = NULL;
5558 bitint_dom_walker (large_huge.m_names,
5559 large_huge.m_loads).walk (entry);
5560 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
5561 clear_aux_for_blocks ();
5562 BITMAP_FREE (large_huge.m_loads);
5564 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
5565 large_huge.m_limb_size
5566 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
5568 if (large_huge.m_names)
5570 large_huge.m_map
5571 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
5572 coalesce_ssa_name (large_huge.m_map);
5573 partition_view_normal (large_huge.m_map);
5574 if (dump_file && (dump_flags & TDF_DETAILS))
5576 fprintf (dump_file, "After Coalescing:\n");
5577 dump_var_map (dump_file, large_huge.m_map);
5579 large_huge.m_vars
5580 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
5581 bitmap_iterator bi;
5582 if (has_large_huge_parm_result)
5583 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
5585 tree s = ssa_name (i);
5586 if (SSA_NAME_VAR (s)
5587 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
5588 && SSA_NAME_IS_DEFAULT_DEF (s))
5589 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
5591 int p = var_to_partition (large_huge.m_map, s);
5592 if (large_huge.m_vars[p] == NULL_TREE)
5594 large_huge.m_vars[p] = SSA_NAME_VAR (s);
5595 mark_addressable (SSA_NAME_VAR (s));
5599 tree atype = NULL_TREE;
5600 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
5602 tree s = ssa_name (i);
5603 int p = var_to_partition (large_huge.m_map, s);
5604 if (large_huge.m_vars[p] != NULL_TREE)
5605 continue;
5606 if (atype == NULL_TREE
5607 || !tree_int_cst_equal (TYPE_SIZE (atype),
5608 TYPE_SIZE (TREE_TYPE (s))))
5610 unsigned HOST_WIDE_INT nelts
5611 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
5612 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
5614 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
5615 mark_addressable (large_huge.m_vars[p]);
5619 FOR_EACH_BB_REVERSE_FN (bb, cfun)
5621 gimple_stmt_iterator prev;
5622 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
5623 gsi = prev)
5625 prev = gsi;
5626 gsi_prev (&prev);
5627 ssa_op_iter iter;
5628 gimple *stmt = gsi_stmt (gsi);
5629 if (is_gimple_debug (stmt))
5630 continue;
5631 bitint_prec_kind kind = bitint_prec_small;
5632 tree t;
5633 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
5634 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
5636 bitint_prec_kind this_kind
5637 = bitint_precision_kind (TREE_TYPE (t));
5638 if (this_kind > kind)
5639 kind = this_kind;
5641 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
5643 t = gimple_assign_rhs1 (stmt);
5644 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
5646 bitint_prec_kind this_kind
5647 = bitint_precision_kind (TREE_TYPE (t));
5648 if (this_kind > kind)
5649 kind = this_kind;
5652 if (is_gimple_call (stmt))
5654 t = gimple_call_lhs (stmt);
5655 if (t
5656 && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
5657 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
5659 bitint_prec_kind this_kind
5660 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
5661 if (this_kind > kind)
5662 kind = this_kind;
5665 if (kind == bitint_prec_small)
5666 continue;
5667 switch (gimple_code (stmt))
5669 case GIMPLE_CALL:
5670 /* For now. We'll need to handle some internal functions and
5671 perhaps some builtins. */
5672 if (kind == bitint_prec_middle)
5673 continue;
5674 break;
5675 case GIMPLE_ASM:
5676 if (kind == bitint_prec_middle)
5677 continue;
5678 break;
5679 case GIMPLE_RETURN:
5680 continue;
5681 case GIMPLE_ASSIGN:
5682 if (gimple_clobber_p (stmt))
5683 continue;
5684 if (kind >= bitint_prec_large)
5685 break;
5686 if (gimple_assign_single_p (stmt))
5687 /* No need to lower copies, loads or stores. */
5688 continue;
5689 if (gimple_assign_cast_p (stmt))
5691 tree lhs = gimple_assign_lhs (stmt);
5692 tree rhs = gimple_assign_rhs1 (stmt);
5693 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5694 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5695 && (TYPE_PRECISION (TREE_TYPE (lhs))
5696 == TYPE_PRECISION (TREE_TYPE (rhs))))
5697 /* No need to lower casts to same precision. */
5698 continue;
5700 break;
5701 default:
5702 break;
5705 if (kind == bitint_prec_middle)
5707 tree type = NULL_TREE;
5708 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
5709 with the same precision and back. */
5710 if (tree lhs = gimple_get_lhs (stmt))
5711 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5712 && (bitint_precision_kind (TREE_TYPE (lhs))
5713 == bitint_prec_middle))
5715 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
5716 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
5717 type = build_nonstandard_integer_type (prec, uns);
5718 tree lhs2 = make_ssa_name (type);
5719 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
5720 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
5721 gimple_set_lhs (stmt, lhs2);
5723 unsigned int nops = gimple_num_ops (stmt);
5724 for (unsigned int i = 0; i < nops; ++i)
5725 if (tree op = gimple_op (stmt, i))
5727 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
5728 if (nop != op)
5729 gimple_set_op (stmt, i, nop);
5730 else if (COMPARISON_CLASS_P (op))
5732 TREE_OPERAND (op, 0)
5733 = maybe_cast_middle_bitint (&gsi,
5734 TREE_OPERAND (op, 0),
5735 type);
5736 TREE_OPERAND (op, 1)
5737 = maybe_cast_middle_bitint (&gsi,
5738 TREE_OPERAND (op, 1),
5739 type);
5741 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
5743 CASE_LOW (op)
5744 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
5745 type);
5746 CASE_HIGH (op)
5747 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
5748 type);
5751 update_stmt (stmt);
5752 continue;
5755 if (tree lhs = gimple_get_lhs (stmt))
5756 if (TREE_CODE (lhs) == SSA_NAME)
5758 tree type = TREE_TYPE (lhs);
5759 if (TREE_CODE (type) == COMPLEX_TYPE)
5760 type = TREE_TYPE (type);
5761 if (TREE_CODE (type) == BITINT_TYPE
5762 && bitint_precision_kind (type) >= bitint_prec_large
5763 && (large_huge.m_names == NULL
5764 || !bitmap_bit_p (large_huge.m_names,
5765 SSA_NAME_VERSION (lhs))))
5766 continue;
5769 large_huge.lower_stmt (stmt);
5772 tree atype = NULL_TREE;
5773 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5774 gsi_next (&gsi))
5776 gphi *phi = gsi.phi ();
5777 tree lhs = gimple_phi_result (phi);
5778 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5779 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
5780 continue;
5781 int p1 = var_to_partition (large_huge.m_map, lhs);
5782 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
5783 tree v1 = large_huge.m_vars[p1];
5784 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
5786 tree arg = gimple_phi_arg_def (phi, i);
5787 edge e = gimple_phi_arg_edge (phi, i);
5788 gimple *g;
5789 switch (TREE_CODE (arg))
5791 case INTEGER_CST:
5792 if (integer_zerop (arg) && VAR_P (v1))
5794 tree zero = build_zero_cst (TREE_TYPE (v1));
5795 g = gimple_build_assign (v1, zero);
5796 gsi_insert_on_edge (e, g);
5797 edge_insertions = true;
5798 break;
5800 int ext;
5801 unsigned int min_prec, prec, rem;
5802 tree c;
5803 prec = TYPE_PRECISION (TREE_TYPE (arg));
5804 rem = prec % (2 * limb_prec);
5805 min_prec = bitint_min_cst_precision (arg, ext);
5806 if (min_prec > prec - rem - 2 * limb_prec
5807 && min_prec > (unsigned) limb_prec)
5808 /* Constant which has enough significant bits that it
5809 isn't worth trying to save .rodata space by extending
5810 from smaller number. */
5811 min_prec = prec;
5812 else
5813 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
5814 if (min_prec == 0)
5815 c = NULL_TREE;
5816 else if (min_prec == prec)
5817 c = tree_output_constant_def (arg);
5818 else if (min_prec == (unsigned) limb_prec)
5819 c = fold_convert (large_huge.m_limb_type, arg);
5820 else
5822 tree ctype = build_bitint_type (min_prec, 1);
5823 c = tree_output_constant_def (fold_convert (ctype, arg));
5825 if (c)
5827 if (VAR_P (v1) && min_prec == prec)
5829 tree v2 = build1 (VIEW_CONVERT_EXPR,
5830 TREE_TYPE (v1), c);
5831 g = gimple_build_assign (v1, v2);
5832 gsi_insert_on_edge (e, g);
5833 edge_insertions = true;
5834 break;
5836 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
5837 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
5838 TREE_TYPE (c), v1),
5840 else
5842 unsigned HOST_WIDE_INT nelts
5843 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
5844 / limb_prec;
5845 tree vtype
5846 = build_array_type_nelts (large_huge.m_limb_type,
5847 nelts);
5848 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
5849 vtype, v1),
5850 build1 (VIEW_CONVERT_EXPR,
5851 vtype, c));
5853 gsi_insert_on_edge (e, g);
5855 if (ext == 0)
5857 unsigned HOST_WIDE_INT nelts
5858 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
5859 - min_prec) / limb_prec;
5860 tree vtype
5861 = build_array_type_nelts (large_huge.m_limb_type,
5862 nelts);
5863 tree ptype = build_pointer_type (TREE_TYPE (v1));
5864 tree off = fold_convert (ptype,
5865 TYPE_SIZE_UNIT (TREE_TYPE (c)));
5866 tree vd = build2 (MEM_REF, vtype,
5867 build_fold_addr_expr (v1), off);
5868 g = gimple_build_assign (vd, build_zero_cst (vtype));
5870 else
5872 tree vd = v1;
5873 if (c)
5875 tree ptype = build_pointer_type (TREE_TYPE (v1));
5876 tree off
5877 = fold_convert (ptype,
5878 TYPE_SIZE_UNIT (TREE_TYPE (c)));
5879 vd = build2 (MEM_REF, large_huge.m_limb_type,
5880 build_fold_addr_expr (v1), off);
5882 vd = build_fold_addr_expr (vd);
5883 unsigned HOST_WIDE_INT nbytes
5884 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
5885 if (c)
5886 nbytes
5887 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
5888 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
5889 g = gimple_build_call (fn, 3, vd,
5890 integer_minus_one_node,
5891 build_int_cst (sizetype,
5892 nbytes));
5894 gsi_insert_on_edge (e, g);
5895 edge_insertions = true;
5896 break;
5897 default:
5898 gcc_unreachable ();
5899 case SSA_NAME:
5900 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
5902 if (large_huge.m_names == NULL
5903 || !bitmap_bit_p (large_huge.m_names,
5904 SSA_NAME_VERSION (arg)))
5905 continue;
5907 int p2 = var_to_partition (large_huge.m_map, arg);
5908 if (p1 == p2)
5909 continue;
5910 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
5911 tree v2 = large_huge.m_vars[p2];
5912 if (VAR_P (v1) && VAR_P (v2))
5913 g = gimple_build_assign (v1, v2);
5914 else if (VAR_P (v1))
5915 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
5916 TREE_TYPE (v1), v2));
5917 else if (VAR_P (v2))
5918 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
5919 TREE_TYPE (v2), v1), v2);
5920 else
5922 if (atype == NULL_TREE
5923 || !tree_int_cst_equal (TYPE_SIZE (atype),
5924 TYPE_SIZE (TREE_TYPE (lhs))))
5926 unsigned HOST_WIDE_INT nelts
5927 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
5928 / limb_prec;
5929 atype
5930 = build_array_type_nelts (large_huge.m_limb_type,
5931 nelts);
5933 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
5934 atype, v1),
5935 build1 (VIEW_CONVERT_EXPR,
5936 atype, v2));
5938 gsi_insert_on_edge (e, g);
5939 edge_insertions = true;
5940 break;
5946 if (large_huge.m_names || has_large_huge)
5948 gimple *nop = NULL;
5949 for (i = 0; i < num_ssa_names; ++i)
5951 tree s = ssa_name (i);
5952 if (s == NULL_TREE)
5953 continue;
5954 tree type = TREE_TYPE (s);
5955 if (TREE_CODE (type) == COMPLEX_TYPE)
5956 type = TREE_TYPE (type);
5957 if (TREE_CODE (type) == BITINT_TYPE
5958 && bitint_precision_kind (type) >= bitint_prec_large)
5960 if (large_huge.m_preserved
5961 && bitmap_bit_p (large_huge.m_preserved,
5962 SSA_NAME_VERSION (s)))
5963 continue;
5964 gimple *g = SSA_NAME_DEF_STMT (s);
5965 if (gimple_code (g) == GIMPLE_NOP)
5967 if (SSA_NAME_VAR (s))
5968 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
5969 release_ssa_name (s);
5970 continue;
5972 if (gimple_code (g) != GIMPLE_ASM)
5974 gimple_stmt_iterator gsi = gsi_for_stmt (g);
5975 bool save_vta = flag_var_tracking_assignments;
5976 flag_var_tracking_assignments = false;
5977 gsi_remove (&gsi, true);
5978 flag_var_tracking_assignments = save_vta;
5980 if (nop == NULL)
5981 nop = gimple_build_nop ();
5982 SSA_NAME_DEF_STMT (s) = nop;
5983 release_ssa_name (s);
5986 if (optimize)
5987 disable_ranger (cfun);
5990 if (edge_insertions)
5991 gsi_commit_edge_inserts ();
5993 return ret;
5996 namespace {
5998 const pass_data pass_data_lower_bitint =
6000 GIMPLE_PASS, /* type */
6001 "bitintlower", /* name */
6002 OPTGROUP_NONE, /* optinfo_flags */
6003 TV_NONE, /* tv_id */
6004 PROP_ssa, /* properties_required */
6005 PROP_gimple_lbitint, /* properties_provided */
6006 0, /* properties_destroyed */
6007 0, /* todo_flags_start */
6008 0, /* todo_flags_finish */
6011 class pass_lower_bitint : public gimple_opt_pass
6013 public:
6014 pass_lower_bitint (gcc::context *ctxt)
6015 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6018 /* opt_pass methods: */
6019 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6020 unsigned int execute (function *) final override
6022 return gimple_lower_bitint ();
6025 }; // class pass_lower_bitint
6027 } // anon namespace
6029 gimple_opt_pass *
6030 make_pass_lower_bitint (gcc::context *ctxt)
6032 return new pass_lower_bitint (ctxt);
6036 namespace {
6038 const pass_data pass_data_lower_bitint_O0 =
6040 GIMPLE_PASS, /* type */
6041 "bitintlower0", /* name */
6042 OPTGROUP_NONE, /* optinfo_flags */
6043 TV_NONE, /* tv_id */
6044 PROP_cfg, /* properties_required */
6045 PROP_gimple_lbitint, /* properties_provided */
6046 0, /* properties_destroyed */
6047 0, /* todo_flags_start */
6048 0, /* todo_flags_finish */
6051 class pass_lower_bitint_O0 : public gimple_opt_pass
6053 public:
6054 pass_lower_bitint_O0 (gcc::context *ctxt)
6055 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6058 /* opt_pass methods: */
6059 bool gate (function *fun) final override
6061 /* With errors, normal optimization passes are not run. If we don't
6062 lower bitint operations at all, rtl expansion will abort. */
6063 return !(fun->curr_properties & PROP_gimple_lbitint);
6066 unsigned int execute (function *) final override
6068 return gimple_lower_bitint ();
6071 }; // class pass_lower_bitint_O0
6073 } // anon namespace
6075 gimple_opt_pass *
6076 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6078 return new pass_lower_bitint_O0 (ctxt);