d: Merge upstream dmd, druntime 2bbf64907c, phobos b64bfbf91
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blobc55c32fb40d644dce091ff3608af9a1b8d6be416
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
342 else
343 return 0;
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
365 else if (is_gimple_assign (stmt))
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
381 *pop1 = op1;
382 *pop2 = op2;
384 return code;
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_bit_query (gimple *);
431 void lower_call (tree, gimple *);
432 void lower_asm (gimple *);
433 void lower_stmt (gimple *);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
437 bitmap m_names;
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
440 their uses. */
441 bitmap m_loads;
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
444 bitmap m_preserved;
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
452 set in m_names. */
453 var_map m_map;
454 /* Mapping of the partitions to corresponding decls. */
455 tree *m_vars;
456 /* Unsigned integer type with limb precision. */
457 tree m_limb_type;
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size;
460 /* Location of a gimple stmt which is being currently lowered. */
461 location_t m_loc;
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple *m_after_stmt;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb, m_preheader_bb;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi;
470 /* Decl into which a mergeable statement stores result. */
471 tree m_lhs;
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
520 bool m_first;
521 bool m_var_msb;
522 unsigned m_upwards_2limb;
523 bool m_upwards;
524 bool m_cast_conditional;
525 unsigned m_bitfld_load;
526 vec<tree> m_data;
527 unsigned int m_data_cnt;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names);
533 BITMAP_FREE (m_loads);
534 BITMAP_FREE (m_preserved);
535 BITMAP_FREE (m_single_use_names);
536 if (m_map)
537 delete_var_map (m_map);
538 XDELETEVEC (m_vars);
539 m_data.release ();
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
545 void
546 bitint_large_huge::insert_before (gimple *g)
548 gimple_set_location (g, m_loc);
549 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
556 tree
557 bitint_large_huge::limb_access_type (tree type, tree idx)
559 if (type == NULL_TREE)
560 return m_limb_type;
561 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
562 unsigned int prec = TYPE_PRECISION (type);
563 gcc_assert (i * limb_prec < prec);
564 if ((i + 1) * limb_prec <= prec)
565 return m_limb_type;
566 else
567 return build_nonstandard_integer_type (prec % limb_prec,
568 TYPE_UNSIGNED (type));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
574 tree
575 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
577 tree atype = (tree_fits_uhwi_p (idx)
578 ? limb_access_type (type, idx) : m_limb_type);
579 tree ret;
580 if (DECL_P (var) && tree_fits_uhwi_p (idx))
582 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
583 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
584 ret = build2 (MEM_REF, m_limb_type,
585 build_fold_addr_expr (var),
586 build_int_cst (ptype, off));
587 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
588 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
590 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
593 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
594 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
596 tree_to_uhwi (idx)
597 * m_limb_size)));
598 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
599 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
600 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
602 else
604 var = unshare_expr (var);
605 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type,
607 TREE_TYPE (TREE_TYPE (var))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
611 tree atype = build_array_type_nelts (m_limb_type, nelts);
612 var = build1 (VIEW_CONVERT_EXPR, atype, var);
614 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
616 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
618 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
619 insert_before (g);
620 ret = gimple_assign_lhs (g);
621 ret = build1 (NOP_EXPR, atype, ret);
623 return ret;
626 /* Emit a half diamond,
627 if (COND)
631 | new_bb1
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
641 void
642 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
643 edge &edge_true, edge &edge_false)
645 insert_before (cond);
646 edge e1 = split_block (gsi_bb (m_gsi), cond);
647 edge e2 = split_block (e1->dest, (gimple *) NULL);
648 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
649 e1->flags = EDGE_TRUE_VALUE;
650 e1->probability = prob;
651 e3->probability = prob.invert ();
652 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
653 edge_true = e2;
654 edge_false = e3;
655 m_gsi = gsi_after_labels (e1->dest);
658 /* Emit a full diamond,
659 if (COND)
663 new_bb1 new_bb2
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
673 void
674 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
675 edge &edge_true, edge &edge_false)
677 insert_before (cond);
678 edge e1 = split_block (gsi_bb (m_gsi), cond);
679 edge e2 = split_block (e1->dest, (gimple *) NULL);
680 basic_block bb = create_empty_bb (e1->dest);
681 add_bb_to_loop (bb, e1->dest->loop_father);
682 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
683 e1->flags = EDGE_FALSE_VALUE;
684 e3->probability = prob;
685 e1->probability = prob.invert ();
686 bb->count = e1->src->count.apply_probability (prob);
687 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
688 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
689 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
690 edge_false = e2;
691 m_gsi = gsi_after_labels (bb);
694 /* Emit a half diamond with full diamond in it
695 if (COND1)
699 | if (COND2)
700 | / \
701 | / \
702 |new_bb1 new_bb2
703 | | /
704 \ | /
705 \ | /
706 \ | /
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
719 void
720 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
721 profile_probability prob1,
722 profile_probability prob2,
723 edge &edge_true_true,
724 edge &edge_true_false,
725 edge &edge_false)
727 edge e2, e3, e4 = NULL;
728 if_then (cond1, prob1, e2, e3);
729 if (cond2 == NULL)
731 edge_true_true = NULL;
732 edge_true_false = e2;
733 edge_false = e3;
734 return;
736 insert_before (cond2);
737 e2 = split_block (gsi_bb (m_gsi), cond2);
738 basic_block bb = create_empty_bb (e2->dest);
739 add_bb_to_loop (bb, e2->dest->loop_father);
740 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
741 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
742 e4->probability = prob2;
743 e2->flags = EDGE_FALSE_VALUE;
744 e2->probability = prob2.invert ();
745 bb->count = e2->src->count.apply_probability (prob2);
746 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
747 e2 = find_edge (e2->dest, e3->dest);
748 edge_true_true = e4;
749 edge_true_false = e2;
750 edge_false = e3;
751 m_gsi = gsi_after_labels (e2->src);
754 /* Emit code to access limb IDX from OP. */
756 tree
757 bitint_large_huge::handle_operand (tree op, tree idx)
759 switch (TREE_CODE (op))
761 case SSA_NAME:
762 if (m_names == NULL
763 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op))
767 if (m_first)
769 tree v = create_tmp_reg (m_limb_type);
770 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
772 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
773 DECL_SOURCE_LOCATION (v)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
776 v = get_or_create_ssa_default_def (cfun, v);
777 m_data.safe_push (v);
779 tree ret = m_data[m_data_cnt];
780 m_data_cnt++;
781 if (tree_fits_uhwi_p (idx))
783 tree type = limb_access_type (TREE_TYPE (op), idx);
784 ret = add_cast (type, ret);
786 return ret;
788 location_t loc_save = m_loc;
789 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
790 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
791 m_loc = loc_save;
792 return ret;
794 int p;
795 gimple *g;
796 tree t;
797 p = var_to_partition (m_map, op);
798 gcc_assert (m_vars[p] != NULL_TREE);
799 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
801 insert_before (g);
802 t = gimple_assign_lhs (g);
803 if (m_first
804 && m_single_use_names
805 && m_vars[p] != m_lhs
806 && m_after_stmt
807 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
809 tree clobber = build_clobber (TREE_TYPE (m_vars[p]), CLOBBER_EOL);
810 g = gimple_build_assign (m_vars[p], clobber);
811 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
812 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
814 return t;
815 case INTEGER_CST:
816 if (tree_fits_uhwi_p (idx))
818 tree c, type = limb_access_type (TREE_TYPE (op), idx);
819 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
820 if (m_first)
822 m_data.safe_push (NULL_TREE);
823 m_data.safe_push (NULL_TREE);
825 if (limb_prec != HOST_BITS_PER_WIDE_INT)
827 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
828 TYPE_SIGN (TREE_TYPE (op)));
829 c = wide_int_to_tree (type,
830 wide_int::from (w, TYPE_PRECISION (type),
831 UNSIGNED));
833 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
834 c = build_int_cst (type,
835 tree_int_cst_sgn (op) < 0 ? -1 : 0);
836 else
837 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
838 m_data_cnt += 2;
839 return c;
841 if (m_first
842 || (m_data[m_data_cnt] == NULL_TREE
843 && m_data[m_data_cnt + 1] == NULL_TREE))
845 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
846 unsigned int rem = prec % (2 * limb_prec);
847 int ext;
848 unsigned min_prec = bitint_min_cst_precision (op, ext);
849 if (m_first)
851 m_data.safe_push (NULL_TREE);
852 m_data.safe_push (NULL_TREE);
854 if (integer_zerop (op))
856 tree c = build_zero_cst (m_limb_type);
857 m_data[m_data_cnt] = c;
858 m_data[m_data_cnt + 1] = c;
860 else if (integer_all_onesp (op))
862 tree c = build_all_ones_cst (m_limb_type);
863 m_data[m_data_cnt] = c;
864 m_data[m_data_cnt + 1] = c;
866 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
868 /* Single limb constant. Use a phi with that limb from
869 the preheader edge and 0 or -1 constant from the other edge
870 and for the second limb in the loop. */
871 tree out;
872 gcc_assert (m_first);
873 m_data.pop ();
874 m_data.pop ();
875 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out);
876 g = gimple_build_assign (m_data[m_data_cnt + 1],
877 build_int_cst (m_limb_type, ext));
878 insert_before (g);
879 m_data[m_data_cnt + 1] = gimple_assign_rhs1 (g);
881 else if (min_prec > prec - rem - 2 * limb_prec)
883 /* Constant which has enough significant bits that it isn't
884 worth trying to save .rodata space by extending from smaller
885 number. */
886 tree type;
887 if (m_var_msb)
888 type = TREE_TYPE (op);
889 else
890 /* If we have a guarantee the most significant partial limb
891 (if any) will be only accessed through handle_operand
892 with INTEGER_CST idx, we don't need to include the partial
893 limb in .rodata. */
894 type = build_bitint_type (prec - rem, 1);
895 tree c = tree_output_constant_def (fold_convert (type, op));
896 m_data[m_data_cnt] = c;
897 m_data[m_data_cnt + 1] = NULL_TREE;
899 else if (m_upwards_2limb)
901 /* Constant with smaller number of bits. Trade conditional
902 code for .rodata space by extending from smaller number. */
903 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
904 tree type = build_bitint_type (min_prec, 1);
905 tree c = tree_output_constant_def (fold_convert (type, op));
906 tree idx2 = make_ssa_name (sizetype);
907 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
908 insert_before (g);
909 g = gimple_build_cond (LT_EXPR, idx,
910 size_int (min_prec / limb_prec),
911 NULL_TREE, NULL_TREE);
912 edge edge_true, edge_false;
913 if_then (g, (min_prec >= (prec - rem) / 2
914 ? profile_probability::likely ()
915 : profile_probability::unlikely ()),
916 edge_true, edge_false);
917 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
918 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
919 insert_before (g);
920 c1 = gimple_assign_lhs (g);
921 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
922 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
923 insert_before (g);
924 c2 = gimple_assign_lhs (g);
925 tree c3 = build_int_cst (m_limb_type, ext);
926 m_gsi = gsi_after_labels (edge_true->dest);
927 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
928 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
929 gphi *phi = create_phi_node (m_data[m_data_cnt],
930 edge_true->dest);
931 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
932 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
933 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
934 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
935 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
937 else
939 /* Constant with smaller number of bits. Trade conditional
940 code for .rodata space by extending from smaller number.
941 Version for loops with random access to the limbs or
942 downwards loops. */
943 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
944 tree c;
945 if (min_prec <= (unsigned) limb_prec)
946 c = fold_convert (m_limb_type, op);
947 else
949 tree type = build_bitint_type (min_prec, 1);
950 c = tree_output_constant_def (fold_convert (type, op));
952 m_data[m_data_cnt] = c;
953 m_data[m_data_cnt + 1] = integer_type_node;
955 t = m_data[m_data_cnt];
956 if (m_data[m_data_cnt + 1] == NULL_TREE)
958 t = limb_access (TREE_TYPE (op), t, idx, false);
959 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
960 insert_before (g);
961 t = gimple_assign_lhs (g);
964 else if (m_data[m_data_cnt + 1] == NULL_TREE)
966 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
967 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
968 insert_before (g);
969 t = gimple_assign_lhs (g);
971 else
972 t = m_data[m_data_cnt + 1];
973 if (m_data[m_data_cnt + 1] == integer_type_node)
975 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
976 unsigned rem = prec % (2 * limb_prec);
977 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
978 tree c = m_data[m_data_cnt];
979 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
980 g = gimple_build_cond (LT_EXPR, idx,
981 size_int (min_prec / limb_prec),
982 NULL_TREE, NULL_TREE);
983 edge edge_true, edge_false;
984 if_then (g, (min_prec >= (prec - rem) / 2
985 ? profile_probability::likely ()
986 : profile_probability::unlikely ()),
987 edge_true, edge_false);
988 if (min_prec > (unsigned) limb_prec)
990 c = limb_access (TREE_TYPE (op), c, idx, false);
991 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
992 insert_before (g);
993 c = gimple_assign_lhs (g);
995 tree c2 = build_int_cst (m_limb_type, ext);
996 m_gsi = gsi_after_labels (edge_true->dest);
997 t = make_ssa_name (m_limb_type);
998 gphi *phi = create_phi_node (t, edge_true->dest);
999 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1000 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1002 m_data_cnt += 2;
1003 return t;
1004 default:
1005 gcc_unreachable ();
1009 /* Helper method, add a PHI node with VAL from preheader edge if
1010 inside of a loop and m_first. Keep state in a pair of m_data
1011 elements. */
1013 tree
1014 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out)
1016 if (!m_first)
1018 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1019 return m_data[m_data_cnt];
1022 *data_out = NULL_TREE;
1023 if (tree_fits_uhwi_p (idx))
1025 m_data.safe_push (val);
1026 m_data.safe_push (NULL_TREE);
1027 return val;
1030 tree in = make_ssa_name (TREE_TYPE (val));
1031 gphi *phi = create_phi_node (in, m_bb);
1032 edge e1 = find_edge (m_preheader_bb, m_bb);
1033 edge e2 = EDGE_PRED (m_bb, 0);
1034 if (e1 == e2)
1035 e2 = EDGE_PRED (m_bb, 1);
1036 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1037 tree out = make_ssa_name (TREE_TYPE (val));
1038 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1039 m_data.safe_push (in);
1040 m_data.safe_push (out);
1041 return in;
1044 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1045 convert it without emitting any code, otherwise emit
1046 the conversion statement before the current location. */
1048 tree
1049 bitint_large_huge::add_cast (tree type, tree val)
1051 if (TREE_CODE (val) == INTEGER_CST)
1052 return fold_convert (type, val);
1054 tree lhs = make_ssa_name (type);
1055 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1056 insert_before (g);
1057 return lhs;
1060 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1062 tree
1063 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1064 tree idx)
1066 tree lhs, data_out, ctype;
1067 tree rhs1_type = TREE_TYPE (rhs1);
1068 gimple *g;
1069 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1070 &data_out);
1072 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1073 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1075 ctype = build_complex_type (m_limb_type);
1076 if (!types_compatible_p (rhs1_type, m_limb_type))
1078 if (!TYPE_UNSIGNED (rhs1_type))
1080 tree type = unsigned_type_for (rhs1_type);
1081 rhs1 = add_cast (type, rhs1);
1082 rhs2 = add_cast (type, rhs2);
1084 rhs1 = add_cast (m_limb_type, rhs1);
1085 rhs2 = add_cast (m_limb_type, rhs2);
1087 lhs = make_ssa_name (ctype);
1088 g = gimple_build_call_internal (code == PLUS_EXPR
1089 ? IFN_UADDC : IFN_USUBC,
1090 3, rhs1, rhs2, data_in);
1091 gimple_call_set_lhs (g, lhs);
1092 insert_before (g);
1093 if (data_out == NULL_TREE)
1094 data_out = make_ssa_name (m_limb_type);
1095 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1096 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1097 insert_before (g);
1099 else if (types_compatible_p (rhs1_type, m_limb_type))
1101 ctype = build_complex_type (m_limb_type);
1102 lhs = make_ssa_name (ctype);
1103 g = gimple_build_call_internal (code == PLUS_EXPR
1104 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1105 2, rhs1, rhs2);
1106 gimple_call_set_lhs (g, lhs);
1107 insert_before (g);
1108 if (data_out == NULL_TREE)
1109 data_out = make_ssa_name (m_limb_type);
1110 if (!integer_zerop (data_in))
1112 rhs1 = make_ssa_name (m_limb_type);
1113 g = gimple_build_assign (rhs1, REALPART_EXPR,
1114 build1 (REALPART_EXPR, m_limb_type, lhs));
1115 insert_before (g);
1116 rhs2 = make_ssa_name (m_limb_type);
1117 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1118 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1119 insert_before (g);
1120 lhs = make_ssa_name (ctype);
1121 g = gimple_build_call_internal (code == PLUS_EXPR
1122 ? IFN_ADD_OVERFLOW
1123 : IFN_SUB_OVERFLOW,
1124 2, rhs1, data_in);
1125 gimple_call_set_lhs (g, lhs);
1126 insert_before (g);
1127 data_in = make_ssa_name (m_limb_type);
1128 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1129 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1130 insert_before (g);
1131 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1132 insert_before (g);
1134 else
1136 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1137 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1138 insert_before (g);
1141 else
1143 tree in = add_cast (rhs1_type, data_in);
1144 lhs = make_ssa_name (rhs1_type);
1145 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1146 insert_before (g);
1147 rhs1 = make_ssa_name (rhs1_type);
1148 g = gimple_build_assign (rhs1, code, lhs, in);
1149 insert_before (g);
1150 m_data[m_data_cnt] = NULL_TREE;
1151 m_data_cnt += 2;
1152 return rhs1;
1154 rhs1 = make_ssa_name (m_limb_type);
1155 g = gimple_build_assign (rhs1, REALPART_EXPR,
1156 build1 (REALPART_EXPR, m_limb_type, lhs));
1157 insert_before (g);
1158 if (!types_compatible_p (rhs1_type, m_limb_type))
1159 rhs1 = add_cast (rhs1_type, rhs1);
1160 m_data[m_data_cnt] = data_out;
1161 m_data_cnt += 2;
1162 return rhs1;
1165 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1166 count in [0, limb_prec - 1] range. */
1168 tree
1169 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1171 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1172 gcc_checking_assert (cnt < (unsigned) limb_prec);
1173 if (cnt == 0)
1174 return rhs1;
1176 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1177 gimple *g;
1178 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1179 &data_out);
1181 if (!integer_zerop (data_in))
1183 lhs = make_ssa_name (m_limb_type);
1184 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1185 build_int_cst (unsigned_type_node,
1186 limb_prec - cnt));
1187 insert_before (g);
1188 if (!types_compatible_p (rhs1_type, m_limb_type))
1189 lhs = add_cast (rhs1_type, lhs);
1190 data_in = lhs;
1192 if (types_compatible_p (rhs1_type, m_limb_type))
1194 if (data_out == NULL_TREE)
1195 data_out = make_ssa_name (m_limb_type);
1196 g = gimple_build_assign (data_out, rhs1);
1197 insert_before (g);
1199 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1201 lhs = make_ssa_name (rhs1_type);
1202 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1203 insert_before (g);
1204 if (!integer_zerop (data_in))
1206 rhs1 = lhs;
1207 lhs = make_ssa_name (rhs1_type);
1208 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1209 insert_before (g);
1212 else
1213 lhs = data_in;
1214 m_data[m_data_cnt] = data_out;
1215 m_data_cnt += 2;
1216 return lhs;
1219 /* Helper function for handle_stmt method, handle an integral
1220 to integral conversion. */
1222 tree
1223 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1225 tree rhs_type = TREE_TYPE (rhs1);
1226 gimple *g;
1227 if (TREE_CODE (rhs1) == SSA_NAME
1228 && TREE_CODE (lhs_type) == BITINT_TYPE
1229 && TREE_CODE (rhs_type) == BITINT_TYPE
1230 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1231 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1233 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1234 /* If lhs has bigger precision than rhs, we can use
1235 the simple case only if there is a guarantee that
1236 the most significant limb is handled in straight
1237 line code. If m_var_msb (on left shifts) or
1238 if m_upwards_2limb * limb_prec is equal to
1239 lhs precision that is not the case. */
1240 || (!m_var_msb
1241 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1242 TYPE_SIZE (lhs_type))
1243 && (!m_upwards_2limb
1244 || (m_upwards_2limb * limb_prec
1245 < TYPE_PRECISION (lhs_type)))))
1247 rhs1 = handle_operand (rhs1, idx);
1248 if (tree_fits_uhwi_p (idx))
1250 tree type = limb_access_type (lhs_type, idx);
1251 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1252 rhs1 = add_cast (type, rhs1);
1254 return rhs1;
1256 tree t;
1257 /* Indexes lower than this don't need any special processing. */
1258 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1259 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1260 /* Indexes >= than this always contain an extension. */
1261 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1262 bool save_first = m_first;
1263 if (m_first)
1265 m_data.safe_push (NULL_TREE);
1266 m_data.safe_push (NULL_TREE);
1267 m_data.safe_push (NULL_TREE);
1268 if (TYPE_UNSIGNED (rhs_type))
1269 /* No need to keep state between iterations. */
1271 else if (m_upwards && !m_upwards_2limb)
1272 /* We need to keep state between iterations, but
1273 not within any loop, everything is straight line
1274 code with only increasing indexes. */
1276 else if (!m_upwards_2limb)
1278 unsigned save_data_cnt = m_data_cnt;
1279 gimple_stmt_iterator save_gsi = m_gsi;
1280 m_gsi = m_init_gsi;
1281 if (gsi_end_p (m_gsi))
1282 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1283 else
1284 gsi_next (&m_gsi);
1285 m_data_cnt = save_data_cnt + 3;
1286 t = handle_operand (rhs1, size_int (low));
1287 m_first = false;
1288 m_data[save_data_cnt + 2]
1289 = build_int_cst (NULL_TREE, m_data_cnt);
1290 m_data_cnt = save_data_cnt;
1291 t = add_cast (signed_type_for (m_limb_type), t);
1292 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1293 tree n = make_ssa_name (TREE_TYPE (t));
1294 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1295 insert_before (g);
1296 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1297 m_init_gsi = m_gsi;
1298 if (gsi_end_p (m_init_gsi))
1299 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1300 else
1301 gsi_prev (&m_init_gsi);
1302 m_gsi = save_gsi;
1304 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1305 /* We need to keep state between iterations, but
1306 fortunately not within the loop, only afterwards. */
1308 else
1310 tree out;
1311 m_data.truncate (m_data_cnt);
1312 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1313 m_data.safe_push (NULL_TREE);
1317 unsigned save_data_cnt = m_data_cnt;
1318 m_data_cnt += 3;
1319 if (!tree_fits_uhwi_p (idx))
1321 if (m_upwards_2limb
1322 && (m_upwards_2limb * limb_prec
1323 <= ((unsigned) TYPE_PRECISION (rhs_type)
1324 - !TYPE_UNSIGNED (rhs_type))))
1326 rhs1 = handle_operand (rhs1, idx);
1327 if (m_first)
1328 m_data[save_data_cnt + 2]
1329 = build_int_cst (NULL_TREE, m_data_cnt);
1330 m_first = save_first;
1331 return rhs1;
1333 bool single_comparison
1334 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1335 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1336 idx, size_int (low), NULL_TREE, NULL_TREE);
1337 edge edge_true_true, edge_true_false, edge_false;
1338 if_then_if_then_else (g, (single_comparison ? NULL
1339 : gimple_build_cond (EQ_EXPR, idx,
1340 size_int (low),
1341 NULL_TREE,
1342 NULL_TREE)),
1343 profile_probability::likely (),
1344 profile_probability::unlikely (),
1345 edge_true_true, edge_true_false, edge_false);
1346 bool save_cast_conditional = m_cast_conditional;
1347 m_cast_conditional = true;
1348 m_bitfld_load = 0;
1349 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1350 if (m_first)
1351 m_data[save_data_cnt + 2]
1352 = build_int_cst (NULL_TREE, m_data_cnt);
1353 tree ext = NULL_TREE;
1354 tree bitfld = NULL_TREE;
1355 if (!single_comparison)
1357 m_gsi = gsi_after_labels (edge_true_true->src);
1358 m_first = false;
1359 m_data_cnt = save_data_cnt + 3;
1360 if (m_bitfld_load)
1362 bitfld = m_data[m_bitfld_load];
1363 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1364 m_bitfld_load = 0;
1366 t2 = handle_operand (rhs1, size_int (low));
1367 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1368 t2 = add_cast (m_limb_type, t2);
1369 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1371 ext = add_cast (signed_type_for (m_limb_type), t2);
1372 tree lpm1 = build_int_cst (unsigned_type_node,
1373 limb_prec - 1);
1374 tree n = make_ssa_name (TREE_TYPE (ext));
1375 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1376 insert_before (g);
1377 ext = add_cast (m_limb_type, n);
1380 tree t3;
1381 if (TYPE_UNSIGNED (rhs_type))
1382 t3 = build_zero_cst (m_limb_type);
1383 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1384 t3 = m_data[save_data_cnt];
1385 else
1386 t3 = m_data[save_data_cnt + 1];
1387 m_gsi = gsi_after_labels (edge_true_false->dest);
1388 t = make_ssa_name (m_limb_type);
1389 gphi *phi = create_phi_node (t, edge_true_false->dest);
1390 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1391 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1392 if (edge_true_true)
1393 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1394 if (ext)
1396 tree t4 = make_ssa_name (m_limb_type);
1397 phi = create_phi_node (t4, edge_true_false->dest);
1398 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1399 UNKNOWN_LOCATION);
1400 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1401 UNKNOWN_LOCATION);
1402 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1403 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1404 insert_before (g);
1406 if (m_bitfld_load)
1408 tree t4;
1409 if (!m_first)
1410 t4 = m_data[m_bitfld_load + 1];
1411 else
1412 t4 = make_ssa_name (m_limb_type);
1413 phi = create_phi_node (t4, edge_true_false->dest);
1414 add_phi_arg (phi,
1415 edge_true_true ? bitfld : m_data[m_bitfld_load],
1416 edge_true_false, UNKNOWN_LOCATION);
1417 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1418 edge_false, UNKNOWN_LOCATION);
1419 if (edge_true_true)
1420 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1421 UNKNOWN_LOCATION);
1422 m_data[m_bitfld_load] = t4;
1423 m_data[m_bitfld_load + 2] = t4;
1424 m_bitfld_load = 0;
1426 m_cast_conditional = save_cast_conditional;
1427 m_first = save_first;
1428 return t;
1430 else
1432 if (tree_to_uhwi (idx) < low)
1434 t = handle_operand (rhs1, idx);
1435 if (m_first)
1436 m_data[save_data_cnt + 2]
1437 = build_int_cst (NULL_TREE, m_data_cnt);
1439 else if (tree_to_uhwi (idx) < high)
1441 t = handle_operand (rhs1, size_int (low));
1442 if (m_first)
1443 m_data[save_data_cnt + 2]
1444 = build_int_cst (NULL_TREE, m_data_cnt);
1445 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1446 t = add_cast (m_limb_type, t);
1447 tree ext = NULL_TREE;
1448 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1450 ext = add_cast (signed_type_for (m_limb_type), t);
1451 tree lpm1 = build_int_cst (unsigned_type_node,
1452 limb_prec - 1);
1453 tree n = make_ssa_name (TREE_TYPE (ext));
1454 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1455 insert_before (g);
1456 ext = add_cast (m_limb_type, n);
1457 m_data[save_data_cnt + 1] = ext;
1460 else
1462 if (TYPE_UNSIGNED (rhs_type) && m_first)
1464 handle_operand (rhs1, size_zero_node);
1465 m_data[save_data_cnt + 2]
1466 = build_int_cst (NULL_TREE, m_data_cnt);
1468 else
1469 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1470 if (TYPE_UNSIGNED (rhs_type))
1471 t = build_zero_cst (m_limb_type);
1472 else
1473 t = m_data[save_data_cnt + 1];
1475 tree type = limb_access_type (lhs_type, idx);
1476 if (!useless_type_conversion_p (type, m_limb_type))
1477 t = add_cast (type, t);
1478 m_first = save_first;
1479 return t;
1482 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1483 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1484 && INTEGRAL_TYPE_P (rhs_type))
1486 /* Add support for 3 or more limbs filled in from normal integral
1487 type if this assert fails. If no target chooses limb mode smaller
1488 than half of largest supported normal integral type, this will not
1489 be needed. */
1490 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1491 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1492 if (m_first)
1494 gimple_stmt_iterator save_gsi = m_gsi;
1495 m_gsi = m_init_gsi;
1496 if (gsi_end_p (m_gsi))
1497 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1498 else
1499 gsi_next (&m_gsi);
1500 if (TREE_CODE (rhs_type) == BITINT_TYPE
1501 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1503 tree type = NULL_TREE;
1504 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1505 rhs_type = TREE_TYPE (rhs1);
1507 r1 = rhs1;
1508 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1509 r1 = add_cast (m_limb_type, rhs1);
1510 if (TYPE_PRECISION (rhs_type) > limb_prec)
1512 g = gimple_build_assign (make_ssa_name (rhs_type),
1513 RSHIFT_EXPR, rhs1,
1514 build_int_cst (unsigned_type_node,
1515 limb_prec));
1516 insert_before (g);
1517 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1519 if (TYPE_UNSIGNED (rhs_type))
1520 rext = build_zero_cst (m_limb_type);
1521 else
1523 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1524 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1525 RSHIFT_EXPR, rext,
1526 build_int_cst (unsigned_type_node,
1527 limb_prec - 1));
1528 insert_before (g);
1529 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1531 m_init_gsi = m_gsi;
1532 if (gsi_end_p (m_init_gsi))
1533 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1534 else
1535 gsi_prev (&m_init_gsi);
1536 m_gsi = save_gsi;
1538 tree t;
1539 if (m_upwards_2limb)
1541 if (m_first)
1543 tree out1, out2;
1544 prepare_data_in_out (r1, idx, &out1);
1545 g = gimple_build_assign (m_data[m_data_cnt + 1], rext);
1546 insert_before (g);
1547 if (TYPE_PRECISION (rhs_type) > limb_prec)
1549 prepare_data_in_out (r2, idx, &out2);
1550 g = gimple_build_assign (m_data[m_data_cnt + 3], rext);
1551 insert_before (g);
1552 m_data.pop ();
1553 t = m_data.pop ();
1554 m_data[m_data_cnt + 1] = t;
1556 else
1557 m_data[m_data_cnt + 1] = rext;
1558 m_data.safe_push (rext);
1559 t = m_data[m_data_cnt];
1561 else if (!tree_fits_uhwi_p (idx))
1562 t = m_data[m_data_cnt + 1];
1563 else
1565 tree type = limb_access_type (lhs_type, idx);
1566 t = m_data[m_data_cnt + 2];
1567 if (!useless_type_conversion_p (type, m_limb_type))
1568 t = add_cast (type, t);
1570 m_data_cnt += 3;
1571 return t;
1573 else if (m_first)
1575 m_data.safe_push (r1);
1576 m_data.safe_push (r2);
1577 m_data.safe_push (rext);
1579 if (tree_fits_uhwi_p (idx))
1581 tree type = limb_access_type (lhs_type, idx);
1582 if (integer_zerop (idx))
1583 t = m_data[m_data_cnt];
1584 else if (TYPE_PRECISION (rhs_type) > limb_prec
1585 && integer_onep (idx))
1586 t = m_data[m_data_cnt + 1];
1587 else
1588 t = m_data[m_data_cnt + 2];
1589 if (!useless_type_conversion_p (type, m_limb_type))
1590 t = add_cast (type, t);
1591 m_data_cnt += 3;
1592 return t;
1594 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1595 NULL_TREE, NULL_TREE);
1596 edge e2, e3, e4 = NULL;
1597 if_then (g, profile_probability::likely (), e2, e3);
1598 if (m_data[m_data_cnt + 1])
1600 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1601 NULL_TREE, NULL_TREE);
1602 insert_before (g);
1603 edge e5 = split_block (gsi_bb (m_gsi), g);
1604 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1605 e2 = find_edge (e5->dest, e2->dest);
1606 e4->probability = profile_probability::unlikely ();
1607 e5->flags = EDGE_FALSE_VALUE;
1608 e5->probability = e4->probability.invert ();
1610 m_gsi = gsi_after_labels (e2->dest);
1611 t = make_ssa_name (m_limb_type);
1612 gphi *phi = create_phi_node (t, e2->dest);
1613 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1614 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1615 if (e4)
1616 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1617 m_data_cnt += 3;
1618 return t;
1620 return NULL_TREE;
1623 /* Helper function for handle_stmt method, handle a load from memory. */
1625 tree
1626 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1628 tree rhs1 = gimple_assign_rhs1 (stmt);
1629 tree rhs_type = TREE_TYPE (rhs1);
1630 bool eh = stmt_ends_bb_p (stmt);
1631 edge eh_edge = NULL;
1632 gimple *g;
1634 if (eh)
1636 edge_iterator ei;
1637 basic_block bb = gimple_bb (stmt);
1639 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1640 if (eh_edge->flags & EDGE_EH)
1641 break;
1644 if (TREE_CODE (rhs1) == COMPONENT_REF
1645 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1647 tree fld = TREE_OPERAND (rhs1, 1);
1648 /* For little-endian, we can allow as inputs bit-fields
1649 which start at a limb boundary. */
1650 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1651 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1652 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1653 goto normal_load;
1654 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1655 handle it normally for now. */
1656 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1657 goto normal_load;
1658 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1659 poly_int64 bitoffset;
1660 poly_uint64 field_offset, repr_offset;
1661 bool var_field_off = false;
1662 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1663 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1664 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1665 else
1667 bitoffset = 0;
1668 var_field_off = true;
1670 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1671 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1672 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1673 TREE_OPERAND (rhs1, 0), repr,
1674 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1675 HOST_WIDE_INT bo = bitoffset.to_constant ();
1676 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1677 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1678 if (m_first)
1680 if (m_upwards)
1682 gimple_stmt_iterator save_gsi = m_gsi;
1683 m_gsi = m_init_gsi;
1684 if (gsi_end_p (m_gsi))
1685 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1686 else
1687 gsi_next (&m_gsi);
1688 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1689 tree iv = make_ssa_name (m_limb_type);
1690 g = gimple_build_assign (iv, t);
1691 insert_before (g);
1692 if (eh)
1694 maybe_duplicate_eh_stmt (g, stmt);
1695 if (eh_edge)
1697 edge e = split_block (gsi_bb (m_gsi), g);
1698 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1699 = profile_probability::very_unlikely ();
1700 m_gsi = gsi_after_labels (e->dest);
1701 if (gsi_bb (save_gsi) == e->src)
1703 if (gsi_end_p (save_gsi))
1704 save_gsi = gsi_end_bb (e->dest);
1705 else
1706 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1708 if (m_preheader_bb == e->src)
1709 m_preheader_bb = e->dest;
1712 m_init_gsi = m_gsi;
1713 if (gsi_end_p (m_init_gsi))
1714 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1715 else
1716 gsi_prev (&m_init_gsi);
1717 m_gsi = save_gsi;
1718 tree out;
1719 prepare_data_in_out (iv, idx, &out);
1720 out = m_data[m_data_cnt];
1721 m_data.safe_push (out);
1723 else
1725 m_data.safe_push (NULL_TREE);
1726 m_data.safe_push (NULL_TREE);
1727 m_data.safe_push (NULL_TREE);
1731 tree nidx0 = NULL_TREE, nidx1;
1732 tree iv = m_data[m_data_cnt];
1733 if (m_cast_conditional && iv)
1735 gcc_assert (!m_bitfld_load);
1736 m_bitfld_load = m_data_cnt;
1738 if (tree_fits_uhwi_p (idx))
1740 unsigned prec = TYPE_PRECISION (rhs_type);
1741 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1742 gcc_assert (i * limb_prec < prec);
1743 nidx1 = size_int (i + bo_idx + 1);
1744 if ((i + 1) * limb_prec > prec)
1746 prec %= limb_prec;
1747 if (prec + bo_bit <= (unsigned) limb_prec)
1748 nidx1 = NULL_TREE;
1750 if (!iv)
1751 nidx0 = size_int (i + bo_idx);
1753 else
1755 if (!iv)
1757 if (bo_idx == 0)
1758 nidx0 = idx;
1759 else
1761 nidx0 = make_ssa_name (sizetype);
1762 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1763 size_int (bo_idx));
1764 insert_before (g);
1767 nidx1 = make_ssa_name (sizetype);
1768 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1769 size_int (bo_idx + 1));
1770 insert_before (g);
1773 tree iv2 = NULL_TREE;
1774 if (nidx0)
1776 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1777 iv = make_ssa_name (m_limb_type);
1778 g = gimple_build_assign (iv, t);
1779 insert_before (g);
1780 gcc_assert (!eh);
1782 if (nidx1)
1784 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1785 unsigned prec = TYPE_PRECISION (rhs_type);
1786 if (conditional)
1788 if ((prec % limb_prec) == 0
1789 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1790 conditional = false;
1792 edge edge_true = NULL, edge_false = NULL;
1793 if (conditional)
1795 g = gimple_build_cond (NE_EXPR, idx,
1796 size_int (prec / limb_prec),
1797 NULL_TREE, NULL_TREE);
1798 if_then (g, profile_probability::likely (),
1799 edge_true, edge_false);
1801 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1802 if (m_upwards_2limb
1803 && !m_first
1804 && !m_bitfld_load
1805 && !tree_fits_uhwi_p (idx))
1806 iv2 = m_data[m_data_cnt + 1];
1807 else
1808 iv2 = make_ssa_name (m_limb_type);
1809 g = gimple_build_assign (iv2, t);
1810 insert_before (g);
1811 if (eh)
1813 maybe_duplicate_eh_stmt (g, stmt);
1814 if (eh_edge)
1816 edge e = split_block (gsi_bb (m_gsi), g);
1817 m_gsi = gsi_after_labels (e->dest);
1818 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1819 = profile_probability::very_unlikely ();
1822 if (conditional)
1824 tree iv3 = make_ssa_name (m_limb_type);
1825 if (eh)
1826 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1827 gphi *phi = create_phi_node (iv3, edge_true->dest);
1828 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1829 add_phi_arg (phi, build_zero_cst (m_limb_type),
1830 edge_false, UNKNOWN_LOCATION);
1831 m_gsi = gsi_after_labels (edge_true->dest);
1834 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1835 iv, build_int_cst (unsigned_type_node, bo_bit));
1836 insert_before (g);
1837 iv = gimple_assign_lhs (g);
1838 if (iv2)
1840 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1841 iv2, build_int_cst (unsigned_type_node,
1842 limb_prec - bo_bit));
1843 insert_before (g);
1844 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1845 gimple_assign_lhs (g), iv);
1846 insert_before (g);
1847 iv = gimple_assign_lhs (g);
1848 if (m_data[m_data_cnt])
1849 m_data[m_data_cnt] = iv2;
1851 if (tree_fits_uhwi_p (idx))
1853 tree atype = limb_access_type (rhs_type, idx);
1854 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1855 iv = add_cast (atype, iv);
1857 m_data_cnt += 3;
1858 return iv;
1861 normal_load:
1862 /* Use write_p = true for loads with EH edges to make
1863 sure limb_access doesn't add a cast as separate
1864 statement after it. */
1865 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1866 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1867 g = gimple_build_assign (ret, rhs1);
1868 insert_before (g);
1869 if (eh)
1871 maybe_duplicate_eh_stmt (g, stmt);
1872 if (eh_edge)
1874 edge e = split_block (gsi_bb (m_gsi), g);
1875 m_gsi = gsi_after_labels (e->dest);
1876 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1877 = profile_probability::very_unlikely ();
1879 if (tree_fits_uhwi_p (idx))
1881 tree atype = limb_access_type (rhs_type, idx);
1882 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1883 ret = add_cast (atype, ret);
1886 return ret;
1889 /* Return a limb IDX from a mergeable statement STMT. */
1891 tree
1892 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1894 tree lhs, rhs1, rhs2 = NULL_TREE;
1895 gimple *g;
1896 switch (gimple_code (stmt))
1898 case GIMPLE_ASSIGN:
1899 if (gimple_assign_load_p (stmt))
1900 return handle_load (stmt, idx);
1901 switch (gimple_assign_rhs_code (stmt))
1903 case BIT_AND_EXPR:
1904 case BIT_IOR_EXPR:
1905 case BIT_XOR_EXPR:
1906 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1907 /* FALLTHRU */
1908 case BIT_NOT_EXPR:
1909 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1910 lhs = make_ssa_name (TREE_TYPE (rhs1));
1911 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1912 rhs1, rhs2);
1913 insert_before (g);
1914 return lhs;
1915 case PLUS_EXPR:
1916 case MINUS_EXPR:
1917 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1918 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1919 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1920 rhs1, rhs2, idx);
1921 case NEGATE_EXPR:
1922 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1923 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1924 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1925 case LSHIFT_EXPR:
1926 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1927 idx),
1928 gimple_assign_rhs2 (stmt), idx);
1929 case SSA_NAME:
1930 case INTEGER_CST:
1931 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1932 CASE_CONVERT:
1933 case VIEW_CONVERT_EXPR:
1934 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1935 gimple_assign_rhs1 (stmt), idx);
1936 default:
1937 break;
1939 break;
1940 default:
1941 break;
1943 gcc_unreachable ();
1946 /* Return minimum precision of OP at STMT.
1947 Positive value is minimum precision above which all bits
1948 are zero, negative means all bits above negation of the
1949 value are copies of the sign bit. */
1951 static int
1952 range_to_prec (tree op, gimple *stmt)
1954 int_range_max r;
1955 wide_int w;
1956 tree type = TREE_TYPE (op);
1957 unsigned int prec = TYPE_PRECISION (type);
1959 if (!optimize
1960 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1961 || r.undefined_p ())
1963 if (TYPE_UNSIGNED (type))
1964 return prec;
1965 else
1966 return MIN ((int) -prec, -2);
1969 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1971 w = r.lower_bound ();
1972 if (wi::neg_p (w))
1974 int min_prec1 = wi::min_precision (w, SIGNED);
1975 w = r.upper_bound ();
1976 int min_prec2 = wi::min_precision (w, SIGNED);
1977 int min_prec = MAX (min_prec1, min_prec2);
1978 return MIN (-min_prec, -2);
1982 w = r.upper_bound ();
1983 int min_prec = wi::min_precision (w, UNSIGNED);
1984 return MAX (min_prec, 1);
1987 /* Return address of the first limb of OP and write into *PREC
1988 its precision. If positive, the operand is zero extended
1989 from that precision, if it is negative, the operand is sign-extended
1990 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1991 otherwise *PREC_STORED is prec from the innermost call without
1992 range optimizations. */
1994 tree
1995 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
1996 int *prec_stored, int *prec)
1998 wide_int w;
1999 location_t loc_save = m_loc;
2000 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2001 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2002 && TREE_CODE (op) != INTEGER_CST)
2004 do_int:
2005 *prec = range_to_prec (op, stmt);
2006 bitint_prec_kind kind = bitint_prec_small;
2007 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2008 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2009 kind = bitint_precision_kind (TREE_TYPE (op));
2010 if (kind == bitint_prec_middle)
2012 tree type = NULL_TREE;
2013 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2015 tree op_type = TREE_TYPE (op);
2016 unsigned HOST_WIDE_INT nelts
2017 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2018 /* Add support for 3 or more limbs filled in from normal
2019 integral type if this assert fails. If no target chooses
2020 limb mode smaller than half of largest supported normal
2021 integral type, this will not be needed. */
2022 gcc_assert (nelts <= 2);
2023 if (prec_stored)
2024 *prec_stored = (TYPE_UNSIGNED (op_type)
2025 ? TYPE_PRECISION (op_type)
2026 : -TYPE_PRECISION (op_type));
2027 if (*prec <= limb_prec && *prec >= -limb_prec)
2029 nelts = 1;
2030 if (prec_stored)
2032 if (TYPE_UNSIGNED (op_type))
2034 if (*prec_stored > limb_prec)
2035 *prec_stored = limb_prec;
2037 else if (*prec_stored < -limb_prec)
2038 *prec_stored = -limb_prec;
2041 tree atype = build_array_type_nelts (m_limb_type, nelts);
2042 tree var = create_tmp_var (atype);
2043 tree t1 = op;
2044 if (!useless_type_conversion_p (m_limb_type, op_type))
2045 t1 = add_cast (m_limb_type, t1);
2046 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2047 NULL_TREE, NULL_TREE);
2048 gimple *g = gimple_build_assign (v, t1);
2049 insert_before (g);
2050 if (nelts > 1)
2052 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2053 g = gimple_build_assign (make_ssa_name (op_type),
2054 RSHIFT_EXPR, op, lp);
2055 insert_before (g);
2056 tree t2 = gimple_assign_lhs (g);
2057 t2 = add_cast (m_limb_type, t2);
2058 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2059 NULL_TREE, NULL_TREE);
2060 g = gimple_build_assign (v, t2);
2061 insert_before (g);
2063 tree ret = build_fold_addr_expr (var);
2064 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2066 tree clobber = build_clobber (atype, CLOBBER_EOL);
2067 g = gimple_build_assign (var, clobber);
2068 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2070 m_loc = loc_save;
2071 return ret;
2073 switch (TREE_CODE (op))
2075 case SSA_NAME:
2076 if (m_names == NULL
2077 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2079 gimple *g = SSA_NAME_DEF_STMT (op);
2080 tree ret;
2081 m_loc = gimple_location (g);
2082 if (gimple_assign_load_p (g))
2084 *prec = range_to_prec (op, NULL);
2085 if (prec_stored)
2086 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2087 ? TYPE_PRECISION (TREE_TYPE (op))
2088 : -TYPE_PRECISION (TREE_TYPE (op)));
2089 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2090 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2091 NULL_TREE, true, GSI_SAME_STMT);
2093 else if (gimple_code (g) == GIMPLE_NOP)
2095 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2096 if (prec_stored)
2097 *prec_stored = *prec;
2098 tree var = create_tmp_var (m_limb_type);
2099 TREE_ADDRESSABLE (var) = 1;
2100 ret = build_fold_addr_expr (var);
2101 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2103 tree clobber = build_clobber (m_limb_type, CLOBBER_EOL);
2104 g = gimple_build_assign (var, clobber);
2105 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2108 else
2110 gcc_assert (gimple_assign_cast_p (g));
2111 tree rhs1 = gimple_assign_rhs1 (g);
2112 bitint_prec_kind kind = bitint_prec_small;
2113 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2114 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2115 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2116 if (kind >= bitint_prec_large)
2118 tree lhs_type = TREE_TYPE (op);
2119 tree rhs_type = TREE_TYPE (rhs1);
2120 int prec_stored_val = 0;
2121 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2122 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2124 if (TYPE_UNSIGNED (lhs_type)
2125 && !TYPE_UNSIGNED (rhs_type))
2126 gcc_assert (*prec >= 0 || prec_stored == NULL);
2128 else
2130 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2132 else if (TYPE_UNSIGNED (lhs_type))
2134 gcc_assert (*prec > 0
2135 || prec_stored_val > 0
2136 || (-prec_stored_val
2137 >= TYPE_PRECISION (lhs_type)));
2138 *prec = TYPE_PRECISION (lhs_type);
2140 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2142 else
2143 *prec = -TYPE_PRECISION (lhs_type);
2146 else
2148 op = rhs1;
2149 stmt = g;
2150 goto do_int;
2153 m_loc = loc_save;
2154 return ret;
2156 else
2158 int p = var_to_partition (m_map, op);
2159 gcc_assert (m_vars[p] != NULL_TREE);
2160 *prec = range_to_prec (op, stmt);
2161 if (prec_stored)
2162 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2163 ? TYPE_PRECISION (TREE_TYPE (op))
2164 : -TYPE_PRECISION (TREE_TYPE (op)));
2165 return build_fold_addr_expr (m_vars[p]);
2167 case INTEGER_CST:
2168 unsigned int min_prec, mp;
2169 tree type;
2170 w = wi::to_wide (op);
2171 if (tree_int_cst_sgn (op) >= 0)
2173 min_prec = wi::min_precision (w, UNSIGNED);
2174 *prec = MAX (min_prec, 1);
2176 else
2178 min_prec = wi::min_precision (w, SIGNED);
2179 *prec = MIN ((int) -min_prec, -2);
2181 mp = CEIL (min_prec, limb_prec) * limb_prec;
2182 if (mp == 0)
2183 mp = 1;
2184 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2185 type = TREE_TYPE (op);
2186 else
2187 type = build_bitint_type (mp, 1);
2188 if (TREE_CODE (type) != BITINT_TYPE
2189 || bitint_precision_kind (type) == bitint_prec_small)
2191 if (TYPE_PRECISION (type) <= limb_prec)
2192 type = m_limb_type;
2193 else
2194 /* This case is for targets which e.g. have 64-bit
2195 limb but categorize up to 128-bits _BitInts as
2196 small. We could use type of m_limb_type[2] and
2197 similar instead to save space. */
2198 type = build_bitint_type (mid_min_prec, 1);
2200 if (prec_stored)
2202 if (tree_int_cst_sgn (op) >= 0)
2203 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2204 else
2205 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2207 op = tree_output_constant_def (fold_convert (type, op));
2208 return build_fold_addr_expr (op);
2209 default:
2210 gcc_unreachable ();
2214 /* Helper function, create a loop before the current location,
2215 start with sizetype INIT value from the preheader edge. Return
2216 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2217 from the latch edge. */
2219 tree
2220 bitint_large_huge::create_loop (tree init, tree *idx_next)
2222 if (!gsi_end_p (m_gsi))
2223 gsi_prev (&m_gsi);
2224 else
2225 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2226 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2227 edge e2 = split_block (e1->dest, (gimple *) NULL);
2228 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2229 e3->probability = profile_probability::very_unlikely ();
2230 e2->flags = EDGE_FALSE_VALUE;
2231 e2->probability = e3->probability.invert ();
2232 tree idx = make_ssa_name (sizetype);
2233 gphi *phi = create_phi_node (idx, e1->dest);
2234 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2235 *idx_next = make_ssa_name (sizetype);
2236 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2237 m_gsi = gsi_after_labels (e1->dest);
2238 m_bb = e1->dest;
2239 m_preheader_bb = e1->src;
2240 class loop *loop = alloc_loop ();
2241 loop->header = e1->dest;
2242 add_loop (loop, e1->src->loop_father);
2243 return idx;
2246 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2247 lowered using iteration from the least significant limb up to the most
2248 significant limb. For large _BitInt it is emitted as straight line code
2249 before current location, for huge _BitInt as a loop handling two limbs
2250 at once, followed by handling up to limbs in straight line code (at most
2251 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2252 comparisons, in that case CMP_CODE should be the comparison code and
2253 CMP_OP1/CMP_OP2 the comparison operands. */
2255 tree
2256 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2257 tree cmp_op1, tree cmp_op2)
2259 bool eq_p = cmp_code != ERROR_MARK;
2260 tree type;
2261 if (eq_p)
2262 type = TREE_TYPE (cmp_op1);
2263 else
2264 type = TREE_TYPE (gimple_assign_lhs (stmt));
2265 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2266 bitint_prec_kind kind = bitint_precision_kind (type);
2267 gcc_assert (kind >= bitint_prec_large);
2268 gimple *g;
2269 tree lhs = gimple_get_lhs (stmt);
2270 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2271 if (lhs
2272 && TREE_CODE (lhs) == SSA_NAME
2273 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2274 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2276 int p = var_to_partition (m_map, lhs);
2277 gcc_assert (m_vars[p] != NULL_TREE);
2278 m_lhs = lhs = m_vars[p];
2280 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2281 bool sext = false;
2282 tree ext = NULL_TREE, store_operand = NULL_TREE;
2283 bool eh = false;
2284 basic_block eh_pad = NULL;
2285 tree nlhs = NULL_TREE;
2286 unsigned HOST_WIDE_INT bo_idx = 0;
2287 unsigned HOST_WIDE_INT bo_bit = 0;
2288 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2289 if (gimple_store_p (stmt))
2291 store_operand = gimple_assign_rhs1 (stmt);
2292 eh = stmt_ends_bb_p (stmt);
2293 if (eh)
2295 edge e;
2296 edge_iterator ei;
2297 basic_block bb = gimple_bb (stmt);
2299 FOR_EACH_EDGE (e, ei, bb->succs)
2300 if (e->flags & EDGE_EH)
2302 eh_pad = e->dest;
2303 break;
2306 if (TREE_CODE (lhs) == COMPONENT_REF
2307 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2309 tree fld = TREE_OPERAND (lhs, 1);
2310 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2311 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2312 poly_int64 bitoffset;
2313 poly_uint64 field_offset, repr_offset;
2314 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2315 nlhs = lhs;
2316 else
2318 bool var_field_off = false;
2319 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2320 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2321 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2322 else
2324 bitoffset = 0;
2325 var_field_off = true;
2327 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2328 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2329 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2330 TREE_OPERAND (lhs, 0), repr,
2331 var_field_off
2332 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2333 HOST_WIDE_INT bo = bitoffset.to_constant ();
2334 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2335 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2339 if ((store_operand
2340 && TREE_CODE (store_operand) == SSA_NAME
2341 && (m_names == NULL
2342 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2343 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2344 || gimple_assign_cast_p (stmt))
2346 rhs1 = gimple_assign_rhs1 (store_operand
2347 ? SSA_NAME_DEF_STMT (store_operand)
2348 : stmt);
2349 /* Optimize mergeable ops ending with widening cast to _BitInt
2350 (or followed by store). We can lower just the limbs of the
2351 cast operand and widen afterwards. */
2352 if (TREE_CODE (rhs1) == SSA_NAME
2353 && (m_names == NULL
2354 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2355 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2356 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2357 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2358 limb_prec) < CEIL (prec, limb_prec)
2359 || (kind == bitint_prec_huge
2360 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2362 store_operand = rhs1;
2363 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2364 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2365 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2366 sext = true;
2369 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2370 if (kind == bitint_prec_large)
2371 cnt = CEIL (prec, limb_prec);
2372 else
2374 rem = (prec % (2 * limb_prec));
2375 end = (prec - rem) / limb_prec;
2376 cnt = 2 + CEIL (rem, limb_prec);
2377 idx = idx_first = create_loop (size_zero_node, &idx_next);
2380 basic_block edge_bb = NULL;
2381 if (eq_p)
2383 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2384 gsi_prev (&gsi);
2385 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2386 edge_bb = e->src;
2387 if (kind == bitint_prec_large)
2388 m_gsi = gsi_end_bb (edge_bb);
2390 else
2391 m_after_stmt = stmt;
2392 if (kind != bitint_prec_large)
2393 m_upwards_2limb = end;
2394 m_upwards = true;
2396 bool separate_ext
2397 = (prec != (unsigned) TYPE_PRECISION (type)
2398 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2399 > CEIL (prec, limb_prec)));
2401 for (unsigned i = 0; i < cnt; i++)
2403 m_data_cnt = 0;
2404 if (kind == bitint_prec_large)
2405 idx = size_int (i);
2406 else if (i >= 2)
2407 idx = size_int (end + (i > 2));
2408 if (eq_p)
2410 rhs1 = handle_operand (cmp_op1, idx);
2411 tree rhs2 = handle_operand (cmp_op2, idx);
2412 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2413 insert_before (g);
2414 edge e1 = split_block (gsi_bb (m_gsi), g);
2415 e1->flags = EDGE_FALSE_VALUE;
2416 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2417 e1->probability = profile_probability::unlikely ();
2418 e2->probability = e1->probability.invert ();
2419 if (i == 0)
2420 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2421 m_gsi = gsi_after_labels (e1->dest);
2423 else
2425 if (store_operand)
2426 rhs1 = handle_operand (store_operand, idx);
2427 else
2428 rhs1 = handle_stmt (stmt, idx);
2429 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2430 rhs1 = add_cast (m_limb_type, rhs1);
2431 if (sext && i == cnt - 1)
2432 ext = rhs1;
2433 tree nidx = idx;
2434 if (bo_idx)
2436 if (tree_fits_uhwi_p (idx))
2437 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2438 else
2440 nidx = make_ssa_name (sizetype);
2441 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2442 size_int (bo_idx));
2443 insert_before (g);
2446 bool done = false;
2447 basic_block new_bb = NULL;
2448 /* Handle stores into bit-fields. */
2449 if (bo_bit)
2451 if (i == 0)
2453 edge e2 = NULL;
2454 if (kind != bitint_prec_large)
2456 prepare_data_in_out (build_zero_cst (m_limb_type),
2457 idx, &bf_next);
2458 bf_next = m_data.pop ();
2459 bf_cur = m_data.pop ();
2460 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2461 NULL_TREE, NULL_TREE);
2462 edge edge_true;
2463 if_then_else (g, profile_probability::unlikely (),
2464 edge_true, e2);
2465 new_bb = e2->dest;
2467 tree ftype
2468 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2469 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2470 bitsize_int (limb_prec - bo_bit),
2471 bitsize_int (bo_idx * limb_prec + bo_bit));
2472 tree t = add_cast (ftype, rhs1);
2473 g = gimple_build_assign (bfr, t);
2474 insert_before (g);
2475 if (eh)
2477 maybe_duplicate_eh_stmt (g, stmt);
2478 if (eh_pad)
2480 edge e = split_block (gsi_bb (m_gsi), g);
2481 m_gsi = gsi_after_labels (e->dest);
2482 make_edge (e->src, eh_pad, EDGE_EH)->probability
2483 = profile_probability::very_unlikely ();
2486 if (kind == bitint_prec_large)
2488 bf_cur = rhs1;
2489 done = true;
2491 else if (e2)
2492 m_gsi = gsi_after_labels (e2->src);
2494 if (!done)
2496 tree t1 = make_ssa_name (m_limb_type);
2497 tree t2 = make_ssa_name (m_limb_type);
2498 tree t3 = make_ssa_name (m_limb_type);
2499 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2500 build_int_cst (unsigned_type_node,
2501 limb_prec - bo_bit));
2502 insert_before (g);
2503 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2504 build_int_cst (unsigned_type_node,
2505 bo_bit));
2506 insert_before (g);
2507 bf_cur = rhs1;
2508 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2509 insert_before (g);
2510 rhs1 = t3;
2511 if (bf_next && i == 1)
2513 g = gimple_build_assign (bf_next, bf_cur);
2514 insert_before (g);
2518 if (!done)
2520 /* Handle bit-field access to partial last limb if needed. */
2521 if (nlhs
2522 && i == cnt - 1
2523 && !separate_ext
2524 && tree_fits_uhwi_p (idx))
2526 unsigned int tprec = TYPE_PRECISION (type);
2527 unsigned int rprec = tprec % limb_prec;
2528 if (rprec + bo_bit < (unsigned) limb_prec)
2530 tree ftype
2531 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2532 tree bfr = build3 (BIT_FIELD_REF, ftype,
2533 unshare_expr (nlhs),
2534 bitsize_int (rprec + bo_bit),
2535 bitsize_int ((bo_idx
2536 + tprec / limb_prec)
2537 * limb_prec));
2538 tree t = add_cast (ftype, rhs1);
2539 g = gimple_build_assign (bfr, t);
2540 done = true;
2541 bf_cur = NULL_TREE;
2543 else if (rprec + bo_bit == (unsigned) limb_prec)
2544 bf_cur = NULL_TREE;
2546 /* Otherwise, stores to any other lhs. */
2547 if (!done)
2549 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2550 nidx, true);
2551 g = gimple_build_assign (l, rhs1);
2553 insert_before (g);
2554 if (eh)
2556 maybe_duplicate_eh_stmt (g, stmt);
2557 if (eh_pad)
2559 edge e = split_block (gsi_bb (m_gsi), g);
2560 m_gsi = gsi_after_labels (e->dest);
2561 make_edge (e->src, eh_pad, EDGE_EH)->probability
2562 = profile_probability::very_unlikely ();
2565 if (new_bb)
2566 m_gsi = gsi_after_labels (new_bb);
2569 m_first = false;
2570 if (kind == bitint_prec_huge && i <= 1)
2572 if (i == 0)
2574 idx = make_ssa_name (sizetype);
2575 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2576 size_one_node);
2577 insert_before (g);
2579 else
2581 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2582 size_int (2));
2583 insert_before (g);
2584 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2585 NULL_TREE, NULL_TREE);
2586 insert_before (g);
2587 if (eq_p)
2588 m_gsi = gsi_after_labels (edge_bb);
2589 else
2590 m_gsi = gsi_for_stmt (stmt);
2595 if (separate_ext)
2597 if (sext)
2599 ext = add_cast (signed_type_for (m_limb_type), ext);
2600 tree lpm1 = build_int_cst (unsigned_type_node,
2601 limb_prec - 1);
2602 tree n = make_ssa_name (TREE_TYPE (ext));
2603 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2604 insert_before (g);
2605 ext = add_cast (m_limb_type, n);
2607 else
2608 ext = build_zero_cst (m_limb_type);
2609 kind = bitint_precision_kind (type);
2610 unsigned start = CEIL (prec, limb_prec);
2611 prec = TYPE_PRECISION (type);
2612 idx = idx_first = idx_next = NULL_TREE;
2613 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2614 kind = bitint_prec_large;
2615 if (kind == bitint_prec_large)
2616 cnt = CEIL (prec, limb_prec) - start;
2617 else
2619 rem = prec % limb_prec;
2620 end = (prec - rem) / limb_prec;
2621 cnt = (bo_bit != 0) + 1 + (rem != 0);
2623 for (unsigned i = 0; i < cnt; i++)
2625 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2626 idx = size_int (start + i);
2627 else if (i == cnt - 1 && (rem != 0))
2628 idx = size_int (end);
2629 else if (i == (bo_bit != 0))
2630 idx = create_loop (size_int (start + i), &idx_next);
2631 rhs1 = ext;
2632 if (bf_cur != NULL_TREE && bf_cur != ext)
2634 tree t1 = make_ssa_name (m_limb_type);
2635 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2636 build_int_cst (unsigned_type_node,
2637 limb_prec - bo_bit));
2638 insert_before (g);
2639 if (integer_zerop (ext))
2640 rhs1 = t1;
2641 else
2643 tree t2 = make_ssa_name (m_limb_type);
2644 rhs1 = make_ssa_name (m_limb_type);
2645 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2646 build_int_cst (unsigned_type_node,
2647 bo_bit));
2648 insert_before (g);
2649 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2650 insert_before (g);
2652 bf_cur = ext;
2654 tree nidx = idx;
2655 if (bo_idx)
2657 if (tree_fits_uhwi_p (idx))
2658 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2659 else
2661 nidx = make_ssa_name (sizetype);
2662 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2663 size_int (bo_idx));
2664 insert_before (g);
2667 bool done = false;
2668 /* Handle bit-field access to partial last limb if needed. */
2669 if (nlhs && i == cnt - 1)
2671 unsigned int tprec = TYPE_PRECISION (type);
2672 unsigned int rprec = tprec % limb_prec;
2673 if (rprec + bo_bit < (unsigned) limb_prec)
2675 tree ftype
2676 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2677 tree bfr = build3 (BIT_FIELD_REF, ftype,
2678 unshare_expr (nlhs),
2679 bitsize_int (rprec + bo_bit),
2680 bitsize_int ((bo_idx + tprec / limb_prec)
2681 * limb_prec));
2682 tree t = add_cast (ftype, rhs1);
2683 g = gimple_build_assign (bfr, t);
2684 done = true;
2685 bf_cur = NULL_TREE;
2687 else if (rprec + bo_bit == (unsigned) limb_prec)
2688 bf_cur = NULL_TREE;
2690 /* Otherwise, stores to any other lhs. */
2691 if (!done)
2693 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2694 g = gimple_build_assign (l, rhs1);
2696 insert_before (g);
2697 if (eh)
2699 maybe_duplicate_eh_stmt (g, stmt);
2700 if (eh_pad)
2702 edge e = split_block (gsi_bb (m_gsi), g);
2703 m_gsi = gsi_after_labels (e->dest);
2704 make_edge (e->src, eh_pad, EDGE_EH)->probability
2705 = profile_probability::very_unlikely ();
2708 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2710 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2711 size_one_node);
2712 insert_before (g);
2713 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2714 NULL_TREE, NULL_TREE);
2715 insert_before (g);
2716 m_gsi = gsi_for_stmt (stmt);
2720 if (bf_cur != NULL_TREE)
2722 unsigned int tprec = TYPE_PRECISION (type);
2723 unsigned int rprec = tprec % limb_prec;
2724 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2725 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2726 bitsize_int (rprec + bo_bit),
2727 bitsize_int ((bo_idx + tprec / limb_prec)
2728 * limb_prec));
2729 rhs1 = bf_cur;
2730 if (bf_cur != ext)
2732 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2733 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2734 build_int_cst (unsigned_type_node,
2735 limb_prec - bo_bit));
2736 insert_before (g);
2738 rhs1 = add_cast (ftype, rhs1);
2739 g = gimple_build_assign (bfr, rhs1);
2740 insert_before (g);
2741 if (eh)
2743 maybe_duplicate_eh_stmt (g, stmt);
2744 if (eh_pad)
2746 edge e = split_block (gsi_bb (m_gsi), g);
2747 m_gsi = gsi_after_labels (e->dest);
2748 make_edge (e->src, eh_pad, EDGE_EH)->probability
2749 = profile_probability::very_unlikely ();
2754 if (gimple_store_p (stmt))
2756 unlink_stmt_vdef (stmt);
2757 release_ssa_name (gimple_vdef (stmt));
2758 gsi_remove (&m_gsi, true);
2760 if (eq_p)
2762 lhs = make_ssa_name (boolean_type_node);
2763 basic_block bb = gimple_bb (stmt);
2764 gphi *phi = create_phi_node (lhs, bb);
2765 edge e = find_edge (gsi_bb (m_gsi), bb);
2766 unsigned int n = EDGE_COUNT (bb->preds);
2767 for (unsigned int i = 0; i < n; i++)
2769 edge e2 = EDGE_PRED (bb, i);
2770 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2771 e2, UNKNOWN_LOCATION);
2773 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2774 return lhs;
2776 else
2777 return NULL_TREE;
2780 /* Handle a large/huge _BitInt comparison statement STMT other than
2781 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2782 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2783 lowered by iteration from the most significant limb downwards to
2784 the least significant one, for large _BitInt in straight line code,
2785 otherwise with most significant limb handled in
2786 straight line code followed by a loop handling one limb at a time.
2787 Comparisons with unsigned huge _BitInt with precisions which are
2788 multiples of limb precision can use just the loop and don't need to
2789 handle most significant limb before the loop. The loop or straight
2790 line code jumps to final basic block if a particular pair of limbs
2791 is not equal. */
2793 tree
2794 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2795 tree cmp_op1, tree cmp_op2)
2797 tree type = TREE_TYPE (cmp_op1);
2798 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2799 bitint_prec_kind kind = bitint_precision_kind (type);
2800 gcc_assert (kind >= bitint_prec_large);
2801 gimple *g;
2802 if (!TYPE_UNSIGNED (type)
2803 && integer_zerop (cmp_op2)
2804 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2806 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2807 tree idx = size_int (end);
2808 m_data_cnt = 0;
2809 tree rhs1 = handle_operand (cmp_op1, idx);
2810 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2812 tree stype = signed_type_for (TREE_TYPE (rhs1));
2813 rhs1 = add_cast (stype, rhs1);
2815 tree lhs = make_ssa_name (boolean_type_node);
2816 g = gimple_build_assign (lhs, cmp_code, rhs1,
2817 build_zero_cst (TREE_TYPE (rhs1)));
2818 insert_before (g);
2819 cmp_code = NE_EXPR;
2820 return lhs;
2823 unsigned cnt, rem = 0, end = 0;
2824 tree idx = NULL_TREE, idx_next = NULL_TREE;
2825 if (kind == bitint_prec_large)
2826 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2827 else
2829 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2830 if (rem == 0 && !TYPE_UNSIGNED (type))
2831 rem = limb_prec;
2832 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2833 cnt = 1 + (rem != 0);
2836 basic_block edge_bb = NULL;
2837 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2838 gsi_prev (&gsi);
2839 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2840 edge_bb = e->src;
2841 m_gsi = gsi_end_bb (edge_bb);
2843 edge *edges = XALLOCAVEC (edge, cnt * 2);
2844 for (unsigned i = 0; i < cnt; i++)
2846 m_data_cnt = 0;
2847 if (kind == bitint_prec_large)
2848 idx = size_int (cnt - i - 1);
2849 else if (i == cnt - 1)
2850 idx = create_loop (size_int (end - 1), &idx_next);
2851 else
2852 idx = size_int (end);
2853 tree rhs1 = handle_operand (cmp_op1, idx);
2854 tree rhs2 = handle_operand (cmp_op2, idx);
2855 if (i == 0
2856 && !TYPE_UNSIGNED (type)
2857 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2859 tree stype = signed_type_for (TREE_TYPE (rhs1));
2860 rhs1 = add_cast (stype, rhs1);
2861 rhs2 = add_cast (stype, rhs2);
2863 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2864 insert_before (g);
2865 edge e1 = split_block (gsi_bb (m_gsi), g);
2866 e1->flags = EDGE_FALSE_VALUE;
2867 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2868 e1->probability = profile_probability::likely ();
2869 e2->probability = e1->probability.invert ();
2870 if (i == 0)
2871 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2872 m_gsi = gsi_after_labels (e1->dest);
2873 edges[2 * i] = e2;
2874 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2875 insert_before (g);
2876 e1 = split_block (gsi_bb (m_gsi), g);
2877 e1->flags = EDGE_FALSE_VALUE;
2878 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2879 e1->probability = profile_probability::unlikely ();
2880 e2->probability = e1->probability.invert ();
2881 m_gsi = gsi_after_labels (e1->dest);
2882 edges[2 * i + 1] = e2;
2883 m_first = false;
2884 if (kind == bitint_prec_huge && i == cnt - 1)
2886 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2887 insert_before (g);
2888 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2889 NULL_TREE, NULL_TREE);
2890 insert_before (g);
2891 edge true_edge, false_edge;
2892 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2893 &true_edge, &false_edge);
2894 m_gsi = gsi_after_labels (false_edge->dest);
2898 tree lhs = make_ssa_name (boolean_type_node);
2899 basic_block bb = gimple_bb (stmt);
2900 gphi *phi = create_phi_node (lhs, bb);
2901 for (unsigned int i = 0; i < cnt * 2; i++)
2903 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2904 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2905 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2907 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2908 ? boolean_true_node : boolean_false_node,
2909 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2910 cmp_code = NE_EXPR;
2911 return lhs;
2914 /* Lower large/huge _BitInt left and right shift except for left
2915 shift by < limb_prec constant. */
2917 void
2918 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2920 tree rhs1 = gimple_assign_rhs1 (stmt);
2921 tree lhs = gimple_assign_lhs (stmt);
2922 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2923 tree type = TREE_TYPE (rhs1);
2924 gimple *final_stmt = gsi_stmt (m_gsi);
2925 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2926 && bitint_precision_kind (type) >= bitint_prec_large);
2927 int prec = TYPE_PRECISION (type);
2928 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2929 gimple *g;
2930 if (obj == NULL_TREE)
2932 int part = var_to_partition (m_map, lhs);
2933 gcc_assert (m_vars[part] != NULL_TREE);
2934 obj = m_vars[part];
2936 /* Preparation code common for both left and right shifts.
2937 unsigned n1 = n % limb_prec;
2938 size_t n2 = n / limb_prec;
2939 size_t n3 = n1 != 0;
2940 unsigned n4 = (limb_prec - n1) % limb_prec;
2941 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2942 if (TREE_CODE (n) == INTEGER_CST)
2944 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2945 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2946 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2947 n3 = size_int (!integer_zerop (n1));
2948 n4 = int_const_binop (TRUNC_MOD_EXPR,
2949 int_const_binop (MINUS_EXPR, lp, n1), lp);
2951 else
2953 n1 = make_ssa_name (TREE_TYPE (n));
2954 n2 = make_ssa_name (sizetype);
2955 n3 = make_ssa_name (sizetype);
2956 n4 = make_ssa_name (TREE_TYPE (n));
2957 if (pow2p_hwi (limb_prec))
2959 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2960 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2961 insert_before (g);
2962 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2963 TREE_TYPE (n))
2964 ? n2 : make_ssa_name (TREE_TYPE (n)),
2965 RSHIFT_EXPR, n,
2966 build_int_cst (TREE_TYPE (n),
2967 exact_log2 (limb_prec)));
2968 insert_before (g);
2969 if (gimple_assign_lhs (g) != n2)
2971 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2972 insert_before (g);
2974 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2975 NEGATE_EXPR, n1);
2976 insert_before (g);
2977 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
2978 lpm1);
2979 insert_before (g);
2981 else
2983 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2984 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
2985 insert_before (g);
2986 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2987 TREE_TYPE (n))
2988 ? n2 : make_ssa_name (TREE_TYPE (n)),
2989 TRUNC_DIV_EXPR, n, lp);
2990 insert_before (g);
2991 if (gimple_assign_lhs (g) != n2)
2993 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2994 insert_before (g);
2996 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2997 MINUS_EXPR, lp, n1);
2998 insert_before (g);
2999 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3000 lp);
3001 insert_before (g);
3003 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3004 build_zero_cst (TREE_TYPE (n)));
3005 insert_before (g);
3006 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3007 insert_before (g);
3009 tree p = build_int_cst (sizetype,
3010 prec / limb_prec - (prec % limb_prec == 0));
3011 if (rhs_code == RSHIFT_EXPR)
3013 /* Lower
3014 dst = src >> n;
3016 unsigned n1 = n % limb_prec;
3017 size_t n2 = n / limb_prec;
3018 size_t n3 = n1 != 0;
3019 unsigned n4 = (limb_prec - n1) % limb_prec;
3020 size_t idx;
3021 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3022 int signed_p = (typeof (src) -1) < 0;
3023 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3024 ? p : p - n3); ++idx)
3025 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3026 limb_type ext;
3027 if (prec % limb_prec == 0)
3028 ext = src[p];
3029 else if (signed_p)
3030 ext = ((signed limb_type) (src[p] << (limb_prec
3031 - (prec % limb_prec))))
3032 >> (limb_prec - (prec % limb_prec));
3033 else
3034 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3035 if (!signed_p && (prec % limb_prec == 0))
3037 else if (idx < prec / 64)
3039 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3040 ++idx;
3042 idx -= n2;
3043 if (signed_p)
3045 dst[idx] = ((signed limb_type) ext) >> n1;
3046 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3048 else
3050 dst[idx] = ext >> n1;
3051 ext = 0;
3053 for (++idx; idx <= p; ++idx)
3054 dst[idx] = ext; */
3055 tree pmn3;
3056 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3057 pmn3 = p;
3058 else if (TREE_CODE (n3) == INTEGER_CST)
3059 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3060 else
3062 pmn3 = make_ssa_name (sizetype);
3063 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3064 insert_before (g);
3066 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3067 edge edge_true, edge_false;
3068 if_then (g, profile_probability::likely (), edge_true, edge_false);
3069 tree idx_next;
3070 tree idx = create_loop (n2, &idx_next);
3071 tree idxmn2 = make_ssa_name (sizetype);
3072 tree idxpn3 = make_ssa_name (sizetype);
3073 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3074 insert_before (g);
3075 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3076 insert_before (g);
3077 m_data_cnt = 0;
3078 tree t1 = handle_operand (rhs1, idx);
3079 m_first = false;
3080 g = gimple_build_assign (make_ssa_name (m_limb_type),
3081 RSHIFT_EXPR, t1, n1);
3082 insert_before (g);
3083 t1 = gimple_assign_lhs (g);
3084 if (!integer_zerop (n3))
3086 m_data_cnt = 0;
3087 tree t2 = handle_operand (rhs1, idxpn3);
3088 g = gimple_build_assign (make_ssa_name (m_limb_type),
3089 LSHIFT_EXPR, t2, n4);
3090 insert_before (g);
3091 t2 = gimple_assign_lhs (g);
3092 g = gimple_build_assign (make_ssa_name (m_limb_type),
3093 BIT_IOR_EXPR, t1, t2);
3094 insert_before (g);
3095 t1 = gimple_assign_lhs (g);
3097 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3098 g = gimple_build_assign (l, t1);
3099 insert_before (g);
3100 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3101 insert_before (g);
3102 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3103 insert_before (g);
3104 idx = make_ssa_name (sizetype);
3105 m_gsi = gsi_for_stmt (final_stmt);
3106 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3107 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3108 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3109 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3110 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3111 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3112 m_data_cnt = 0;
3113 tree ms = handle_operand (rhs1, p);
3114 tree ext = ms;
3115 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3116 ext = add_cast (m_limb_type, ms);
3117 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3118 && !integer_zerop (n3))
3120 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3121 if_then (g, profile_probability::likely (), edge_true, edge_false);
3122 m_data_cnt = 0;
3123 t1 = handle_operand (rhs1, idx);
3124 g = gimple_build_assign (make_ssa_name (m_limb_type),
3125 RSHIFT_EXPR, t1, n1);
3126 insert_before (g);
3127 t1 = gimple_assign_lhs (g);
3128 g = gimple_build_assign (make_ssa_name (m_limb_type),
3129 LSHIFT_EXPR, ext, n4);
3130 insert_before (g);
3131 tree t2 = gimple_assign_lhs (g);
3132 g = gimple_build_assign (make_ssa_name (m_limb_type),
3133 BIT_IOR_EXPR, t1, t2);
3134 insert_before (g);
3135 t1 = gimple_assign_lhs (g);
3136 idxmn2 = make_ssa_name (sizetype);
3137 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3138 insert_before (g);
3139 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3140 g = gimple_build_assign (l, t1);
3141 insert_before (g);
3142 idx_next = make_ssa_name (sizetype);
3143 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3144 insert_before (g);
3145 m_gsi = gsi_for_stmt (final_stmt);
3146 tree nidx = make_ssa_name (sizetype);
3147 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3148 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3149 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3150 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3151 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3152 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3153 idx = nidx;
3155 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3156 insert_before (g);
3157 idx = gimple_assign_lhs (g);
3158 tree sext = ext;
3159 if (!TYPE_UNSIGNED (type))
3160 sext = add_cast (signed_type_for (m_limb_type), ext);
3161 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3162 RSHIFT_EXPR, sext, n1);
3163 insert_before (g);
3164 t1 = gimple_assign_lhs (g);
3165 if (!TYPE_UNSIGNED (type))
3167 t1 = add_cast (m_limb_type, t1);
3168 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3169 RSHIFT_EXPR, sext,
3170 build_int_cst (TREE_TYPE (n),
3171 limb_prec - 1));
3172 insert_before (g);
3173 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3175 else
3176 ext = build_zero_cst (m_limb_type);
3177 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3178 g = gimple_build_assign (l, t1);
3179 insert_before (g);
3180 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3181 size_one_node);
3182 insert_before (g);
3183 idx = gimple_assign_lhs (g);
3184 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3185 if_then (g, profile_probability::likely (), edge_true, edge_false);
3186 idx = create_loop (idx, &idx_next);
3187 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3188 g = gimple_build_assign (l, ext);
3189 insert_before (g);
3190 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3191 insert_before (g);
3192 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3193 insert_before (g);
3195 else
3197 /* Lower
3198 dst = src << n;
3200 unsigned n1 = n % limb_prec;
3201 size_t n2 = n / limb_prec;
3202 size_t n3 = n1 != 0;
3203 unsigned n4 = (limb_prec - n1) % limb_prec;
3204 size_t idx;
3205 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3206 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3207 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3208 if (n1)
3210 dst[idx] = src[idx - n2] << n1;
3211 --idx;
3213 for (; (ssize_t) idx >= 0; --idx)
3214 dst[idx] = 0; */
3215 tree n2pn3;
3216 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3217 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3218 else
3220 n2pn3 = make_ssa_name (sizetype);
3221 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3222 insert_before (g);
3224 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3225 idx even to access the most significant partial limb. */
3226 m_var_msb = true;
3227 if (integer_zerop (n3))
3228 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3229 counts. Emit if (true) condition that can be optimized later. */
3230 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3231 NULL_TREE, NULL_TREE);
3232 else
3233 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3234 edge edge_true, edge_false;
3235 if_then (g, profile_probability::likely (), edge_true, edge_false);
3236 tree idx_next;
3237 tree idx = create_loop (p, &idx_next);
3238 tree idxmn2 = make_ssa_name (sizetype);
3239 tree idxmn2mn3 = make_ssa_name (sizetype);
3240 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3241 insert_before (g);
3242 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3243 insert_before (g);
3244 m_data_cnt = 0;
3245 tree t1 = handle_operand (rhs1, idxmn2);
3246 m_first = false;
3247 g = gimple_build_assign (make_ssa_name (m_limb_type),
3248 LSHIFT_EXPR, t1, n1);
3249 insert_before (g);
3250 t1 = gimple_assign_lhs (g);
3251 if (!integer_zerop (n3))
3253 m_data_cnt = 0;
3254 tree t2 = handle_operand (rhs1, idxmn2mn3);
3255 g = gimple_build_assign (make_ssa_name (m_limb_type),
3256 RSHIFT_EXPR, t2, n4);
3257 insert_before (g);
3258 t2 = gimple_assign_lhs (g);
3259 g = gimple_build_assign (make_ssa_name (m_limb_type),
3260 BIT_IOR_EXPR, t1, t2);
3261 insert_before (g);
3262 t1 = gimple_assign_lhs (g);
3264 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3265 g = gimple_build_assign (l, t1);
3266 insert_before (g);
3267 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3268 insert_before (g);
3269 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3270 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3271 NULL_TREE, NULL_TREE);
3272 insert_before (g);
3273 idx = make_ssa_name (sizetype);
3274 m_gsi = gsi_for_stmt (final_stmt);
3275 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3276 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3277 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3278 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3279 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3280 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3281 m_data_cnt = 0;
3282 if (!integer_zerop (n3))
3284 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3285 NULL_TREE, NULL_TREE);
3286 if_then (g, profile_probability::likely (), edge_true, edge_false);
3287 idxmn2 = make_ssa_name (sizetype);
3288 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3289 insert_before (g);
3290 m_data_cnt = 0;
3291 t1 = handle_operand (rhs1, idxmn2);
3292 g = gimple_build_assign (make_ssa_name (m_limb_type),
3293 LSHIFT_EXPR, t1, n1);
3294 insert_before (g);
3295 t1 = gimple_assign_lhs (g);
3296 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3297 g = gimple_build_assign (l, t1);
3298 insert_before (g);
3299 idx_next = make_ssa_name (sizetype);
3300 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3301 insert_before (g);
3302 m_gsi = gsi_for_stmt (final_stmt);
3303 tree nidx = make_ssa_name (sizetype);
3304 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3305 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3306 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3307 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3308 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3309 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3310 idx = nidx;
3312 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3313 ssize_int (0), NULL_TREE, NULL_TREE);
3314 if_then (g, profile_probability::likely (), edge_true, edge_false);
3315 idx = create_loop (idx, &idx_next);
3316 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3317 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3318 insert_before (g);
3319 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3320 insert_before (g);
3321 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3322 ssize_int (0), NULL_TREE, NULL_TREE);
3323 insert_before (g);
3327 /* Lower large/huge _BitInt multiplication or division. */
3329 void
3330 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3332 tree rhs1 = gimple_assign_rhs1 (stmt);
3333 tree rhs2 = gimple_assign_rhs2 (stmt);
3334 tree lhs = gimple_assign_lhs (stmt);
3335 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3336 tree type = TREE_TYPE (rhs1);
3337 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3338 && bitint_precision_kind (type) >= bitint_prec_large);
3339 int prec = TYPE_PRECISION (type), prec1, prec2;
3340 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3341 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3342 if (obj == NULL_TREE)
3344 int part = var_to_partition (m_map, lhs);
3345 gcc_assert (m_vars[part] != NULL_TREE);
3346 obj = m_vars[part];
3347 lhs = build_fold_addr_expr (obj);
3349 else
3351 lhs = build_fold_addr_expr (obj);
3352 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3353 NULL_TREE, true, GSI_SAME_STMT);
3355 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3356 gimple *g;
3357 switch (rhs_code)
3359 case MULT_EXPR:
3360 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3361 lhs, build_int_cst (sitype, prec),
3362 rhs1, build_int_cst (sitype, prec1),
3363 rhs2, build_int_cst (sitype, prec2));
3364 insert_before (g);
3365 break;
3366 case TRUNC_DIV_EXPR:
3367 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3368 lhs, build_int_cst (sitype, prec),
3369 null_pointer_node,
3370 build_int_cst (sitype, 0),
3371 rhs1, build_int_cst (sitype, prec1),
3372 rhs2, build_int_cst (sitype, prec2));
3373 if (!stmt_ends_bb_p (stmt))
3374 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3375 insert_before (g);
3376 break;
3377 case TRUNC_MOD_EXPR:
3378 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3379 build_int_cst (sitype, 0),
3380 lhs, build_int_cst (sitype, prec),
3381 rhs1, build_int_cst (sitype, prec1),
3382 rhs2, build_int_cst (sitype, prec2));
3383 if (!stmt_ends_bb_p (stmt))
3384 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3385 insert_before (g);
3386 break;
3387 default:
3388 gcc_unreachable ();
3390 if (stmt_ends_bb_p (stmt))
3392 maybe_duplicate_eh_stmt (g, stmt);
3393 edge e1;
3394 edge_iterator ei;
3395 basic_block bb = gimple_bb (stmt);
3397 FOR_EACH_EDGE (e1, ei, bb->succs)
3398 if (e1->flags & EDGE_EH)
3399 break;
3400 if (e1)
3402 edge e2 = split_block (gsi_bb (m_gsi), g);
3403 m_gsi = gsi_after_labels (e2->dest);
3404 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3405 = profile_probability::very_unlikely ();
3410 /* Lower large/huge _BitInt conversion to/from floating point. */
3412 void
3413 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3415 tree rhs1 = gimple_assign_rhs1 (stmt);
3416 tree lhs = gimple_assign_lhs (stmt);
3417 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3418 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3419 gimple *g;
3420 if (rhs_code == FIX_TRUNC_EXPR)
3422 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3423 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3424 prec = -prec;
3425 if (obj == NULL_TREE)
3427 int part = var_to_partition (m_map, lhs);
3428 gcc_assert (m_vars[part] != NULL_TREE);
3429 obj = m_vars[part];
3430 lhs = build_fold_addr_expr (obj);
3432 else
3434 lhs = build_fold_addr_expr (obj);
3435 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3436 NULL_TREE, true, GSI_SAME_STMT);
3438 scalar_mode from_mode
3439 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3440 #ifdef HAVE_SFmode
3441 /* IEEE single is a full superset of both IEEE half and
3442 bfloat formats, convert to float first and then to _BitInt
3443 to avoid the need of another 2 library routines. */
3444 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3445 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3446 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3448 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3449 if (type)
3450 rhs1 = add_cast (type, rhs1);
3452 #endif
3453 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3454 lhs, build_int_cst (sitype, prec),
3455 rhs1);
3456 insert_before (g);
3458 else
3460 int prec;
3461 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3462 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3463 rhs1, build_int_cst (sitype, prec));
3464 gimple_call_set_lhs (g, lhs);
3465 if (!stmt_ends_bb_p (stmt))
3466 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3467 gsi_replace (&m_gsi, g, true);
3471 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3472 If check_zero is true, caller wants to check if all bits in [start, end)
3473 are zero, otherwise if bits in [start, end) are either all zero or
3474 all ones. L is the limb with index LIMB, START and END are measured
3475 in bits. */
3477 tree
3478 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3479 unsigned int end, tree l,
3480 unsigned int limb,
3481 bool check_zero)
3483 unsigned startlimb = start / limb_prec;
3484 unsigned endlimb = (end - 1) / limb_prec;
3485 gimple *g;
3487 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3488 return l;
3489 if (startlimb == endlimb && limb == startlimb)
3491 if (check_zero)
3493 wide_int w = wi::shifted_mask (start % limb_prec,
3494 end - start, false, limb_prec);
3495 g = gimple_build_assign (make_ssa_name (m_limb_type),
3496 BIT_AND_EXPR, l,
3497 wide_int_to_tree (m_limb_type, w));
3498 insert_before (g);
3499 return gimple_assign_lhs (g);
3501 unsigned int shift = start % limb_prec;
3502 if ((end % limb_prec) != 0)
3504 unsigned int lshift = (-end) % limb_prec;
3505 shift += lshift;
3506 g = gimple_build_assign (make_ssa_name (m_limb_type),
3507 LSHIFT_EXPR, l,
3508 build_int_cst (unsigned_type_node,
3509 lshift));
3510 insert_before (g);
3511 l = gimple_assign_lhs (g);
3513 l = add_cast (signed_type_for (m_limb_type), l);
3514 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3515 RSHIFT_EXPR, l,
3516 build_int_cst (unsigned_type_node, shift));
3517 insert_before (g);
3518 return add_cast (m_limb_type, gimple_assign_lhs (g));
3520 else if (limb == startlimb)
3522 if ((start % limb_prec) == 0)
3523 return l;
3524 if (!check_zero)
3525 l = add_cast (signed_type_for (m_limb_type), l);
3526 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3527 RSHIFT_EXPR, l,
3528 build_int_cst (unsigned_type_node,
3529 start % limb_prec));
3530 insert_before (g);
3531 l = gimple_assign_lhs (g);
3532 if (!check_zero)
3533 l = add_cast (m_limb_type, l);
3534 return l;
3536 else if (limb == endlimb)
3538 if ((end % limb_prec) == 0)
3539 return l;
3540 if (check_zero)
3542 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3543 g = gimple_build_assign (make_ssa_name (m_limb_type),
3544 BIT_AND_EXPR, l,
3545 wide_int_to_tree (m_limb_type, w));
3546 insert_before (g);
3547 return gimple_assign_lhs (g);
3549 unsigned int shift = (-end) % limb_prec;
3550 g = gimple_build_assign (make_ssa_name (m_limb_type),
3551 LSHIFT_EXPR, l,
3552 build_int_cst (unsigned_type_node, shift));
3553 insert_before (g);
3554 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3555 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3556 RSHIFT_EXPR, l,
3557 build_int_cst (unsigned_type_node, shift));
3558 insert_before (g);
3559 return add_cast (m_limb_type, gimple_assign_lhs (g));
3561 return l;
3564 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3565 result including overflow flag into the right locations. */
3567 void
3568 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3569 tree ovf, tree lhs, tree orig_obj,
3570 gimple *stmt, tree_code code)
3572 gimple *g;
3574 if (obj == NULL_TREE
3575 && (TREE_CODE (type) != BITINT_TYPE
3576 || bitint_precision_kind (type) < bitint_prec_large))
3578 /* Add support for 3 or more limbs filled in from normal integral
3579 type if this assert fails. If no target chooses limb mode smaller
3580 than half of largest supported normal integral type, this will not
3581 be needed. */
3582 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3583 tree lhs_type = type;
3584 if (TREE_CODE (type) == BITINT_TYPE
3585 && bitint_precision_kind (type) == bitint_prec_middle)
3586 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3587 TYPE_UNSIGNED (type));
3588 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3589 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3590 insert_before (g);
3591 r1 = gimple_assign_lhs (g);
3592 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3593 r1 = add_cast (lhs_type, r1);
3594 if (TYPE_PRECISION (lhs_type) > limb_prec)
3596 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3597 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3598 insert_before (g);
3599 r2 = gimple_assign_lhs (g);
3600 r2 = add_cast (lhs_type, r2);
3601 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3602 build_int_cst (unsigned_type_node,
3603 limb_prec));
3604 insert_before (g);
3605 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3606 gimple_assign_lhs (g));
3607 insert_before (g);
3608 r1 = gimple_assign_lhs (g);
3610 if (lhs_type != type)
3611 r1 = add_cast (type, r1);
3612 ovf = add_cast (lhs_type, ovf);
3613 if (lhs_type != type)
3614 ovf = add_cast (type, ovf);
3615 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3616 m_gsi = gsi_for_stmt (stmt);
3617 gsi_replace (&m_gsi, g, true);
3619 else
3621 unsigned HOST_WIDE_INT nelts = 0;
3622 tree atype = NULL_TREE;
3623 if (obj)
3625 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3626 if (orig_obj == NULL_TREE)
3627 nelts >>= 1;
3628 atype = build_array_type_nelts (m_limb_type, nelts);
3630 if (var && obj)
3632 tree v1, v2;
3633 tree zero;
3634 if (orig_obj == NULL_TREE)
3636 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3637 v1 = build2 (MEM_REF, atype,
3638 build_fold_addr_expr (unshare_expr (obj)), zero);
3640 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3641 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3642 else
3643 v1 = unshare_expr (obj);
3644 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3645 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3646 g = gimple_build_assign (v1, v2);
3647 insert_before (g);
3649 if (orig_obj == NULL_TREE && obj)
3651 ovf = add_cast (m_limb_type, ovf);
3652 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3653 g = gimple_build_assign (l, ovf);
3654 insert_before (g);
3655 if (nelts > 1)
3657 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3658 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3659 (nelts + 1) * m_limb_size);
3660 tree v1 = build2 (MEM_REF, atype,
3661 build_fold_addr_expr (unshare_expr (obj)),
3662 off);
3663 g = gimple_build_assign (v1, build_zero_cst (atype));
3664 insert_before (g);
3667 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3669 imm_use_iterator ui;
3670 use_operand_p use_p;
3671 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3673 g = USE_STMT (use_p);
3674 if (!is_gimple_assign (g)
3675 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3676 continue;
3677 tree lhs2 = gimple_assign_lhs (g);
3678 gimple *use_stmt;
3679 single_imm_use (lhs2, &use_p, &use_stmt);
3680 lhs2 = gimple_assign_lhs (use_stmt);
3681 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3682 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3683 g = gimple_build_assign (lhs2, ovf);
3684 else
3685 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3686 gsi_replace (&gsi, g, true);
3687 if (gsi_stmt (m_gsi) == use_stmt)
3688 m_gsi = gsi_for_stmt (g);
3689 break;
3692 else if (ovf != boolean_false_node)
3694 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3695 NULL_TREE, NULL_TREE);
3696 edge edge_true, edge_false;
3697 if_then (g, profile_probability::very_unlikely (),
3698 edge_true, edge_false);
3699 tree zero = build_zero_cst (TREE_TYPE (lhs));
3700 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3701 TREE_TYPE (lhs),
3702 zero, zero, NULL);
3703 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3704 true, GSI_SAME_STMT);
3705 m_gsi = gsi_after_labels (edge_true->dest);
3708 if (var)
3710 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_EOL);
3711 g = gimple_build_assign (var, clobber);
3712 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3716 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3717 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3718 argument 1 precision PREC1 and minimum precision for the result
3719 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3721 static tree
3722 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3723 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3725 *start = 0;
3726 *end = 0;
3727 *check_zero = true;
3728 /* Ignore this special rule for subtraction, even if both
3729 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3730 in infinite precision. */
3731 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3733 /* Result in [0, prec2) is unsigned, if prec > prec2,
3734 all bits above it will be zero. */
3735 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3736 return boolean_false_node;
3737 else
3739 /* ovf if any of bits in [start, end) is non-zero. */
3740 *start = prec - !TYPE_UNSIGNED (type);
3741 *end = prec2;
3744 else if (TYPE_UNSIGNED (type))
3746 /* If result in [0, prec2) is signed and if prec > prec2,
3747 all bits above it will be sign bit copies. */
3748 if (prec >= prec2)
3750 /* ovf if bit prec - 1 is non-zero. */
3751 *start = prec - 1;
3752 *end = prec;
3754 else
3756 /* ovf if any of bits in [start, end) is non-zero. */
3757 *start = prec;
3758 *end = prec2;
3761 else if (prec >= prec2)
3762 return boolean_false_node;
3763 else
3765 /* ovf if [start, end) bits aren't all zeros or all ones. */
3766 *start = prec - 1;
3767 *end = prec2;
3768 *check_zero = false;
3770 return NULL_TREE;
3773 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3774 argument or return type _Complex large/huge _BitInt. */
3776 void
3777 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3779 tree arg0 = gimple_call_arg (stmt, 0);
3780 tree arg1 = gimple_call_arg (stmt, 1);
3781 tree lhs = gimple_call_lhs (stmt);
3782 gimple *g;
3784 if (!lhs)
3786 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3787 gsi_remove (&gsi, true);
3788 return;
3790 gimple *final_stmt = gsi_stmt (m_gsi);
3791 tree type = TREE_TYPE (lhs);
3792 if (TREE_CODE (type) == COMPLEX_TYPE)
3793 type = TREE_TYPE (type);
3794 int prec = TYPE_PRECISION (type);
3795 int prec0 = range_to_prec (arg0, stmt);
3796 int prec1 = range_to_prec (arg1, stmt);
3797 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3798 the be minimum unsigned precision of any possible operation's
3799 result, otherwise it is minimum signed precision.
3800 Some examples:
3801 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3802 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3803 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3804 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3805 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3806 8 + 8 [0, 0x1fe] 9 UNSIGNED
3807 8 + 10 [0, 0x4fe] 11 UNSIGNED
3808 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3809 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3810 8 + -8 [-0x80, 0x17e] 10 SIGNED
3811 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3812 10 + -8 [-0x80, 0x47e] 12 SIGNED
3813 8 - 8 [-0xff, 0xff] 9 SIGNED
3814 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3815 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3816 -8 - -8 [-0xff, 0xff] 9 SIGNED
3817 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3818 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3819 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3820 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3821 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3822 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3823 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3824 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3825 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3826 prec1 < 0 ? -prec1 : prec1);
3827 /* If operands are either both signed or both unsigned,
3828 we need just one additional bit. */
3829 prec2 = (((prec0 < 0) == (prec1 < 0)
3830 /* If one operand is signed and one unsigned and
3831 the signed one has larger precision, we need
3832 just one extra bit, otherwise two. */
3833 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3834 : (prec2 == -prec1 && prec2 != prec0)))
3835 ? prec2 + 1 : prec2 + 2);
3836 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3837 prec1 < 0 ? -prec1 : prec1);
3838 prec3 = MAX (prec3, prec);
3839 tree var = NULL_TREE;
3840 tree orig_obj = obj;
3841 if (obj == NULL_TREE
3842 && TREE_CODE (type) == BITINT_TYPE
3843 && bitint_precision_kind (type) >= bitint_prec_large
3844 && m_names
3845 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3847 int part = var_to_partition (m_map, lhs);
3848 gcc_assert (m_vars[part] != NULL_TREE);
3849 obj = m_vars[part];
3850 if (TREE_TYPE (lhs) == type)
3851 orig_obj = obj;
3853 if (TREE_CODE (type) != BITINT_TYPE
3854 || bitint_precision_kind (type) < bitint_prec_large)
3856 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3857 tree atype = build_array_type_nelts (m_limb_type, nelts);
3858 var = create_tmp_var (atype);
3861 enum tree_code code;
3862 switch (gimple_call_internal_fn (stmt))
3864 case IFN_ADD_OVERFLOW:
3865 case IFN_UBSAN_CHECK_ADD:
3866 code = PLUS_EXPR;
3867 break;
3868 case IFN_SUB_OVERFLOW:
3869 case IFN_UBSAN_CHECK_SUB:
3870 code = MINUS_EXPR;
3871 break;
3872 default:
3873 gcc_unreachable ();
3875 unsigned start, end;
3876 bool check_zero;
3877 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3878 &start, &end, &check_zero);
3880 unsigned startlimb, endlimb;
3881 if (ovf)
3883 startlimb = ~0U;
3884 endlimb = ~0U;
3886 else
3888 startlimb = start / limb_prec;
3889 endlimb = (end - 1) / limb_prec;
3892 int prec4 = ovf != NULL_TREE ? prec : prec3;
3893 bitint_prec_kind kind = bitint_precision_kind (prec4);
3894 unsigned cnt, rem = 0, fin = 0;
3895 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3896 bool last_ovf = (ovf == NULL_TREE
3897 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3898 if (kind != bitint_prec_huge)
3899 cnt = CEIL (prec4, limb_prec) + last_ovf;
3900 else
3902 rem = (prec4 % (2 * limb_prec));
3903 fin = (prec4 - rem) / limb_prec;
3904 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3905 idx = idx_first = create_loop (size_zero_node, &idx_next);
3908 if (kind == bitint_prec_huge)
3909 m_upwards_2limb = fin;
3910 m_upwards = true;
3912 tree type0 = TREE_TYPE (arg0);
3913 tree type1 = TREE_TYPE (arg1);
3914 int prec5 = prec3;
3915 if (bitint_precision_kind (prec5) < bitint_prec_large)
3916 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3917 if (TYPE_PRECISION (type0) < prec5)
3919 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3920 if (TREE_CODE (arg0) == INTEGER_CST)
3921 arg0 = fold_convert (type0, arg0);
3923 if (TYPE_PRECISION (type1) < prec5)
3925 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3926 if (TREE_CODE (arg1) == INTEGER_CST)
3927 arg1 = fold_convert (type1, arg1);
3929 unsigned int data_cnt = 0;
3930 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3931 tree cmp = build_zero_cst (m_limb_type);
3932 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3933 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3934 for (unsigned i = 0; i < cnt; i++)
3936 m_data_cnt = 0;
3937 tree rhs1, rhs2;
3938 if (kind != bitint_prec_huge)
3939 idx = size_int (i);
3940 else if (i >= 2)
3941 idx = size_int (fin + (i > 2));
3942 if (!last_ovf || i < cnt - 1)
3944 if (type0 != TREE_TYPE (arg0))
3945 rhs1 = handle_cast (type0, arg0, idx);
3946 else
3947 rhs1 = handle_operand (arg0, idx);
3948 if (type1 != TREE_TYPE (arg1))
3949 rhs2 = handle_cast (type1, arg1, idx);
3950 else
3951 rhs2 = handle_operand (arg1, idx);
3952 if (i == 0)
3953 data_cnt = m_data_cnt;
3954 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3955 rhs1 = add_cast (m_limb_type, rhs1);
3956 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3957 rhs2 = add_cast (m_limb_type, rhs2);
3958 last_rhs1 = rhs1;
3959 last_rhs2 = rhs2;
3961 else
3963 m_data_cnt = data_cnt;
3964 if (TYPE_UNSIGNED (type0))
3965 rhs1 = build_zero_cst (m_limb_type);
3966 else
3968 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3969 if (TREE_CODE (rhs1) == INTEGER_CST)
3970 rhs1 = build_int_cst (m_limb_type,
3971 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3972 else
3974 tree lpm1 = build_int_cst (unsigned_type_node,
3975 limb_prec - 1);
3976 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
3977 RSHIFT_EXPR, rhs1, lpm1);
3978 insert_before (g);
3979 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
3982 if (TYPE_UNSIGNED (type1))
3983 rhs2 = build_zero_cst (m_limb_type);
3984 else
3986 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
3987 if (TREE_CODE (rhs2) == INTEGER_CST)
3988 rhs2 = build_int_cst (m_limb_type,
3989 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
3990 else
3992 tree lpm1 = build_int_cst (unsigned_type_node,
3993 limb_prec - 1);
3994 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
3995 RSHIFT_EXPR, rhs2, lpm1);
3996 insert_before (g);
3997 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4001 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4002 if (ovf != boolean_false_node)
4004 if (tree_fits_uhwi_p (idx))
4006 unsigned limb = tree_to_uhwi (idx);
4007 if (limb >= startlimb && limb <= endlimb)
4009 tree l = arith_overflow_extract_bits (start, end, rhs,
4010 limb, check_zero);
4011 tree this_ovf = make_ssa_name (boolean_type_node);
4012 if (ovf == NULL_TREE && !check_zero)
4014 cmp = l;
4015 g = gimple_build_assign (make_ssa_name (m_limb_type),
4016 PLUS_EXPR, l,
4017 build_int_cst (m_limb_type, 1));
4018 insert_before (g);
4019 g = gimple_build_assign (this_ovf, GT_EXPR,
4020 gimple_assign_lhs (g),
4021 build_int_cst (m_limb_type, 1));
4023 else
4024 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4025 insert_before (g);
4026 if (ovf == NULL_TREE)
4027 ovf = this_ovf;
4028 else
4030 tree b = make_ssa_name (boolean_type_node);
4031 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4032 insert_before (g);
4033 ovf = b;
4037 else if (startlimb < fin)
4039 if (m_first && startlimb + 2 < fin)
4041 tree data_out;
4042 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4043 ovf_out = m_data.pop ();
4044 m_data.pop ();
4045 if (!check_zero)
4047 cmp = prepare_data_in_out (cmp, idx, &data_out);
4048 cmp_out = m_data.pop ();
4049 m_data.pop ();
4052 if (i != 0 || startlimb != fin - 1)
4054 tree_code cmp_code;
4055 bool single_comparison
4056 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4057 if (!single_comparison)
4059 cmp_code = GE_EXPR;
4060 if (!check_zero && (start % limb_prec) == 0)
4061 single_comparison = true;
4063 else if ((startlimb & 1) == (i & 1))
4064 cmp_code = EQ_EXPR;
4065 else
4066 cmp_code = GT_EXPR;
4067 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4068 NULL_TREE, NULL_TREE);
4069 edge edge_true_true, edge_true_false, edge_false;
4070 gimple *g2 = NULL;
4071 if (!single_comparison)
4072 g2 = gimple_build_cond (NE_EXPR, idx,
4073 size_int (startlimb), NULL_TREE,
4074 NULL_TREE);
4075 if_then_if_then_else (g, g2, profile_probability::likely (),
4076 profile_probability::likely (),
4077 edge_true_true, edge_true_false,
4078 edge_false);
4079 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4080 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4081 check_zero);
4082 tree this_ovf = make_ssa_name (boolean_type_node);
4083 if (cmp_code != GT_EXPR && !check_zero)
4085 g = gimple_build_assign (make_ssa_name (m_limb_type),
4086 PLUS_EXPR, l,
4087 build_int_cst (m_limb_type, 1));
4088 insert_before (g);
4089 g = gimple_build_assign (this_ovf, GT_EXPR,
4090 gimple_assign_lhs (g),
4091 build_int_cst (m_limb_type, 1));
4093 else
4094 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4095 insert_before (g);
4096 if (cmp_code == GT_EXPR)
4098 tree t = make_ssa_name (boolean_type_node);
4099 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4100 insert_before (g);
4101 this_ovf = t;
4103 tree this_ovf2 = NULL_TREE;
4104 if (!single_comparison)
4106 m_gsi = gsi_after_labels (edge_true_true->src);
4107 tree t = make_ssa_name (boolean_type_node);
4108 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4109 insert_before (g);
4110 this_ovf2 = make_ssa_name (boolean_type_node);
4111 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4112 ovf, t);
4113 insert_before (g);
4115 m_gsi = gsi_after_labels (edge_true_false->dest);
4116 tree t;
4117 if (i == 1 && ovf_out)
4118 t = ovf_out;
4119 else
4120 t = make_ssa_name (boolean_type_node);
4121 gphi *phi = create_phi_node (t, edge_true_false->dest);
4122 add_phi_arg (phi, this_ovf, edge_true_false,
4123 UNKNOWN_LOCATION);
4124 add_phi_arg (phi, ovf ? ovf
4125 : boolean_false_node, edge_false,
4126 UNKNOWN_LOCATION);
4127 if (edge_true_true)
4128 add_phi_arg (phi, this_ovf2, edge_true_true,
4129 UNKNOWN_LOCATION);
4130 ovf = t;
4131 if (!check_zero && cmp_code != GT_EXPR)
4133 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4134 phi = create_phi_node (t, edge_true_false->dest);
4135 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4136 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4137 if (edge_true_true)
4138 add_phi_arg (phi, cmp, edge_true_true,
4139 UNKNOWN_LOCATION);
4140 cmp = t;
4146 if (var || obj)
4148 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4150 else if (!tree_fits_uhwi_p (idx)
4151 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4153 bool single_comparison
4154 = (((unsigned) prec % limb_prec) == 0
4155 || prec_limbs + 1 >= fin
4156 || (prec_limbs & 1) == (i & 1));
4157 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4158 NULL_TREE, NULL_TREE);
4159 gimple *g2 = NULL;
4160 if (!single_comparison)
4161 g2 = gimple_build_cond (LT_EXPR, idx,
4162 size_int (prec_limbs - 1),
4163 NULL_TREE, NULL_TREE);
4164 edge edge_true_true, edge_true_false, edge_false;
4165 if_then_if_then_else (g, g2, profile_probability::likely (),
4166 profile_probability::likely (),
4167 edge_true_true, edge_true_false,
4168 edge_false);
4169 tree l = limb_access (type, var ? var : obj, idx, true);
4170 g = gimple_build_assign (l, rhs);
4171 insert_before (g);
4172 if (!single_comparison)
4174 m_gsi = gsi_after_labels (edge_true_true->src);
4175 l = limb_access (type, var ? var : obj,
4176 size_int (prec_limbs - 1), true);
4177 if (!useless_type_conversion_p (TREE_TYPE (l),
4178 TREE_TYPE (rhs)))
4179 rhs = add_cast (TREE_TYPE (l), rhs);
4180 g = gimple_build_assign (l, rhs);
4181 insert_before (g);
4183 m_gsi = gsi_after_labels (edge_true_false->dest);
4185 else
4187 tree l = limb_access (type, var ? var : obj, idx, true);
4188 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4189 rhs = add_cast (TREE_TYPE (l), rhs);
4190 g = gimple_build_assign (l, rhs);
4191 insert_before (g);
4194 m_first = false;
4195 if (kind == bitint_prec_huge && i <= 1)
4197 if (i == 0)
4199 idx = make_ssa_name (sizetype);
4200 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4201 size_one_node);
4202 insert_before (g);
4204 else
4206 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4207 size_int (2));
4208 insert_before (g);
4209 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4210 NULL_TREE, NULL_TREE);
4211 insert_before (g);
4212 m_gsi = gsi_for_stmt (final_stmt);
4217 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4220 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4221 argument or return type _Complex large/huge _BitInt. */
4223 void
4224 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4226 tree arg0 = gimple_call_arg (stmt, 0);
4227 tree arg1 = gimple_call_arg (stmt, 1);
4228 tree lhs = gimple_call_lhs (stmt);
4229 if (!lhs)
4231 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4232 gsi_remove (&gsi, true);
4233 return;
4235 gimple *final_stmt = gsi_stmt (m_gsi);
4236 tree type = TREE_TYPE (lhs);
4237 if (TREE_CODE (type) == COMPLEX_TYPE)
4238 type = TREE_TYPE (type);
4239 int prec = TYPE_PRECISION (type), prec0, prec1;
4240 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4241 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4242 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4243 + (prec1 < 0 ? -prec1 : prec1));
4244 if (prec0 == 1 || prec1 == 1)
4245 --prec2;
4246 tree var = NULL_TREE;
4247 tree orig_obj = obj;
4248 bool force_var = false;
4249 if (obj == NULL_TREE
4250 && TREE_CODE (type) == BITINT_TYPE
4251 && bitint_precision_kind (type) >= bitint_prec_large
4252 && m_names
4253 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4255 int part = var_to_partition (m_map, lhs);
4256 gcc_assert (m_vars[part] != NULL_TREE);
4257 obj = m_vars[part];
4258 if (TREE_TYPE (lhs) == type)
4259 orig_obj = obj;
4261 else if (obj != NULL_TREE && DECL_P (obj))
4263 for (int i = 0; i < 2; ++i)
4265 tree arg = i ? arg1 : arg0;
4266 if (TREE_CODE (arg) == ADDR_EXPR)
4267 arg = TREE_OPERAND (arg, 0);
4268 if (get_base_address (arg) == obj)
4270 force_var = true;
4271 break;
4275 if (obj == NULL_TREE
4276 || force_var
4277 || TREE_CODE (type) != BITINT_TYPE
4278 || bitint_precision_kind (type) < bitint_prec_large
4279 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4281 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4282 tree atype = build_array_type_nelts (m_limb_type, nelts);
4283 var = create_tmp_var (atype);
4285 tree addr = build_fold_addr_expr (var ? var : obj);
4286 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4287 NULL_TREE, true, GSI_SAME_STMT);
4288 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4289 gimple *g
4290 = gimple_build_call_internal (IFN_MULBITINT, 6,
4291 addr, build_int_cst (sitype,
4292 MAX (prec2, prec)),
4293 arg0, build_int_cst (sitype, prec0),
4294 arg1, build_int_cst (sitype, prec1));
4295 insert_before (g);
4297 unsigned start, end;
4298 bool check_zero;
4299 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4300 &start, &end, &check_zero);
4301 if (ovf == NULL_TREE)
4303 unsigned startlimb = start / limb_prec;
4304 unsigned endlimb = (end - 1) / limb_prec;
4305 unsigned cnt;
4306 bool use_loop = false;
4307 if (startlimb == endlimb)
4308 cnt = 1;
4309 else if (startlimb + 1 == endlimb)
4310 cnt = 2;
4311 else if ((end % limb_prec) == 0)
4313 cnt = 2;
4314 use_loop = true;
4316 else
4318 cnt = 3;
4319 use_loop = startlimb + 2 < endlimb;
4321 if (cnt == 1)
4323 tree l = limb_access (NULL_TREE, var ? var : obj,
4324 size_int (startlimb), true);
4325 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4326 insert_before (g);
4327 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4328 startlimb, check_zero);
4329 ovf = make_ssa_name (boolean_type_node);
4330 if (check_zero)
4331 g = gimple_build_assign (ovf, NE_EXPR, l,
4332 build_zero_cst (m_limb_type));
4333 else
4335 g = gimple_build_assign (make_ssa_name (m_limb_type),
4336 PLUS_EXPR, l,
4337 build_int_cst (m_limb_type, 1));
4338 insert_before (g);
4339 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4340 build_int_cst (m_limb_type, 1));
4342 insert_before (g);
4344 else
4346 basic_block edge_bb = NULL;
4347 gimple_stmt_iterator gsi = m_gsi;
4348 gsi_prev (&gsi);
4349 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4350 edge_bb = e->src;
4351 m_gsi = gsi_end_bb (edge_bb);
4353 tree cmp = build_zero_cst (m_limb_type);
4354 for (unsigned i = 0; i < cnt; i++)
4356 tree idx, idx_next = NULL_TREE;
4357 if (i == 0)
4358 idx = size_int (startlimb);
4359 else if (i == 2)
4360 idx = size_int (endlimb);
4361 else if (use_loop)
4362 idx = create_loop (size_int (startlimb + 1), &idx_next);
4363 else
4364 idx = size_int (startlimb + 1);
4365 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4366 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4367 insert_before (g);
4368 l = gimple_assign_lhs (g);
4369 if (i == 0 || i == 2)
4370 l = arith_overflow_extract_bits (start, end, l,
4371 tree_to_uhwi (idx),
4372 check_zero);
4373 if (i == 0 && !check_zero)
4375 cmp = l;
4376 g = gimple_build_assign (make_ssa_name (m_limb_type),
4377 PLUS_EXPR, l,
4378 build_int_cst (m_limb_type, 1));
4379 insert_before (g);
4380 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4381 build_int_cst (m_limb_type, 1),
4382 NULL_TREE, NULL_TREE);
4384 else
4385 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4386 insert_before (g);
4387 edge e1 = split_block (gsi_bb (m_gsi), g);
4388 e1->flags = EDGE_FALSE_VALUE;
4389 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4390 EDGE_TRUE_VALUE);
4391 e1->probability = profile_probability::likely ();
4392 e2->probability = e1->probability.invert ();
4393 if (i == 0)
4394 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4395 m_gsi = gsi_after_labels (e1->dest);
4396 if (i == 1 && use_loop)
4398 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4399 size_one_node);
4400 insert_before (g);
4401 g = gimple_build_cond (NE_EXPR, idx_next,
4402 size_int (endlimb + (cnt == 1)),
4403 NULL_TREE, NULL_TREE);
4404 insert_before (g);
4405 edge true_edge, false_edge;
4406 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4407 &true_edge,
4408 &false_edge);
4409 m_gsi = gsi_after_labels (false_edge->dest);
4413 ovf = make_ssa_name (boolean_type_node);
4414 basic_block bb = gimple_bb (final_stmt);
4415 gphi *phi = create_phi_node (ovf, bb);
4416 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4417 edge_iterator ei;
4418 FOR_EACH_EDGE (e, ei, bb->preds)
4420 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4421 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4423 m_gsi = gsi_for_stmt (final_stmt);
4427 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4430 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4431 .{ADD,SUB,MUL}_OVERFLOW call. */
4433 void
4434 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4436 tree rhs1 = gimple_assign_rhs1 (stmt);
4437 rhs1 = TREE_OPERAND (rhs1, 0);
4438 if (obj == NULL_TREE)
4440 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4441 gcc_assert (m_vars[part] != NULL_TREE);
4442 obj = m_vars[part];
4444 if (TREE_CODE (rhs1) == SSA_NAME
4445 && (m_names == NULL
4446 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4448 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4449 return;
4451 int part = var_to_partition (m_map, rhs1);
4452 gcc_assert (m_vars[part] != NULL_TREE);
4453 tree var = m_vars[part];
4454 unsigned HOST_WIDE_INT nelts
4455 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4456 tree atype = build_array_type_nelts (m_limb_type, nelts);
4457 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4458 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4459 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4460 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4461 ? 0 : nelts * m_limb_size);
4462 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4463 gimple *g = gimple_build_assign (obj, v2);
4464 insert_before (g);
4467 /* Lower COMPLEX_EXPR stmt. */
4469 void
4470 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4472 tree lhs = gimple_assign_lhs (stmt);
4473 tree rhs1 = gimple_assign_rhs1 (stmt);
4474 tree rhs2 = gimple_assign_rhs2 (stmt);
4475 int part = var_to_partition (m_map, lhs);
4476 gcc_assert (m_vars[part] != NULL_TREE);
4477 lhs = m_vars[part];
4478 unsigned HOST_WIDE_INT nelts
4479 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4480 tree atype = build_array_type_nelts (m_limb_type, nelts);
4481 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4482 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4483 tree v2;
4484 if (TREE_CODE (rhs1) == SSA_NAME)
4486 part = var_to_partition (m_map, rhs1);
4487 gcc_assert (m_vars[part] != NULL_TREE);
4488 v2 = m_vars[part];
4490 else if (integer_zerop (rhs1))
4491 v2 = build_zero_cst (atype);
4492 else
4493 v2 = tree_output_constant_def (rhs1);
4494 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4495 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4496 gimple *g = gimple_build_assign (v1, v2);
4497 insert_before (g);
4498 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4499 TYPE_SIZE_UNIT (atype));
4500 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4501 if (TREE_CODE (rhs2) == SSA_NAME)
4503 part = var_to_partition (m_map, rhs2);
4504 gcc_assert (m_vars[part] != NULL_TREE);
4505 v2 = m_vars[part];
4507 else if (integer_zerop (rhs2))
4508 v2 = build_zero_cst (atype);
4509 else
4510 v2 = tree_output_constant_def (rhs2);
4511 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4512 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4513 g = gimple_build_assign (v1, v2);
4514 insert_before (g);
4517 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4518 argument. */
4520 void
4521 bitint_large_huge::lower_bit_query (gimple *stmt)
4523 tree arg0 = gimple_call_arg (stmt, 0);
4524 tree arg1 = (gimple_call_num_args (stmt) == 2
4525 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4526 tree lhs = gimple_call_lhs (stmt);
4527 gimple *g;
4529 if (!lhs)
4531 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4532 gsi_remove (&gsi, true);
4533 return;
4535 tree type = TREE_TYPE (arg0);
4536 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4537 bitint_prec_kind kind = bitint_precision_kind (type);
4538 gcc_assert (kind >= bitint_prec_large);
4539 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4540 enum built_in_function fcode = END_BUILTINS;
4541 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4542 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4543 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4544 switch (ifn)
4546 case IFN_CLZ:
4547 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4548 fcode = BUILT_IN_CLZ;
4549 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4550 fcode = BUILT_IN_CLZL;
4551 else
4552 fcode = BUILT_IN_CLZLL;
4553 break;
4554 case IFN_FFS:
4555 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4556 we don't add the addend at the end. */
4557 arg1 = integer_zero_node;
4558 /* FALLTHRU */
4559 case IFN_CTZ:
4560 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4561 fcode = BUILT_IN_CTZ;
4562 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4563 fcode = BUILT_IN_CTZL;
4564 else
4565 fcode = BUILT_IN_CTZLL;
4566 m_upwards = true;
4567 break;
4568 case IFN_CLRSB:
4569 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4570 fcode = BUILT_IN_CLRSB;
4571 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4572 fcode = BUILT_IN_CLRSBL;
4573 else
4574 fcode = BUILT_IN_CLRSBLL;
4575 break;
4576 case IFN_PARITY:
4577 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4578 fcode = BUILT_IN_PARITY;
4579 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4580 fcode = BUILT_IN_PARITYL;
4581 else
4582 fcode = BUILT_IN_PARITYLL;
4583 m_upwards = true;
4584 break;
4585 case IFN_POPCOUNT:
4586 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4587 fcode = BUILT_IN_POPCOUNT;
4588 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4589 fcode = BUILT_IN_POPCOUNTL;
4590 else
4591 fcode = BUILT_IN_POPCOUNTLL;
4592 m_upwards = true;
4593 break;
4594 default:
4595 gcc_unreachable ();
4597 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4598 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4599 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4600 basic_block edge_bb = NULL;
4601 if (m_upwards)
4603 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4604 if (kind == bitint_prec_large)
4605 cnt = CEIL (prec, limb_prec);
4606 else
4608 rem = (prec % (2 * limb_prec));
4609 end = (prec - rem) / limb_prec;
4610 cnt = 2 + CEIL (rem, limb_prec);
4611 idx = idx_first = create_loop (size_zero_node, &idx_next);
4614 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4616 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4617 gsi_prev (&gsi);
4618 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4619 edge_bb = e->src;
4620 if (kind == bitint_prec_large)
4621 m_gsi = gsi_end_bb (edge_bb);
4622 bqp = XALLOCAVEC (struct bq_details, cnt);
4624 else
4625 m_after_stmt = stmt;
4626 if (kind != bitint_prec_large)
4627 m_upwards_2limb = end;
4629 for (unsigned i = 0; i < cnt; i++)
4631 m_data_cnt = 0;
4632 if (kind == bitint_prec_large)
4633 idx = size_int (i);
4634 else if (i >= 2)
4635 idx = size_int (end + (i > 2));
4637 tree rhs1 = handle_operand (arg0, idx);
4638 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4640 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4641 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4642 rhs1 = add_cast (m_limb_type, rhs1);
4645 tree in, out, tem;
4646 if (ifn == IFN_PARITY)
4647 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4648 else if (ifn == IFN_FFS)
4649 in = prepare_data_in_out (integer_one_node, idx, &out);
4650 else
4651 in = prepare_data_in_out (integer_zero_node, idx, &out);
4653 switch (ifn)
4655 case IFN_CTZ:
4656 case IFN_FFS:
4657 g = gimple_build_cond (NE_EXPR, rhs1,
4658 build_zero_cst (m_limb_type),
4659 NULL_TREE, NULL_TREE);
4660 insert_before (g);
4661 edge e1, e2;
4662 e1 = split_block (gsi_bb (m_gsi), g);
4663 e1->flags = EDGE_FALSE_VALUE;
4664 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4665 e1->probability = profile_probability::unlikely ();
4666 e2->probability = e1->probability.invert ();
4667 if (i == 0)
4668 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4669 m_gsi = gsi_after_labels (e1->dest);
4670 bqp[i].e = e2;
4671 bqp[i].val = rhs1;
4672 if (tree_fits_uhwi_p (idx))
4673 bqp[i].addend
4674 = build_int_cst (integer_type_node,
4675 tree_to_uhwi (idx) * limb_prec
4676 + (ifn == IFN_FFS));
4677 else
4679 bqp[i].addend = in;
4680 if (i == 1)
4681 res = out;
4682 else
4683 res = make_ssa_name (integer_type_node);
4684 g = gimple_build_assign (res, PLUS_EXPR, in,
4685 build_int_cst (integer_type_node,
4686 limb_prec));
4687 insert_before (g);
4688 m_data[m_data_cnt] = res;
4690 break;
4691 case IFN_PARITY:
4692 if (!integer_zerop (in))
4694 if (kind == bitint_prec_huge && i == 1)
4695 res = out;
4696 else
4697 res = make_ssa_name (m_limb_type);
4698 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4699 insert_before (g);
4701 else
4702 res = rhs1;
4703 m_data[m_data_cnt] = res;
4704 break;
4705 case IFN_POPCOUNT:
4706 g = gimple_build_call (fndecl, 1, rhs1);
4707 tem = make_ssa_name (integer_type_node);
4708 gimple_call_set_lhs (g, tem);
4709 insert_before (g);
4710 if (!integer_zerop (in))
4712 if (kind == bitint_prec_huge && i == 1)
4713 res = out;
4714 else
4715 res = make_ssa_name (integer_type_node);
4716 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4717 insert_before (g);
4719 else
4720 res = tem;
4721 m_data[m_data_cnt] = res;
4722 break;
4723 default:
4724 gcc_unreachable ();
4727 m_first = false;
4728 if (kind == bitint_prec_huge && i <= 1)
4730 if (i == 0)
4732 idx = make_ssa_name (sizetype);
4733 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4734 size_one_node);
4735 insert_before (g);
4737 else
4739 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4740 size_int (2));
4741 insert_before (g);
4742 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4743 NULL_TREE, NULL_TREE);
4744 insert_before (g);
4745 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4746 m_gsi = gsi_after_labels (edge_bb);
4747 else
4748 m_gsi = gsi_for_stmt (stmt);
4753 else
4755 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4756 int sub_one = 0;
4757 if (kind == bitint_prec_large)
4758 cnt = CEIL (prec, limb_prec);
4759 else
4761 rem = prec % limb_prec;
4762 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4763 rem = limb_prec;
4764 end = (prec - rem) / limb_prec;
4765 cnt = 1 + (rem != 0);
4766 if (ifn == IFN_CLRSB)
4767 sub_one = 1;
4770 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4771 gsi_prev (&gsi);
4772 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4773 edge_bb = e->src;
4774 m_gsi = gsi_end_bb (edge_bb);
4776 if (ifn == IFN_CLZ)
4777 bqp = XALLOCAVEC (struct bq_details, cnt);
4778 else
4780 gsi = gsi_for_stmt (stmt);
4781 gsi_prev (&gsi);
4782 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4783 edge_bb = e->src;
4784 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4787 for (unsigned i = 0; i < cnt; i++)
4789 m_data_cnt = 0;
4790 if (kind == bitint_prec_large)
4791 idx = size_int (cnt - i - 1);
4792 else if (i == cnt - 1)
4793 idx = create_loop (size_int (end - 1), &idx_next);
4794 else
4795 idx = size_int (end);
4797 tree rhs1 = handle_operand (arg0, idx);
4798 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4800 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4801 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4802 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4803 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4804 rhs1 = add_cast (m_limb_type, rhs1);
4807 if (ifn == IFN_CLZ)
4809 g = gimple_build_cond (NE_EXPR, rhs1,
4810 build_zero_cst (m_limb_type),
4811 NULL_TREE, NULL_TREE);
4812 insert_before (g);
4813 edge e1 = split_block (gsi_bb (m_gsi), g);
4814 e1->flags = EDGE_FALSE_VALUE;
4815 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4816 e1->probability = profile_probability::unlikely ();
4817 e2->probability = e1->probability.invert ();
4818 if (i == 0)
4819 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4820 m_gsi = gsi_after_labels (e1->dest);
4821 bqp[i].e = e2;
4822 bqp[i].val = rhs1;
4824 else
4826 if (i == 0)
4828 first = rhs1;
4829 g = gimple_build_assign (make_ssa_name (m_limb_type),
4830 PLUS_EXPR, rhs1,
4831 build_int_cst (m_limb_type, 1));
4832 insert_before (g);
4833 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4834 build_int_cst (m_limb_type, 1),
4835 NULL_TREE, NULL_TREE);
4836 insert_before (g);
4838 else
4840 g = gimple_build_assign (make_ssa_name (m_limb_type),
4841 BIT_XOR_EXPR, rhs1, first);
4842 insert_before (g);
4843 tree stype = signed_type_for (m_limb_type);
4844 g = gimple_build_cond (LT_EXPR,
4845 add_cast (stype,
4846 gimple_assign_lhs (g)),
4847 build_zero_cst (stype),
4848 NULL_TREE, NULL_TREE);
4849 insert_before (g);
4850 edge e1 = split_block (gsi_bb (m_gsi), g);
4851 e1->flags = EDGE_FALSE_VALUE;
4852 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4853 EDGE_TRUE_VALUE);
4854 e1->probability = profile_probability::unlikely ();
4855 e2->probability = e1->probability.invert ();
4856 if (i == 1)
4857 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4858 e2->src);
4859 m_gsi = gsi_after_labels (e1->dest);
4860 bqp[2 * i].e = e2;
4861 g = gimple_build_cond (NE_EXPR, rhs1, first,
4862 NULL_TREE, NULL_TREE);
4863 insert_before (g);
4865 edge e1 = split_block (gsi_bb (m_gsi), g);
4866 e1->flags = EDGE_FALSE_VALUE;
4867 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4868 e1->probability = profile_probability::unlikely ();
4869 e2->probability = e1->probability.invert ();
4870 if (i == 0)
4871 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4872 m_gsi = gsi_after_labels (e1->dest);
4873 bqp[2 * i + 1].e = e2;
4874 bqp[i].val = rhs1;
4876 if (tree_fits_uhwi_p (idx))
4877 bqp[i].addend
4878 = build_int_cst (integer_type_node,
4879 (int) prec
4880 - (((int) tree_to_uhwi (idx) + 1)
4881 * limb_prec) - sub_one);
4882 else
4884 tree in, out;
4885 in = build_int_cst (integer_type_node, rem - sub_one);
4886 m_first = true;
4887 in = prepare_data_in_out (in, idx, &out);
4888 out = m_data[m_data_cnt + 1];
4889 bqp[i].addend = in;
4890 g = gimple_build_assign (out, PLUS_EXPR, in,
4891 build_int_cst (integer_type_node,
4892 limb_prec));
4893 insert_before (g);
4894 m_data[m_data_cnt] = out;
4897 m_first = false;
4898 if (kind == bitint_prec_huge && i == cnt - 1)
4900 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4901 size_int (-1));
4902 insert_before (g);
4903 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4904 NULL_TREE, NULL_TREE);
4905 insert_before (g);
4906 edge true_edge, false_edge;
4907 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4908 &true_edge, &false_edge);
4909 m_gsi = gsi_after_labels (false_edge->dest);
4913 switch (ifn)
4915 case IFN_CLZ:
4916 case IFN_CTZ:
4917 case IFN_FFS:
4918 gphi *phi1, *phi2, *phi3;
4919 basic_block bb;
4920 bb = gsi_bb (m_gsi);
4921 remove_edge (find_edge (bb, gimple_bb (stmt)));
4922 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4923 gimple_bb (stmt));
4924 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4925 gimple_bb (stmt));
4926 for (unsigned i = 0; i < cnt; i++)
4928 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4929 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4931 if (arg1 == NULL_TREE)
4933 g = gimple_build_builtin_unreachable (m_loc);
4934 insert_before (g);
4936 m_gsi = gsi_for_stmt (stmt);
4937 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4938 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4939 insert_before (g);
4940 if (arg1 == NULL_TREE)
4941 g = gimple_build_assign (lhs, PLUS_EXPR,
4942 gimple_phi_result (phi2),
4943 gimple_call_lhs (g));
4944 else
4946 g = gimple_build_assign (make_ssa_name (integer_type_node),
4947 PLUS_EXPR, gimple_phi_result (phi2),
4948 gimple_call_lhs (g));
4949 insert_before (g);
4950 edge e1 = split_block (gimple_bb (stmt), g);
4951 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
4952 e2->probability = profile_probability::always ();
4953 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
4954 get_immediate_dominator (CDI_DOMINATORS,
4955 e1->src));
4956 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
4957 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
4958 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
4959 m_gsi = gsi_for_stmt (stmt);
4960 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4962 gsi_replace (&m_gsi, g, true);
4963 break;
4964 case IFN_CLRSB:
4965 bb = gsi_bb (m_gsi);
4966 remove_edge (find_edge (bb, edge_bb));
4967 edge e;
4968 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
4969 e->probability = profile_probability::always ();
4970 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
4971 get_immediate_dominator (CDI_DOMINATORS,
4972 edge_bb));
4973 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4974 edge_bb);
4975 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4976 edge_bb);
4977 phi3 = create_phi_node (make_ssa_name (integer_type_node),
4978 gimple_bb (stmt));
4979 for (unsigned i = 0; i < cnt; i++)
4981 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
4982 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
4983 UNKNOWN_LOCATION);
4984 tree a = bqp[i].addend;
4985 if (i && kind == bitint_prec_large)
4986 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
4987 if (i)
4988 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
4990 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
4991 UNKNOWN_LOCATION);
4992 m_gsi = gsi_after_labels (edge_bb);
4993 g = gimple_build_call (fndecl, 1,
4994 add_cast (signed_type_for (m_limb_type),
4995 gimple_phi_result (phi1)));
4996 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4997 insert_before (g);
4998 g = gimple_build_assign (make_ssa_name (integer_type_node),
4999 PLUS_EXPR, gimple_call_lhs (g),
5000 gimple_phi_result (phi2));
5001 insert_before (g);
5002 if (kind != bitint_prec_large)
5004 g = gimple_build_assign (make_ssa_name (integer_type_node),
5005 PLUS_EXPR, gimple_assign_lhs (g),
5006 integer_one_node);
5007 insert_before (g);
5009 add_phi_arg (phi3, gimple_assign_lhs (g),
5010 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5011 m_gsi = gsi_for_stmt (stmt);
5012 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5013 gsi_replace (&m_gsi, g, true);
5014 break;
5015 case IFN_PARITY:
5016 g = gimple_build_call (fndecl, 1, res);
5017 gimple_call_set_lhs (g, lhs);
5018 gsi_replace (&m_gsi, g, true);
5019 break;
5020 case IFN_POPCOUNT:
5021 g = gimple_build_assign (lhs, res);
5022 gsi_replace (&m_gsi, g, true);
5023 break;
5024 default:
5025 gcc_unreachable ();
5029 /* Lower a call statement with one or more large/huge _BitInt
5030 arguments or large/huge _BitInt return value. */
5032 void
5033 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5035 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5036 unsigned int nargs = gimple_call_num_args (stmt);
5037 if (gimple_call_internal_p (stmt))
5038 switch (gimple_call_internal_fn (stmt))
5040 case IFN_ADD_OVERFLOW:
5041 case IFN_SUB_OVERFLOW:
5042 case IFN_UBSAN_CHECK_ADD:
5043 case IFN_UBSAN_CHECK_SUB:
5044 lower_addsub_overflow (obj, stmt);
5045 return;
5046 case IFN_MUL_OVERFLOW:
5047 case IFN_UBSAN_CHECK_MUL:
5048 lower_mul_overflow (obj, stmt);
5049 return;
5050 case IFN_CLZ:
5051 case IFN_CTZ:
5052 case IFN_CLRSB:
5053 case IFN_FFS:
5054 case IFN_PARITY:
5055 case IFN_POPCOUNT:
5056 lower_bit_query (stmt);
5057 return;
5058 default:
5059 break;
5061 for (unsigned int i = 0; i < nargs; ++i)
5063 tree arg = gimple_call_arg (stmt, i);
5064 if (TREE_CODE (arg) != SSA_NAME
5065 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5066 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5067 continue;
5068 int p = var_to_partition (m_map, arg);
5069 tree v = m_vars[p];
5070 gcc_assert (v != NULL_TREE);
5071 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5072 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5073 arg = make_ssa_name (TREE_TYPE (arg));
5074 gimple *g = gimple_build_assign (arg, v);
5075 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5076 gimple_call_set_arg (stmt, i, arg);
5077 if (m_preserved == NULL)
5078 m_preserved = BITMAP_ALLOC (NULL);
5079 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5081 tree lhs = gimple_call_lhs (stmt);
5082 if (lhs
5083 && TREE_CODE (lhs) == SSA_NAME
5084 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5085 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5087 int p = var_to_partition (m_map, lhs);
5088 tree v = m_vars[p];
5089 gcc_assert (v != NULL_TREE);
5090 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5091 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5092 gimple_call_set_lhs (stmt, v);
5093 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5095 update_stmt (stmt);
5098 /* Lower __asm STMT which involves large/huge _BitInt values. */
5100 void
5101 bitint_large_huge::lower_asm (gimple *stmt)
5103 gasm *g = as_a <gasm *> (stmt);
5104 unsigned noutputs = gimple_asm_noutputs (g);
5105 unsigned ninputs = gimple_asm_ninputs (g);
5107 for (unsigned i = 0; i < noutputs; ++i)
5109 tree t = gimple_asm_output_op (g, i);
5110 tree s = TREE_VALUE (t);
5111 if (TREE_CODE (s) == SSA_NAME
5112 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5113 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5115 int part = var_to_partition (m_map, s);
5116 gcc_assert (m_vars[part] != NULL_TREE);
5117 TREE_VALUE (t) = m_vars[part];
5120 for (unsigned i = 0; i < ninputs; ++i)
5122 tree t = gimple_asm_input_op (g, i);
5123 tree s = TREE_VALUE (t);
5124 if (TREE_CODE (s) == SSA_NAME
5125 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5126 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5128 int part = var_to_partition (m_map, s);
5129 gcc_assert (m_vars[part] != NULL_TREE);
5130 TREE_VALUE (t) = m_vars[part];
5133 update_stmt (stmt);
5136 /* Lower statement STMT which involves large/huge _BitInt values
5137 into code accessing individual limbs. */
5139 void
5140 bitint_large_huge::lower_stmt (gimple *stmt)
5142 m_first = true;
5143 m_lhs = NULL_TREE;
5144 m_data.truncate (0);
5145 m_data_cnt = 0;
5146 m_gsi = gsi_for_stmt (stmt);
5147 m_after_stmt = NULL;
5148 m_bb = NULL;
5149 m_init_gsi = m_gsi;
5150 gsi_prev (&m_init_gsi);
5151 m_preheader_bb = NULL;
5152 m_upwards_2limb = 0;
5153 m_upwards = false;
5154 m_var_msb = false;
5155 m_cast_conditional = false;
5156 m_bitfld_load = 0;
5157 m_loc = gimple_location (stmt);
5158 if (is_gimple_call (stmt))
5160 lower_call (NULL_TREE, stmt);
5161 return;
5163 if (gimple_code (stmt) == GIMPLE_ASM)
5165 lower_asm (stmt);
5166 return;
5168 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5169 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5170 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5171 bool mergeable_cast_p = false;
5172 bool final_cast_p = false;
5173 if (gimple_assign_cast_p (stmt))
5175 lhs = gimple_assign_lhs (stmt);
5176 tree rhs1 = gimple_assign_rhs1 (stmt);
5177 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5178 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5179 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5180 mergeable_cast_p = true;
5181 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5182 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5183 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5185 final_cast_p = true;
5186 if (TREE_CODE (rhs1) == SSA_NAME
5187 && (m_names == NULL
5188 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5190 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5191 if (is_gimple_assign (g)
5192 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5194 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5195 if (TREE_CODE (rhs2) == SSA_NAME
5196 && (m_names == NULL
5197 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5199 g = SSA_NAME_DEF_STMT (rhs2);
5200 int ovf = optimizable_arith_overflow (g);
5201 if (ovf == 2)
5202 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5203 and IMAGPART_EXPR uses, where the latter is cast to
5204 non-_BitInt, it will be optimized when handling
5205 the REALPART_EXPR. */
5206 return;
5207 if (ovf == 1)
5209 lower_call (NULL_TREE, g);
5210 return;
5217 if (gimple_store_p (stmt))
5219 tree rhs1 = gimple_assign_rhs1 (stmt);
5220 if (TREE_CODE (rhs1) == SSA_NAME
5221 && (m_names == NULL
5222 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5224 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5225 m_loc = gimple_location (g);
5226 lhs = gimple_assign_lhs (stmt);
5227 if (is_gimple_assign (g) && !mergeable_op (g))
5228 switch (gimple_assign_rhs_code (g))
5230 case LSHIFT_EXPR:
5231 case RSHIFT_EXPR:
5232 lower_shift_stmt (lhs, g);
5233 handled:
5234 m_gsi = gsi_for_stmt (stmt);
5235 unlink_stmt_vdef (stmt);
5236 release_ssa_name (gimple_vdef (stmt));
5237 gsi_remove (&m_gsi, true);
5238 return;
5239 case MULT_EXPR:
5240 case TRUNC_DIV_EXPR:
5241 case TRUNC_MOD_EXPR:
5242 lower_muldiv_stmt (lhs, g);
5243 goto handled;
5244 case FIX_TRUNC_EXPR:
5245 lower_float_conv_stmt (lhs, g);
5246 goto handled;
5247 case REALPART_EXPR:
5248 case IMAGPART_EXPR:
5249 lower_cplxpart_stmt (lhs, g);
5250 goto handled;
5251 default:
5252 break;
5254 else if (optimizable_arith_overflow (g) == 3)
5256 lower_call (lhs, g);
5257 goto handled;
5259 m_loc = gimple_location (stmt);
5262 if (mergeable_op (stmt)
5263 || gimple_store_p (stmt)
5264 || gimple_assign_load_p (stmt)
5265 || eq_p
5266 || mergeable_cast_p)
5268 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5269 if (!eq_p)
5270 return;
5272 else if (cmp_code != ERROR_MARK)
5273 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5274 if (cmp_code != ERROR_MARK)
5276 if (gimple_code (stmt) == GIMPLE_COND)
5278 gcond *cstmt = as_a <gcond *> (stmt);
5279 gimple_cond_set_lhs (cstmt, lhs);
5280 gimple_cond_set_rhs (cstmt, boolean_false_node);
5281 gimple_cond_set_code (cstmt, cmp_code);
5282 update_stmt (stmt);
5283 return;
5285 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5287 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5288 boolean_false_node);
5289 gimple_assign_set_rhs1 (stmt, cond);
5290 lhs = gimple_assign_lhs (stmt);
5291 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5292 || (bitint_precision_kind (TREE_TYPE (lhs))
5293 <= bitint_prec_middle));
5294 update_stmt (stmt);
5295 return;
5297 gimple_assign_set_rhs1 (stmt, lhs);
5298 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5299 gimple_assign_set_rhs_code (stmt, cmp_code);
5300 update_stmt (stmt);
5301 return;
5303 if (final_cast_p)
5305 tree lhs_type = TREE_TYPE (lhs);
5306 /* Add support for 3 or more limbs filled in from normal integral
5307 type if this assert fails. If no target chooses limb mode smaller
5308 than half of largest supported normal integral type, this will not
5309 be needed. */
5310 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5311 gimple *g;
5312 if (TREE_CODE (lhs_type) == BITINT_TYPE
5313 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5314 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5315 TYPE_UNSIGNED (lhs_type));
5316 m_data_cnt = 0;
5317 tree rhs1 = gimple_assign_rhs1 (stmt);
5318 tree r1 = handle_operand (rhs1, size_int (0));
5319 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5320 r1 = add_cast (lhs_type, r1);
5321 if (TYPE_PRECISION (lhs_type) > limb_prec)
5323 m_data_cnt = 0;
5324 m_first = false;
5325 tree r2 = handle_operand (rhs1, size_int (1));
5326 r2 = add_cast (lhs_type, r2);
5327 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5328 build_int_cst (unsigned_type_node,
5329 limb_prec));
5330 insert_before (g);
5331 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5332 gimple_assign_lhs (g));
5333 insert_before (g);
5334 r1 = gimple_assign_lhs (g);
5336 if (lhs_type != TREE_TYPE (lhs))
5337 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5338 else
5339 g = gimple_build_assign (lhs, r1);
5340 gsi_replace (&m_gsi, g, true);
5341 return;
5343 if (is_gimple_assign (stmt))
5344 switch (gimple_assign_rhs_code (stmt))
5346 case LSHIFT_EXPR:
5347 case RSHIFT_EXPR:
5348 lower_shift_stmt (NULL_TREE, stmt);
5349 return;
5350 case MULT_EXPR:
5351 case TRUNC_DIV_EXPR:
5352 case TRUNC_MOD_EXPR:
5353 lower_muldiv_stmt (NULL_TREE, stmt);
5354 return;
5355 case FIX_TRUNC_EXPR:
5356 case FLOAT_EXPR:
5357 lower_float_conv_stmt (NULL_TREE, stmt);
5358 return;
5359 case REALPART_EXPR:
5360 case IMAGPART_EXPR:
5361 lower_cplxpart_stmt (NULL_TREE, stmt);
5362 return;
5363 case COMPLEX_EXPR:
5364 lower_complexexpr_stmt (stmt);
5365 return;
5366 default:
5367 break;
5369 gcc_unreachable ();
5372 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5373 the desired memory state. */
5375 void *
5376 vuse_eq (ao_ref *, tree vuse1, void *data)
5378 tree vuse2 = (tree) data;
5379 if (vuse1 == vuse2)
5380 return data;
5382 return NULL;
5385 /* Return true if STMT uses a library function and needs to take
5386 address of its inputs. We need to avoid bit-fields in those
5387 cases. */
5389 bool
5390 stmt_needs_operand_addr (gimple *stmt)
5392 if (is_gimple_assign (stmt))
5393 switch (gimple_assign_rhs_code (stmt))
5395 case MULT_EXPR:
5396 case TRUNC_DIV_EXPR:
5397 case TRUNC_MOD_EXPR:
5398 case FLOAT_EXPR:
5399 return true;
5400 default:
5401 break;
5403 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5404 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5405 return true;
5406 return false;
5409 /* Dominator walker used to discover which large/huge _BitInt
5410 loads could be sunk into all their uses. */
5412 class bitint_dom_walker : public dom_walker
5414 public:
5415 bitint_dom_walker (bitmap names, bitmap loads)
5416 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5418 edge before_dom_children (basic_block) final override;
5420 private:
5421 bitmap m_names, m_loads;
5424 edge
5425 bitint_dom_walker::before_dom_children (basic_block bb)
5427 gphi *phi = get_virtual_phi (bb);
5428 tree vop;
5429 if (phi)
5430 vop = gimple_phi_result (phi);
5431 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5432 vop = NULL_TREE;
5433 else
5434 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5436 auto_vec<tree, 16> worklist;
5437 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5438 !gsi_end_p (gsi); gsi_next (&gsi))
5440 gimple *stmt = gsi_stmt (gsi);
5441 if (is_gimple_debug (stmt))
5442 continue;
5444 if (!vop && gimple_vuse (stmt))
5445 vop = gimple_vuse (stmt);
5447 tree cvop = vop;
5448 if (gimple_vdef (stmt))
5449 vop = gimple_vdef (stmt);
5451 tree lhs = gimple_get_lhs (stmt);
5452 if (lhs
5453 && TREE_CODE (lhs) == SSA_NAME
5454 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5455 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5456 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5457 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5458 it means it will be handled in a loop or straight line code
5459 at the location of its (ultimate) immediate use, so for
5460 vop checking purposes check these only at the ultimate
5461 immediate use. */
5462 continue;
5464 ssa_op_iter oi;
5465 use_operand_p use_p;
5466 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5468 tree s = USE_FROM_PTR (use_p);
5469 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5470 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5471 worklist.safe_push (s);
5474 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5475 while (worklist.length () > 0)
5477 tree s = worklist.pop ();
5479 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5481 gimple *g = SSA_NAME_DEF_STMT (s);
5482 needs_operand_addr |= stmt_needs_operand_addr (g);
5483 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5485 tree s2 = USE_FROM_PTR (use_p);
5486 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5487 && (bitint_precision_kind (TREE_TYPE (s2))
5488 >= bitint_prec_large))
5489 worklist.safe_push (s2);
5491 continue;
5493 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5494 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5496 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5497 if (TREE_CODE (rhs) == SSA_NAME
5498 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5499 s = rhs;
5500 else
5501 continue;
5503 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5504 continue;
5506 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5507 if (needs_operand_addr
5508 && TREE_CODE (rhs1) == COMPONENT_REF
5509 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5511 tree fld = TREE_OPERAND (rhs1, 1);
5512 /* For little-endian, we can allow as inputs bit-fields
5513 which start at a limb boundary. */
5514 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5515 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5516 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5517 % limb_prec) == 0)
5519 else
5521 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5522 continue;
5526 ao_ref ref;
5527 ao_ref_init (&ref, rhs1);
5528 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5529 unsigned limit = 64;
5530 tree vuse = cvop;
5531 if (vop != cvop
5532 && is_gimple_assign (stmt)
5533 && gimple_store_p (stmt)
5534 && !operand_equal_p (lhs,
5535 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5537 vuse = vop;
5538 if (vuse != lvop
5539 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5540 NULL, NULL, limit, lvop) == NULL)
5541 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5545 bb->aux = (void *) vop;
5546 return NULL;
5551 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5552 build_ssa_conflict_graph.
5553 The differences are:
5554 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5555 2) for large/huge _BitInt multiplication/division/modulo process def
5556 only after processing uses rather than before to make uses conflict
5557 with the definition
5558 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5559 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5560 the final statement. */
5562 void
5563 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5564 ssa_conflicts *graph, bitmap names,
5565 void (*def) (live_track *, tree,
5566 ssa_conflicts *),
5567 void (*use) (live_track *, tree))
5569 bool muldiv_p = false;
5570 tree lhs = NULL_TREE;
5571 if (is_gimple_assign (stmt))
5573 lhs = gimple_assign_lhs (stmt);
5574 if (TREE_CODE (lhs) == SSA_NAME
5575 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5576 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5578 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5579 return;
5580 switch (gimple_assign_rhs_code (stmt))
5582 case MULT_EXPR:
5583 case TRUNC_DIV_EXPR:
5584 case TRUNC_MOD_EXPR:
5585 muldiv_p = true;
5586 default:
5587 break;
5592 ssa_op_iter iter;
5593 tree var;
5594 if (!muldiv_p)
5596 /* For stmts with more than one SSA_NAME definition pretend all the
5597 SSA_NAME outputs but the first one are live at this point, so
5598 that conflicts are added in between all those even when they are
5599 actually not really live after the asm, because expansion might
5600 copy those into pseudos after the asm and if multiple outputs
5601 share the same partition, it might overwrite those that should
5602 be live. E.g.
5603 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5604 return a;
5605 See PR70593. */
5606 bool first = true;
5607 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5608 if (first)
5609 first = false;
5610 else
5611 use (live, var);
5613 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5614 def (live, var, graph);
5617 auto_vec<tree, 16> worklist;
5618 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5619 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5620 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5622 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5623 use (live, var);
5624 else
5625 worklist.safe_push (var);
5628 while (worklist.length () > 0)
5630 tree s = worklist.pop ();
5631 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5632 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5633 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5635 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5636 use (live, var);
5637 else
5638 worklist.safe_push (var);
5642 if (muldiv_p)
5643 def (live, lhs, graph);
5646 /* Entry point for _BitInt(N) operation lowering during optimization. */
5648 static unsigned int
5649 gimple_lower_bitint (void)
5651 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5652 limb_prec = 0;
5654 unsigned int i;
5655 for (i = 0; i < num_ssa_names; ++i)
5657 tree s = ssa_name (i);
5658 if (s == NULL)
5659 continue;
5660 tree type = TREE_TYPE (s);
5661 if (TREE_CODE (type) == COMPLEX_TYPE)
5662 type = TREE_TYPE (type);
5663 if (TREE_CODE (type) == BITINT_TYPE
5664 && bitint_precision_kind (type) != bitint_prec_small)
5665 break;
5666 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5667 into memory. Such functions could have no large/huge SSA_NAMEs. */
5668 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5670 gimple *g = SSA_NAME_DEF_STMT (s);
5671 if (is_gimple_assign (g) && gimple_store_p (g))
5673 tree t = gimple_assign_rhs1 (g);
5674 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5675 && (bitint_precision_kind (TREE_TYPE (t))
5676 >= bitint_prec_large))
5677 break;
5680 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5681 to floating point types need to be rewritten. */
5682 else if (SCALAR_FLOAT_TYPE_P (type))
5684 gimple *g = SSA_NAME_DEF_STMT (s);
5685 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5687 tree t = gimple_assign_rhs1 (g);
5688 if (TREE_CODE (t) == INTEGER_CST
5689 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5690 && (bitint_precision_kind (TREE_TYPE (t))
5691 != bitint_prec_small))
5692 break;
5696 if (i == num_ssa_names)
5697 return 0;
5699 basic_block bb;
5700 auto_vec<gimple *, 4> switch_statements;
5701 FOR_EACH_BB_FN (bb, cfun)
5703 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5705 tree idx = gimple_switch_index (swtch);
5706 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5707 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5708 continue;
5710 if (optimize)
5711 group_case_labels_stmt (swtch);
5712 switch_statements.safe_push (swtch);
5716 if (!switch_statements.is_empty ())
5718 bool expanded = false;
5719 gimple *stmt;
5720 unsigned int j;
5721 i = 0;
5722 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5724 gswitch *swtch = as_a<gswitch *> (stmt);
5725 tree_switch_conversion::switch_decision_tree dt (swtch);
5726 expanded |= dt.analyze_switch_statement ();
5729 if (expanded)
5731 free_dominance_info (CDI_DOMINATORS);
5732 free_dominance_info (CDI_POST_DOMINATORS);
5733 mark_virtual_operands_for_renaming (cfun);
5734 cleanup_tree_cfg (TODO_update_ssa);
5738 struct bitint_large_huge large_huge;
5739 bool has_large_huge_parm_result = false;
5740 bool has_large_huge = false;
5741 unsigned int ret = 0, first_large_huge = ~0U;
5742 bool edge_insertions = false;
5743 for (; i < num_ssa_names; ++i)
5745 tree s = ssa_name (i);
5746 if (s == NULL)
5747 continue;
5748 tree type = TREE_TYPE (s);
5749 if (TREE_CODE (type) == COMPLEX_TYPE)
5750 type = TREE_TYPE (type);
5751 if (TREE_CODE (type) == BITINT_TYPE
5752 && bitint_precision_kind (type) >= bitint_prec_large)
5754 if (first_large_huge == ~0U)
5755 first_large_huge = i;
5756 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5757 gimple_stmt_iterator gsi;
5758 tree_code rhs_code;
5759 /* Unoptimize certain constructs to simpler alternatives to
5760 avoid having to lower all of them. */
5761 if (is_gimple_assign (stmt))
5762 switch (rhs_code = gimple_assign_rhs_code (stmt))
5764 default:
5765 break;
5766 case LROTATE_EXPR:
5767 case RROTATE_EXPR:
5769 first_large_huge = 0;
5770 location_t loc = gimple_location (stmt);
5771 gsi = gsi_for_stmt (stmt);
5772 tree rhs1 = gimple_assign_rhs1 (stmt);
5773 tree type = TREE_TYPE (rhs1);
5774 tree n = gimple_assign_rhs2 (stmt), m;
5775 tree p = build_int_cst (TREE_TYPE (n),
5776 TYPE_PRECISION (type));
5777 if (TREE_CODE (n) == INTEGER_CST)
5778 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5779 else
5781 m = make_ssa_name (TREE_TYPE (n));
5782 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5783 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5784 gimple_set_location (g, loc);
5786 if (!TYPE_UNSIGNED (type))
5788 tree utype = build_bitint_type (TYPE_PRECISION (type),
5790 if (TREE_CODE (rhs1) == INTEGER_CST)
5791 rhs1 = fold_convert (utype, rhs1);
5792 else
5794 tree t = make_ssa_name (type);
5795 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5796 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5797 gimple_set_location (g, loc);
5800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5801 rhs_code == LROTATE_EXPR
5802 ? LSHIFT_EXPR : RSHIFT_EXPR,
5803 rhs1, n);
5804 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5805 gimple_set_location (g, loc);
5806 tree op1 = gimple_assign_lhs (g);
5807 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5808 rhs_code == LROTATE_EXPR
5809 ? RSHIFT_EXPR : LSHIFT_EXPR,
5810 rhs1, m);
5811 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5812 gimple_set_location (g, loc);
5813 tree op2 = gimple_assign_lhs (g);
5814 tree lhs = gimple_assign_lhs (stmt);
5815 if (!TYPE_UNSIGNED (type))
5817 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5818 BIT_IOR_EXPR, op1, op2);
5819 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5820 gimple_set_location (g, loc);
5821 g = gimple_build_assign (lhs, NOP_EXPR,
5822 gimple_assign_lhs (g));
5824 else
5825 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5826 gsi_replace (&gsi, g, true);
5827 gimple_set_location (g, loc);
5829 break;
5830 case ABS_EXPR:
5831 case ABSU_EXPR:
5832 case MIN_EXPR:
5833 case MAX_EXPR:
5834 case COND_EXPR:
5835 first_large_huge = 0;
5836 gsi = gsi_for_stmt (stmt);
5837 tree lhs = gimple_assign_lhs (stmt);
5838 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5839 location_t loc = gimple_location (stmt);
5840 if (rhs_code == ABS_EXPR)
5841 g = gimple_build_cond (LT_EXPR, rhs1,
5842 build_zero_cst (TREE_TYPE (rhs1)),
5843 NULL_TREE, NULL_TREE);
5844 else if (rhs_code == ABSU_EXPR)
5846 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5847 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5848 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5849 gimple_set_location (g, loc);
5850 g = gimple_build_cond (LT_EXPR, rhs1,
5851 build_zero_cst (TREE_TYPE (rhs1)),
5852 NULL_TREE, NULL_TREE);
5853 rhs1 = rhs2;
5855 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5857 rhs2 = gimple_assign_rhs2 (stmt);
5858 if (TREE_CODE (rhs1) == INTEGER_CST)
5859 std::swap (rhs1, rhs2);
5860 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5861 NULL_TREE, NULL_TREE);
5862 if (rhs_code == MAX_EXPR)
5863 std::swap (rhs1, rhs2);
5865 else
5867 g = gimple_build_cond (NE_EXPR, rhs1,
5868 build_zero_cst (TREE_TYPE (rhs1)),
5869 NULL_TREE, NULL_TREE);
5870 rhs1 = gimple_assign_rhs2 (stmt);
5871 rhs2 = gimple_assign_rhs3 (stmt);
5873 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5874 gimple_set_location (g, loc);
5875 edge e1 = split_block (gsi_bb (gsi), g);
5876 edge e2 = split_block (e1->dest, (gimple *) NULL);
5877 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5878 e3->probability = profile_probability::even ();
5879 e1->flags = EDGE_TRUE_VALUE;
5880 e1->probability = e3->probability.invert ();
5881 if (dom_info_available_p (CDI_DOMINATORS))
5882 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5883 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5885 gsi = gsi_after_labels (e1->dest);
5886 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5887 NEGATE_EXPR, rhs1);
5888 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5889 gimple_set_location (g, loc);
5890 rhs2 = gimple_assign_lhs (g);
5891 std::swap (rhs1, rhs2);
5893 gsi = gsi_for_stmt (stmt);
5894 gsi_remove (&gsi, true);
5895 gphi *phi = create_phi_node (lhs, e2->dest);
5896 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5897 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5898 break;
5901 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5902 into memory. Such functions could have no large/huge SSA_NAMEs. */
5903 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5905 gimple *g = SSA_NAME_DEF_STMT (s);
5906 if (is_gimple_assign (g) && gimple_store_p (g))
5908 tree t = gimple_assign_rhs1 (g);
5909 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5910 && (bitint_precision_kind (TREE_TYPE (t))
5911 >= bitint_prec_large))
5912 has_large_huge = true;
5915 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5916 to floating point types need to be rewritten. */
5917 else if (SCALAR_FLOAT_TYPE_P (type))
5919 gimple *g = SSA_NAME_DEF_STMT (s);
5920 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5922 tree t = gimple_assign_rhs1 (g);
5923 if (TREE_CODE (t) == INTEGER_CST
5924 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5925 && (bitint_precision_kind (TREE_TYPE (t))
5926 >= bitint_prec_large))
5927 has_large_huge = true;
5931 for (i = first_large_huge; i < num_ssa_names; ++i)
5933 tree s = ssa_name (i);
5934 if (s == NULL)
5935 continue;
5936 tree type = TREE_TYPE (s);
5937 if (TREE_CODE (type) == COMPLEX_TYPE)
5938 type = TREE_TYPE (type);
5939 if (TREE_CODE (type) == BITINT_TYPE
5940 && bitint_precision_kind (type) >= bitint_prec_large)
5942 use_operand_p use_p;
5943 gimple *use_stmt;
5944 has_large_huge = true;
5945 if (optimize
5946 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
5947 continue;
5948 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5949 the same bb and could be handled in the same loop with the
5950 immediate use. */
5951 if (optimize
5952 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5953 && single_imm_use (s, &use_p, &use_stmt)
5954 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
5956 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
5958 if (mergeable_op (use_stmt))
5959 continue;
5960 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
5961 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
5962 continue;
5963 if (gimple_assign_cast_p (use_stmt))
5965 tree lhs = gimple_assign_lhs (use_stmt);
5966 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5967 continue;
5969 else if (gimple_store_p (use_stmt)
5970 && is_gimple_assign (use_stmt)
5971 && !gimple_has_volatile_ops (use_stmt)
5972 && !stmt_ends_bb_p (use_stmt))
5973 continue;
5975 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5977 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5978 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5979 && ((is_gimple_assign (use_stmt)
5980 && (gimple_assign_rhs_code (use_stmt)
5981 != COMPLEX_EXPR))
5982 || gimple_code (use_stmt) == GIMPLE_COND)
5983 && (!gimple_store_p (use_stmt)
5984 || (is_gimple_assign (use_stmt)
5985 && !gimple_has_volatile_ops (use_stmt)
5986 && !stmt_ends_bb_p (use_stmt)))
5987 && (TREE_CODE (rhs1) != SSA_NAME
5988 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5990 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
5991 || (bitint_precision_kind (TREE_TYPE (rhs1))
5992 < bitint_prec_large))
5993 continue;
5994 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
5995 >= TYPE_PRECISION (TREE_TYPE (s)))
5996 && mergeable_op (use_stmt))
5997 continue;
5998 /* Prevent merging a widening non-mergeable cast
5999 on result of some narrower mergeable op
6000 together with later mergeable operations. E.g.
6001 result of _BitInt(223) addition shouldn't be
6002 sign-extended to _BitInt(513) and have another
6003 _BitInt(513) added to it, as handle_plus_minus
6004 with its PHI node handling inside of handle_cast
6005 will not work correctly. An exception is if
6006 use_stmt is a store, this is handled directly
6007 in lower_mergeable_stmt. */
6008 if (TREE_CODE (rhs1) != SSA_NAME
6009 || !has_single_use (rhs1)
6010 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6011 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6012 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6013 || gimple_store_p (use_stmt))
6014 continue;
6015 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6016 < TYPE_PRECISION (TREE_TYPE (s)))
6017 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6019 /* Another exception is if the widening cast is
6020 from mergeable same precision cast from something
6021 not mergeable. */
6022 tree rhs2
6023 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6024 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6025 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6026 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6028 if (TREE_CODE (rhs2) != SSA_NAME
6029 || !has_single_use (rhs2)
6030 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6031 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6032 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6033 continue;
6038 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6039 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6041 case IMAGPART_EXPR:
6043 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6044 rhs1 = TREE_OPERAND (rhs1, 0);
6045 if (TREE_CODE (rhs1) == SSA_NAME)
6047 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6048 if (optimizable_arith_overflow (g))
6049 continue;
6052 /* FALLTHRU */
6053 case LSHIFT_EXPR:
6054 case RSHIFT_EXPR:
6055 case MULT_EXPR:
6056 case TRUNC_DIV_EXPR:
6057 case TRUNC_MOD_EXPR:
6058 case FIX_TRUNC_EXPR:
6059 case REALPART_EXPR:
6060 if (gimple_store_p (use_stmt)
6061 && is_gimple_assign (use_stmt)
6062 && !gimple_has_volatile_ops (use_stmt)
6063 && !stmt_ends_bb_p (use_stmt))
6065 tree lhs = gimple_assign_lhs (use_stmt);
6066 /* As multiply/division passes address of the lhs
6067 to library function and that assumes it can extend
6068 it to whole number of limbs, avoid merging those
6069 with bit-field stores. Don't allow it for
6070 shifts etc. either, so that the bit-field store
6071 handling doesn't have to be done everywhere. */
6072 if (TREE_CODE (lhs) == COMPONENT_REF
6073 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6074 break;
6075 continue;
6077 break;
6078 default:
6079 break;
6083 /* Also ignore uninitialized uses. */
6084 if (SSA_NAME_IS_DEFAULT_DEF (s)
6085 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6086 continue;
6088 if (!large_huge.m_names)
6089 large_huge.m_names = BITMAP_ALLOC (NULL);
6090 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6091 if (has_single_use (s))
6093 if (!large_huge.m_single_use_names)
6094 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6095 bitmap_set_bit (large_huge.m_single_use_names,
6096 SSA_NAME_VERSION (s));
6098 if (SSA_NAME_VAR (s)
6099 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6100 && SSA_NAME_IS_DEFAULT_DEF (s))
6101 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6102 has_large_huge_parm_result = true;
6103 if (optimize
6104 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6105 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6106 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6107 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6109 use_operand_p use_p;
6110 imm_use_iterator iter;
6111 bool optimizable_load = true;
6112 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6114 gimple *use_stmt = USE_STMT (use_p);
6115 if (is_gimple_debug (use_stmt))
6116 continue;
6117 if (gimple_code (use_stmt) == GIMPLE_PHI
6118 || is_gimple_call (use_stmt))
6120 optimizable_load = false;
6121 break;
6125 ssa_op_iter oi;
6126 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6127 oi, SSA_OP_USE)
6129 tree s2 = USE_FROM_PTR (use_p);
6130 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6132 optimizable_load = false;
6133 break;
6137 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6139 if (!large_huge.m_loads)
6140 large_huge.m_loads = BITMAP_ALLOC (NULL);
6141 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6145 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6146 into memory. Such functions could have no large/huge SSA_NAMEs. */
6147 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6149 gimple *g = SSA_NAME_DEF_STMT (s);
6150 if (is_gimple_assign (g) && gimple_store_p (g))
6152 tree t = gimple_assign_rhs1 (g);
6153 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6154 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6155 has_large_huge = true;
6160 if (large_huge.m_names || has_large_huge)
6162 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6163 calculate_dominance_info (CDI_DOMINATORS);
6164 if (optimize)
6165 enable_ranger (cfun);
6166 if (large_huge.m_loads)
6168 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6169 entry->aux = NULL;
6170 bitint_dom_walker (large_huge.m_names,
6171 large_huge.m_loads).walk (entry);
6172 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6173 clear_aux_for_blocks ();
6174 BITMAP_FREE (large_huge.m_loads);
6176 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6177 large_huge.m_limb_size
6178 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6180 if (large_huge.m_names)
6182 large_huge.m_map
6183 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6184 coalesce_ssa_name (large_huge.m_map);
6185 partition_view_normal (large_huge.m_map);
6186 if (dump_file && (dump_flags & TDF_DETAILS))
6188 fprintf (dump_file, "After Coalescing:\n");
6189 dump_var_map (dump_file, large_huge.m_map);
6191 large_huge.m_vars
6192 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6193 bitmap_iterator bi;
6194 if (has_large_huge_parm_result)
6195 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6197 tree s = ssa_name (i);
6198 if (SSA_NAME_VAR (s)
6199 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6200 && SSA_NAME_IS_DEFAULT_DEF (s))
6201 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6203 int p = var_to_partition (large_huge.m_map, s);
6204 if (large_huge.m_vars[p] == NULL_TREE)
6206 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6207 mark_addressable (SSA_NAME_VAR (s));
6211 tree atype = NULL_TREE;
6212 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6214 tree s = ssa_name (i);
6215 int p = var_to_partition (large_huge.m_map, s);
6216 if (large_huge.m_vars[p] != NULL_TREE)
6217 continue;
6218 if (atype == NULL_TREE
6219 || !tree_int_cst_equal (TYPE_SIZE (atype),
6220 TYPE_SIZE (TREE_TYPE (s))))
6222 unsigned HOST_WIDE_INT nelts
6223 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6224 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6226 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6227 mark_addressable (large_huge.m_vars[p]);
6231 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6233 gimple_stmt_iterator prev;
6234 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6235 gsi = prev)
6237 prev = gsi;
6238 gsi_prev (&prev);
6239 ssa_op_iter iter;
6240 gimple *stmt = gsi_stmt (gsi);
6241 if (is_gimple_debug (stmt))
6242 continue;
6243 bitint_prec_kind kind = bitint_prec_small;
6244 tree t;
6245 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6246 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6248 bitint_prec_kind this_kind
6249 = bitint_precision_kind (TREE_TYPE (t));
6250 if (this_kind > kind)
6251 kind = this_kind;
6253 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6255 t = gimple_assign_rhs1 (stmt);
6256 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6258 bitint_prec_kind this_kind
6259 = bitint_precision_kind (TREE_TYPE (t));
6260 if (this_kind > kind)
6261 kind = this_kind;
6264 if (is_gimple_assign (stmt)
6265 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6267 t = gimple_assign_rhs1 (stmt);
6268 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6269 && TREE_CODE (t) == INTEGER_CST)
6271 bitint_prec_kind this_kind
6272 = bitint_precision_kind (TREE_TYPE (t));
6273 if (this_kind > kind)
6274 kind = this_kind;
6277 if (is_gimple_call (stmt))
6279 t = gimple_call_lhs (stmt);
6280 if (t
6281 && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
6282 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6284 bitint_prec_kind this_kind
6285 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6286 if (this_kind > kind)
6287 kind = this_kind;
6290 if (kind == bitint_prec_small)
6291 continue;
6292 switch (gimple_code (stmt))
6294 case GIMPLE_CALL:
6295 /* For now. We'll need to handle some internal functions and
6296 perhaps some builtins. */
6297 if (kind == bitint_prec_middle)
6298 continue;
6299 break;
6300 case GIMPLE_ASM:
6301 if (kind == bitint_prec_middle)
6302 continue;
6303 break;
6304 case GIMPLE_RETURN:
6305 continue;
6306 case GIMPLE_ASSIGN:
6307 if (gimple_clobber_p (stmt))
6308 continue;
6309 if (kind >= bitint_prec_large)
6310 break;
6311 if (gimple_assign_single_p (stmt))
6312 /* No need to lower copies, loads or stores. */
6313 continue;
6314 if (gimple_assign_cast_p (stmt))
6316 tree lhs = gimple_assign_lhs (stmt);
6317 tree rhs = gimple_assign_rhs1 (stmt);
6318 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6319 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6320 && (TYPE_PRECISION (TREE_TYPE (lhs))
6321 == TYPE_PRECISION (TREE_TYPE (rhs))))
6322 /* No need to lower casts to same precision. */
6323 continue;
6325 break;
6326 default:
6327 break;
6330 if (kind == bitint_prec_middle)
6332 tree type = NULL_TREE;
6333 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6334 with the same precision and back. */
6335 unsigned int nops = gimple_num_ops (stmt);
6336 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6337 i < nops; ++i)
6338 if (tree op = gimple_op (stmt, i))
6340 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6341 if (nop != op)
6342 gimple_set_op (stmt, i, nop);
6343 else if (COMPARISON_CLASS_P (op))
6345 TREE_OPERAND (op, 0)
6346 = maybe_cast_middle_bitint (&gsi,
6347 TREE_OPERAND (op, 0),
6348 type);
6349 TREE_OPERAND (op, 1)
6350 = maybe_cast_middle_bitint (&gsi,
6351 TREE_OPERAND (op, 1),
6352 type);
6354 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6356 CASE_LOW (op)
6357 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6358 type);
6359 CASE_HIGH (op)
6360 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6361 type);
6364 if (tree lhs = gimple_get_lhs (stmt))
6365 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6366 && (bitint_precision_kind (TREE_TYPE (lhs))
6367 == bitint_prec_middle))
6369 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6370 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6371 type = build_nonstandard_integer_type (prec, uns);
6372 tree lhs2 = make_ssa_name (type);
6373 gimple_set_lhs (stmt, lhs2);
6374 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6375 if (stmt_ends_bb_p (stmt))
6377 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6378 gsi_insert_on_edge_immediate (e, g);
6380 else
6381 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6383 update_stmt (stmt);
6384 continue;
6387 if (tree lhs = gimple_get_lhs (stmt))
6388 if (TREE_CODE (lhs) == SSA_NAME)
6390 tree type = TREE_TYPE (lhs);
6391 if (TREE_CODE (type) == COMPLEX_TYPE)
6392 type = TREE_TYPE (type);
6393 if (TREE_CODE (type) == BITINT_TYPE
6394 && bitint_precision_kind (type) >= bitint_prec_large
6395 && (large_huge.m_names == NULL
6396 || !bitmap_bit_p (large_huge.m_names,
6397 SSA_NAME_VERSION (lhs))))
6398 continue;
6401 large_huge.lower_stmt (stmt);
6404 tree atype = NULL_TREE;
6405 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6406 gsi_next (&gsi))
6408 gphi *phi = gsi.phi ();
6409 tree lhs = gimple_phi_result (phi);
6410 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6411 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6412 continue;
6413 int p1 = var_to_partition (large_huge.m_map, lhs);
6414 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6415 tree v1 = large_huge.m_vars[p1];
6416 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6418 tree arg = gimple_phi_arg_def (phi, i);
6419 edge e = gimple_phi_arg_edge (phi, i);
6420 gimple *g;
6421 switch (TREE_CODE (arg))
6423 case INTEGER_CST:
6424 if (integer_zerop (arg) && VAR_P (v1))
6426 tree zero = build_zero_cst (TREE_TYPE (v1));
6427 g = gimple_build_assign (v1, zero);
6428 gsi_insert_on_edge (e, g);
6429 edge_insertions = true;
6430 break;
6432 int ext;
6433 unsigned int min_prec, prec, rem;
6434 tree c;
6435 prec = TYPE_PRECISION (TREE_TYPE (arg));
6436 rem = prec % (2 * limb_prec);
6437 min_prec = bitint_min_cst_precision (arg, ext);
6438 if (min_prec > prec - rem - 2 * limb_prec
6439 && min_prec > (unsigned) limb_prec)
6440 /* Constant which has enough significant bits that it
6441 isn't worth trying to save .rodata space by extending
6442 from smaller number. */
6443 min_prec = prec;
6444 else
6445 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6446 if (min_prec == 0)
6447 c = NULL_TREE;
6448 else if (min_prec == prec)
6449 c = tree_output_constant_def (arg);
6450 else if (min_prec == (unsigned) limb_prec)
6451 c = fold_convert (large_huge.m_limb_type, arg);
6452 else
6454 tree ctype = build_bitint_type (min_prec, 1);
6455 c = tree_output_constant_def (fold_convert (ctype, arg));
6457 if (c)
6459 if (VAR_P (v1) && min_prec == prec)
6461 tree v2 = build1 (VIEW_CONVERT_EXPR,
6462 TREE_TYPE (v1), c);
6463 g = gimple_build_assign (v1, v2);
6464 gsi_insert_on_edge (e, g);
6465 edge_insertions = true;
6466 break;
6468 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6469 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6470 TREE_TYPE (c), v1),
6472 else
6474 unsigned HOST_WIDE_INT nelts
6475 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6476 / limb_prec;
6477 tree vtype
6478 = build_array_type_nelts (large_huge.m_limb_type,
6479 nelts);
6480 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6481 vtype, v1),
6482 build1 (VIEW_CONVERT_EXPR,
6483 vtype, c));
6485 gsi_insert_on_edge (e, g);
6487 if (ext == 0)
6489 unsigned HOST_WIDE_INT nelts
6490 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6491 - min_prec) / limb_prec;
6492 tree vtype
6493 = build_array_type_nelts (large_huge.m_limb_type,
6494 nelts);
6495 tree ptype = build_pointer_type (TREE_TYPE (v1));
6496 tree off = fold_convert (ptype,
6497 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6498 tree vd = build2 (MEM_REF, vtype,
6499 build_fold_addr_expr (v1), off);
6500 g = gimple_build_assign (vd, build_zero_cst (vtype));
6502 else
6504 tree vd = v1;
6505 if (c)
6507 tree ptype = build_pointer_type (TREE_TYPE (v1));
6508 tree off
6509 = fold_convert (ptype,
6510 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6511 vd = build2 (MEM_REF, large_huge.m_limb_type,
6512 build_fold_addr_expr (v1), off);
6514 vd = build_fold_addr_expr (vd);
6515 unsigned HOST_WIDE_INT nbytes
6516 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6517 if (c)
6518 nbytes
6519 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6520 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6521 g = gimple_build_call (fn, 3, vd,
6522 integer_minus_one_node,
6523 build_int_cst (sizetype,
6524 nbytes));
6526 gsi_insert_on_edge (e, g);
6527 edge_insertions = true;
6528 break;
6529 default:
6530 gcc_unreachable ();
6531 case SSA_NAME:
6532 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6534 if (large_huge.m_names == NULL
6535 || !bitmap_bit_p (large_huge.m_names,
6536 SSA_NAME_VERSION (arg)))
6537 continue;
6539 int p2 = var_to_partition (large_huge.m_map, arg);
6540 if (p1 == p2)
6541 continue;
6542 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6543 tree v2 = large_huge.m_vars[p2];
6544 if (VAR_P (v1) && VAR_P (v2))
6545 g = gimple_build_assign (v1, v2);
6546 else if (VAR_P (v1))
6547 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6548 TREE_TYPE (v1), v2));
6549 else if (VAR_P (v2))
6550 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6551 TREE_TYPE (v2), v1), v2);
6552 else
6554 if (atype == NULL_TREE
6555 || !tree_int_cst_equal (TYPE_SIZE (atype),
6556 TYPE_SIZE (TREE_TYPE (lhs))))
6558 unsigned HOST_WIDE_INT nelts
6559 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6560 / limb_prec;
6561 atype
6562 = build_array_type_nelts (large_huge.m_limb_type,
6563 nelts);
6565 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6566 atype, v1),
6567 build1 (VIEW_CONVERT_EXPR,
6568 atype, v2));
6570 gsi_insert_on_edge (e, g);
6571 edge_insertions = true;
6572 break;
6578 if (large_huge.m_names || has_large_huge)
6580 gimple *nop = NULL;
6581 for (i = 0; i < num_ssa_names; ++i)
6583 tree s = ssa_name (i);
6584 if (s == NULL_TREE)
6585 continue;
6586 tree type = TREE_TYPE (s);
6587 if (TREE_CODE (type) == COMPLEX_TYPE)
6588 type = TREE_TYPE (type);
6589 if (TREE_CODE (type) == BITINT_TYPE
6590 && bitint_precision_kind (type) >= bitint_prec_large)
6592 if (large_huge.m_preserved
6593 && bitmap_bit_p (large_huge.m_preserved,
6594 SSA_NAME_VERSION (s)))
6595 continue;
6596 gimple *g = SSA_NAME_DEF_STMT (s);
6597 if (gimple_code (g) == GIMPLE_NOP)
6599 if (SSA_NAME_VAR (s))
6600 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6601 release_ssa_name (s);
6602 continue;
6604 if (gimple_code (g) != GIMPLE_ASM)
6606 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6607 bool save_vta = flag_var_tracking_assignments;
6608 flag_var_tracking_assignments = false;
6609 gsi_remove (&gsi, true);
6610 flag_var_tracking_assignments = save_vta;
6612 if (nop == NULL)
6613 nop = gimple_build_nop ();
6614 SSA_NAME_DEF_STMT (s) = nop;
6615 release_ssa_name (s);
6618 if (optimize)
6619 disable_ranger (cfun);
6622 if (edge_insertions)
6623 gsi_commit_edge_inserts ();
6625 return ret;
6628 namespace {
6630 const pass_data pass_data_lower_bitint =
6632 GIMPLE_PASS, /* type */
6633 "bitintlower", /* name */
6634 OPTGROUP_NONE, /* optinfo_flags */
6635 TV_NONE, /* tv_id */
6636 PROP_ssa, /* properties_required */
6637 PROP_gimple_lbitint, /* properties_provided */
6638 0, /* properties_destroyed */
6639 0, /* todo_flags_start */
6640 0, /* todo_flags_finish */
6643 class pass_lower_bitint : public gimple_opt_pass
6645 public:
6646 pass_lower_bitint (gcc::context *ctxt)
6647 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6650 /* opt_pass methods: */
6651 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6652 unsigned int execute (function *) final override
6654 return gimple_lower_bitint ();
6657 }; // class pass_lower_bitint
6659 } // anon namespace
6661 gimple_opt_pass *
6662 make_pass_lower_bitint (gcc::context *ctxt)
6664 return new pass_lower_bitint (ctxt);
6668 namespace {
6670 const pass_data pass_data_lower_bitint_O0 =
6672 GIMPLE_PASS, /* type */
6673 "bitintlower0", /* name */
6674 OPTGROUP_NONE, /* optinfo_flags */
6675 TV_NONE, /* tv_id */
6676 PROP_cfg, /* properties_required */
6677 PROP_gimple_lbitint, /* properties_provided */
6678 0, /* properties_destroyed */
6679 0, /* todo_flags_start */
6680 0, /* todo_flags_finish */
6683 class pass_lower_bitint_O0 : public gimple_opt_pass
6685 public:
6686 pass_lower_bitint_O0 (gcc::context *ctxt)
6687 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6690 /* opt_pass methods: */
6691 bool gate (function *fun) final override
6693 /* With errors, normal optimization passes are not run. If we don't
6694 lower bitint operations at all, rtl expansion will abort. */
6695 return !(fun->curr_properties & PROP_gimple_lbitint);
6698 unsigned int execute (function *) final override
6700 return gimple_lower_bitint ();
6703 }; // class pass_lower_bitint_O0
6705 } // anon namespace
6707 gimple_opt_pass *
6708 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6710 return new pass_lower_bitint_O0 (ctxt);