tree: add to clobber_kind
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blob65a7bbe3fa9aef99d494b618ad851175610cabc2
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
342 else
343 return 0;
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
365 else if (is_gimple_assign (stmt))
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
381 *pop1 = op1;
382 *pop2 = op2;
384 return code;
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_bit_query (gimple *);
431 void lower_call (tree, gimple *);
432 void lower_asm (gimple *);
433 void lower_stmt (gimple *);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
437 bitmap m_names;
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
440 their uses. */
441 bitmap m_loads;
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
444 bitmap m_preserved;
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
452 set in m_names. */
453 var_map m_map;
454 /* Mapping of the partitions to corresponding decls. */
455 tree *m_vars;
456 /* Unsigned integer type with limb precision. */
457 tree m_limb_type;
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size;
460 /* Location of a gimple stmt which is being currently lowered. */
461 location_t m_loc;
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple *m_after_stmt;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb, m_preheader_bb;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi;
470 /* Decl into which a mergeable statement stores result. */
471 tree m_lhs;
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
520 bool m_first;
521 bool m_var_msb;
522 unsigned m_upwards_2limb;
523 bool m_upwards;
524 bool m_cast_conditional;
525 unsigned m_bitfld_load;
526 vec<tree> m_data;
527 unsigned int m_data_cnt;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names);
533 BITMAP_FREE (m_loads);
534 BITMAP_FREE (m_preserved);
535 BITMAP_FREE (m_single_use_names);
536 if (m_map)
537 delete_var_map (m_map);
538 XDELETEVEC (m_vars);
539 m_data.release ();
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
545 void
546 bitint_large_huge::insert_before (gimple *g)
548 gimple_set_location (g, m_loc);
549 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
556 tree
557 bitint_large_huge::limb_access_type (tree type, tree idx)
559 if (type == NULL_TREE)
560 return m_limb_type;
561 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
562 unsigned int prec = TYPE_PRECISION (type);
563 gcc_assert (i * limb_prec < prec);
564 if ((i + 1) * limb_prec <= prec)
565 return m_limb_type;
566 else
567 return build_nonstandard_integer_type (prec % limb_prec,
568 TYPE_UNSIGNED (type));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
574 tree
575 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
577 tree atype = (tree_fits_uhwi_p (idx)
578 ? limb_access_type (type, idx) : m_limb_type);
579 tree ret;
580 if (DECL_P (var) && tree_fits_uhwi_p (idx))
582 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
583 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
584 ret = build2 (MEM_REF, m_limb_type,
585 build_fold_addr_expr (var),
586 build_int_cst (ptype, off));
587 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
588 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
590 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
593 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
594 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
596 tree_to_uhwi (idx)
597 * m_limb_size)));
598 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
599 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
600 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
602 else
604 var = unshare_expr (var);
605 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type,
607 TREE_TYPE (TREE_TYPE (var))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
611 tree atype = build_array_type_nelts (m_limb_type, nelts);
612 var = build1 (VIEW_CONVERT_EXPR, atype, var);
614 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
616 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
618 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
619 insert_before (g);
620 ret = gimple_assign_lhs (g);
621 ret = build1 (NOP_EXPR, atype, ret);
623 return ret;
626 /* Emit a half diamond,
627 if (COND)
631 | new_bb1
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
641 void
642 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
643 edge &edge_true, edge &edge_false)
645 insert_before (cond);
646 edge e1 = split_block (gsi_bb (m_gsi), cond);
647 edge e2 = split_block (e1->dest, (gimple *) NULL);
648 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
649 e1->flags = EDGE_TRUE_VALUE;
650 e1->probability = prob;
651 e3->probability = prob.invert ();
652 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
653 edge_true = e2;
654 edge_false = e3;
655 m_gsi = gsi_after_labels (e1->dest);
658 /* Emit a full diamond,
659 if (COND)
663 new_bb1 new_bb2
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
673 void
674 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
675 edge &edge_true, edge &edge_false)
677 insert_before (cond);
678 edge e1 = split_block (gsi_bb (m_gsi), cond);
679 edge e2 = split_block (e1->dest, (gimple *) NULL);
680 basic_block bb = create_empty_bb (e1->dest);
681 add_bb_to_loop (bb, e1->dest->loop_father);
682 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
683 e1->flags = EDGE_FALSE_VALUE;
684 e3->probability = prob;
685 e1->probability = prob.invert ();
686 bb->count = e1->src->count.apply_probability (prob);
687 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
688 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
689 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
690 edge_false = e2;
691 m_gsi = gsi_after_labels (bb);
694 /* Emit a half diamond with full diamond in it
695 if (COND1)
699 | if (COND2)
700 | / \
701 | / \
702 |new_bb1 new_bb2
703 | | /
704 \ | /
705 \ | /
706 \ | /
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
719 void
720 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
721 profile_probability prob1,
722 profile_probability prob2,
723 edge &edge_true_true,
724 edge &edge_true_false,
725 edge &edge_false)
727 edge e2, e3, e4 = NULL;
728 if_then (cond1, prob1, e2, e3);
729 if (cond2 == NULL)
731 edge_true_true = NULL;
732 edge_true_false = e2;
733 edge_false = e3;
734 return;
736 insert_before (cond2);
737 e2 = split_block (gsi_bb (m_gsi), cond2);
738 basic_block bb = create_empty_bb (e2->dest);
739 add_bb_to_loop (bb, e2->dest->loop_father);
740 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
741 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
742 e4->probability = prob2;
743 e2->flags = EDGE_FALSE_VALUE;
744 e2->probability = prob2.invert ();
745 bb->count = e2->src->count.apply_probability (prob2);
746 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
747 e2 = find_edge (e2->dest, e3->dest);
748 edge_true_true = e4;
749 edge_true_false = e2;
750 edge_false = e3;
751 m_gsi = gsi_after_labels (e2->src);
754 /* Emit code to access limb IDX from OP. */
756 tree
757 bitint_large_huge::handle_operand (tree op, tree idx)
759 switch (TREE_CODE (op))
761 case SSA_NAME:
762 if (m_names == NULL
763 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op))
767 if (m_first)
769 tree v = create_tmp_reg (m_limb_type);
770 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
772 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
773 DECL_SOURCE_LOCATION (v)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
776 v = get_or_create_ssa_default_def (cfun, v);
777 m_data.safe_push (v);
779 tree ret = m_data[m_data_cnt];
780 m_data_cnt++;
781 if (tree_fits_uhwi_p (idx))
783 tree type = limb_access_type (TREE_TYPE (op), idx);
784 ret = add_cast (type, ret);
786 return ret;
788 location_t loc_save = m_loc;
789 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
790 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
791 m_loc = loc_save;
792 return ret;
794 int p;
795 gimple *g;
796 tree t;
797 p = var_to_partition (m_map, op);
798 gcc_assert (m_vars[p] != NULL_TREE);
799 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
801 insert_before (g);
802 t = gimple_assign_lhs (g);
803 if (m_first
804 && m_single_use_names
805 && m_vars[p] != m_lhs
806 && m_after_stmt
807 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
809 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
810 CLOBBER_STORAGE_END);
811 g = gimple_build_assign (m_vars[p], clobber);
812 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
813 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
815 return t;
816 case INTEGER_CST:
817 if (tree_fits_uhwi_p (idx))
819 tree c, type = limb_access_type (TREE_TYPE (op), idx);
820 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
821 if (m_first)
823 m_data.safe_push (NULL_TREE);
824 m_data.safe_push (NULL_TREE);
826 if (limb_prec != HOST_BITS_PER_WIDE_INT)
828 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
829 TYPE_SIGN (TREE_TYPE (op)));
830 c = wide_int_to_tree (type,
831 wide_int::from (w, TYPE_PRECISION (type),
832 UNSIGNED));
834 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
835 c = build_int_cst (type,
836 tree_int_cst_sgn (op) < 0 ? -1 : 0);
837 else
838 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
839 m_data_cnt += 2;
840 return c;
842 if (m_first
843 || (m_data[m_data_cnt] == NULL_TREE
844 && m_data[m_data_cnt + 1] == NULL_TREE))
846 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
847 unsigned int rem = prec % (2 * limb_prec);
848 int ext;
849 unsigned min_prec = bitint_min_cst_precision (op, ext);
850 if (m_first)
852 m_data.safe_push (NULL_TREE);
853 m_data.safe_push (NULL_TREE);
855 if (integer_zerop (op))
857 tree c = build_zero_cst (m_limb_type);
858 m_data[m_data_cnt] = c;
859 m_data[m_data_cnt + 1] = c;
861 else if (integer_all_onesp (op))
863 tree c = build_all_ones_cst (m_limb_type);
864 m_data[m_data_cnt] = c;
865 m_data[m_data_cnt + 1] = c;
867 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
869 /* Single limb constant. Use a phi with that limb from
870 the preheader edge and 0 or -1 constant from the other edge
871 and for the second limb in the loop. */
872 tree out;
873 gcc_assert (m_first);
874 m_data.pop ();
875 m_data.pop ();
876 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out);
877 g = gimple_build_assign (m_data[m_data_cnt + 1],
878 build_int_cst (m_limb_type, ext));
879 insert_before (g);
880 m_data[m_data_cnt + 1] = gimple_assign_rhs1 (g);
882 else if (min_prec > prec - rem - 2 * limb_prec)
884 /* Constant which has enough significant bits that it isn't
885 worth trying to save .rodata space by extending from smaller
886 number. */
887 tree type;
888 if (m_var_msb)
889 type = TREE_TYPE (op);
890 else
891 /* If we have a guarantee the most significant partial limb
892 (if any) will be only accessed through handle_operand
893 with INTEGER_CST idx, we don't need to include the partial
894 limb in .rodata. */
895 type = build_bitint_type (prec - rem, 1);
896 tree c = tree_output_constant_def (fold_convert (type, op));
897 m_data[m_data_cnt] = c;
898 m_data[m_data_cnt + 1] = NULL_TREE;
900 else if (m_upwards_2limb)
902 /* Constant with smaller number of bits. Trade conditional
903 code for .rodata space by extending from smaller number. */
904 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
905 tree type = build_bitint_type (min_prec, 1);
906 tree c = tree_output_constant_def (fold_convert (type, op));
907 tree idx2 = make_ssa_name (sizetype);
908 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
909 insert_before (g);
910 g = gimple_build_cond (LT_EXPR, idx,
911 size_int (min_prec / limb_prec),
912 NULL_TREE, NULL_TREE);
913 edge edge_true, edge_false;
914 if_then (g, (min_prec >= (prec - rem) / 2
915 ? profile_probability::likely ()
916 : profile_probability::unlikely ()),
917 edge_true, edge_false);
918 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
919 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
920 insert_before (g);
921 c1 = gimple_assign_lhs (g);
922 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
923 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
924 insert_before (g);
925 c2 = gimple_assign_lhs (g);
926 tree c3 = build_int_cst (m_limb_type, ext);
927 m_gsi = gsi_after_labels (edge_true->dest);
928 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
929 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
930 gphi *phi = create_phi_node (m_data[m_data_cnt],
931 edge_true->dest);
932 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
933 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
934 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
935 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
936 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
938 else
940 /* Constant with smaller number of bits. Trade conditional
941 code for .rodata space by extending from smaller number.
942 Version for loops with random access to the limbs or
943 downwards loops. */
944 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
945 tree c;
946 if (min_prec <= (unsigned) limb_prec)
947 c = fold_convert (m_limb_type, op);
948 else
950 tree type = build_bitint_type (min_prec, 1);
951 c = tree_output_constant_def (fold_convert (type, op));
953 m_data[m_data_cnt] = c;
954 m_data[m_data_cnt + 1] = integer_type_node;
956 t = m_data[m_data_cnt];
957 if (m_data[m_data_cnt + 1] == NULL_TREE)
959 t = limb_access (TREE_TYPE (op), t, idx, false);
960 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
961 insert_before (g);
962 t = gimple_assign_lhs (g);
965 else if (m_data[m_data_cnt + 1] == NULL_TREE)
967 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
968 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
969 insert_before (g);
970 t = gimple_assign_lhs (g);
972 else
973 t = m_data[m_data_cnt + 1];
974 if (m_data[m_data_cnt + 1] == integer_type_node)
976 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
977 unsigned rem = prec % (2 * limb_prec);
978 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
979 tree c = m_data[m_data_cnt];
980 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
981 g = gimple_build_cond (LT_EXPR, idx,
982 size_int (min_prec / limb_prec),
983 NULL_TREE, NULL_TREE);
984 edge edge_true, edge_false;
985 if_then (g, (min_prec >= (prec - rem) / 2
986 ? profile_probability::likely ()
987 : profile_probability::unlikely ()),
988 edge_true, edge_false);
989 if (min_prec > (unsigned) limb_prec)
991 c = limb_access (TREE_TYPE (op), c, idx, false);
992 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
993 insert_before (g);
994 c = gimple_assign_lhs (g);
996 tree c2 = build_int_cst (m_limb_type, ext);
997 m_gsi = gsi_after_labels (edge_true->dest);
998 t = make_ssa_name (m_limb_type);
999 gphi *phi = create_phi_node (t, edge_true->dest);
1000 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1001 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1003 m_data_cnt += 2;
1004 return t;
1005 default:
1006 gcc_unreachable ();
1010 /* Helper method, add a PHI node with VAL from preheader edge if
1011 inside of a loop and m_first. Keep state in a pair of m_data
1012 elements. */
1014 tree
1015 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out)
1017 if (!m_first)
1019 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1020 return m_data[m_data_cnt];
1023 *data_out = NULL_TREE;
1024 if (tree_fits_uhwi_p (idx))
1026 m_data.safe_push (val);
1027 m_data.safe_push (NULL_TREE);
1028 return val;
1031 tree in = make_ssa_name (TREE_TYPE (val));
1032 gphi *phi = create_phi_node (in, m_bb);
1033 edge e1 = find_edge (m_preheader_bb, m_bb);
1034 edge e2 = EDGE_PRED (m_bb, 0);
1035 if (e1 == e2)
1036 e2 = EDGE_PRED (m_bb, 1);
1037 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1038 tree out = make_ssa_name (TREE_TYPE (val));
1039 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1040 m_data.safe_push (in);
1041 m_data.safe_push (out);
1042 return in;
1045 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1046 convert it without emitting any code, otherwise emit
1047 the conversion statement before the current location. */
1049 tree
1050 bitint_large_huge::add_cast (tree type, tree val)
1052 if (TREE_CODE (val) == INTEGER_CST)
1053 return fold_convert (type, val);
1055 tree lhs = make_ssa_name (type);
1056 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1057 insert_before (g);
1058 return lhs;
1061 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1063 tree
1064 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1065 tree idx)
1067 tree lhs, data_out, ctype;
1068 tree rhs1_type = TREE_TYPE (rhs1);
1069 gimple *g;
1070 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1071 &data_out);
1073 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1074 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1076 ctype = build_complex_type (m_limb_type);
1077 if (!types_compatible_p (rhs1_type, m_limb_type))
1079 if (!TYPE_UNSIGNED (rhs1_type))
1081 tree type = unsigned_type_for (rhs1_type);
1082 rhs1 = add_cast (type, rhs1);
1083 rhs2 = add_cast (type, rhs2);
1085 rhs1 = add_cast (m_limb_type, rhs1);
1086 rhs2 = add_cast (m_limb_type, rhs2);
1088 lhs = make_ssa_name (ctype);
1089 g = gimple_build_call_internal (code == PLUS_EXPR
1090 ? IFN_UADDC : IFN_USUBC,
1091 3, rhs1, rhs2, data_in);
1092 gimple_call_set_lhs (g, lhs);
1093 insert_before (g);
1094 if (data_out == NULL_TREE)
1095 data_out = make_ssa_name (m_limb_type);
1096 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1097 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1098 insert_before (g);
1100 else if (types_compatible_p (rhs1_type, m_limb_type))
1102 ctype = build_complex_type (m_limb_type);
1103 lhs = make_ssa_name (ctype);
1104 g = gimple_build_call_internal (code == PLUS_EXPR
1105 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1106 2, rhs1, rhs2);
1107 gimple_call_set_lhs (g, lhs);
1108 insert_before (g);
1109 if (data_out == NULL_TREE)
1110 data_out = make_ssa_name (m_limb_type);
1111 if (!integer_zerop (data_in))
1113 rhs1 = make_ssa_name (m_limb_type);
1114 g = gimple_build_assign (rhs1, REALPART_EXPR,
1115 build1 (REALPART_EXPR, m_limb_type, lhs));
1116 insert_before (g);
1117 rhs2 = make_ssa_name (m_limb_type);
1118 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1119 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1120 insert_before (g);
1121 lhs = make_ssa_name (ctype);
1122 g = gimple_build_call_internal (code == PLUS_EXPR
1123 ? IFN_ADD_OVERFLOW
1124 : IFN_SUB_OVERFLOW,
1125 2, rhs1, data_in);
1126 gimple_call_set_lhs (g, lhs);
1127 insert_before (g);
1128 data_in = make_ssa_name (m_limb_type);
1129 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1130 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1131 insert_before (g);
1132 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1133 insert_before (g);
1135 else
1137 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1138 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1139 insert_before (g);
1142 else
1144 tree in = add_cast (rhs1_type, data_in);
1145 lhs = make_ssa_name (rhs1_type);
1146 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1147 insert_before (g);
1148 rhs1 = make_ssa_name (rhs1_type);
1149 g = gimple_build_assign (rhs1, code, lhs, in);
1150 insert_before (g);
1151 m_data[m_data_cnt] = NULL_TREE;
1152 m_data_cnt += 2;
1153 return rhs1;
1155 rhs1 = make_ssa_name (m_limb_type);
1156 g = gimple_build_assign (rhs1, REALPART_EXPR,
1157 build1 (REALPART_EXPR, m_limb_type, lhs));
1158 insert_before (g);
1159 if (!types_compatible_p (rhs1_type, m_limb_type))
1160 rhs1 = add_cast (rhs1_type, rhs1);
1161 m_data[m_data_cnt] = data_out;
1162 m_data_cnt += 2;
1163 return rhs1;
1166 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1167 count in [0, limb_prec - 1] range. */
1169 tree
1170 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1172 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1173 gcc_checking_assert (cnt < (unsigned) limb_prec);
1174 if (cnt == 0)
1175 return rhs1;
1177 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1178 gimple *g;
1179 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1180 &data_out);
1182 if (!integer_zerop (data_in))
1184 lhs = make_ssa_name (m_limb_type);
1185 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1186 build_int_cst (unsigned_type_node,
1187 limb_prec - cnt));
1188 insert_before (g);
1189 if (!types_compatible_p (rhs1_type, m_limb_type))
1190 lhs = add_cast (rhs1_type, lhs);
1191 data_in = lhs;
1193 if (types_compatible_p (rhs1_type, m_limb_type))
1195 if (data_out == NULL_TREE)
1196 data_out = make_ssa_name (m_limb_type);
1197 g = gimple_build_assign (data_out, rhs1);
1198 insert_before (g);
1200 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1202 lhs = make_ssa_name (rhs1_type);
1203 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1204 insert_before (g);
1205 if (!integer_zerop (data_in))
1207 rhs1 = lhs;
1208 lhs = make_ssa_name (rhs1_type);
1209 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1210 insert_before (g);
1213 else
1214 lhs = data_in;
1215 m_data[m_data_cnt] = data_out;
1216 m_data_cnt += 2;
1217 return lhs;
1220 /* Helper function for handle_stmt method, handle an integral
1221 to integral conversion. */
1223 tree
1224 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1226 tree rhs_type = TREE_TYPE (rhs1);
1227 gimple *g;
1228 if (TREE_CODE (rhs1) == SSA_NAME
1229 && TREE_CODE (lhs_type) == BITINT_TYPE
1230 && TREE_CODE (rhs_type) == BITINT_TYPE
1231 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1232 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1234 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1235 /* If lhs has bigger precision than rhs, we can use
1236 the simple case only if there is a guarantee that
1237 the most significant limb is handled in straight
1238 line code. If m_var_msb (on left shifts) or
1239 if m_upwards_2limb * limb_prec is equal to
1240 lhs precision that is not the case. */
1241 || (!m_var_msb
1242 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1243 TYPE_SIZE (lhs_type))
1244 && (!m_upwards_2limb
1245 || (m_upwards_2limb * limb_prec
1246 < TYPE_PRECISION (lhs_type)))))
1248 rhs1 = handle_operand (rhs1, idx);
1249 if (tree_fits_uhwi_p (idx))
1251 tree type = limb_access_type (lhs_type, idx);
1252 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1253 rhs1 = add_cast (type, rhs1);
1255 return rhs1;
1257 tree t;
1258 /* Indexes lower than this don't need any special processing. */
1259 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1260 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1261 /* Indexes >= than this always contain an extension. */
1262 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1263 bool save_first = m_first;
1264 if (m_first)
1266 m_data.safe_push (NULL_TREE);
1267 m_data.safe_push (NULL_TREE);
1268 m_data.safe_push (NULL_TREE);
1269 if (TYPE_UNSIGNED (rhs_type))
1270 /* No need to keep state between iterations. */
1272 else if (m_upwards && !m_upwards_2limb)
1273 /* We need to keep state between iterations, but
1274 not within any loop, everything is straight line
1275 code with only increasing indexes. */
1277 else if (!m_upwards_2limb)
1279 unsigned save_data_cnt = m_data_cnt;
1280 gimple_stmt_iterator save_gsi = m_gsi;
1281 m_gsi = m_init_gsi;
1282 if (gsi_end_p (m_gsi))
1283 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1284 else
1285 gsi_next (&m_gsi);
1286 m_data_cnt = save_data_cnt + 3;
1287 t = handle_operand (rhs1, size_int (low));
1288 m_first = false;
1289 m_data[save_data_cnt + 2]
1290 = build_int_cst (NULL_TREE, m_data_cnt);
1291 m_data_cnt = save_data_cnt;
1292 t = add_cast (signed_type_for (m_limb_type), t);
1293 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1294 tree n = make_ssa_name (TREE_TYPE (t));
1295 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1296 insert_before (g);
1297 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1298 m_init_gsi = m_gsi;
1299 if (gsi_end_p (m_init_gsi))
1300 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1301 else
1302 gsi_prev (&m_init_gsi);
1303 m_gsi = save_gsi;
1305 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1306 /* We need to keep state between iterations, but
1307 fortunately not within the loop, only afterwards. */
1309 else
1311 tree out;
1312 m_data.truncate (m_data_cnt);
1313 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1314 m_data.safe_push (NULL_TREE);
1318 unsigned save_data_cnt = m_data_cnt;
1319 m_data_cnt += 3;
1320 if (!tree_fits_uhwi_p (idx))
1322 if (m_upwards_2limb
1323 && (m_upwards_2limb * limb_prec
1324 <= ((unsigned) TYPE_PRECISION (rhs_type)
1325 - !TYPE_UNSIGNED (rhs_type))))
1327 rhs1 = handle_operand (rhs1, idx);
1328 if (m_first)
1329 m_data[save_data_cnt + 2]
1330 = build_int_cst (NULL_TREE, m_data_cnt);
1331 m_first = save_first;
1332 return rhs1;
1334 bool single_comparison
1335 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1336 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1337 idx, size_int (low), NULL_TREE, NULL_TREE);
1338 edge edge_true_true, edge_true_false, edge_false;
1339 if_then_if_then_else (g, (single_comparison ? NULL
1340 : gimple_build_cond (EQ_EXPR, idx,
1341 size_int (low),
1342 NULL_TREE,
1343 NULL_TREE)),
1344 profile_probability::likely (),
1345 profile_probability::unlikely (),
1346 edge_true_true, edge_true_false, edge_false);
1347 bool save_cast_conditional = m_cast_conditional;
1348 m_cast_conditional = true;
1349 m_bitfld_load = 0;
1350 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1351 if (m_first)
1352 m_data[save_data_cnt + 2]
1353 = build_int_cst (NULL_TREE, m_data_cnt);
1354 tree ext = NULL_TREE;
1355 tree bitfld = NULL_TREE;
1356 if (!single_comparison)
1358 m_gsi = gsi_after_labels (edge_true_true->src);
1359 m_first = false;
1360 m_data_cnt = save_data_cnt + 3;
1361 if (m_bitfld_load)
1363 bitfld = m_data[m_bitfld_load];
1364 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1365 m_bitfld_load = 0;
1367 t2 = handle_operand (rhs1, size_int (low));
1368 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1369 t2 = add_cast (m_limb_type, t2);
1370 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1372 ext = add_cast (signed_type_for (m_limb_type), t2);
1373 tree lpm1 = build_int_cst (unsigned_type_node,
1374 limb_prec - 1);
1375 tree n = make_ssa_name (TREE_TYPE (ext));
1376 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1377 insert_before (g);
1378 ext = add_cast (m_limb_type, n);
1381 tree t3;
1382 if (TYPE_UNSIGNED (rhs_type))
1383 t3 = build_zero_cst (m_limb_type);
1384 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1385 t3 = m_data[save_data_cnt];
1386 else
1387 t3 = m_data[save_data_cnt + 1];
1388 m_gsi = gsi_after_labels (edge_true_false->dest);
1389 t = make_ssa_name (m_limb_type);
1390 gphi *phi = create_phi_node (t, edge_true_false->dest);
1391 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1392 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1393 if (edge_true_true)
1394 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1395 if (ext)
1397 tree t4 = make_ssa_name (m_limb_type);
1398 phi = create_phi_node (t4, edge_true_false->dest);
1399 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1400 UNKNOWN_LOCATION);
1401 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1402 UNKNOWN_LOCATION);
1403 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1404 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1405 insert_before (g);
1407 if (m_bitfld_load)
1409 tree t4;
1410 if (!m_first)
1411 t4 = m_data[m_bitfld_load + 1];
1412 else
1413 t4 = make_ssa_name (m_limb_type);
1414 phi = create_phi_node (t4, edge_true_false->dest);
1415 add_phi_arg (phi,
1416 edge_true_true ? bitfld : m_data[m_bitfld_load],
1417 edge_true_false, UNKNOWN_LOCATION);
1418 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1419 edge_false, UNKNOWN_LOCATION);
1420 if (edge_true_true)
1421 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1422 UNKNOWN_LOCATION);
1423 m_data[m_bitfld_load] = t4;
1424 m_data[m_bitfld_load + 2] = t4;
1425 m_bitfld_load = 0;
1427 m_cast_conditional = save_cast_conditional;
1428 m_first = save_first;
1429 return t;
1431 else
1433 if (tree_to_uhwi (idx) < low)
1435 t = handle_operand (rhs1, idx);
1436 if (m_first)
1437 m_data[save_data_cnt + 2]
1438 = build_int_cst (NULL_TREE, m_data_cnt);
1440 else if (tree_to_uhwi (idx) < high)
1442 t = handle_operand (rhs1, size_int (low));
1443 if (m_first)
1444 m_data[save_data_cnt + 2]
1445 = build_int_cst (NULL_TREE, m_data_cnt);
1446 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1447 t = add_cast (m_limb_type, t);
1448 tree ext = NULL_TREE;
1449 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1451 ext = add_cast (signed_type_for (m_limb_type), t);
1452 tree lpm1 = build_int_cst (unsigned_type_node,
1453 limb_prec - 1);
1454 tree n = make_ssa_name (TREE_TYPE (ext));
1455 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1456 insert_before (g);
1457 ext = add_cast (m_limb_type, n);
1458 m_data[save_data_cnt + 1] = ext;
1461 else
1463 if (TYPE_UNSIGNED (rhs_type) && m_first)
1465 handle_operand (rhs1, size_zero_node);
1466 m_data[save_data_cnt + 2]
1467 = build_int_cst (NULL_TREE, m_data_cnt);
1469 else
1470 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1471 if (TYPE_UNSIGNED (rhs_type))
1472 t = build_zero_cst (m_limb_type);
1473 else
1474 t = m_data[save_data_cnt + 1];
1476 tree type = limb_access_type (lhs_type, idx);
1477 if (!useless_type_conversion_p (type, m_limb_type))
1478 t = add_cast (type, t);
1479 m_first = save_first;
1480 return t;
1483 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1484 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1485 && INTEGRAL_TYPE_P (rhs_type))
1487 /* Add support for 3 or more limbs filled in from normal integral
1488 type if this assert fails. If no target chooses limb mode smaller
1489 than half of largest supported normal integral type, this will not
1490 be needed. */
1491 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1492 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1493 if (m_first)
1495 gimple_stmt_iterator save_gsi = m_gsi;
1496 m_gsi = m_init_gsi;
1497 if (gsi_end_p (m_gsi))
1498 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1499 else
1500 gsi_next (&m_gsi);
1501 if (TREE_CODE (rhs_type) == BITINT_TYPE
1502 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1504 tree type = NULL_TREE;
1505 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1506 rhs_type = TREE_TYPE (rhs1);
1508 r1 = rhs1;
1509 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1510 r1 = add_cast (m_limb_type, rhs1);
1511 if (TYPE_PRECISION (rhs_type) > limb_prec)
1513 g = gimple_build_assign (make_ssa_name (rhs_type),
1514 RSHIFT_EXPR, rhs1,
1515 build_int_cst (unsigned_type_node,
1516 limb_prec));
1517 insert_before (g);
1518 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1520 if (TYPE_UNSIGNED (rhs_type))
1521 rext = build_zero_cst (m_limb_type);
1522 else
1524 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1525 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1526 RSHIFT_EXPR, rext,
1527 build_int_cst (unsigned_type_node,
1528 limb_prec - 1));
1529 insert_before (g);
1530 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1532 m_init_gsi = m_gsi;
1533 if (gsi_end_p (m_init_gsi))
1534 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1535 else
1536 gsi_prev (&m_init_gsi);
1537 m_gsi = save_gsi;
1539 tree t;
1540 if (m_upwards_2limb)
1542 if (m_first)
1544 tree out1, out2;
1545 prepare_data_in_out (r1, idx, &out1);
1546 g = gimple_build_assign (m_data[m_data_cnt + 1], rext);
1547 insert_before (g);
1548 if (TYPE_PRECISION (rhs_type) > limb_prec)
1550 prepare_data_in_out (r2, idx, &out2);
1551 g = gimple_build_assign (m_data[m_data_cnt + 3], rext);
1552 insert_before (g);
1553 m_data.pop ();
1554 t = m_data.pop ();
1555 m_data[m_data_cnt + 1] = t;
1557 else
1558 m_data[m_data_cnt + 1] = rext;
1559 m_data.safe_push (rext);
1560 t = m_data[m_data_cnt];
1562 else if (!tree_fits_uhwi_p (idx))
1563 t = m_data[m_data_cnt + 1];
1564 else
1566 tree type = limb_access_type (lhs_type, idx);
1567 t = m_data[m_data_cnt + 2];
1568 if (!useless_type_conversion_p (type, m_limb_type))
1569 t = add_cast (type, t);
1571 m_data_cnt += 3;
1572 return t;
1574 else if (m_first)
1576 m_data.safe_push (r1);
1577 m_data.safe_push (r2);
1578 m_data.safe_push (rext);
1580 if (tree_fits_uhwi_p (idx))
1582 tree type = limb_access_type (lhs_type, idx);
1583 if (integer_zerop (idx))
1584 t = m_data[m_data_cnt];
1585 else if (TYPE_PRECISION (rhs_type) > limb_prec
1586 && integer_onep (idx))
1587 t = m_data[m_data_cnt + 1];
1588 else
1589 t = m_data[m_data_cnt + 2];
1590 if (!useless_type_conversion_p (type, m_limb_type))
1591 t = add_cast (type, t);
1592 m_data_cnt += 3;
1593 return t;
1595 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1596 NULL_TREE, NULL_TREE);
1597 edge e2, e3, e4 = NULL;
1598 if_then (g, profile_probability::likely (), e2, e3);
1599 if (m_data[m_data_cnt + 1])
1601 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1602 NULL_TREE, NULL_TREE);
1603 insert_before (g);
1604 edge e5 = split_block (gsi_bb (m_gsi), g);
1605 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1606 e2 = find_edge (e5->dest, e2->dest);
1607 e4->probability = profile_probability::unlikely ();
1608 e5->flags = EDGE_FALSE_VALUE;
1609 e5->probability = e4->probability.invert ();
1611 m_gsi = gsi_after_labels (e2->dest);
1612 t = make_ssa_name (m_limb_type);
1613 gphi *phi = create_phi_node (t, e2->dest);
1614 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1615 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1616 if (e4)
1617 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1618 m_data_cnt += 3;
1619 return t;
1621 return NULL_TREE;
1624 /* Helper function for handle_stmt method, handle a load from memory. */
1626 tree
1627 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1629 tree rhs1 = gimple_assign_rhs1 (stmt);
1630 tree rhs_type = TREE_TYPE (rhs1);
1631 bool eh = stmt_ends_bb_p (stmt);
1632 edge eh_edge = NULL;
1633 gimple *g;
1635 if (eh)
1637 edge_iterator ei;
1638 basic_block bb = gimple_bb (stmt);
1640 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1641 if (eh_edge->flags & EDGE_EH)
1642 break;
1645 if (TREE_CODE (rhs1) == COMPONENT_REF
1646 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1648 tree fld = TREE_OPERAND (rhs1, 1);
1649 /* For little-endian, we can allow as inputs bit-fields
1650 which start at a limb boundary. */
1651 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1652 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1653 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1654 goto normal_load;
1655 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1656 handle it normally for now. */
1657 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1658 goto normal_load;
1659 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1660 poly_int64 bitoffset;
1661 poly_uint64 field_offset, repr_offset;
1662 bool var_field_off = false;
1663 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1664 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1665 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1666 else
1668 bitoffset = 0;
1669 var_field_off = true;
1671 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1672 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1673 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1674 TREE_OPERAND (rhs1, 0), repr,
1675 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1676 HOST_WIDE_INT bo = bitoffset.to_constant ();
1677 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1678 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1679 if (m_first)
1681 if (m_upwards)
1683 gimple_stmt_iterator save_gsi = m_gsi;
1684 m_gsi = m_init_gsi;
1685 if (gsi_end_p (m_gsi))
1686 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1687 else
1688 gsi_next (&m_gsi);
1689 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1690 tree iv = make_ssa_name (m_limb_type);
1691 g = gimple_build_assign (iv, t);
1692 insert_before (g);
1693 if (eh)
1695 maybe_duplicate_eh_stmt (g, stmt);
1696 if (eh_edge)
1698 edge e = split_block (gsi_bb (m_gsi), g);
1699 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1700 = profile_probability::very_unlikely ();
1701 m_gsi = gsi_after_labels (e->dest);
1702 if (gsi_bb (save_gsi) == e->src)
1704 if (gsi_end_p (save_gsi))
1705 save_gsi = gsi_end_bb (e->dest);
1706 else
1707 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1709 if (m_preheader_bb == e->src)
1710 m_preheader_bb = e->dest;
1713 m_init_gsi = m_gsi;
1714 if (gsi_end_p (m_init_gsi))
1715 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1716 else
1717 gsi_prev (&m_init_gsi);
1718 m_gsi = save_gsi;
1719 tree out;
1720 prepare_data_in_out (iv, idx, &out);
1721 out = m_data[m_data_cnt];
1722 m_data.safe_push (out);
1724 else
1726 m_data.safe_push (NULL_TREE);
1727 m_data.safe_push (NULL_TREE);
1728 m_data.safe_push (NULL_TREE);
1732 tree nidx0 = NULL_TREE, nidx1;
1733 tree iv = m_data[m_data_cnt];
1734 if (m_cast_conditional && iv)
1736 gcc_assert (!m_bitfld_load);
1737 m_bitfld_load = m_data_cnt;
1739 if (tree_fits_uhwi_p (idx))
1741 unsigned prec = TYPE_PRECISION (rhs_type);
1742 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1743 gcc_assert (i * limb_prec < prec);
1744 nidx1 = size_int (i + bo_idx + 1);
1745 if ((i + 1) * limb_prec > prec)
1747 prec %= limb_prec;
1748 if (prec + bo_bit <= (unsigned) limb_prec)
1749 nidx1 = NULL_TREE;
1751 if (!iv)
1752 nidx0 = size_int (i + bo_idx);
1754 else
1756 if (!iv)
1758 if (bo_idx == 0)
1759 nidx0 = idx;
1760 else
1762 nidx0 = make_ssa_name (sizetype);
1763 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1764 size_int (bo_idx));
1765 insert_before (g);
1768 nidx1 = make_ssa_name (sizetype);
1769 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1770 size_int (bo_idx + 1));
1771 insert_before (g);
1774 tree iv2 = NULL_TREE;
1775 if (nidx0)
1777 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1778 iv = make_ssa_name (m_limb_type);
1779 g = gimple_build_assign (iv, t);
1780 insert_before (g);
1781 gcc_assert (!eh);
1783 if (nidx1)
1785 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1786 unsigned prec = TYPE_PRECISION (rhs_type);
1787 if (conditional)
1789 if ((prec % limb_prec) == 0
1790 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1791 conditional = false;
1793 edge edge_true = NULL, edge_false = NULL;
1794 if (conditional)
1796 g = gimple_build_cond (NE_EXPR, idx,
1797 size_int (prec / limb_prec),
1798 NULL_TREE, NULL_TREE);
1799 if_then (g, profile_probability::likely (),
1800 edge_true, edge_false);
1802 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1803 if (m_upwards_2limb
1804 && !m_first
1805 && !m_bitfld_load
1806 && !tree_fits_uhwi_p (idx))
1807 iv2 = m_data[m_data_cnt + 1];
1808 else
1809 iv2 = make_ssa_name (m_limb_type);
1810 g = gimple_build_assign (iv2, t);
1811 insert_before (g);
1812 if (eh)
1814 maybe_duplicate_eh_stmt (g, stmt);
1815 if (eh_edge)
1817 edge e = split_block (gsi_bb (m_gsi), g);
1818 m_gsi = gsi_after_labels (e->dest);
1819 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1820 = profile_probability::very_unlikely ();
1823 if (conditional)
1825 tree iv3 = make_ssa_name (m_limb_type);
1826 if (eh)
1827 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1828 gphi *phi = create_phi_node (iv3, edge_true->dest);
1829 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1830 add_phi_arg (phi, build_zero_cst (m_limb_type),
1831 edge_false, UNKNOWN_LOCATION);
1832 m_gsi = gsi_after_labels (edge_true->dest);
1835 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1836 iv, build_int_cst (unsigned_type_node, bo_bit));
1837 insert_before (g);
1838 iv = gimple_assign_lhs (g);
1839 if (iv2)
1841 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1842 iv2, build_int_cst (unsigned_type_node,
1843 limb_prec - bo_bit));
1844 insert_before (g);
1845 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1846 gimple_assign_lhs (g), iv);
1847 insert_before (g);
1848 iv = gimple_assign_lhs (g);
1849 if (m_data[m_data_cnt])
1850 m_data[m_data_cnt] = iv2;
1852 if (tree_fits_uhwi_p (idx))
1854 tree atype = limb_access_type (rhs_type, idx);
1855 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1856 iv = add_cast (atype, iv);
1858 m_data_cnt += 3;
1859 return iv;
1862 normal_load:
1863 /* Use write_p = true for loads with EH edges to make
1864 sure limb_access doesn't add a cast as separate
1865 statement after it. */
1866 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1867 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1868 g = gimple_build_assign (ret, rhs1);
1869 insert_before (g);
1870 if (eh)
1872 maybe_duplicate_eh_stmt (g, stmt);
1873 if (eh_edge)
1875 edge e = split_block (gsi_bb (m_gsi), g);
1876 m_gsi = gsi_after_labels (e->dest);
1877 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1878 = profile_probability::very_unlikely ();
1880 if (tree_fits_uhwi_p (idx))
1882 tree atype = limb_access_type (rhs_type, idx);
1883 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1884 ret = add_cast (atype, ret);
1887 return ret;
1890 /* Return a limb IDX from a mergeable statement STMT. */
1892 tree
1893 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1895 tree lhs, rhs1, rhs2 = NULL_TREE;
1896 gimple *g;
1897 switch (gimple_code (stmt))
1899 case GIMPLE_ASSIGN:
1900 if (gimple_assign_load_p (stmt))
1901 return handle_load (stmt, idx);
1902 switch (gimple_assign_rhs_code (stmt))
1904 case BIT_AND_EXPR:
1905 case BIT_IOR_EXPR:
1906 case BIT_XOR_EXPR:
1907 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1908 /* FALLTHRU */
1909 case BIT_NOT_EXPR:
1910 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1911 lhs = make_ssa_name (TREE_TYPE (rhs1));
1912 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1913 rhs1, rhs2);
1914 insert_before (g);
1915 return lhs;
1916 case PLUS_EXPR:
1917 case MINUS_EXPR:
1918 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1919 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1920 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1921 rhs1, rhs2, idx);
1922 case NEGATE_EXPR:
1923 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1924 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1925 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1926 case LSHIFT_EXPR:
1927 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1928 idx),
1929 gimple_assign_rhs2 (stmt), idx);
1930 case SSA_NAME:
1931 case INTEGER_CST:
1932 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1933 CASE_CONVERT:
1934 case VIEW_CONVERT_EXPR:
1935 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1936 gimple_assign_rhs1 (stmt), idx);
1937 default:
1938 break;
1940 break;
1941 default:
1942 break;
1944 gcc_unreachable ();
1947 /* Return minimum precision of OP at STMT.
1948 Positive value is minimum precision above which all bits
1949 are zero, negative means all bits above negation of the
1950 value are copies of the sign bit. */
1952 static int
1953 range_to_prec (tree op, gimple *stmt)
1955 int_range_max r;
1956 wide_int w;
1957 tree type = TREE_TYPE (op);
1958 unsigned int prec = TYPE_PRECISION (type);
1960 if (!optimize
1961 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1962 || r.undefined_p ())
1964 if (TYPE_UNSIGNED (type))
1965 return prec;
1966 else
1967 return MIN ((int) -prec, -2);
1970 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1972 w = r.lower_bound ();
1973 if (wi::neg_p (w))
1975 int min_prec1 = wi::min_precision (w, SIGNED);
1976 w = r.upper_bound ();
1977 int min_prec2 = wi::min_precision (w, SIGNED);
1978 int min_prec = MAX (min_prec1, min_prec2);
1979 return MIN (-min_prec, -2);
1983 w = r.upper_bound ();
1984 int min_prec = wi::min_precision (w, UNSIGNED);
1985 return MAX (min_prec, 1);
1988 /* Return address of the first limb of OP and write into *PREC
1989 its precision. If positive, the operand is zero extended
1990 from that precision, if it is negative, the operand is sign-extended
1991 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1992 otherwise *PREC_STORED is prec from the innermost call without
1993 range optimizations. */
1995 tree
1996 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
1997 int *prec_stored, int *prec)
1999 wide_int w;
2000 location_t loc_save = m_loc;
2001 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2002 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2003 && TREE_CODE (op) != INTEGER_CST)
2005 do_int:
2006 *prec = range_to_prec (op, stmt);
2007 bitint_prec_kind kind = bitint_prec_small;
2008 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2009 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2010 kind = bitint_precision_kind (TREE_TYPE (op));
2011 if (kind == bitint_prec_middle)
2013 tree type = NULL_TREE;
2014 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2016 tree op_type = TREE_TYPE (op);
2017 unsigned HOST_WIDE_INT nelts
2018 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2019 /* Add support for 3 or more limbs filled in from normal
2020 integral type if this assert fails. If no target chooses
2021 limb mode smaller than half of largest supported normal
2022 integral type, this will not be needed. */
2023 gcc_assert (nelts <= 2);
2024 if (prec_stored)
2025 *prec_stored = (TYPE_UNSIGNED (op_type)
2026 ? TYPE_PRECISION (op_type)
2027 : -TYPE_PRECISION (op_type));
2028 if (*prec <= limb_prec && *prec >= -limb_prec)
2030 nelts = 1;
2031 if (prec_stored)
2033 if (TYPE_UNSIGNED (op_type))
2035 if (*prec_stored > limb_prec)
2036 *prec_stored = limb_prec;
2038 else if (*prec_stored < -limb_prec)
2039 *prec_stored = -limb_prec;
2042 tree atype = build_array_type_nelts (m_limb_type, nelts);
2043 tree var = create_tmp_var (atype);
2044 tree t1 = op;
2045 if (!useless_type_conversion_p (m_limb_type, op_type))
2046 t1 = add_cast (m_limb_type, t1);
2047 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2048 NULL_TREE, NULL_TREE);
2049 gimple *g = gimple_build_assign (v, t1);
2050 insert_before (g);
2051 if (nelts > 1)
2053 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2054 g = gimple_build_assign (make_ssa_name (op_type),
2055 RSHIFT_EXPR, op, lp);
2056 insert_before (g);
2057 tree t2 = gimple_assign_lhs (g);
2058 t2 = add_cast (m_limb_type, t2);
2059 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2060 NULL_TREE, NULL_TREE);
2061 g = gimple_build_assign (v, t2);
2062 insert_before (g);
2064 tree ret = build_fold_addr_expr (var);
2065 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2067 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2068 g = gimple_build_assign (var, clobber);
2069 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2071 m_loc = loc_save;
2072 return ret;
2074 switch (TREE_CODE (op))
2076 case SSA_NAME:
2077 if (m_names == NULL
2078 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2080 gimple *g = SSA_NAME_DEF_STMT (op);
2081 tree ret;
2082 m_loc = gimple_location (g);
2083 if (gimple_assign_load_p (g))
2085 *prec = range_to_prec (op, NULL);
2086 if (prec_stored)
2087 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2088 ? TYPE_PRECISION (TREE_TYPE (op))
2089 : -TYPE_PRECISION (TREE_TYPE (op)));
2090 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2091 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2092 NULL_TREE, true, GSI_SAME_STMT);
2094 else if (gimple_code (g) == GIMPLE_NOP)
2096 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2097 if (prec_stored)
2098 *prec_stored = *prec;
2099 tree var = create_tmp_var (m_limb_type);
2100 TREE_ADDRESSABLE (var) = 1;
2101 ret = build_fold_addr_expr (var);
2102 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2104 tree clobber = build_clobber (m_limb_type,
2105 CLOBBER_STORAGE_END);
2106 g = gimple_build_assign (var, clobber);
2107 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2110 else
2112 gcc_assert (gimple_assign_cast_p (g));
2113 tree rhs1 = gimple_assign_rhs1 (g);
2114 bitint_prec_kind kind = bitint_prec_small;
2115 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2116 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2117 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2118 if (kind >= bitint_prec_large)
2120 tree lhs_type = TREE_TYPE (op);
2121 tree rhs_type = TREE_TYPE (rhs1);
2122 int prec_stored_val = 0;
2123 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2124 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2126 if (TYPE_UNSIGNED (lhs_type)
2127 && !TYPE_UNSIGNED (rhs_type))
2128 gcc_assert (*prec >= 0 || prec_stored == NULL);
2130 else
2132 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2134 else if (TYPE_UNSIGNED (lhs_type))
2136 gcc_assert (*prec > 0
2137 || prec_stored_val > 0
2138 || (-prec_stored_val
2139 >= TYPE_PRECISION (lhs_type)));
2140 *prec = TYPE_PRECISION (lhs_type);
2142 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2144 else
2145 *prec = -TYPE_PRECISION (lhs_type);
2148 else
2150 op = rhs1;
2151 stmt = g;
2152 goto do_int;
2155 m_loc = loc_save;
2156 return ret;
2158 else
2160 int p = var_to_partition (m_map, op);
2161 gcc_assert (m_vars[p] != NULL_TREE);
2162 *prec = range_to_prec (op, stmt);
2163 if (prec_stored)
2164 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2165 ? TYPE_PRECISION (TREE_TYPE (op))
2166 : -TYPE_PRECISION (TREE_TYPE (op)));
2167 return build_fold_addr_expr (m_vars[p]);
2169 case INTEGER_CST:
2170 unsigned int min_prec, mp;
2171 tree type;
2172 w = wi::to_wide (op);
2173 if (tree_int_cst_sgn (op) >= 0)
2175 min_prec = wi::min_precision (w, UNSIGNED);
2176 *prec = MAX (min_prec, 1);
2178 else
2180 min_prec = wi::min_precision (w, SIGNED);
2181 *prec = MIN ((int) -min_prec, -2);
2183 mp = CEIL (min_prec, limb_prec) * limb_prec;
2184 if (mp == 0)
2185 mp = 1;
2186 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2187 type = TREE_TYPE (op);
2188 else
2189 type = build_bitint_type (mp, 1);
2190 if (TREE_CODE (type) != BITINT_TYPE
2191 || bitint_precision_kind (type) == bitint_prec_small)
2193 if (TYPE_PRECISION (type) <= limb_prec)
2194 type = m_limb_type;
2195 else
2196 /* This case is for targets which e.g. have 64-bit
2197 limb but categorize up to 128-bits _BitInts as
2198 small. We could use type of m_limb_type[2] and
2199 similar instead to save space. */
2200 type = build_bitint_type (mid_min_prec, 1);
2202 if (prec_stored)
2204 if (tree_int_cst_sgn (op) >= 0)
2205 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2206 else
2207 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2209 op = tree_output_constant_def (fold_convert (type, op));
2210 return build_fold_addr_expr (op);
2211 default:
2212 gcc_unreachable ();
2216 /* Helper function, create a loop before the current location,
2217 start with sizetype INIT value from the preheader edge. Return
2218 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2219 from the latch edge. */
2221 tree
2222 bitint_large_huge::create_loop (tree init, tree *idx_next)
2224 if (!gsi_end_p (m_gsi))
2225 gsi_prev (&m_gsi);
2226 else
2227 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2228 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2229 edge e2 = split_block (e1->dest, (gimple *) NULL);
2230 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2231 e3->probability = profile_probability::very_unlikely ();
2232 e2->flags = EDGE_FALSE_VALUE;
2233 e2->probability = e3->probability.invert ();
2234 tree idx = make_ssa_name (sizetype);
2235 gphi *phi = create_phi_node (idx, e1->dest);
2236 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2237 *idx_next = make_ssa_name (sizetype);
2238 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2239 m_gsi = gsi_after_labels (e1->dest);
2240 m_bb = e1->dest;
2241 m_preheader_bb = e1->src;
2242 class loop *loop = alloc_loop ();
2243 loop->header = e1->dest;
2244 add_loop (loop, e1->src->loop_father);
2245 return idx;
2248 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2249 lowered using iteration from the least significant limb up to the most
2250 significant limb. For large _BitInt it is emitted as straight line code
2251 before current location, for huge _BitInt as a loop handling two limbs
2252 at once, followed by handling up to limbs in straight line code (at most
2253 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2254 comparisons, in that case CMP_CODE should be the comparison code and
2255 CMP_OP1/CMP_OP2 the comparison operands. */
2257 tree
2258 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2259 tree cmp_op1, tree cmp_op2)
2261 bool eq_p = cmp_code != ERROR_MARK;
2262 tree type;
2263 if (eq_p)
2264 type = TREE_TYPE (cmp_op1);
2265 else
2266 type = TREE_TYPE (gimple_assign_lhs (stmt));
2267 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2268 bitint_prec_kind kind = bitint_precision_kind (type);
2269 gcc_assert (kind >= bitint_prec_large);
2270 gimple *g;
2271 tree lhs = gimple_get_lhs (stmt);
2272 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2273 if (lhs
2274 && TREE_CODE (lhs) == SSA_NAME
2275 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2276 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2278 int p = var_to_partition (m_map, lhs);
2279 gcc_assert (m_vars[p] != NULL_TREE);
2280 m_lhs = lhs = m_vars[p];
2282 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2283 bool sext = false;
2284 tree ext = NULL_TREE, store_operand = NULL_TREE;
2285 bool eh = false;
2286 basic_block eh_pad = NULL;
2287 tree nlhs = NULL_TREE;
2288 unsigned HOST_WIDE_INT bo_idx = 0;
2289 unsigned HOST_WIDE_INT bo_bit = 0;
2290 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2291 if (gimple_store_p (stmt))
2293 store_operand = gimple_assign_rhs1 (stmt);
2294 eh = stmt_ends_bb_p (stmt);
2295 if (eh)
2297 edge e;
2298 edge_iterator ei;
2299 basic_block bb = gimple_bb (stmt);
2301 FOR_EACH_EDGE (e, ei, bb->succs)
2302 if (e->flags & EDGE_EH)
2304 eh_pad = e->dest;
2305 break;
2308 if (TREE_CODE (lhs) == COMPONENT_REF
2309 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2311 tree fld = TREE_OPERAND (lhs, 1);
2312 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2313 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2314 poly_int64 bitoffset;
2315 poly_uint64 field_offset, repr_offset;
2316 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2317 nlhs = lhs;
2318 else
2320 bool var_field_off = false;
2321 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2322 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2323 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2324 else
2326 bitoffset = 0;
2327 var_field_off = true;
2329 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2330 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2331 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2332 TREE_OPERAND (lhs, 0), repr,
2333 var_field_off
2334 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2335 HOST_WIDE_INT bo = bitoffset.to_constant ();
2336 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2337 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2341 if ((store_operand
2342 && TREE_CODE (store_operand) == SSA_NAME
2343 && (m_names == NULL
2344 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2345 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2346 || gimple_assign_cast_p (stmt))
2348 rhs1 = gimple_assign_rhs1 (store_operand
2349 ? SSA_NAME_DEF_STMT (store_operand)
2350 : stmt);
2351 /* Optimize mergeable ops ending with widening cast to _BitInt
2352 (or followed by store). We can lower just the limbs of the
2353 cast operand and widen afterwards. */
2354 if (TREE_CODE (rhs1) == SSA_NAME
2355 && (m_names == NULL
2356 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2357 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2358 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2359 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2360 limb_prec) < CEIL (prec, limb_prec)
2361 || (kind == bitint_prec_huge
2362 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2364 store_operand = rhs1;
2365 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2366 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2367 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2368 sext = true;
2371 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2372 if (kind == bitint_prec_large)
2373 cnt = CEIL (prec, limb_prec);
2374 else
2376 rem = (prec % (2 * limb_prec));
2377 end = (prec - rem) / limb_prec;
2378 cnt = 2 + CEIL (rem, limb_prec);
2379 idx = idx_first = create_loop (size_zero_node, &idx_next);
2382 basic_block edge_bb = NULL;
2383 if (eq_p)
2385 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2386 gsi_prev (&gsi);
2387 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2388 edge_bb = e->src;
2389 if (kind == bitint_prec_large)
2390 m_gsi = gsi_end_bb (edge_bb);
2392 else
2393 m_after_stmt = stmt;
2394 if (kind != bitint_prec_large)
2395 m_upwards_2limb = end;
2396 m_upwards = true;
2398 bool separate_ext
2399 = (prec != (unsigned) TYPE_PRECISION (type)
2400 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2401 > CEIL (prec, limb_prec)));
2403 for (unsigned i = 0; i < cnt; i++)
2405 m_data_cnt = 0;
2406 if (kind == bitint_prec_large)
2407 idx = size_int (i);
2408 else if (i >= 2)
2409 idx = size_int (end + (i > 2));
2410 if (eq_p)
2412 rhs1 = handle_operand (cmp_op1, idx);
2413 tree rhs2 = handle_operand (cmp_op2, idx);
2414 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2415 insert_before (g);
2416 edge e1 = split_block (gsi_bb (m_gsi), g);
2417 e1->flags = EDGE_FALSE_VALUE;
2418 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2419 e1->probability = profile_probability::unlikely ();
2420 e2->probability = e1->probability.invert ();
2421 if (i == 0)
2422 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2423 m_gsi = gsi_after_labels (e1->dest);
2425 else
2427 if (store_operand)
2428 rhs1 = handle_operand (store_operand, idx);
2429 else
2430 rhs1 = handle_stmt (stmt, idx);
2431 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2432 rhs1 = add_cast (m_limb_type, rhs1);
2433 if (sext && i == cnt - 1)
2434 ext = rhs1;
2435 tree nidx = idx;
2436 if (bo_idx)
2438 if (tree_fits_uhwi_p (idx))
2439 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2440 else
2442 nidx = make_ssa_name (sizetype);
2443 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2444 size_int (bo_idx));
2445 insert_before (g);
2448 bool done = false;
2449 basic_block new_bb = NULL;
2450 /* Handle stores into bit-fields. */
2451 if (bo_bit)
2453 if (i == 0)
2455 edge e2 = NULL;
2456 if (kind != bitint_prec_large)
2458 prepare_data_in_out (build_zero_cst (m_limb_type),
2459 idx, &bf_next);
2460 bf_next = m_data.pop ();
2461 bf_cur = m_data.pop ();
2462 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2463 NULL_TREE, NULL_TREE);
2464 edge edge_true;
2465 if_then_else (g, profile_probability::unlikely (),
2466 edge_true, e2);
2467 new_bb = e2->dest;
2469 tree ftype
2470 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2471 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2472 bitsize_int (limb_prec - bo_bit),
2473 bitsize_int (bo_idx * limb_prec + bo_bit));
2474 tree t = add_cast (ftype, rhs1);
2475 g = gimple_build_assign (bfr, t);
2476 insert_before (g);
2477 if (eh)
2479 maybe_duplicate_eh_stmt (g, stmt);
2480 if (eh_pad)
2482 edge e = split_block (gsi_bb (m_gsi), g);
2483 m_gsi = gsi_after_labels (e->dest);
2484 make_edge (e->src, eh_pad, EDGE_EH)->probability
2485 = profile_probability::very_unlikely ();
2488 if (kind == bitint_prec_large)
2490 bf_cur = rhs1;
2491 done = true;
2493 else if (e2)
2494 m_gsi = gsi_after_labels (e2->src);
2496 if (!done)
2498 tree t1 = make_ssa_name (m_limb_type);
2499 tree t2 = make_ssa_name (m_limb_type);
2500 tree t3 = make_ssa_name (m_limb_type);
2501 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2502 build_int_cst (unsigned_type_node,
2503 limb_prec - bo_bit));
2504 insert_before (g);
2505 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2506 build_int_cst (unsigned_type_node,
2507 bo_bit));
2508 insert_before (g);
2509 bf_cur = rhs1;
2510 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2511 insert_before (g);
2512 rhs1 = t3;
2513 if (bf_next && i == 1)
2515 g = gimple_build_assign (bf_next, bf_cur);
2516 insert_before (g);
2520 if (!done)
2522 /* Handle bit-field access to partial last limb if needed. */
2523 if (nlhs
2524 && i == cnt - 1
2525 && !separate_ext
2526 && tree_fits_uhwi_p (idx))
2528 unsigned int tprec = TYPE_PRECISION (type);
2529 unsigned int rprec = tprec % limb_prec;
2530 if (rprec + bo_bit < (unsigned) limb_prec)
2532 tree ftype
2533 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2534 tree bfr = build3 (BIT_FIELD_REF, ftype,
2535 unshare_expr (nlhs),
2536 bitsize_int (rprec + bo_bit),
2537 bitsize_int ((bo_idx
2538 + tprec / limb_prec)
2539 * limb_prec));
2540 tree t = add_cast (ftype, rhs1);
2541 g = gimple_build_assign (bfr, t);
2542 done = true;
2543 bf_cur = NULL_TREE;
2545 else if (rprec + bo_bit == (unsigned) limb_prec)
2546 bf_cur = NULL_TREE;
2548 /* Otherwise, stores to any other lhs. */
2549 if (!done)
2551 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2552 nidx, true);
2553 g = gimple_build_assign (l, rhs1);
2555 insert_before (g);
2556 if (eh)
2558 maybe_duplicate_eh_stmt (g, stmt);
2559 if (eh_pad)
2561 edge e = split_block (gsi_bb (m_gsi), g);
2562 m_gsi = gsi_after_labels (e->dest);
2563 make_edge (e->src, eh_pad, EDGE_EH)->probability
2564 = profile_probability::very_unlikely ();
2567 if (new_bb)
2568 m_gsi = gsi_after_labels (new_bb);
2571 m_first = false;
2572 if (kind == bitint_prec_huge && i <= 1)
2574 if (i == 0)
2576 idx = make_ssa_name (sizetype);
2577 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2578 size_one_node);
2579 insert_before (g);
2581 else
2583 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2584 size_int (2));
2585 insert_before (g);
2586 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2587 NULL_TREE, NULL_TREE);
2588 insert_before (g);
2589 if (eq_p)
2590 m_gsi = gsi_after_labels (edge_bb);
2591 else
2592 m_gsi = gsi_for_stmt (stmt);
2597 if (separate_ext)
2599 if (sext)
2601 ext = add_cast (signed_type_for (m_limb_type), ext);
2602 tree lpm1 = build_int_cst (unsigned_type_node,
2603 limb_prec - 1);
2604 tree n = make_ssa_name (TREE_TYPE (ext));
2605 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2606 insert_before (g);
2607 ext = add_cast (m_limb_type, n);
2609 else
2610 ext = build_zero_cst (m_limb_type);
2611 kind = bitint_precision_kind (type);
2612 unsigned start = CEIL (prec, limb_prec);
2613 prec = TYPE_PRECISION (type);
2614 idx = idx_first = idx_next = NULL_TREE;
2615 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2616 kind = bitint_prec_large;
2617 if (kind == bitint_prec_large)
2618 cnt = CEIL (prec, limb_prec) - start;
2619 else
2621 rem = prec % limb_prec;
2622 end = (prec - rem) / limb_prec;
2623 cnt = (bo_bit != 0) + 1 + (rem != 0);
2625 for (unsigned i = 0; i < cnt; i++)
2627 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2628 idx = size_int (start + i);
2629 else if (i == cnt - 1 && (rem != 0))
2630 idx = size_int (end);
2631 else if (i == (bo_bit != 0))
2632 idx = create_loop (size_int (start + i), &idx_next);
2633 rhs1 = ext;
2634 if (bf_cur != NULL_TREE && bf_cur != ext)
2636 tree t1 = make_ssa_name (m_limb_type);
2637 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2638 build_int_cst (unsigned_type_node,
2639 limb_prec - bo_bit));
2640 insert_before (g);
2641 if (integer_zerop (ext))
2642 rhs1 = t1;
2643 else
2645 tree t2 = make_ssa_name (m_limb_type);
2646 rhs1 = make_ssa_name (m_limb_type);
2647 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2648 build_int_cst (unsigned_type_node,
2649 bo_bit));
2650 insert_before (g);
2651 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2652 insert_before (g);
2654 bf_cur = ext;
2656 tree nidx = idx;
2657 if (bo_idx)
2659 if (tree_fits_uhwi_p (idx))
2660 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2661 else
2663 nidx = make_ssa_name (sizetype);
2664 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2665 size_int (bo_idx));
2666 insert_before (g);
2669 bool done = false;
2670 /* Handle bit-field access to partial last limb if needed. */
2671 if (nlhs && i == cnt - 1)
2673 unsigned int tprec = TYPE_PRECISION (type);
2674 unsigned int rprec = tprec % limb_prec;
2675 if (rprec + bo_bit < (unsigned) limb_prec)
2677 tree ftype
2678 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2679 tree bfr = build3 (BIT_FIELD_REF, ftype,
2680 unshare_expr (nlhs),
2681 bitsize_int (rprec + bo_bit),
2682 bitsize_int ((bo_idx + tprec / limb_prec)
2683 * limb_prec));
2684 tree t = add_cast (ftype, rhs1);
2685 g = gimple_build_assign (bfr, t);
2686 done = true;
2687 bf_cur = NULL_TREE;
2689 else if (rprec + bo_bit == (unsigned) limb_prec)
2690 bf_cur = NULL_TREE;
2692 /* Otherwise, stores to any other lhs. */
2693 if (!done)
2695 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2696 g = gimple_build_assign (l, rhs1);
2698 insert_before (g);
2699 if (eh)
2701 maybe_duplicate_eh_stmt (g, stmt);
2702 if (eh_pad)
2704 edge e = split_block (gsi_bb (m_gsi), g);
2705 m_gsi = gsi_after_labels (e->dest);
2706 make_edge (e->src, eh_pad, EDGE_EH)->probability
2707 = profile_probability::very_unlikely ();
2710 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2712 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2713 size_one_node);
2714 insert_before (g);
2715 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2716 NULL_TREE, NULL_TREE);
2717 insert_before (g);
2718 m_gsi = gsi_for_stmt (stmt);
2722 if (bf_cur != NULL_TREE)
2724 unsigned int tprec = TYPE_PRECISION (type);
2725 unsigned int rprec = tprec % limb_prec;
2726 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2727 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2728 bitsize_int (rprec + bo_bit),
2729 bitsize_int ((bo_idx + tprec / limb_prec)
2730 * limb_prec));
2731 rhs1 = bf_cur;
2732 if (bf_cur != ext)
2734 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2735 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2736 build_int_cst (unsigned_type_node,
2737 limb_prec - bo_bit));
2738 insert_before (g);
2740 rhs1 = add_cast (ftype, rhs1);
2741 g = gimple_build_assign (bfr, rhs1);
2742 insert_before (g);
2743 if (eh)
2745 maybe_duplicate_eh_stmt (g, stmt);
2746 if (eh_pad)
2748 edge e = split_block (gsi_bb (m_gsi), g);
2749 m_gsi = gsi_after_labels (e->dest);
2750 make_edge (e->src, eh_pad, EDGE_EH)->probability
2751 = profile_probability::very_unlikely ();
2756 if (gimple_store_p (stmt))
2758 unlink_stmt_vdef (stmt);
2759 release_ssa_name (gimple_vdef (stmt));
2760 gsi_remove (&m_gsi, true);
2762 if (eq_p)
2764 lhs = make_ssa_name (boolean_type_node);
2765 basic_block bb = gimple_bb (stmt);
2766 gphi *phi = create_phi_node (lhs, bb);
2767 edge e = find_edge (gsi_bb (m_gsi), bb);
2768 unsigned int n = EDGE_COUNT (bb->preds);
2769 for (unsigned int i = 0; i < n; i++)
2771 edge e2 = EDGE_PRED (bb, i);
2772 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2773 e2, UNKNOWN_LOCATION);
2775 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2776 return lhs;
2778 else
2779 return NULL_TREE;
2782 /* Handle a large/huge _BitInt comparison statement STMT other than
2783 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2784 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2785 lowered by iteration from the most significant limb downwards to
2786 the least significant one, for large _BitInt in straight line code,
2787 otherwise with most significant limb handled in
2788 straight line code followed by a loop handling one limb at a time.
2789 Comparisons with unsigned huge _BitInt with precisions which are
2790 multiples of limb precision can use just the loop and don't need to
2791 handle most significant limb before the loop. The loop or straight
2792 line code jumps to final basic block if a particular pair of limbs
2793 is not equal. */
2795 tree
2796 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2797 tree cmp_op1, tree cmp_op2)
2799 tree type = TREE_TYPE (cmp_op1);
2800 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2801 bitint_prec_kind kind = bitint_precision_kind (type);
2802 gcc_assert (kind >= bitint_prec_large);
2803 gimple *g;
2804 if (!TYPE_UNSIGNED (type)
2805 && integer_zerop (cmp_op2)
2806 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2808 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2809 tree idx = size_int (end);
2810 m_data_cnt = 0;
2811 tree rhs1 = handle_operand (cmp_op1, idx);
2812 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2814 tree stype = signed_type_for (TREE_TYPE (rhs1));
2815 rhs1 = add_cast (stype, rhs1);
2817 tree lhs = make_ssa_name (boolean_type_node);
2818 g = gimple_build_assign (lhs, cmp_code, rhs1,
2819 build_zero_cst (TREE_TYPE (rhs1)));
2820 insert_before (g);
2821 cmp_code = NE_EXPR;
2822 return lhs;
2825 unsigned cnt, rem = 0, end = 0;
2826 tree idx = NULL_TREE, idx_next = NULL_TREE;
2827 if (kind == bitint_prec_large)
2828 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2829 else
2831 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2832 if (rem == 0 && !TYPE_UNSIGNED (type))
2833 rem = limb_prec;
2834 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2835 cnt = 1 + (rem != 0);
2838 basic_block edge_bb = NULL;
2839 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2840 gsi_prev (&gsi);
2841 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2842 edge_bb = e->src;
2843 m_gsi = gsi_end_bb (edge_bb);
2845 edge *edges = XALLOCAVEC (edge, cnt * 2);
2846 for (unsigned i = 0; i < cnt; i++)
2848 m_data_cnt = 0;
2849 if (kind == bitint_prec_large)
2850 idx = size_int (cnt - i - 1);
2851 else if (i == cnt - 1)
2852 idx = create_loop (size_int (end - 1), &idx_next);
2853 else
2854 idx = size_int (end);
2855 tree rhs1 = handle_operand (cmp_op1, idx);
2856 tree rhs2 = handle_operand (cmp_op2, idx);
2857 if (i == 0
2858 && !TYPE_UNSIGNED (type)
2859 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2861 tree stype = signed_type_for (TREE_TYPE (rhs1));
2862 rhs1 = add_cast (stype, rhs1);
2863 rhs2 = add_cast (stype, rhs2);
2865 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2866 insert_before (g);
2867 edge e1 = split_block (gsi_bb (m_gsi), g);
2868 e1->flags = EDGE_FALSE_VALUE;
2869 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2870 e1->probability = profile_probability::likely ();
2871 e2->probability = e1->probability.invert ();
2872 if (i == 0)
2873 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2874 m_gsi = gsi_after_labels (e1->dest);
2875 edges[2 * i] = e2;
2876 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2877 insert_before (g);
2878 e1 = split_block (gsi_bb (m_gsi), g);
2879 e1->flags = EDGE_FALSE_VALUE;
2880 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2881 e1->probability = profile_probability::unlikely ();
2882 e2->probability = e1->probability.invert ();
2883 m_gsi = gsi_after_labels (e1->dest);
2884 edges[2 * i + 1] = e2;
2885 m_first = false;
2886 if (kind == bitint_prec_huge && i == cnt - 1)
2888 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2889 insert_before (g);
2890 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2891 NULL_TREE, NULL_TREE);
2892 insert_before (g);
2893 edge true_edge, false_edge;
2894 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2895 &true_edge, &false_edge);
2896 m_gsi = gsi_after_labels (false_edge->dest);
2900 tree lhs = make_ssa_name (boolean_type_node);
2901 basic_block bb = gimple_bb (stmt);
2902 gphi *phi = create_phi_node (lhs, bb);
2903 for (unsigned int i = 0; i < cnt * 2; i++)
2905 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2906 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2907 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2909 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2910 ? boolean_true_node : boolean_false_node,
2911 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2912 cmp_code = NE_EXPR;
2913 return lhs;
2916 /* Lower large/huge _BitInt left and right shift except for left
2917 shift by < limb_prec constant. */
2919 void
2920 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2922 tree rhs1 = gimple_assign_rhs1 (stmt);
2923 tree lhs = gimple_assign_lhs (stmt);
2924 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2925 tree type = TREE_TYPE (rhs1);
2926 gimple *final_stmt = gsi_stmt (m_gsi);
2927 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2928 && bitint_precision_kind (type) >= bitint_prec_large);
2929 int prec = TYPE_PRECISION (type);
2930 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2931 gimple *g;
2932 if (obj == NULL_TREE)
2934 int part = var_to_partition (m_map, lhs);
2935 gcc_assert (m_vars[part] != NULL_TREE);
2936 obj = m_vars[part];
2938 /* Preparation code common for both left and right shifts.
2939 unsigned n1 = n % limb_prec;
2940 size_t n2 = n / limb_prec;
2941 size_t n3 = n1 != 0;
2942 unsigned n4 = (limb_prec - n1) % limb_prec;
2943 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2944 if (TREE_CODE (n) == INTEGER_CST)
2946 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2947 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2948 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2949 n3 = size_int (!integer_zerop (n1));
2950 n4 = int_const_binop (TRUNC_MOD_EXPR,
2951 int_const_binop (MINUS_EXPR, lp, n1), lp);
2953 else
2955 n1 = make_ssa_name (TREE_TYPE (n));
2956 n2 = make_ssa_name (sizetype);
2957 n3 = make_ssa_name (sizetype);
2958 n4 = make_ssa_name (TREE_TYPE (n));
2959 if (pow2p_hwi (limb_prec))
2961 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2962 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2963 insert_before (g);
2964 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2965 TREE_TYPE (n))
2966 ? n2 : make_ssa_name (TREE_TYPE (n)),
2967 RSHIFT_EXPR, n,
2968 build_int_cst (TREE_TYPE (n),
2969 exact_log2 (limb_prec)));
2970 insert_before (g);
2971 if (gimple_assign_lhs (g) != n2)
2973 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2974 insert_before (g);
2976 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2977 NEGATE_EXPR, n1);
2978 insert_before (g);
2979 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
2980 lpm1);
2981 insert_before (g);
2983 else
2985 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2986 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
2987 insert_before (g);
2988 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2989 TREE_TYPE (n))
2990 ? n2 : make_ssa_name (TREE_TYPE (n)),
2991 TRUNC_DIV_EXPR, n, lp);
2992 insert_before (g);
2993 if (gimple_assign_lhs (g) != n2)
2995 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2996 insert_before (g);
2998 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2999 MINUS_EXPR, lp, n1);
3000 insert_before (g);
3001 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3002 lp);
3003 insert_before (g);
3005 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3006 build_zero_cst (TREE_TYPE (n)));
3007 insert_before (g);
3008 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3009 insert_before (g);
3011 tree p = build_int_cst (sizetype,
3012 prec / limb_prec - (prec % limb_prec == 0));
3013 if (rhs_code == RSHIFT_EXPR)
3015 /* Lower
3016 dst = src >> n;
3018 unsigned n1 = n % limb_prec;
3019 size_t n2 = n / limb_prec;
3020 size_t n3 = n1 != 0;
3021 unsigned n4 = (limb_prec - n1) % limb_prec;
3022 size_t idx;
3023 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3024 int signed_p = (typeof (src) -1) < 0;
3025 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3026 ? p : p - n3); ++idx)
3027 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3028 limb_type ext;
3029 if (prec % limb_prec == 0)
3030 ext = src[p];
3031 else if (signed_p)
3032 ext = ((signed limb_type) (src[p] << (limb_prec
3033 - (prec % limb_prec))))
3034 >> (limb_prec - (prec % limb_prec));
3035 else
3036 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3037 if (!signed_p && (prec % limb_prec == 0))
3039 else if (idx < prec / 64)
3041 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3042 ++idx;
3044 idx -= n2;
3045 if (signed_p)
3047 dst[idx] = ((signed limb_type) ext) >> n1;
3048 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3050 else
3052 dst[idx] = ext >> n1;
3053 ext = 0;
3055 for (++idx; idx <= p; ++idx)
3056 dst[idx] = ext; */
3057 tree pmn3;
3058 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3059 pmn3 = p;
3060 else if (TREE_CODE (n3) == INTEGER_CST)
3061 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3062 else
3064 pmn3 = make_ssa_name (sizetype);
3065 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3066 insert_before (g);
3068 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3069 edge edge_true, edge_false;
3070 if_then (g, profile_probability::likely (), edge_true, edge_false);
3071 tree idx_next;
3072 tree idx = create_loop (n2, &idx_next);
3073 tree idxmn2 = make_ssa_name (sizetype);
3074 tree idxpn3 = make_ssa_name (sizetype);
3075 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3076 insert_before (g);
3077 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3078 insert_before (g);
3079 m_data_cnt = 0;
3080 tree t1 = handle_operand (rhs1, idx);
3081 m_first = false;
3082 g = gimple_build_assign (make_ssa_name (m_limb_type),
3083 RSHIFT_EXPR, t1, n1);
3084 insert_before (g);
3085 t1 = gimple_assign_lhs (g);
3086 if (!integer_zerop (n3))
3088 m_data_cnt = 0;
3089 tree t2 = handle_operand (rhs1, idxpn3);
3090 g = gimple_build_assign (make_ssa_name (m_limb_type),
3091 LSHIFT_EXPR, t2, n4);
3092 insert_before (g);
3093 t2 = gimple_assign_lhs (g);
3094 g = gimple_build_assign (make_ssa_name (m_limb_type),
3095 BIT_IOR_EXPR, t1, t2);
3096 insert_before (g);
3097 t1 = gimple_assign_lhs (g);
3099 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3100 g = gimple_build_assign (l, t1);
3101 insert_before (g);
3102 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3103 insert_before (g);
3104 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3105 insert_before (g);
3106 idx = make_ssa_name (sizetype);
3107 m_gsi = gsi_for_stmt (final_stmt);
3108 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3109 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3110 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3111 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3112 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3113 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3114 m_data_cnt = 0;
3115 tree ms = handle_operand (rhs1, p);
3116 tree ext = ms;
3117 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3118 ext = add_cast (m_limb_type, ms);
3119 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3120 && !integer_zerop (n3))
3122 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3123 if_then (g, profile_probability::likely (), edge_true, edge_false);
3124 m_data_cnt = 0;
3125 t1 = handle_operand (rhs1, idx);
3126 g = gimple_build_assign (make_ssa_name (m_limb_type),
3127 RSHIFT_EXPR, t1, n1);
3128 insert_before (g);
3129 t1 = gimple_assign_lhs (g);
3130 g = gimple_build_assign (make_ssa_name (m_limb_type),
3131 LSHIFT_EXPR, ext, n4);
3132 insert_before (g);
3133 tree t2 = gimple_assign_lhs (g);
3134 g = gimple_build_assign (make_ssa_name (m_limb_type),
3135 BIT_IOR_EXPR, t1, t2);
3136 insert_before (g);
3137 t1 = gimple_assign_lhs (g);
3138 idxmn2 = make_ssa_name (sizetype);
3139 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3140 insert_before (g);
3141 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3142 g = gimple_build_assign (l, t1);
3143 insert_before (g);
3144 idx_next = make_ssa_name (sizetype);
3145 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3146 insert_before (g);
3147 m_gsi = gsi_for_stmt (final_stmt);
3148 tree nidx = make_ssa_name (sizetype);
3149 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3150 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3151 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3152 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3153 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3154 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3155 idx = nidx;
3157 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3158 insert_before (g);
3159 idx = gimple_assign_lhs (g);
3160 tree sext = ext;
3161 if (!TYPE_UNSIGNED (type))
3162 sext = add_cast (signed_type_for (m_limb_type), ext);
3163 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3164 RSHIFT_EXPR, sext, n1);
3165 insert_before (g);
3166 t1 = gimple_assign_lhs (g);
3167 if (!TYPE_UNSIGNED (type))
3169 t1 = add_cast (m_limb_type, t1);
3170 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3171 RSHIFT_EXPR, sext,
3172 build_int_cst (TREE_TYPE (n),
3173 limb_prec - 1));
3174 insert_before (g);
3175 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3177 else
3178 ext = build_zero_cst (m_limb_type);
3179 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3180 g = gimple_build_assign (l, t1);
3181 insert_before (g);
3182 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3183 size_one_node);
3184 insert_before (g);
3185 idx = gimple_assign_lhs (g);
3186 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3187 if_then (g, profile_probability::likely (), edge_true, edge_false);
3188 idx = create_loop (idx, &idx_next);
3189 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3190 g = gimple_build_assign (l, ext);
3191 insert_before (g);
3192 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3193 insert_before (g);
3194 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3195 insert_before (g);
3197 else
3199 /* Lower
3200 dst = src << n;
3202 unsigned n1 = n % limb_prec;
3203 size_t n2 = n / limb_prec;
3204 size_t n3 = n1 != 0;
3205 unsigned n4 = (limb_prec - n1) % limb_prec;
3206 size_t idx;
3207 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3208 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3209 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3210 if (n1)
3212 dst[idx] = src[idx - n2] << n1;
3213 --idx;
3215 for (; (ssize_t) idx >= 0; --idx)
3216 dst[idx] = 0; */
3217 tree n2pn3;
3218 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3219 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3220 else
3222 n2pn3 = make_ssa_name (sizetype);
3223 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3224 insert_before (g);
3226 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3227 idx even to access the most significant partial limb. */
3228 m_var_msb = true;
3229 if (integer_zerop (n3))
3230 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3231 counts. Emit if (true) condition that can be optimized later. */
3232 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3233 NULL_TREE, NULL_TREE);
3234 else
3235 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3236 edge edge_true, edge_false;
3237 if_then (g, profile_probability::likely (), edge_true, edge_false);
3238 tree idx_next;
3239 tree idx = create_loop (p, &idx_next);
3240 tree idxmn2 = make_ssa_name (sizetype);
3241 tree idxmn2mn3 = make_ssa_name (sizetype);
3242 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3243 insert_before (g);
3244 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3245 insert_before (g);
3246 m_data_cnt = 0;
3247 tree t1 = handle_operand (rhs1, idxmn2);
3248 m_first = false;
3249 g = gimple_build_assign (make_ssa_name (m_limb_type),
3250 LSHIFT_EXPR, t1, n1);
3251 insert_before (g);
3252 t1 = gimple_assign_lhs (g);
3253 if (!integer_zerop (n3))
3255 m_data_cnt = 0;
3256 tree t2 = handle_operand (rhs1, idxmn2mn3);
3257 g = gimple_build_assign (make_ssa_name (m_limb_type),
3258 RSHIFT_EXPR, t2, n4);
3259 insert_before (g);
3260 t2 = gimple_assign_lhs (g);
3261 g = gimple_build_assign (make_ssa_name (m_limb_type),
3262 BIT_IOR_EXPR, t1, t2);
3263 insert_before (g);
3264 t1 = gimple_assign_lhs (g);
3266 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3267 g = gimple_build_assign (l, t1);
3268 insert_before (g);
3269 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3270 insert_before (g);
3271 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3272 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3273 NULL_TREE, NULL_TREE);
3274 insert_before (g);
3275 idx = make_ssa_name (sizetype);
3276 m_gsi = gsi_for_stmt (final_stmt);
3277 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3278 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3279 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3280 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3281 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3282 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3283 m_data_cnt = 0;
3284 if (!integer_zerop (n3))
3286 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3287 NULL_TREE, NULL_TREE);
3288 if_then (g, profile_probability::likely (), edge_true, edge_false);
3289 idxmn2 = make_ssa_name (sizetype);
3290 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3291 insert_before (g);
3292 m_data_cnt = 0;
3293 t1 = handle_operand (rhs1, idxmn2);
3294 g = gimple_build_assign (make_ssa_name (m_limb_type),
3295 LSHIFT_EXPR, t1, n1);
3296 insert_before (g);
3297 t1 = gimple_assign_lhs (g);
3298 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3299 g = gimple_build_assign (l, t1);
3300 insert_before (g);
3301 idx_next = make_ssa_name (sizetype);
3302 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3303 insert_before (g);
3304 m_gsi = gsi_for_stmt (final_stmt);
3305 tree nidx = make_ssa_name (sizetype);
3306 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3307 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3308 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3309 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3310 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3311 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3312 idx = nidx;
3314 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3315 ssize_int (0), NULL_TREE, NULL_TREE);
3316 if_then (g, profile_probability::likely (), edge_true, edge_false);
3317 idx = create_loop (idx, &idx_next);
3318 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3319 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3320 insert_before (g);
3321 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3322 insert_before (g);
3323 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3324 ssize_int (0), NULL_TREE, NULL_TREE);
3325 insert_before (g);
3329 /* Lower large/huge _BitInt multiplication or division. */
3331 void
3332 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3334 tree rhs1 = gimple_assign_rhs1 (stmt);
3335 tree rhs2 = gimple_assign_rhs2 (stmt);
3336 tree lhs = gimple_assign_lhs (stmt);
3337 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3338 tree type = TREE_TYPE (rhs1);
3339 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3340 && bitint_precision_kind (type) >= bitint_prec_large);
3341 int prec = TYPE_PRECISION (type), prec1, prec2;
3342 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3343 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3344 if (obj == NULL_TREE)
3346 int part = var_to_partition (m_map, lhs);
3347 gcc_assert (m_vars[part] != NULL_TREE);
3348 obj = m_vars[part];
3349 lhs = build_fold_addr_expr (obj);
3351 else
3353 lhs = build_fold_addr_expr (obj);
3354 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3355 NULL_TREE, true, GSI_SAME_STMT);
3357 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3358 gimple *g;
3359 switch (rhs_code)
3361 case MULT_EXPR:
3362 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3363 lhs, build_int_cst (sitype, prec),
3364 rhs1, build_int_cst (sitype, prec1),
3365 rhs2, build_int_cst (sitype, prec2));
3366 insert_before (g);
3367 break;
3368 case TRUNC_DIV_EXPR:
3369 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3370 lhs, build_int_cst (sitype, prec),
3371 null_pointer_node,
3372 build_int_cst (sitype, 0),
3373 rhs1, build_int_cst (sitype, prec1),
3374 rhs2, build_int_cst (sitype, prec2));
3375 if (!stmt_ends_bb_p (stmt))
3376 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3377 insert_before (g);
3378 break;
3379 case TRUNC_MOD_EXPR:
3380 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3381 build_int_cst (sitype, 0),
3382 lhs, build_int_cst (sitype, prec),
3383 rhs1, build_int_cst (sitype, prec1),
3384 rhs2, build_int_cst (sitype, prec2));
3385 if (!stmt_ends_bb_p (stmt))
3386 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3387 insert_before (g);
3388 break;
3389 default:
3390 gcc_unreachable ();
3392 if (stmt_ends_bb_p (stmt))
3394 maybe_duplicate_eh_stmt (g, stmt);
3395 edge e1;
3396 edge_iterator ei;
3397 basic_block bb = gimple_bb (stmt);
3399 FOR_EACH_EDGE (e1, ei, bb->succs)
3400 if (e1->flags & EDGE_EH)
3401 break;
3402 if (e1)
3404 edge e2 = split_block (gsi_bb (m_gsi), g);
3405 m_gsi = gsi_after_labels (e2->dest);
3406 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3407 = profile_probability::very_unlikely ();
3412 /* Lower large/huge _BitInt conversion to/from floating point. */
3414 void
3415 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3417 tree rhs1 = gimple_assign_rhs1 (stmt);
3418 tree lhs = gimple_assign_lhs (stmt);
3419 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3420 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3421 gimple *g;
3422 if (rhs_code == FIX_TRUNC_EXPR)
3424 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3425 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3426 prec = -prec;
3427 if (obj == NULL_TREE)
3429 int part = var_to_partition (m_map, lhs);
3430 gcc_assert (m_vars[part] != NULL_TREE);
3431 obj = m_vars[part];
3432 lhs = build_fold_addr_expr (obj);
3434 else
3436 lhs = build_fold_addr_expr (obj);
3437 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3438 NULL_TREE, true, GSI_SAME_STMT);
3440 scalar_mode from_mode
3441 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3442 #ifdef HAVE_SFmode
3443 /* IEEE single is a full superset of both IEEE half and
3444 bfloat formats, convert to float first and then to _BitInt
3445 to avoid the need of another 2 library routines. */
3446 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3447 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3448 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3450 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3451 if (type)
3452 rhs1 = add_cast (type, rhs1);
3454 #endif
3455 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3456 lhs, build_int_cst (sitype, prec),
3457 rhs1);
3458 insert_before (g);
3460 else
3462 int prec;
3463 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3464 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3465 rhs1, build_int_cst (sitype, prec));
3466 gimple_call_set_lhs (g, lhs);
3467 if (!stmt_ends_bb_p (stmt))
3468 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3469 gsi_replace (&m_gsi, g, true);
3473 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3474 If check_zero is true, caller wants to check if all bits in [start, end)
3475 are zero, otherwise if bits in [start, end) are either all zero or
3476 all ones. L is the limb with index LIMB, START and END are measured
3477 in bits. */
3479 tree
3480 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3481 unsigned int end, tree l,
3482 unsigned int limb,
3483 bool check_zero)
3485 unsigned startlimb = start / limb_prec;
3486 unsigned endlimb = (end - 1) / limb_prec;
3487 gimple *g;
3489 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3490 return l;
3491 if (startlimb == endlimb && limb == startlimb)
3493 if (check_zero)
3495 wide_int w = wi::shifted_mask (start % limb_prec,
3496 end - start, false, limb_prec);
3497 g = gimple_build_assign (make_ssa_name (m_limb_type),
3498 BIT_AND_EXPR, l,
3499 wide_int_to_tree (m_limb_type, w));
3500 insert_before (g);
3501 return gimple_assign_lhs (g);
3503 unsigned int shift = start % limb_prec;
3504 if ((end % limb_prec) != 0)
3506 unsigned int lshift = (-end) % limb_prec;
3507 shift += lshift;
3508 g = gimple_build_assign (make_ssa_name (m_limb_type),
3509 LSHIFT_EXPR, l,
3510 build_int_cst (unsigned_type_node,
3511 lshift));
3512 insert_before (g);
3513 l = gimple_assign_lhs (g);
3515 l = add_cast (signed_type_for (m_limb_type), l);
3516 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3517 RSHIFT_EXPR, l,
3518 build_int_cst (unsigned_type_node, shift));
3519 insert_before (g);
3520 return add_cast (m_limb_type, gimple_assign_lhs (g));
3522 else if (limb == startlimb)
3524 if ((start % limb_prec) == 0)
3525 return l;
3526 if (!check_zero)
3527 l = add_cast (signed_type_for (m_limb_type), l);
3528 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3529 RSHIFT_EXPR, l,
3530 build_int_cst (unsigned_type_node,
3531 start % limb_prec));
3532 insert_before (g);
3533 l = gimple_assign_lhs (g);
3534 if (!check_zero)
3535 l = add_cast (m_limb_type, l);
3536 return l;
3538 else if (limb == endlimb)
3540 if ((end % limb_prec) == 0)
3541 return l;
3542 if (check_zero)
3544 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3545 g = gimple_build_assign (make_ssa_name (m_limb_type),
3546 BIT_AND_EXPR, l,
3547 wide_int_to_tree (m_limb_type, w));
3548 insert_before (g);
3549 return gimple_assign_lhs (g);
3551 unsigned int shift = (-end) % limb_prec;
3552 g = gimple_build_assign (make_ssa_name (m_limb_type),
3553 LSHIFT_EXPR, l,
3554 build_int_cst (unsigned_type_node, shift));
3555 insert_before (g);
3556 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3557 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3558 RSHIFT_EXPR, l,
3559 build_int_cst (unsigned_type_node, shift));
3560 insert_before (g);
3561 return add_cast (m_limb_type, gimple_assign_lhs (g));
3563 return l;
3566 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3567 result including overflow flag into the right locations. */
3569 void
3570 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3571 tree ovf, tree lhs, tree orig_obj,
3572 gimple *stmt, tree_code code)
3574 gimple *g;
3576 if (obj == NULL_TREE
3577 && (TREE_CODE (type) != BITINT_TYPE
3578 || bitint_precision_kind (type) < bitint_prec_large))
3580 /* Add support for 3 or more limbs filled in from normal integral
3581 type if this assert fails. If no target chooses limb mode smaller
3582 than half of largest supported normal integral type, this will not
3583 be needed. */
3584 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3585 tree lhs_type = type;
3586 if (TREE_CODE (type) == BITINT_TYPE
3587 && bitint_precision_kind (type) == bitint_prec_middle)
3588 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3589 TYPE_UNSIGNED (type));
3590 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3591 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3592 insert_before (g);
3593 r1 = gimple_assign_lhs (g);
3594 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3595 r1 = add_cast (lhs_type, r1);
3596 if (TYPE_PRECISION (lhs_type) > limb_prec)
3598 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3599 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3600 insert_before (g);
3601 r2 = gimple_assign_lhs (g);
3602 r2 = add_cast (lhs_type, r2);
3603 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3604 build_int_cst (unsigned_type_node,
3605 limb_prec));
3606 insert_before (g);
3607 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3608 gimple_assign_lhs (g));
3609 insert_before (g);
3610 r1 = gimple_assign_lhs (g);
3612 if (lhs_type != type)
3613 r1 = add_cast (type, r1);
3614 ovf = add_cast (lhs_type, ovf);
3615 if (lhs_type != type)
3616 ovf = add_cast (type, ovf);
3617 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3618 m_gsi = gsi_for_stmt (stmt);
3619 gsi_replace (&m_gsi, g, true);
3621 else
3623 unsigned HOST_WIDE_INT nelts = 0;
3624 tree atype = NULL_TREE;
3625 if (obj)
3627 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3628 if (orig_obj == NULL_TREE)
3629 nelts >>= 1;
3630 atype = build_array_type_nelts (m_limb_type, nelts);
3632 if (var && obj)
3634 tree v1, v2;
3635 tree zero;
3636 if (orig_obj == NULL_TREE)
3638 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3639 v1 = build2 (MEM_REF, atype,
3640 build_fold_addr_expr (unshare_expr (obj)), zero);
3642 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3643 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3644 else
3645 v1 = unshare_expr (obj);
3646 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3647 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3648 g = gimple_build_assign (v1, v2);
3649 insert_before (g);
3651 if (orig_obj == NULL_TREE && obj)
3653 ovf = add_cast (m_limb_type, ovf);
3654 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3655 g = gimple_build_assign (l, ovf);
3656 insert_before (g);
3657 if (nelts > 1)
3659 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3660 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3661 (nelts + 1) * m_limb_size);
3662 tree v1 = build2 (MEM_REF, atype,
3663 build_fold_addr_expr (unshare_expr (obj)),
3664 off);
3665 g = gimple_build_assign (v1, build_zero_cst (atype));
3666 insert_before (g);
3669 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3671 imm_use_iterator ui;
3672 use_operand_p use_p;
3673 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3675 g = USE_STMT (use_p);
3676 if (!is_gimple_assign (g)
3677 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3678 continue;
3679 tree lhs2 = gimple_assign_lhs (g);
3680 gimple *use_stmt;
3681 single_imm_use (lhs2, &use_p, &use_stmt);
3682 lhs2 = gimple_assign_lhs (use_stmt);
3683 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3684 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3685 g = gimple_build_assign (lhs2, ovf);
3686 else
3687 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3688 gsi_replace (&gsi, g, true);
3689 if (gsi_stmt (m_gsi) == use_stmt)
3690 m_gsi = gsi_for_stmt (g);
3691 break;
3694 else if (ovf != boolean_false_node)
3696 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3697 NULL_TREE, NULL_TREE);
3698 edge edge_true, edge_false;
3699 if_then (g, profile_probability::very_unlikely (),
3700 edge_true, edge_false);
3701 tree zero = build_zero_cst (TREE_TYPE (lhs));
3702 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3703 TREE_TYPE (lhs),
3704 zero, zero, NULL);
3705 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3706 true, GSI_SAME_STMT);
3707 m_gsi = gsi_after_labels (edge_true->dest);
3710 if (var)
3712 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3713 g = gimple_build_assign (var, clobber);
3714 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3718 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3719 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3720 argument 1 precision PREC1 and minimum precision for the result
3721 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3723 static tree
3724 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3725 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3727 *start = 0;
3728 *end = 0;
3729 *check_zero = true;
3730 /* Ignore this special rule for subtraction, even if both
3731 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3732 in infinite precision. */
3733 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3735 /* Result in [0, prec2) is unsigned, if prec > prec2,
3736 all bits above it will be zero. */
3737 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3738 return boolean_false_node;
3739 else
3741 /* ovf if any of bits in [start, end) is non-zero. */
3742 *start = prec - !TYPE_UNSIGNED (type);
3743 *end = prec2;
3746 else if (TYPE_UNSIGNED (type))
3748 /* If result in [0, prec2) is signed and if prec > prec2,
3749 all bits above it will be sign bit copies. */
3750 if (prec >= prec2)
3752 /* ovf if bit prec - 1 is non-zero. */
3753 *start = prec - 1;
3754 *end = prec;
3756 else
3758 /* ovf if any of bits in [start, end) is non-zero. */
3759 *start = prec;
3760 *end = prec2;
3763 else if (prec >= prec2)
3764 return boolean_false_node;
3765 else
3767 /* ovf if [start, end) bits aren't all zeros or all ones. */
3768 *start = prec - 1;
3769 *end = prec2;
3770 *check_zero = false;
3772 return NULL_TREE;
3775 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3776 argument or return type _Complex large/huge _BitInt. */
3778 void
3779 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3781 tree arg0 = gimple_call_arg (stmt, 0);
3782 tree arg1 = gimple_call_arg (stmt, 1);
3783 tree lhs = gimple_call_lhs (stmt);
3784 gimple *g;
3786 if (!lhs)
3788 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3789 gsi_remove (&gsi, true);
3790 return;
3792 gimple *final_stmt = gsi_stmt (m_gsi);
3793 tree type = TREE_TYPE (lhs);
3794 if (TREE_CODE (type) == COMPLEX_TYPE)
3795 type = TREE_TYPE (type);
3796 int prec = TYPE_PRECISION (type);
3797 int prec0 = range_to_prec (arg0, stmt);
3798 int prec1 = range_to_prec (arg1, stmt);
3799 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3800 the be minimum unsigned precision of any possible operation's
3801 result, otherwise it is minimum signed precision.
3802 Some examples:
3803 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3804 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3805 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3806 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3807 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3808 8 + 8 [0, 0x1fe] 9 UNSIGNED
3809 8 + 10 [0, 0x4fe] 11 UNSIGNED
3810 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3811 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3812 8 + -8 [-0x80, 0x17e] 10 SIGNED
3813 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3814 10 + -8 [-0x80, 0x47e] 12 SIGNED
3815 8 - 8 [-0xff, 0xff] 9 SIGNED
3816 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3817 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3818 -8 - -8 [-0xff, 0xff] 9 SIGNED
3819 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3820 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3821 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3822 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3823 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3824 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3825 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3826 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3827 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3828 prec1 < 0 ? -prec1 : prec1);
3829 /* If operands are either both signed or both unsigned,
3830 we need just one additional bit. */
3831 prec2 = (((prec0 < 0) == (prec1 < 0)
3832 /* If one operand is signed and one unsigned and
3833 the signed one has larger precision, we need
3834 just one extra bit, otherwise two. */
3835 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3836 : (prec2 == -prec1 && prec2 != prec0)))
3837 ? prec2 + 1 : prec2 + 2);
3838 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3839 prec1 < 0 ? -prec1 : prec1);
3840 prec3 = MAX (prec3, prec);
3841 tree var = NULL_TREE;
3842 tree orig_obj = obj;
3843 if (obj == NULL_TREE
3844 && TREE_CODE (type) == BITINT_TYPE
3845 && bitint_precision_kind (type) >= bitint_prec_large
3846 && m_names
3847 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3849 int part = var_to_partition (m_map, lhs);
3850 gcc_assert (m_vars[part] != NULL_TREE);
3851 obj = m_vars[part];
3852 if (TREE_TYPE (lhs) == type)
3853 orig_obj = obj;
3855 if (TREE_CODE (type) != BITINT_TYPE
3856 || bitint_precision_kind (type) < bitint_prec_large)
3858 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3859 tree atype = build_array_type_nelts (m_limb_type, nelts);
3860 var = create_tmp_var (atype);
3863 enum tree_code code;
3864 switch (gimple_call_internal_fn (stmt))
3866 case IFN_ADD_OVERFLOW:
3867 case IFN_UBSAN_CHECK_ADD:
3868 code = PLUS_EXPR;
3869 break;
3870 case IFN_SUB_OVERFLOW:
3871 case IFN_UBSAN_CHECK_SUB:
3872 code = MINUS_EXPR;
3873 break;
3874 default:
3875 gcc_unreachable ();
3877 unsigned start, end;
3878 bool check_zero;
3879 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3880 &start, &end, &check_zero);
3882 unsigned startlimb, endlimb;
3883 if (ovf)
3885 startlimb = ~0U;
3886 endlimb = ~0U;
3888 else
3890 startlimb = start / limb_prec;
3891 endlimb = (end - 1) / limb_prec;
3894 int prec4 = ovf != NULL_TREE ? prec : prec3;
3895 bitint_prec_kind kind = bitint_precision_kind (prec4);
3896 unsigned cnt, rem = 0, fin = 0;
3897 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3898 bool last_ovf = (ovf == NULL_TREE
3899 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3900 if (kind != bitint_prec_huge)
3901 cnt = CEIL (prec4, limb_prec) + last_ovf;
3902 else
3904 rem = (prec4 % (2 * limb_prec));
3905 fin = (prec4 - rem) / limb_prec;
3906 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3907 idx = idx_first = create_loop (size_zero_node, &idx_next);
3910 if (kind == bitint_prec_huge)
3911 m_upwards_2limb = fin;
3912 m_upwards = true;
3914 tree type0 = TREE_TYPE (arg0);
3915 tree type1 = TREE_TYPE (arg1);
3916 int prec5 = prec3;
3917 if (bitint_precision_kind (prec5) < bitint_prec_large)
3918 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3919 if (TYPE_PRECISION (type0) < prec5)
3921 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3922 if (TREE_CODE (arg0) == INTEGER_CST)
3923 arg0 = fold_convert (type0, arg0);
3925 if (TYPE_PRECISION (type1) < prec5)
3927 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3928 if (TREE_CODE (arg1) == INTEGER_CST)
3929 arg1 = fold_convert (type1, arg1);
3931 unsigned int data_cnt = 0;
3932 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3933 tree cmp = build_zero_cst (m_limb_type);
3934 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3935 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3936 for (unsigned i = 0; i < cnt; i++)
3938 m_data_cnt = 0;
3939 tree rhs1, rhs2;
3940 if (kind != bitint_prec_huge)
3941 idx = size_int (i);
3942 else if (i >= 2)
3943 idx = size_int (fin + (i > 2));
3944 if (!last_ovf || i < cnt - 1)
3946 if (type0 != TREE_TYPE (arg0))
3947 rhs1 = handle_cast (type0, arg0, idx);
3948 else
3949 rhs1 = handle_operand (arg0, idx);
3950 if (type1 != TREE_TYPE (arg1))
3951 rhs2 = handle_cast (type1, arg1, idx);
3952 else
3953 rhs2 = handle_operand (arg1, idx);
3954 if (i == 0)
3955 data_cnt = m_data_cnt;
3956 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3957 rhs1 = add_cast (m_limb_type, rhs1);
3958 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3959 rhs2 = add_cast (m_limb_type, rhs2);
3960 last_rhs1 = rhs1;
3961 last_rhs2 = rhs2;
3963 else
3965 m_data_cnt = data_cnt;
3966 if (TYPE_UNSIGNED (type0))
3967 rhs1 = build_zero_cst (m_limb_type);
3968 else
3970 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3971 if (TREE_CODE (rhs1) == INTEGER_CST)
3972 rhs1 = build_int_cst (m_limb_type,
3973 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3974 else
3976 tree lpm1 = build_int_cst (unsigned_type_node,
3977 limb_prec - 1);
3978 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
3979 RSHIFT_EXPR, rhs1, lpm1);
3980 insert_before (g);
3981 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
3984 if (TYPE_UNSIGNED (type1))
3985 rhs2 = build_zero_cst (m_limb_type);
3986 else
3988 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
3989 if (TREE_CODE (rhs2) == INTEGER_CST)
3990 rhs2 = build_int_cst (m_limb_type,
3991 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
3992 else
3994 tree lpm1 = build_int_cst (unsigned_type_node,
3995 limb_prec - 1);
3996 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
3997 RSHIFT_EXPR, rhs2, lpm1);
3998 insert_before (g);
3999 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4003 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4004 if (ovf != boolean_false_node)
4006 if (tree_fits_uhwi_p (idx))
4008 unsigned limb = tree_to_uhwi (idx);
4009 if (limb >= startlimb && limb <= endlimb)
4011 tree l = arith_overflow_extract_bits (start, end, rhs,
4012 limb, check_zero);
4013 tree this_ovf = make_ssa_name (boolean_type_node);
4014 if (ovf == NULL_TREE && !check_zero)
4016 cmp = l;
4017 g = gimple_build_assign (make_ssa_name (m_limb_type),
4018 PLUS_EXPR, l,
4019 build_int_cst (m_limb_type, 1));
4020 insert_before (g);
4021 g = gimple_build_assign (this_ovf, GT_EXPR,
4022 gimple_assign_lhs (g),
4023 build_int_cst (m_limb_type, 1));
4025 else
4026 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4027 insert_before (g);
4028 if (ovf == NULL_TREE)
4029 ovf = this_ovf;
4030 else
4032 tree b = make_ssa_name (boolean_type_node);
4033 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4034 insert_before (g);
4035 ovf = b;
4039 else if (startlimb < fin)
4041 if (m_first && startlimb + 2 < fin)
4043 tree data_out;
4044 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4045 ovf_out = m_data.pop ();
4046 m_data.pop ();
4047 if (!check_zero)
4049 cmp = prepare_data_in_out (cmp, idx, &data_out);
4050 cmp_out = m_data.pop ();
4051 m_data.pop ();
4054 if (i != 0 || startlimb != fin - 1)
4056 tree_code cmp_code;
4057 bool single_comparison
4058 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4059 if (!single_comparison)
4061 cmp_code = GE_EXPR;
4062 if (!check_zero && (start % limb_prec) == 0)
4063 single_comparison = true;
4065 else if ((startlimb & 1) == (i & 1))
4066 cmp_code = EQ_EXPR;
4067 else
4068 cmp_code = GT_EXPR;
4069 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4070 NULL_TREE, NULL_TREE);
4071 edge edge_true_true, edge_true_false, edge_false;
4072 gimple *g2 = NULL;
4073 if (!single_comparison)
4074 g2 = gimple_build_cond (NE_EXPR, idx,
4075 size_int (startlimb), NULL_TREE,
4076 NULL_TREE);
4077 if_then_if_then_else (g, g2, profile_probability::likely (),
4078 profile_probability::likely (),
4079 edge_true_true, edge_true_false,
4080 edge_false);
4081 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4082 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4083 check_zero);
4084 tree this_ovf = make_ssa_name (boolean_type_node);
4085 if (cmp_code != GT_EXPR && !check_zero)
4087 g = gimple_build_assign (make_ssa_name (m_limb_type),
4088 PLUS_EXPR, l,
4089 build_int_cst (m_limb_type, 1));
4090 insert_before (g);
4091 g = gimple_build_assign (this_ovf, GT_EXPR,
4092 gimple_assign_lhs (g),
4093 build_int_cst (m_limb_type, 1));
4095 else
4096 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4097 insert_before (g);
4098 if (cmp_code == GT_EXPR)
4100 tree t = make_ssa_name (boolean_type_node);
4101 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4102 insert_before (g);
4103 this_ovf = t;
4105 tree this_ovf2 = NULL_TREE;
4106 if (!single_comparison)
4108 m_gsi = gsi_after_labels (edge_true_true->src);
4109 tree t = make_ssa_name (boolean_type_node);
4110 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4111 insert_before (g);
4112 this_ovf2 = make_ssa_name (boolean_type_node);
4113 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4114 ovf, t);
4115 insert_before (g);
4117 m_gsi = gsi_after_labels (edge_true_false->dest);
4118 tree t;
4119 if (i == 1 && ovf_out)
4120 t = ovf_out;
4121 else
4122 t = make_ssa_name (boolean_type_node);
4123 gphi *phi = create_phi_node (t, edge_true_false->dest);
4124 add_phi_arg (phi, this_ovf, edge_true_false,
4125 UNKNOWN_LOCATION);
4126 add_phi_arg (phi, ovf ? ovf
4127 : boolean_false_node, edge_false,
4128 UNKNOWN_LOCATION);
4129 if (edge_true_true)
4130 add_phi_arg (phi, this_ovf2, edge_true_true,
4131 UNKNOWN_LOCATION);
4132 ovf = t;
4133 if (!check_zero && cmp_code != GT_EXPR)
4135 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4136 phi = create_phi_node (t, edge_true_false->dest);
4137 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4138 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4139 if (edge_true_true)
4140 add_phi_arg (phi, cmp, edge_true_true,
4141 UNKNOWN_LOCATION);
4142 cmp = t;
4148 if (var || obj)
4150 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4152 else if (!tree_fits_uhwi_p (idx)
4153 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4155 bool single_comparison
4156 = (((unsigned) prec % limb_prec) == 0
4157 || prec_limbs + 1 >= fin
4158 || (prec_limbs & 1) == (i & 1));
4159 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4160 NULL_TREE, NULL_TREE);
4161 gimple *g2 = NULL;
4162 if (!single_comparison)
4163 g2 = gimple_build_cond (LT_EXPR, idx,
4164 size_int (prec_limbs - 1),
4165 NULL_TREE, NULL_TREE);
4166 edge edge_true_true, edge_true_false, edge_false;
4167 if_then_if_then_else (g, g2, profile_probability::likely (),
4168 profile_probability::likely (),
4169 edge_true_true, edge_true_false,
4170 edge_false);
4171 tree l = limb_access (type, var ? var : obj, idx, true);
4172 g = gimple_build_assign (l, rhs);
4173 insert_before (g);
4174 if (!single_comparison)
4176 m_gsi = gsi_after_labels (edge_true_true->src);
4177 l = limb_access (type, var ? var : obj,
4178 size_int (prec_limbs - 1), true);
4179 if (!useless_type_conversion_p (TREE_TYPE (l),
4180 TREE_TYPE (rhs)))
4181 rhs = add_cast (TREE_TYPE (l), rhs);
4182 g = gimple_build_assign (l, rhs);
4183 insert_before (g);
4185 m_gsi = gsi_after_labels (edge_true_false->dest);
4187 else
4189 tree l = limb_access (type, var ? var : obj, idx, true);
4190 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4191 rhs = add_cast (TREE_TYPE (l), rhs);
4192 g = gimple_build_assign (l, rhs);
4193 insert_before (g);
4196 m_first = false;
4197 if (kind == bitint_prec_huge && i <= 1)
4199 if (i == 0)
4201 idx = make_ssa_name (sizetype);
4202 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4203 size_one_node);
4204 insert_before (g);
4206 else
4208 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4209 size_int (2));
4210 insert_before (g);
4211 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4212 NULL_TREE, NULL_TREE);
4213 insert_before (g);
4214 m_gsi = gsi_for_stmt (final_stmt);
4219 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4222 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4223 argument or return type _Complex large/huge _BitInt. */
4225 void
4226 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4228 tree arg0 = gimple_call_arg (stmt, 0);
4229 tree arg1 = gimple_call_arg (stmt, 1);
4230 tree lhs = gimple_call_lhs (stmt);
4231 if (!lhs)
4233 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4234 gsi_remove (&gsi, true);
4235 return;
4237 gimple *final_stmt = gsi_stmt (m_gsi);
4238 tree type = TREE_TYPE (lhs);
4239 if (TREE_CODE (type) == COMPLEX_TYPE)
4240 type = TREE_TYPE (type);
4241 int prec = TYPE_PRECISION (type), prec0, prec1;
4242 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4243 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4244 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4245 + (prec1 < 0 ? -prec1 : prec1));
4246 if (prec0 == 1 || prec1 == 1)
4247 --prec2;
4248 tree var = NULL_TREE;
4249 tree orig_obj = obj;
4250 bool force_var = false;
4251 if (obj == NULL_TREE
4252 && TREE_CODE (type) == BITINT_TYPE
4253 && bitint_precision_kind (type) >= bitint_prec_large
4254 && m_names
4255 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4257 int part = var_to_partition (m_map, lhs);
4258 gcc_assert (m_vars[part] != NULL_TREE);
4259 obj = m_vars[part];
4260 if (TREE_TYPE (lhs) == type)
4261 orig_obj = obj;
4263 else if (obj != NULL_TREE && DECL_P (obj))
4265 for (int i = 0; i < 2; ++i)
4267 tree arg = i ? arg1 : arg0;
4268 if (TREE_CODE (arg) == ADDR_EXPR)
4269 arg = TREE_OPERAND (arg, 0);
4270 if (get_base_address (arg) == obj)
4272 force_var = true;
4273 break;
4277 if (obj == NULL_TREE
4278 || force_var
4279 || TREE_CODE (type) != BITINT_TYPE
4280 || bitint_precision_kind (type) < bitint_prec_large
4281 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4283 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4284 tree atype = build_array_type_nelts (m_limb_type, nelts);
4285 var = create_tmp_var (atype);
4287 tree addr = build_fold_addr_expr (var ? var : obj);
4288 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4289 NULL_TREE, true, GSI_SAME_STMT);
4290 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4291 gimple *g
4292 = gimple_build_call_internal (IFN_MULBITINT, 6,
4293 addr, build_int_cst (sitype,
4294 MAX (prec2, prec)),
4295 arg0, build_int_cst (sitype, prec0),
4296 arg1, build_int_cst (sitype, prec1));
4297 insert_before (g);
4299 unsigned start, end;
4300 bool check_zero;
4301 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4302 &start, &end, &check_zero);
4303 if (ovf == NULL_TREE)
4305 unsigned startlimb = start / limb_prec;
4306 unsigned endlimb = (end - 1) / limb_prec;
4307 unsigned cnt;
4308 bool use_loop = false;
4309 if (startlimb == endlimb)
4310 cnt = 1;
4311 else if (startlimb + 1 == endlimb)
4312 cnt = 2;
4313 else if ((end % limb_prec) == 0)
4315 cnt = 2;
4316 use_loop = true;
4318 else
4320 cnt = 3;
4321 use_loop = startlimb + 2 < endlimb;
4323 if (cnt == 1)
4325 tree l = limb_access (NULL_TREE, var ? var : obj,
4326 size_int (startlimb), true);
4327 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4328 insert_before (g);
4329 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4330 startlimb, check_zero);
4331 ovf = make_ssa_name (boolean_type_node);
4332 if (check_zero)
4333 g = gimple_build_assign (ovf, NE_EXPR, l,
4334 build_zero_cst (m_limb_type));
4335 else
4337 g = gimple_build_assign (make_ssa_name (m_limb_type),
4338 PLUS_EXPR, l,
4339 build_int_cst (m_limb_type, 1));
4340 insert_before (g);
4341 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4342 build_int_cst (m_limb_type, 1));
4344 insert_before (g);
4346 else
4348 basic_block edge_bb = NULL;
4349 gimple_stmt_iterator gsi = m_gsi;
4350 gsi_prev (&gsi);
4351 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4352 edge_bb = e->src;
4353 m_gsi = gsi_end_bb (edge_bb);
4355 tree cmp = build_zero_cst (m_limb_type);
4356 for (unsigned i = 0; i < cnt; i++)
4358 tree idx, idx_next = NULL_TREE;
4359 if (i == 0)
4360 idx = size_int (startlimb);
4361 else if (i == 2)
4362 idx = size_int (endlimb);
4363 else if (use_loop)
4364 idx = create_loop (size_int (startlimb + 1), &idx_next);
4365 else
4366 idx = size_int (startlimb + 1);
4367 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4368 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4369 insert_before (g);
4370 l = gimple_assign_lhs (g);
4371 if (i == 0 || i == 2)
4372 l = arith_overflow_extract_bits (start, end, l,
4373 tree_to_uhwi (idx),
4374 check_zero);
4375 if (i == 0 && !check_zero)
4377 cmp = l;
4378 g = gimple_build_assign (make_ssa_name (m_limb_type),
4379 PLUS_EXPR, l,
4380 build_int_cst (m_limb_type, 1));
4381 insert_before (g);
4382 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4383 build_int_cst (m_limb_type, 1),
4384 NULL_TREE, NULL_TREE);
4386 else
4387 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4388 insert_before (g);
4389 edge e1 = split_block (gsi_bb (m_gsi), g);
4390 e1->flags = EDGE_FALSE_VALUE;
4391 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4392 EDGE_TRUE_VALUE);
4393 e1->probability = profile_probability::likely ();
4394 e2->probability = e1->probability.invert ();
4395 if (i == 0)
4396 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4397 m_gsi = gsi_after_labels (e1->dest);
4398 if (i == 1 && use_loop)
4400 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4401 size_one_node);
4402 insert_before (g);
4403 g = gimple_build_cond (NE_EXPR, idx_next,
4404 size_int (endlimb + (cnt == 1)),
4405 NULL_TREE, NULL_TREE);
4406 insert_before (g);
4407 edge true_edge, false_edge;
4408 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4409 &true_edge,
4410 &false_edge);
4411 m_gsi = gsi_after_labels (false_edge->dest);
4415 ovf = make_ssa_name (boolean_type_node);
4416 basic_block bb = gimple_bb (final_stmt);
4417 gphi *phi = create_phi_node (ovf, bb);
4418 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4419 edge_iterator ei;
4420 FOR_EACH_EDGE (e, ei, bb->preds)
4422 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4423 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4425 m_gsi = gsi_for_stmt (final_stmt);
4429 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4432 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4433 .{ADD,SUB,MUL}_OVERFLOW call. */
4435 void
4436 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4438 tree rhs1 = gimple_assign_rhs1 (stmt);
4439 rhs1 = TREE_OPERAND (rhs1, 0);
4440 if (obj == NULL_TREE)
4442 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4443 gcc_assert (m_vars[part] != NULL_TREE);
4444 obj = m_vars[part];
4446 if (TREE_CODE (rhs1) == SSA_NAME
4447 && (m_names == NULL
4448 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4450 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4451 return;
4453 int part = var_to_partition (m_map, rhs1);
4454 gcc_assert (m_vars[part] != NULL_TREE);
4455 tree var = m_vars[part];
4456 unsigned HOST_WIDE_INT nelts
4457 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4458 tree atype = build_array_type_nelts (m_limb_type, nelts);
4459 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4460 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4461 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4462 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4463 ? 0 : nelts * m_limb_size);
4464 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4465 gimple *g = gimple_build_assign (obj, v2);
4466 insert_before (g);
4469 /* Lower COMPLEX_EXPR stmt. */
4471 void
4472 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4474 tree lhs = gimple_assign_lhs (stmt);
4475 tree rhs1 = gimple_assign_rhs1 (stmt);
4476 tree rhs2 = gimple_assign_rhs2 (stmt);
4477 int part = var_to_partition (m_map, lhs);
4478 gcc_assert (m_vars[part] != NULL_TREE);
4479 lhs = m_vars[part];
4480 unsigned HOST_WIDE_INT nelts
4481 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4482 tree atype = build_array_type_nelts (m_limb_type, nelts);
4483 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4484 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4485 tree v2;
4486 if (TREE_CODE (rhs1) == SSA_NAME)
4488 part = var_to_partition (m_map, rhs1);
4489 gcc_assert (m_vars[part] != NULL_TREE);
4490 v2 = m_vars[part];
4492 else if (integer_zerop (rhs1))
4493 v2 = build_zero_cst (atype);
4494 else
4495 v2 = tree_output_constant_def (rhs1);
4496 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4497 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4498 gimple *g = gimple_build_assign (v1, v2);
4499 insert_before (g);
4500 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4501 TYPE_SIZE_UNIT (atype));
4502 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4503 if (TREE_CODE (rhs2) == SSA_NAME)
4505 part = var_to_partition (m_map, rhs2);
4506 gcc_assert (m_vars[part] != NULL_TREE);
4507 v2 = m_vars[part];
4509 else if (integer_zerop (rhs2))
4510 v2 = build_zero_cst (atype);
4511 else
4512 v2 = tree_output_constant_def (rhs2);
4513 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4514 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4515 g = gimple_build_assign (v1, v2);
4516 insert_before (g);
4519 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4520 argument. */
4522 void
4523 bitint_large_huge::lower_bit_query (gimple *stmt)
4525 tree arg0 = gimple_call_arg (stmt, 0);
4526 tree arg1 = (gimple_call_num_args (stmt) == 2
4527 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4528 tree lhs = gimple_call_lhs (stmt);
4529 gimple *g;
4531 if (!lhs)
4533 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4534 gsi_remove (&gsi, true);
4535 return;
4537 tree type = TREE_TYPE (arg0);
4538 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4539 bitint_prec_kind kind = bitint_precision_kind (type);
4540 gcc_assert (kind >= bitint_prec_large);
4541 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4542 enum built_in_function fcode = END_BUILTINS;
4543 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4544 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4545 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4546 switch (ifn)
4548 case IFN_CLZ:
4549 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4550 fcode = BUILT_IN_CLZ;
4551 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4552 fcode = BUILT_IN_CLZL;
4553 else
4554 fcode = BUILT_IN_CLZLL;
4555 break;
4556 case IFN_FFS:
4557 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4558 we don't add the addend at the end. */
4559 arg1 = integer_zero_node;
4560 /* FALLTHRU */
4561 case IFN_CTZ:
4562 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4563 fcode = BUILT_IN_CTZ;
4564 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4565 fcode = BUILT_IN_CTZL;
4566 else
4567 fcode = BUILT_IN_CTZLL;
4568 m_upwards = true;
4569 break;
4570 case IFN_CLRSB:
4571 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4572 fcode = BUILT_IN_CLRSB;
4573 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4574 fcode = BUILT_IN_CLRSBL;
4575 else
4576 fcode = BUILT_IN_CLRSBLL;
4577 break;
4578 case IFN_PARITY:
4579 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4580 fcode = BUILT_IN_PARITY;
4581 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4582 fcode = BUILT_IN_PARITYL;
4583 else
4584 fcode = BUILT_IN_PARITYLL;
4585 m_upwards = true;
4586 break;
4587 case IFN_POPCOUNT:
4588 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4589 fcode = BUILT_IN_POPCOUNT;
4590 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4591 fcode = BUILT_IN_POPCOUNTL;
4592 else
4593 fcode = BUILT_IN_POPCOUNTLL;
4594 m_upwards = true;
4595 break;
4596 default:
4597 gcc_unreachable ();
4599 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4600 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4601 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4602 basic_block edge_bb = NULL;
4603 if (m_upwards)
4605 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4606 if (kind == bitint_prec_large)
4607 cnt = CEIL (prec, limb_prec);
4608 else
4610 rem = (prec % (2 * limb_prec));
4611 end = (prec - rem) / limb_prec;
4612 cnt = 2 + CEIL (rem, limb_prec);
4613 idx = idx_first = create_loop (size_zero_node, &idx_next);
4616 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4618 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4619 gsi_prev (&gsi);
4620 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4621 edge_bb = e->src;
4622 if (kind == bitint_prec_large)
4623 m_gsi = gsi_end_bb (edge_bb);
4624 bqp = XALLOCAVEC (struct bq_details, cnt);
4626 else
4627 m_after_stmt = stmt;
4628 if (kind != bitint_prec_large)
4629 m_upwards_2limb = end;
4631 for (unsigned i = 0; i < cnt; i++)
4633 m_data_cnt = 0;
4634 if (kind == bitint_prec_large)
4635 idx = size_int (i);
4636 else if (i >= 2)
4637 idx = size_int (end + (i > 2));
4639 tree rhs1 = handle_operand (arg0, idx);
4640 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4642 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4643 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4644 rhs1 = add_cast (m_limb_type, rhs1);
4647 tree in, out, tem;
4648 if (ifn == IFN_PARITY)
4649 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4650 else if (ifn == IFN_FFS)
4651 in = prepare_data_in_out (integer_one_node, idx, &out);
4652 else
4653 in = prepare_data_in_out (integer_zero_node, idx, &out);
4655 switch (ifn)
4657 case IFN_CTZ:
4658 case IFN_FFS:
4659 g = gimple_build_cond (NE_EXPR, rhs1,
4660 build_zero_cst (m_limb_type),
4661 NULL_TREE, NULL_TREE);
4662 insert_before (g);
4663 edge e1, e2;
4664 e1 = split_block (gsi_bb (m_gsi), g);
4665 e1->flags = EDGE_FALSE_VALUE;
4666 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4667 e1->probability = profile_probability::unlikely ();
4668 e2->probability = e1->probability.invert ();
4669 if (i == 0)
4670 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4671 m_gsi = gsi_after_labels (e1->dest);
4672 bqp[i].e = e2;
4673 bqp[i].val = rhs1;
4674 if (tree_fits_uhwi_p (idx))
4675 bqp[i].addend
4676 = build_int_cst (integer_type_node,
4677 tree_to_uhwi (idx) * limb_prec
4678 + (ifn == IFN_FFS));
4679 else
4681 bqp[i].addend = in;
4682 if (i == 1)
4683 res = out;
4684 else
4685 res = make_ssa_name (integer_type_node);
4686 g = gimple_build_assign (res, PLUS_EXPR, in,
4687 build_int_cst (integer_type_node,
4688 limb_prec));
4689 insert_before (g);
4690 m_data[m_data_cnt] = res;
4692 break;
4693 case IFN_PARITY:
4694 if (!integer_zerop (in))
4696 if (kind == bitint_prec_huge && i == 1)
4697 res = out;
4698 else
4699 res = make_ssa_name (m_limb_type);
4700 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4701 insert_before (g);
4703 else
4704 res = rhs1;
4705 m_data[m_data_cnt] = res;
4706 break;
4707 case IFN_POPCOUNT:
4708 g = gimple_build_call (fndecl, 1, rhs1);
4709 tem = make_ssa_name (integer_type_node);
4710 gimple_call_set_lhs (g, tem);
4711 insert_before (g);
4712 if (!integer_zerop (in))
4714 if (kind == bitint_prec_huge && i == 1)
4715 res = out;
4716 else
4717 res = make_ssa_name (integer_type_node);
4718 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4719 insert_before (g);
4721 else
4722 res = tem;
4723 m_data[m_data_cnt] = res;
4724 break;
4725 default:
4726 gcc_unreachable ();
4729 m_first = false;
4730 if (kind == bitint_prec_huge && i <= 1)
4732 if (i == 0)
4734 idx = make_ssa_name (sizetype);
4735 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4736 size_one_node);
4737 insert_before (g);
4739 else
4741 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4742 size_int (2));
4743 insert_before (g);
4744 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4745 NULL_TREE, NULL_TREE);
4746 insert_before (g);
4747 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4748 m_gsi = gsi_after_labels (edge_bb);
4749 else
4750 m_gsi = gsi_for_stmt (stmt);
4755 else
4757 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4758 int sub_one = 0;
4759 if (kind == bitint_prec_large)
4760 cnt = CEIL (prec, limb_prec);
4761 else
4763 rem = prec % limb_prec;
4764 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4765 rem = limb_prec;
4766 end = (prec - rem) / limb_prec;
4767 cnt = 1 + (rem != 0);
4768 if (ifn == IFN_CLRSB)
4769 sub_one = 1;
4772 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4773 gsi_prev (&gsi);
4774 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4775 edge_bb = e->src;
4776 m_gsi = gsi_end_bb (edge_bb);
4778 if (ifn == IFN_CLZ)
4779 bqp = XALLOCAVEC (struct bq_details, cnt);
4780 else
4782 gsi = gsi_for_stmt (stmt);
4783 gsi_prev (&gsi);
4784 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4785 edge_bb = e->src;
4786 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4789 for (unsigned i = 0; i < cnt; i++)
4791 m_data_cnt = 0;
4792 if (kind == bitint_prec_large)
4793 idx = size_int (cnt - i - 1);
4794 else if (i == cnt - 1)
4795 idx = create_loop (size_int (end - 1), &idx_next);
4796 else
4797 idx = size_int (end);
4799 tree rhs1 = handle_operand (arg0, idx);
4800 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4802 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4803 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4804 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4805 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4806 rhs1 = add_cast (m_limb_type, rhs1);
4809 if (ifn == IFN_CLZ)
4811 g = gimple_build_cond (NE_EXPR, rhs1,
4812 build_zero_cst (m_limb_type),
4813 NULL_TREE, NULL_TREE);
4814 insert_before (g);
4815 edge e1 = split_block (gsi_bb (m_gsi), g);
4816 e1->flags = EDGE_FALSE_VALUE;
4817 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4818 e1->probability = profile_probability::unlikely ();
4819 e2->probability = e1->probability.invert ();
4820 if (i == 0)
4821 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4822 m_gsi = gsi_after_labels (e1->dest);
4823 bqp[i].e = e2;
4824 bqp[i].val = rhs1;
4826 else
4828 if (i == 0)
4830 first = rhs1;
4831 g = gimple_build_assign (make_ssa_name (m_limb_type),
4832 PLUS_EXPR, rhs1,
4833 build_int_cst (m_limb_type, 1));
4834 insert_before (g);
4835 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4836 build_int_cst (m_limb_type, 1),
4837 NULL_TREE, NULL_TREE);
4838 insert_before (g);
4840 else
4842 g = gimple_build_assign (make_ssa_name (m_limb_type),
4843 BIT_XOR_EXPR, rhs1, first);
4844 insert_before (g);
4845 tree stype = signed_type_for (m_limb_type);
4846 g = gimple_build_cond (LT_EXPR,
4847 add_cast (stype,
4848 gimple_assign_lhs (g)),
4849 build_zero_cst (stype),
4850 NULL_TREE, NULL_TREE);
4851 insert_before (g);
4852 edge e1 = split_block (gsi_bb (m_gsi), g);
4853 e1->flags = EDGE_FALSE_VALUE;
4854 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4855 EDGE_TRUE_VALUE);
4856 e1->probability = profile_probability::unlikely ();
4857 e2->probability = e1->probability.invert ();
4858 if (i == 1)
4859 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4860 e2->src);
4861 m_gsi = gsi_after_labels (e1->dest);
4862 bqp[2 * i].e = e2;
4863 g = gimple_build_cond (NE_EXPR, rhs1, first,
4864 NULL_TREE, NULL_TREE);
4865 insert_before (g);
4867 edge e1 = split_block (gsi_bb (m_gsi), g);
4868 e1->flags = EDGE_FALSE_VALUE;
4869 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4870 e1->probability = profile_probability::unlikely ();
4871 e2->probability = e1->probability.invert ();
4872 if (i == 0)
4873 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4874 m_gsi = gsi_after_labels (e1->dest);
4875 bqp[2 * i + 1].e = e2;
4876 bqp[i].val = rhs1;
4878 if (tree_fits_uhwi_p (idx))
4879 bqp[i].addend
4880 = build_int_cst (integer_type_node,
4881 (int) prec
4882 - (((int) tree_to_uhwi (idx) + 1)
4883 * limb_prec) - sub_one);
4884 else
4886 tree in, out;
4887 in = build_int_cst (integer_type_node, rem - sub_one);
4888 m_first = true;
4889 in = prepare_data_in_out (in, idx, &out);
4890 out = m_data[m_data_cnt + 1];
4891 bqp[i].addend = in;
4892 g = gimple_build_assign (out, PLUS_EXPR, in,
4893 build_int_cst (integer_type_node,
4894 limb_prec));
4895 insert_before (g);
4896 m_data[m_data_cnt] = out;
4899 m_first = false;
4900 if (kind == bitint_prec_huge && i == cnt - 1)
4902 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4903 size_int (-1));
4904 insert_before (g);
4905 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4906 NULL_TREE, NULL_TREE);
4907 insert_before (g);
4908 edge true_edge, false_edge;
4909 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4910 &true_edge, &false_edge);
4911 m_gsi = gsi_after_labels (false_edge->dest);
4915 switch (ifn)
4917 case IFN_CLZ:
4918 case IFN_CTZ:
4919 case IFN_FFS:
4920 gphi *phi1, *phi2, *phi3;
4921 basic_block bb;
4922 bb = gsi_bb (m_gsi);
4923 remove_edge (find_edge (bb, gimple_bb (stmt)));
4924 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4925 gimple_bb (stmt));
4926 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4927 gimple_bb (stmt));
4928 for (unsigned i = 0; i < cnt; i++)
4930 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4931 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4933 if (arg1 == NULL_TREE)
4935 g = gimple_build_builtin_unreachable (m_loc);
4936 insert_before (g);
4938 m_gsi = gsi_for_stmt (stmt);
4939 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4940 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4941 insert_before (g);
4942 if (arg1 == NULL_TREE)
4943 g = gimple_build_assign (lhs, PLUS_EXPR,
4944 gimple_phi_result (phi2),
4945 gimple_call_lhs (g));
4946 else
4948 g = gimple_build_assign (make_ssa_name (integer_type_node),
4949 PLUS_EXPR, gimple_phi_result (phi2),
4950 gimple_call_lhs (g));
4951 insert_before (g);
4952 edge e1 = split_block (gimple_bb (stmt), g);
4953 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
4954 e2->probability = profile_probability::always ();
4955 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
4956 get_immediate_dominator (CDI_DOMINATORS,
4957 e1->src));
4958 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
4959 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
4960 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
4961 m_gsi = gsi_for_stmt (stmt);
4962 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4964 gsi_replace (&m_gsi, g, true);
4965 break;
4966 case IFN_CLRSB:
4967 bb = gsi_bb (m_gsi);
4968 remove_edge (find_edge (bb, edge_bb));
4969 edge e;
4970 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
4971 e->probability = profile_probability::always ();
4972 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
4973 get_immediate_dominator (CDI_DOMINATORS,
4974 edge_bb));
4975 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4976 edge_bb);
4977 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4978 edge_bb);
4979 phi3 = create_phi_node (make_ssa_name (integer_type_node),
4980 gimple_bb (stmt));
4981 for (unsigned i = 0; i < cnt; i++)
4983 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
4984 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
4985 UNKNOWN_LOCATION);
4986 tree a = bqp[i].addend;
4987 if (i && kind == bitint_prec_large)
4988 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
4989 if (i)
4990 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
4992 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
4993 UNKNOWN_LOCATION);
4994 m_gsi = gsi_after_labels (edge_bb);
4995 g = gimple_build_call (fndecl, 1,
4996 add_cast (signed_type_for (m_limb_type),
4997 gimple_phi_result (phi1)));
4998 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4999 insert_before (g);
5000 g = gimple_build_assign (make_ssa_name (integer_type_node),
5001 PLUS_EXPR, gimple_call_lhs (g),
5002 gimple_phi_result (phi2));
5003 insert_before (g);
5004 if (kind != bitint_prec_large)
5006 g = gimple_build_assign (make_ssa_name (integer_type_node),
5007 PLUS_EXPR, gimple_assign_lhs (g),
5008 integer_one_node);
5009 insert_before (g);
5011 add_phi_arg (phi3, gimple_assign_lhs (g),
5012 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5013 m_gsi = gsi_for_stmt (stmt);
5014 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5015 gsi_replace (&m_gsi, g, true);
5016 break;
5017 case IFN_PARITY:
5018 g = gimple_build_call (fndecl, 1, res);
5019 gimple_call_set_lhs (g, lhs);
5020 gsi_replace (&m_gsi, g, true);
5021 break;
5022 case IFN_POPCOUNT:
5023 g = gimple_build_assign (lhs, res);
5024 gsi_replace (&m_gsi, g, true);
5025 break;
5026 default:
5027 gcc_unreachable ();
5031 /* Lower a call statement with one or more large/huge _BitInt
5032 arguments or large/huge _BitInt return value. */
5034 void
5035 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5037 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5038 unsigned int nargs = gimple_call_num_args (stmt);
5039 if (gimple_call_internal_p (stmt))
5040 switch (gimple_call_internal_fn (stmt))
5042 case IFN_ADD_OVERFLOW:
5043 case IFN_SUB_OVERFLOW:
5044 case IFN_UBSAN_CHECK_ADD:
5045 case IFN_UBSAN_CHECK_SUB:
5046 lower_addsub_overflow (obj, stmt);
5047 return;
5048 case IFN_MUL_OVERFLOW:
5049 case IFN_UBSAN_CHECK_MUL:
5050 lower_mul_overflow (obj, stmt);
5051 return;
5052 case IFN_CLZ:
5053 case IFN_CTZ:
5054 case IFN_CLRSB:
5055 case IFN_FFS:
5056 case IFN_PARITY:
5057 case IFN_POPCOUNT:
5058 lower_bit_query (stmt);
5059 return;
5060 default:
5061 break;
5063 for (unsigned int i = 0; i < nargs; ++i)
5065 tree arg = gimple_call_arg (stmt, i);
5066 if (TREE_CODE (arg) != SSA_NAME
5067 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5068 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5069 continue;
5070 int p = var_to_partition (m_map, arg);
5071 tree v = m_vars[p];
5072 gcc_assert (v != NULL_TREE);
5073 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5074 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5075 arg = make_ssa_name (TREE_TYPE (arg));
5076 gimple *g = gimple_build_assign (arg, v);
5077 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5078 gimple_call_set_arg (stmt, i, arg);
5079 if (m_preserved == NULL)
5080 m_preserved = BITMAP_ALLOC (NULL);
5081 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5083 tree lhs = gimple_call_lhs (stmt);
5084 if (lhs
5085 && TREE_CODE (lhs) == SSA_NAME
5086 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5087 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5089 int p = var_to_partition (m_map, lhs);
5090 tree v = m_vars[p];
5091 gcc_assert (v != NULL_TREE);
5092 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5093 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5094 gimple_call_set_lhs (stmt, v);
5095 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5097 update_stmt (stmt);
5100 /* Lower __asm STMT which involves large/huge _BitInt values. */
5102 void
5103 bitint_large_huge::lower_asm (gimple *stmt)
5105 gasm *g = as_a <gasm *> (stmt);
5106 unsigned noutputs = gimple_asm_noutputs (g);
5107 unsigned ninputs = gimple_asm_ninputs (g);
5109 for (unsigned i = 0; i < noutputs; ++i)
5111 tree t = gimple_asm_output_op (g, i);
5112 tree s = TREE_VALUE (t);
5113 if (TREE_CODE (s) == SSA_NAME
5114 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5115 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5117 int part = var_to_partition (m_map, s);
5118 gcc_assert (m_vars[part] != NULL_TREE);
5119 TREE_VALUE (t) = m_vars[part];
5122 for (unsigned i = 0; i < ninputs; ++i)
5124 tree t = gimple_asm_input_op (g, i);
5125 tree s = TREE_VALUE (t);
5126 if (TREE_CODE (s) == SSA_NAME
5127 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5128 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5130 int part = var_to_partition (m_map, s);
5131 gcc_assert (m_vars[part] != NULL_TREE);
5132 TREE_VALUE (t) = m_vars[part];
5135 update_stmt (stmt);
5138 /* Lower statement STMT which involves large/huge _BitInt values
5139 into code accessing individual limbs. */
5141 void
5142 bitint_large_huge::lower_stmt (gimple *stmt)
5144 m_first = true;
5145 m_lhs = NULL_TREE;
5146 m_data.truncate (0);
5147 m_data_cnt = 0;
5148 m_gsi = gsi_for_stmt (stmt);
5149 m_after_stmt = NULL;
5150 m_bb = NULL;
5151 m_init_gsi = m_gsi;
5152 gsi_prev (&m_init_gsi);
5153 m_preheader_bb = NULL;
5154 m_upwards_2limb = 0;
5155 m_upwards = false;
5156 m_var_msb = false;
5157 m_cast_conditional = false;
5158 m_bitfld_load = 0;
5159 m_loc = gimple_location (stmt);
5160 if (is_gimple_call (stmt))
5162 lower_call (NULL_TREE, stmt);
5163 return;
5165 if (gimple_code (stmt) == GIMPLE_ASM)
5167 lower_asm (stmt);
5168 return;
5170 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5171 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5172 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5173 bool mergeable_cast_p = false;
5174 bool final_cast_p = false;
5175 if (gimple_assign_cast_p (stmt))
5177 lhs = gimple_assign_lhs (stmt);
5178 tree rhs1 = gimple_assign_rhs1 (stmt);
5179 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5180 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5181 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5182 mergeable_cast_p = true;
5183 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5184 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5185 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5187 final_cast_p = true;
5188 if (TREE_CODE (rhs1) == SSA_NAME
5189 && (m_names == NULL
5190 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5192 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5193 if (is_gimple_assign (g)
5194 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5196 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5197 if (TREE_CODE (rhs2) == SSA_NAME
5198 && (m_names == NULL
5199 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5201 g = SSA_NAME_DEF_STMT (rhs2);
5202 int ovf = optimizable_arith_overflow (g);
5203 if (ovf == 2)
5204 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5205 and IMAGPART_EXPR uses, where the latter is cast to
5206 non-_BitInt, it will be optimized when handling
5207 the REALPART_EXPR. */
5208 return;
5209 if (ovf == 1)
5211 lower_call (NULL_TREE, g);
5212 return;
5219 if (gimple_store_p (stmt))
5221 tree rhs1 = gimple_assign_rhs1 (stmt);
5222 if (TREE_CODE (rhs1) == SSA_NAME
5223 && (m_names == NULL
5224 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5226 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5227 m_loc = gimple_location (g);
5228 lhs = gimple_assign_lhs (stmt);
5229 if (is_gimple_assign (g) && !mergeable_op (g))
5230 switch (gimple_assign_rhs_code (g))
5232 case LSHIFT_EXPR:
5233 case RSHIFT_EXPR:
5234 lower_shift_stmt (lhs, g);
5235 handled:
5236 m_gsi = gsi_for_stmt (stmt);
5237 unlink_stmt_vdef (stmt);
5238 release_ssa_name (gimple_vdef (stmt));
5239 gsi_remove (&m_gsi, true);
5240 return;
5241 case MULT_EXPR:
5242 case TRUNC_DIV_EXPR:
5243 case TRUNC_MOD_EXPR:
5244 lower_muldiv_stmt (lhs, g);
5245 goto handled;
5246 case FIX_TRUNC_EXPR:
5247 lower_float_conv_stmt (lhs, g);
5248 goto handled;
5249 case REALPART_EXPR:
5250 case IMAGPART_EXPR:
5251 lower_cplxpart_stmt (lhs, g);
5252 goto handled;
5253 default:
5254 break;
5256 else if (optimizable_arith_overflow (g) == 3)
5258 lower_call (lhs, g);
5259 goto handled;
5261 m_loc = gimple_location (stmt);
5264 if (mergeable_op (stmt)
5265 || gimple_store_p (stmt)
5266 || gimple_assign_load_p (stmt)
5267 || eq_p
5268 || mergeable_cast_p)
5270 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5271 if (!eq_p)
5272 return;
5274 else if (cmp_code != ERROR_MARK)
5275 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5276 if (cmp_code != ERROR_MARK)
5278 if (gimple_code (stmt) == GIMPLE_COND)
5280 gcond *cstmt = as_a <gcond *> (stmt);
5281 gimple_cond_set_lhs (cstmt, lhs);
5282 gimple_cond_set_rhs (cstmt, boolean_false_node);
5283 gimple_cond_set_code (cstmt, cmp_code);
5284 update_stmt (stmt);
5285 return;
5287 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5289 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5290 boolean_false_node);
5291 gimple_assign_set_rhs1 (stmt, cond);
5292 lhs = gimple_assign_lhs (stmt);
5293 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5294 || (bitint_precision_kind (TREE_TYPE (lhs))
5295 <= bitint_prec_middle));
5296 update_stmt (stmt);
5297 return;
5299 gimple_assign_set_rhs1 (stmt, lhs);
5300 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5301 gimple_assign_set_rhs_code (stmt, cmp_code);
5302 update_stmt (stmt);
5303 return;
5305 if (final_cast_p)
5307 tree lhs_type = TREE_TYPE (lhs);
5308 /* Add support for 3 or more limbs filled in from normal integral
5309 type if this assert fails. If no target chooses limb mode smaller
5310 than half of largest supported normal integral type, this will not
5311 be needed. */
5312 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5313 gimple *g;
5314 if (TREE_CODE (lhs_type) == BITINT_TYPE
5315 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5316 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5317 TYPE_UNSIGNED (lhs_type));
5318 m_data_cnt = 0;
5319 tree rhs1 = gimple_assign_rhs1 (stmt);
5320 tree r1 = handle_operand (rhs1, size_int (0));
5321 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5322 r1 = add_cast (lhs_type, r1);
5323 if (TYPE_PRECISION (lhs_type) > limb_prec)
5325 m_data_cnt = 0;
5326 m_first = false;
5327 tree r2 = handle_operand (rhs1, size_int (1));
5328 r2 = add_cast (lhs_type, r2);
5329 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5330 build_int_cst (unsigned_type_node,
5331 limb_prec));
5332 insert_before (g);
5333 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5334 gimple_assign_lhs (g));
5335 insert_before (g);
5336 r1 = gimple_assign_lhs (g);
5338 if (lhs_type != TREE_TYPE (lhs))
5339 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5340 else
5341 g = gimple_build_assign (lhs, r1);
5342 gsi_replace (&m_gsi, g, true);
5343 return;
5345 if (is_gimple_assign (stmt))
5346 switch (gimple_assign_rhs_code (stmt))
5348 case LSHIFT_EXPR:
5349 case RSHIFT_EXPR:
5350 lower_shift_stmt (NULL_TREE, stmt);
5351 return;
5352 case MULT_EXPR:
5353 case TRUNC_DIV_EXPR:
5354 case TRUNC_MOD_EXPR:
5355 lower_muldiv_stmt (NULL_TREE, stmt);
5356 return;
5357 case FIX_TRUNC_EXPR:
5358 case FLOAT_EXPR:
5359 lower_float_conv_stmt (NULL_TREE, stmt);
5360 return;
5361 case REALPART_EXPR:
5362 case IMAGPART_EXPR:
5363 lower_cplxpart_stmt (NULL_TREE, stmt);
5364 return;
5365 case COMPLEX_EXPR:
5366 lower_complexexpr_stmt (stmt);
5367 return;
5368 default:
5369 break;
5371 gcc_unreachable ();
5374 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5375 the desired memory state. */
5377 void *
5378 vuse_eq (ao_ref *, tree vuse1, void *data)
5380 tree vuse2 = (tree) data;
5381 if (vuse1 == vuse2)
5382 return data;
5384 return NULL;
5387 /* Return true if STMT uses a library function and needs to take
5388 address of its inputs. We need to avoid bit-fields in those
5389 cases. */
5391 bool
5392 stmt_needs_operand_addr (gimple *stmt)
5394 if (is_gimple_assign (stmt))
5395 switch (gimple_assign_rhs_code (stmt))
5397 case MULT_EXPR:
5398 case TRUNC_DIV_EXPR:
5399 case TRUNC_MOD_EXPR:
5400 case FLOAT_EXPR:
5401 return true;
5402 default:
5403 break;
5405 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5406 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5407 return true;
5408 return false;
5411 /* Dominator walker used to discover which large/huge _BitInt
5412 loads could be sunk into all their uses. */
5414 class bitint_dom_walker : public dom_walker
5416 public:
5417 bitint_dom_walker (bitmap names, bitmap loads)
5418 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5420 edge before_dom_children (basic_block) final override;
5422 private:
5423 bitmap m_names, m_loads;
5426 edge
5427 bitint_dom_walker::before_dom_children (basic_block bb)
5429 gphi *phi = get_virtual_phi (bb);
5430 tree vop;
5431 if (phi)
5432 vop = gimple_phi_result (phi);
5433 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5434 vop = NULL_TREE;
5435 else
5436 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5438 auto_vec<tree, 16> worklist;
5439 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5440 !gsi_end_p (gsi); gsi_next (&gsi))
5442 gimple *stmt = gsi_stmt (gsi);
5443 if (is_gimple_debug (stmt))
5444 continue;
5446 if (!vop && gimple_vuse (stmt))
5447 vop = gimple_vuse (stmt);
5449 tree cvop = vop;
5450 if (gimple_vdef (stmt))
5451 vop = gimple_vdef (stmt);
5453 tree lhs = gimple_get_lhs (stmt);
5454 if (lhs
5455 && TREE_CODE (lhs) == SSA_NAME
5456 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5457 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5458 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5459 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5460 it means it will be handled in a loop or straight line code
5461 at the location of its (ultimate) immediate use, so for
5462 vop checking purposes check these only at the ultimate
5463 immediate use. */
5464 continue;
5466 ssa_op_iter oi;
5467 use_operand_p use_p;
5468 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5470 tree s = USE_FROM_PTR (use_p);
5471 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5472 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5473 worklist.safe_push (s);
5476 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5477 while (worklist.length () > 0)
5479 tree s = worklist.pop ();
5481 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5483 gimple *g = SSA_NAME_DEF_STMT (s);
5484 needs_operand_addr |= stmt_needs_operand_addr (g);
5485 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5487 tree s2 = USE_FROM_PTR (use_p);
5488 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5489 && (bitint_precision_kind (TREE_TYPE (s2))
5490 >= bitint_prec_large))
5491 worklist.safe_push (s2);
5493 continue;
5495 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5496 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5498 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5499 if (TREE_CODE (rhs) == SSA_NAME
5500 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5501 s = rhs;
5502 else
5503 continue;
5505 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5506 continue;
5508 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5509 if (needs_operand_addr
5510 && TREE_CODE (rhs1) == COMPONENT_REF
5511 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5513 tree fld = TREE_OPERAND (rhs1, 1);
5514 /* For little-endian, we can allow as inputs bit-fields
5515 which start at a limb boundary. */
5516 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5517 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5518 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5519 % limb_prec) == 0)
5521 else
5523 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5524 continue;
5528 ao_ref ref;
5529 ao_ref_init (&ref, rhs1);
5530 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5531 unsigned limit = 64;
5532 tree vuse = cvop;
5533 if (vop != cvop
5534 && is_gimple_assign (stmt)
5535 && gimple_store_p (stmt)
5536 && !operand_equal_p (lhs,
5537 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5539 vuse = vop;
5540 if (vuse != lvop
5541 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5542 NULL, NULL, limit, lvop) == NULL)
5543 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5547 bb->aux = (void *) vop;
5548 return NULL;
5553 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5554 build_ssa_conflict_graph.
5555 The differences are:
5556 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5557 2) for large/huge _BitInt multiplication/division/modulo process def
5558 only after processing uses rather than before to make uses conflict
5559 with the definition
5560 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5561 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5562 the final statement. */
5564 void
5565 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5566 ssa_conflicts *graph, bitmap names,
5567 void (*def) (live_track *, tree,
5568 ssa_conflicts *),
5569 void (*use) (live_track *, tree))
5571 bool muldiv_p = false;
5572 tree lhs = NULL_TREE;
5573 if (is_gimple_assign (stmt))
5575 lhs = gimple_assign_lhs (stmt);
5576 if (TREE_CODE (lhs) == SSA_NAME
5577 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5578 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5580 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5581 return;
5582 switch (gimple_assign_rhs_code (stmt))
5584 case MULT_EXPR:
5585 case TRUNC_DIV_EXPR:
5586 case TRUNC_MOD_EXPR:
5587 muldiv_p = true;
5588 default:
5589 break;
5594 ssa_op_iter iter;
5595 tree var;
5596 if (!muldiv_p)
5598 /* For stmts with more than one SSA_NAME definition pretend all the
5599 SSA_NAME outputs but the first one are live at this point, so
5600 that conflicts are added in between all those even when they are
5601 actually not really live after the asm, because expansion might
5602 copy those into pseudos after the asm and if multiple outputs
5603 share the same partition, it might overwrite those that should
5604 be live. E.g.
5605 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5606 return a;
5607 See PR70593. */
5608 bool first = true;
5609 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5610 if (first)
5611 first = false;
5612 else
5613 use (live, var);
5615 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5616 def (live, var, graph);
5619 auto_vec<tree, 16> worklist;
5620 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5621 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5622 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5624 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5625 use (live, var);
5626 else
5627 worklist.safe_push (var);
5630 while (worklist.length () > 0)
5632 tree s = worklist.pop ();
5633 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5634 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5635 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5637 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5638 use (live, var);
5639 else
5640 worklist.safe_push (var);
5644 if (muldiv_p)
5645 def (live, lhs, graph);
5648 /* Entry point for _BitInt(N) operation lowering during optimization. */
5650 static unsigned int
5651 gimple_lower_bitint (void)
5653 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5654 limb_prec = 0;
5656 unsigned int i;
5657 for (i = 0; i < num_ssa_names; ++i)
5659 tree s = ssa_name (i);
5660 if (s == NULL)
5661 continue;
5662 tree type = TREE_TYPE (s);
5663 if (TREE_CODE (type) == COMPLEX_TYPE)
5664 type = TREE_TYPE (type);
5665 if (TREE_CODE (type) == BITINT_TYPE
5666 && bitint_precision_kind (type) != bitint_prec_small)
5667 break;
5668 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5669 into memory. Such functions could have no large/huge SSA_NAMEs. */
5670 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5672 gimple *g = SSA_NAME_DEF_STMT (s);
5673 if (is_gimple_assign (g) && gimple_store_p (g))
5675 tree t = gimple_assign_rhs1 (g);
5676 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5677 && (bitint_precision_kind (TREE_TYPE (t))
5678 >= bitint_prec_large))
5679 break;
5682 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5683 to floating point types need to be rewritten. */
5684 else if (SCALAR_FLOAT_TYPE_P (type))
5686 gimple *g = SSA_NAME_DEF_STMT (s);
5687 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5689 tree t = gimple_assign_rhs1 (g);
5690 if (TREE_CODE (t) == INTEGER_CST
5691 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5692 && (bitint_precision_kind (TREE_TYPE (t))
5693 != bitint_prec_small))
5694 break;
5698 if (i == num_ssa_names)
5699 return 0;
5701 basic_block bb;
5702 auto_vec<gimple *, 4> switch_statements;
5703 FOR_EACH_BB_FN (bb, cfun)
5705 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5707 tree idx = gimple_switch_index (swtch);
5708 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5709 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5710 continue;
5712 if (optimize)
5713 group_case_labels_stmt (swtch);
5714 switch_statements.safe_push (swtch);
5718 if (!switch_statements.is_empty ())
5720 bool expanded = false;
5721 gimple *stmt;
5722 unsigned int j;
5723 i = 0;
5724 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5726 gswitch *swtch = as_a<gswitch *> (stmt);
5727 tree_switch_conversion::switch_decision_tree dt (swtch);
5728 expanded |= dt.analyze_switch_statement ();
5731 if (expanded)
5733 free_dominance_info (CDI_DOMINATORS);
5734 free_dominance_info (CDI_POST_DOMINATORS);
5735 mark_virtual_operands_for_renaming (cfun);
5736 cleanup_tree_cfg (TODO_update_ssa);
5740 struct bitint_large_huge large_huge;
5741 bool has_large_huge_parm_result = false;
5742 bool has_large_huge = false;
5743 unsigned int ret = 0, first_large_huge = ~0U;
5744 bool edge_insertions = false;
5745 for (; i < num_ssa_names; ++i)
5747 tree s = ssa_name (i);
5748 if (s == NULL)
5749 continue;
5750 tree type = TREE_TYPE (s);
5751 if (TREE_CODE (type) == COMPLEX_TYPE)
5752 type = TREE_TYPE (type);
5753 if (TREE_CODE (type) == BITINT_TYPE
5754 && bitint_precision_kind (type) >= bitint_prec_large)
5756 if (first_large_huge == ~0U)
5757 first_large_huge = i;
5758 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5759 gimple_stmt_iterator gsi;
5760 tree_code rhs_code;
5761 /* Unoptimize certain constructs to simpler alternatives to
5762 avoid having to lower all of them. */
5763 if (is_gimple_assign (stmt))
5764 switch (rhs_code = gimple_assign_rhs_code (stmt))
5766 default:
5767 break;
5768 case LROTATE_EXPR:
5769 case RROTATE_EXPR:
5771 first_large_huge = 0;
5772 location_t loc = gimple_location (stmt);
5773 gsi = gsi_for_stmt (stmt);
5774 tree rhs1 = gimple_assign_rhs1 (stmt);
5775 tree type = TREE_TYPE (rhs1);
5776 tree n = gimple_assign_rhs2 (stmt), m;
5777 tree p = build_int_cst (TREE_TYPE (n),
5778 TYPE_PRECISION (type));
5779 if (TREE_CODE (n) == INTEGER_CST)
5780 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5781 else
5783 m = make_ssa_name (TREE_TYPE (n));
5784 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5785 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5786 gimple_set_location (g, loc);
5788 if (!TYPE_UNSIGNED (type))
5790 tree utype = build_bitint_type (TYPE_PRECISION (type),
5792 if (TREE_CODE (rhs1) == INTEGER_CST)
5793 rhs1 = fold_convert (utype, rhs1);
5794 else
5796 tree t = make_ssa_name (type);
5797 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5798 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5799 gimple_set_location (g, loc);
5802 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5803 rhs_code == LROTATE_EXPR
5804 ? LSHIFT_EXPR : RSHIFT_EXPR,
5805 rhs1, n);
5806 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5807 gimple_set_location (g, loc);
5808 tree op1 = gimple_assign_lhs (g);
5809 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5810 rhs_code == LROTATE_EXPR
5811 ? RSHIFT_EXPR : LSHIFT_EXPR,
5812 rhs1, m);
5813 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5814 gimple_set_location (g, loc);
5815 tree op2 = gimple_assign_lhs (g);
5816 tree lhs = gimple_assign_lhs (stmt);
5817 if (!TYPE_UNSIGNED (type))
5819 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5820 BIT_IOR_EXPR, op1, op2);
5821 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5822 gimple_set_location (g, loc);
5823 g = gimple_build_assign (lhs, NOP_EXPR,
5824 gimple_assign_lhs (g));
5826 else
5827 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5828 gsi_replace (&gsi, g, true);
5829 gimple_set_location (g, loc);
5831 break;
5832 case ABS_EXPR:
5833 case ABSU_EXPR:
5834 case MIN_EXPR:
5835 case MAX_EXPR:
5836 case COND_EXPR:
5837 first_large_huge = 0;
5838 gsi = gsi_for_stmt (stmt);
5839 tree lhs = gimple_assign_lhs (stmt);
5840 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5841 location_t loc = gimple_location (stmt);
5842 if (rhs_code == ABS_EXPR)
5843 g = gimple_build_cond (LT_EXPR, rhs1,
5844 build_zero_cst (TREE_TYPE (rhs1)),
5845 NULL_TREE, NULL_TREE);
5846 else if (rhs_code == ABSU_EXPR)
5848 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5849 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5850 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5851 gimple_set_location (g, loc);
5852 g = gimple_build_cond (LT_EXPR, rhs1,
5853 build_zero_cst (TREE_TYPE (rhs1)),
5854 NULL_TREE, NULL_TREE);
5855 rhs1 = rhs2;
5857 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5859 rhs2 = gimple_assign_rhs2 (stmt);
5860 if (TREE_CODE (rhs1) == INTEGER_CST)
5861 std::swap (rhs1, rhs2);
5862 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5863 NULL_TREE, NULL_TREE);
5864 if (rhs_code == MAX_EXPR)
5865 std::swap (rhs1, rhs2);
5867 else
5869 g = gimple_build_cond (NE_EXPR, rhs1,
5870 build_zero_cst (TREE_TYPE (rhs1)),
5871 NULL_TREE, NULL_TREE);
5872 rhs1 = gimple_assign_rhs2 (stmt);
5873 rhs2 = gimple_assign_rhs3 (stmt);
5875 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5876 gimple_set_location (g, loc);
5877 edge e1 = split_block (gsi_bb (gsi), g);
5878 edge e2 = split_block (e1->dest, (gimple *) NULL);
5879 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5880 e3->probability = profile_probability::even ();
5881 e1->flags = EDGE_TRUE_VALUE;
5882 e1->probability = e3->probability.invert ();
5883 if (dom_info_available_p (CDI_DOMINATORS))
5884 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5885 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5887 gsi = gsi_after_labels (e1->dest);
5888 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5889 NEGATE_EXPR, rhs1);
5890 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5891 gimple_set_location (g, loc);
5892 rhs2 = gimple_assign_lhs (g);
5893 std::swap (rhs1, rhs2);
5895 gsi = gsi_for_stmt (stmt);
5896 gsi_remove (&gsi, true);
5897 gphi *phi = create_phi_node (lhs, e2->dest);
5898 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5899 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5900 break;
5903 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5904 into memory. Such functions could have no large/huge SSA_NAMEs. */
5905 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5907 gimple *g = SSA_NAME_DEF_STMT (s);
5908 if (is_gimple_assign (g) && gimple_store_p (g))
5910 tree t = gimple_assign_rhs1 (g);
5911 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5912 && (bitint_precision_kind (TREE_TYPE (t))
5913 >= bitint_prec_large))
5914 has_large_huge = true;
5917 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5918 to floating point types need to be rewritten. */
5919 else if (SCALAR_FLOAT_TYPE_P (type))
5921 gimple *g = SSA_NAME_DEF_STMT (s);
5922 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5924 tree t = gimple_assign_rhs1 (g);
5925 if (TREE_CODE (t) == INTEGER_CST
5926 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5927 && (bitint_precision_kind (TREE_TYPE (t))
5928 >= bitint_prec_large))
5929 has_large_huge = true;
5933 for (i = first_large_huge; i < num_ssa_names; ++i)
5935 tree s = ssa_name (i);
5936 if (s == NULL)
5937 continue;
5938 tree type = TREE_TYPE (s);
5939 if (TREE_CODE (type) == COMPLEX_TYPE)
5940 type = TREE_TYPE (type);
5941 if (TREE_CODE (type) == BITINT_TYPE
5942 && bitint_precision_kind (type) >= bitint_prec_large)
5944 use_operand_p use_p;
5945 gimple *use_stmt;
5946 has_large_huge = true;
5947 if (optimize
5948 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
5949 continue;
5950 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5951 the same bb and could be handled in the same loop with the
5952 immediate use. */
5953 if (optimize
5954 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5955 && single_imm_use (s, &use_p, &use_stmt)
5956 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
5958 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
5960 if (mergeable_op (use_stmt))
5961 continue;
5962 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
5963 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
5964 continue;
5965 if (gimple_assign_cast_p (use_stmt))
5967 tree lhs = gimple_assign_lhs (use_stmt);
5968 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5969 continue;
5971 else if (gimple_store_p (use_stmt)
5972 && is_gimple_assign (use_stmt)
5973 && !gimple_has_volatile_ops (use_stmt)
5974 && !stmt_ends_bb_p (use_stmt))
5975 continue;
5977 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5979 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5980 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5981 && ((is_gimple_assign (use_stmt)
5982 && (gimple_assign_rhs_code (use_stmt)
5983 != COMPLEX_EXPR))
5984 || gimple_code (use_stmt) == GIMPLE_COND)
5985 && (!gimple_store_p (use_stmt)
5986 || (is_gimple_assign (use_stmt)
5987 && !gimple_has_volatile_ops (use_stmt)
5988 && !stmt_ends_bb_p (use_stmt)))
5989 && (TREE_CODE (rhs1) != SSA_NAME
5990 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5992 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
5993 || (bitint_precision_kind (TREE_TYPE (rhs1))
5994 < bitint_prec_large))
5995 continue;
5996 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
5997 >= TYPE_PRECISION (TREE_TYPE (s)))
5998 && mergeable_op (use_stmt))
5999 continue;
6000 /* Prevent merging a widening non-mergeable cast
6001 on result of some narrower mergeable op
6002 together with later mergeable operations. E.g.
6003 result of _BitInt(223) addition shouldn't be
6004 sign-extended to _BitInt(513) and have another
6005 _BitInt(513) added to it, as handle_plus_minus
6006 with its PHI node handling inside of handle_cast
6007 will not work correctly. An exception is if
6008 use_stmt is a store, this is handled directly
6009 in lower_mergeable_stmt. */
6010 if (TREE_CODE (rhs1) != SSA_NAME
6011 || !has_single_use (rhs1)
6012 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6013 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6014 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6015 || gimple_store_p (use_stmt))
6016 continue;
6017 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6018 < TYPE_PRECISION (TREE_TYPE (s)))
6019 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6021 /* Another exception is if the widening cast is
6022 from mergeable same precision cast from something
6023 not mergeable. */
6024 tree rhs2
6025 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6026 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6027 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6028 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6030 if (TREE_CODE (rhs2) != SSA_NAME
6031 || !has_single_use (rhs2)
6032 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6033 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6034 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6035 continue;
6040 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6041 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6043 case IMAGPART_EXPR:
6045 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6046 rhs1 = TREE_OPERAND (rhs1, 0);
6047 if (TREE_CODE (rhs1) == SSA_NAME)
6049 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6050 if (optimizable_arith_overflow (g))
6051 continue;
6054 /* FALLTHRU */
6055 case LSHIFT_EXPR:
6056 case RSHIFT_EXPR:
6057 case MULT_EXPR:
6058 case TRUNC_DIV_EXPR:
6059 case TRUNC_MOD_EXPR:
6060 case FIX_TRUNC_EXPR:
6061 case REALPART_EXPR:
6062 if (gimple_store_p (use_stmt)
6063 && is_gimple_assign (use_stmt)
6064 && !gimple_has_volatile_ops (use_stmt)
6065 && !stmt_ends_bb_p (use_stmt))
6067 tree lhs = gimple_assign_lhs (use_stmt);
6068 /* As multiply/division passes address of the lhs
6069 to library function and that assumes it can extend
6070 it to whole number of limbs, avoid merging those
6071 with bit-field stores. Don't allow it for
6072 shifts etc. either, so that the bit-field store
6073 handling doesn't have to be done everywhere. */
6074 if (TREE_CODE (lhs) == COMPONENT_REF
6075 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6076 break;
6077 continue;
6079 break;
6080 default:
6081 break;
6085 /* Also ignore uninitialized uses. */
6086 if (SSA_NAME_IS_DEFAULT_DEF (s)
6087 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6088 continue;
6090 if (!large_huge.m_names)
6091 large_huge.m_names = BITMAP_ALLOC (NULL);
6092 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6093 if (has_single_use (s))
6095 if (!large_huge.m_single_use_names)
6096 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6097 bitmap_set_bit (large_huge.m_single_use_names,
6098 SSA_NAME_VERSION (s));
6100 if (SSA_NAME_VAR (s)
6101 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6102 && SSA_NAME_IS_DEFAULT_DEF (s))
6103 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6104 has_large_huge_parm_result = true;
6105 if (optimize
6106 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6107 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6108 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6109 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6111 use_operand_p use_p;
6112 imm_use_iterator iter;
6113 bool optimizable_load = true;
6114 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6116 gimple *use_stmt = USE_STMT (use_p);
6117 if (is_gimple_debug (use_stmt))
6118 continue;
6119 if (gimple_code (use_stmt) == GIMPLE_PHI
6120 || is_gimple_call (use_stmt))
6122 optimizable_load = false;
6123 break;
6127 ssa_op_iter oi;
6128 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6129 oi, SSA_OP_USE)
6131 tree s2 = USE_FROM_PTR (use_p);
6132 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6134 optimizable_load = false;
6135 break;
6139 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6141 if (!large_huge.m_loads)
6142 large_huge.m_loads = BITMAP_ALLOC (NULL);
6143 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6147 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6148 into memory. Such functions could have no large/huge SSA_NAMEs. */
6149 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6151 gimple *g = SSA_NAME_DEF_STMT (s);
6152 if (is_gimple_assign (g) && gimple_store_p (g))
6154 tree t = gimple_assign_rhs1 (g);
6155 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6156 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6157 has_large_huge = true;
6162 if (large_huge.m_names || has_large_huge)
6164 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6165 calculate_dominance_info (CDI_DOMINATORS);
6166 if (optimize)
6167 enable_ranger (cfun);
6168 if (large_huge.m_loads)
6170 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6171 entry->aux = NULL;
6172 bitint_dom_walker (large_huge.m_names,
6173 large_huge.m_loads).walk (entry);
6174 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6175 clear_aux_for_blocks ();
6176 BITMAP_FREE (large_huge.m_loads);
6178 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6179 large_huge.m_limb_size
6180 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6182 if (large_huge.m_names)
6184 large_huge.m_map
6185 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6186 coalesce_ssa_name (large_huge.m_map);
6187 partition_view_normal (large_huge.m_map);
6188 if (dump_file && (dump_flags & TDF_DETAILS))
6190 fprintf (dump_file, "After Coalescing:\n");
6191 dump_var_map (dump_file, large_huge.m_map);
6193 large_huge.m_vars
6194 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6195 bitmap_iterator bi;
6196 if (has_large_huge_parm_result)
6197 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6199 tree s = ssa_name (i);
6200 if (SSA_NAME_VAR (s)
6201 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6202 && SSA_NAME_IS_DEFAULT_DEF (s))
6203 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6205 int p = var_to_partition (large_huge.m_map, s);
6206 if (large_huge.m_vars[p] == NULL_TREE)
6208 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6209 mark_addressable (SSA_NAME_VAR (s));
6213 tree atype = NULL_TREE;
6214 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6216 tree s = ssa_name (i);
6217 int p = var_to_partition (large_huge.m_map, s);
6218 if (large_huge.m_vars[p] != NULL_TREE)
6219 continue;
6220 if (atype == NULL_TREE
6221 || !tree_int_cst_equal (TYPE_SIZE (atype),
6222 TYPE_SIZE (TREE_TYPE (s))))
6224 unsigned HOST_WIDE_INT nelts
6225 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6226 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6228 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6229 mark_addressable (large_huge.m_vars[p]);
6233 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6235 gimple_stmt_iterator prev;
6236 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6237 gsi = prev)
6239 prev = gsi;
6240 gsi_prev (&prev);
6241 ssa_op_iter iter;
6242 gimple *stmt = gsi_stmt (gsi);
6243 if (is_gimple_debug (stmt))
6244 continue;
6245 bitint_prec_kind kind = bitint_prec_small;
6246 tree t;
6247 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6248 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6250 bitint_prec_kind this_kind
6251 = bitint_precision_kind (TREE_TYPE (t));
6252 if (this_kind > kind)
6253 kind = this_kind;
6255 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6257 t = gimple_assign_rhs1 (stmt);
6258 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6260 bitint_prec_kind this_kind
6261 = bitint_precision_kind (TREE_TYPE (t));
6262 if (this_kind > kind)
6263 kind = this_kind;
6266 if (is_gimple_assign (stmt)
6267 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6269 t = gimple_assign_rhs1 (stmt);
6270 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6271 && TREE_CODE (t) == INTEGER_CST)
6273 bitint_prec_kind this_kind
6274 = bitint_precision_kind (TREE_TYPE (t));
6275 if (this_kind > kind)
6276 kind = this_kind;
6279 if (is_gimple_call (stmt))
6281 t = gimple_call_lhs (stmt);
6282 if (t
6283 && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
6284 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6286 bitint_prec_kind this_kind
6287 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6288 if (this_kind > kind)
6289 kind = this_kind;
6292 if (kind == bitint_prec_small)
6293 continue;
6294 switch (gimple_code (stmt))
6296 case GIMPLE_CALL:
6297 /* For now. We'll need to handle some internal functions and
6298 perhaps some builtins. */
6299 if (kind == bitint_prec_middle)
6300 continue;
6301 break;
6302 case GIMPLE_ASM:
6303 if (kind == bitint_prec_middle)
6304 continue;
6305 break;
6306 case GIMPLE_RETURN:
6307 continue;
6308 case GIMPLE_ASSIGN:
6309 if (gimple_clobber_p (stmt))
6310 continue;
6311 if (kind >= bitint_prec_large)
6312 break;
6313 if (gimple_assign_single_p (stmt))
6314 /* No need to lower copies, loads or stores. */
6315 continue;
6316 if (gimple_assign_cast_p (stmt))
6318 tree lhs = gimple_assign_lhs (stmt);
6319 tree rhs = gimple_assign_rhs1 (stmt);
6320 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6321 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6322 && (TYPE_PRECISION (TREE_TYPE (lhs))
6323 == TYPE_PRECISION (TREE_TYPE (rhs))))
6324 /* No need to lower casts to same precision. */
6325 continue;
6327 break;
6328 default:
6329 break;
6332 if (kind == bitint_prec_middle)
6334 tree type = NULL_TREE;
6335 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6336 with the same precision and back. */
6337 unsigned int nops = gimple_num_ops (stmt);
6338 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6339 i < nops; ++i)
6340 if (tree op = gimple_op (stmt, i))
6342 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6343 if (nop != op)
6344 gimple_set_op (stmt, i, nop);
6345 else if (COMPARISON_CLASS_P (op))
6347 TREE_OPERAND (op, 0)
6348 = maybe_cast_middle_bitint (&gsi,
6349 TREE_OPERAND (op, 0),
6350 type);
6351 TREE_OPERAND (op, 1)
6352 = maybe_cast_middle_bitint (&gsi,
6353 TREE_OPERAND (op, 1),
6354 type);
6356 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6358 CASE_LOW (op)
6359 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6360 type);
6361 CASE_HIGH (op)
6362 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6363 type);
6366 if (tree lhs = gimple_get_lhs (stmt))
6367 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6368 && (bitint_precision_kind (TREE_TYPE (lhs))
6369 == bitint_prec_middle))
6371 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6372 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6373 type = build_nonstandard_integer_type (prec, uns);
6374 tree lhs2 = make_ssa_name (type);
6375 gimple_set_lhs (stmt, lhs2);
6376 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6377 if (stmt_ends_bb_p (stmt))
6379 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6380 gsi_insert_on_edge_immediate (e, g);
6382 else
6383 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6385 update_stmt (stmt);
6386 continue;
6389 if (tree lhs = gimple_get_lhs (stmt))
6390 if (TREE_CODE (lhs) == SSA_NAME)
6392 tree type = TREE_TYPE (lhs);
6393 if (TREE_CODE (type) == COMPLEX_TYPE)
6394 type = TREE_TYPE (type);
6395 if (TREE_CODE (type) == BITINT_TYPE
6396 && bitint_precision_kind (type) >= bitint_prec_large
6397 && (large_huge.m_names == NULL
6398 || !bitmap_bit_p (large_huge.m_names,
6399 SSA_NAME_VERSION (lhs))))
6400 continue;
6403 large_huge.lower_stmt (stmt);
6406 tree atype = NULL_TREE;
6407 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6408 gsi_next (&gsi))
6410 gphi *phi = gsi.phi ();
6411 tree lhs = gimple_phi_result (phi);
6412 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6413 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6414 continue;
6415 int p1 = var_to_partition (large_huge.m_map, lhs);
6416 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6417 tree v1 = large_huge.m_vars[p1];
6418 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6420 tree arg = gimple_phi_arg_def (phi, i);
6421 edge e = gimple_phi_arg_edge (phi, i);
6422 gimple *g;
6423 switch (TREE_CODE (arg))
6425 case INTEGER_CST:
6426 if (integer_zerop (arg) && VAR_P (v1))
6428 tree zero = build_zero_cst (TREE_TYPE (v1));
6429 g = gimple_build_assign (v1, zero);
6430 gsi_insert_on_edge (e, g);
6431 edge_insertions = true;
6432 break;
6434 int ext;
6435 unsigned int min_prec, prec, rem;
6436 tree c;
6437 prec = TYPE_PRECISION (TREE_TYPE (arg));
6438 rem = prec % (2 * limb_prec);
6439 min_prec = bitint_min_cst_precision (arg, ext);
6440 if (min_prec > prec - rem - 2 * limb_prec
6441 && min_prec > (unsigned) limb_prec)
6442 /* Constant which has enough significant bits that it
6443 isn't worth trying to save .rodata space by extending
6444 from smaller number. */
6445 min_prec = prec;
6446 else
6447 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6448 if (min_prec == 0)
6449 c = NULL_TREE;
6450 else if (min_prec == prec)
6451 c = tree_output_constant_def (arg);
6452 else if (min_prec == (unsigned) limb_prec)
6453 c = fold_convert (large_huge.m_limb_type, arg);
6454 else
6456 tree ctype = build_bitint_type (min_prec, 1);
6457 c = tree_output_constant_def (fold_convert (ctype, arg));
6459 if (c)
6461 if (VAR_P (v1) && min_prec == prec)
6463 tree v2 = build1 (VIEW_CONVERT_EXPR,
6464 TREE_TYPE (v1), c);
6465 g = gimple_build_assign (v1, v2);
6466 gsi_insert_on_edge (e, g);
6467 edge_insertions = true;
6468 break;
6470 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6471 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6472 TREE_TYPE (c), v1),
6474 else
6476 unsigned HOST_WIDE_INT nelts
6477 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6478 / limb_prec;
6479 tree vtype
6480 = build_array_type_nelts (large_huge.m_limb_type,
6481 nelts);
6482 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6483 vtype, v1),
6484 build1 (VIEW_CONVERT_EXPR,
6485 vtype, c));
6487 gsi_insert_on_edge (e, g);
6489 if (ext == 0)
6491 unsigned HOST_WIDE_INT nelts
6492 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6493 - min_prec) / limb_prec;
6494 tree vtype
6495 = build_array_type_nelts (large_huge.m_limb_type,
6496 nelts);
6497 tree ptype = build_pointer_type (TREE_TYPE (v1));
6498 tree off = fold_convert (ptype,
6499 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6500 tree vd = build2 (MEM_REF, vtype,
6501 build_fold_addr_expr (v1), off);
6502 g = gimple_build_assign (vd, build_zero_cst (vtype));
6504 else
6506 tree vd = v1;
6507 if (c)
6509 tree ptype = build_pointer_type (TREE_TYPE (v1));
6510 tree off
6511 = fold_convert (ptype,
6512 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6513 vd = build2 (MEM_REF, large_huge.m_limb_type,
6514 build_fold_addr_expr (v1), off);
6516 vd = build_fold_addr_expr (vd);
6517 unsigned HOST_WIDE_INT nbytes
6518 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6519 if (c)
6520 nbytes
6521 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6522 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6523 g = gimple_build_call (fn, 3, vd,
6524 integer_minus_one_node,
6525 build_int_cst (sizetype,
6526 nbytes));
6528 gsi_insert_on_edge (e, g);
6529 edge_insertions = true;
6530 break;
6531 default:
6532 gcc_unreachable ();
6533 case SSA_NAME:
6534 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6536 if (large_huge.m_names == NULL
6537 || !bitmap_bit_p (large_huge.m_names,
6538 SSA_NAME_VERSION (arg)))
6539 continue;
6541 int p2 = var_to_partition (large_huge.m_map, arg);
6542 if (p1 == p2)
6543 continue;
6544 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6545 tree v2 = large_huge.m_vars[p2];
6546 if (VAR_P (v1) && VAR_P (v2))
6547 g = gimple_build_assign (v1, v2);
6548 else if (VAR_P (v1))
6549 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6550 TREE_TYPE (v1), v2));
6551 else if (VAR_P (v2))
6552 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6553 TREE_TYPE (v2), v1), v2);
6554 else
6556 if (atype == NULL_TREE
6557 || !tree_int_cst_equal (TYPE_SIZE (atype),
6558 TYPE_SIZE (TREE_TYPE (lhs))))
6560 unsigned HOST_WIDE_INT nelts
6561 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6562 / limb_prec;
6563 atype
6564 = build_array_type_nelts (large_huge.m_limb_type,
6565 nelts);
6567 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6568 atype, v1),
6569 build1 (VIEW_CONVERT_EXPR,
6570 atype, v2));
6572 gsi_insert_on_edge (e, g);
6573 edge_insertions = true;
6574 break;
6580 if (large_huge.m_names || has_large_huge)
6582 gimple *nop = NULL;
6583 for (i = 0; i < num_ssa_names; ++i)
6585 tree s = ssa_name (i);
6586 if (s == NULL_TREE)
6587 continue;
6588 tree type = TREE_TYPE (s);
6589 if (TREE_CODE (type) == COMPLEX_TYPE)
6590 type = TREE_TYPE (type);
6591 if (TREE_CODE (type) == BITINT_TYPE
6592 && bitint_precision_kind (type) >= bitint_prec_large)
6594 if (large_huge.m_preserved
6595 && bitmap_bit_p (large_huge.m_preserved,
6596 SSA_NAME_VERSION (s)))
6597 continue;
6598 gimple *g = SSA_NAME_DEF_STMT (s);
6599 if (gimple_code (g) == GIMPLE_NOP)
6601 if (SSA_NAME_VAR (s))
6602 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6603 release_ssa_name (s);
6604 continue;
6606 if (gimple_code (g) != GIMPLE_ASM)
6608 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6609 bool save_vta = flag_var_tracking_assignments;
6610 flag_var_tracking_assignments = false;
6611 gsi_remove (&gsi, true);
6612 flag_var_tracking_assignments = save_vta;
6614 if (nop == NULL)
6615 nop = gimple_build_nop ();
6616 SSA_NAME_DEF_STMT (s) = nop;
6617 release_ssa_name (s);
6620 if (optimize)
6621 disable_ranger (cfun);
6624 if (edge_insertions)
6625 gsi_commit_edge_inserts ();
6627 return ret;
6630 namespace {
6632 const pass_data pass_data_lower_bitint =
6634 GIMPLE_PASS, /* type */
6635 "bitintlower", /* name */
6636 OPTGROUP_NONE, /* optinfo_flags */
6637 TV_NONE, /* tv_id */
6638 PROP_ssa, /* properties_required */
6639 PROP_gimple_lbitint, /* properties_provided */
6640 0, /* properties_destroyed */
6641 0, /* todo_flags_start */
6642 0, /* todo_flags_finish */
6645 class pass_lower_bitint : public gimple_opt_pass
6647 public:
6648 pass_lower_bitint (gcc::context *ctxt)
6649 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6652 /* opt_pass methods: */
6653 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6654 unsigned int execute (function *) final override
6656 return gimple_lower_bitint ();
6659 }; // class pass_lower_bitint
6661 } // anon namespace
6663 gimple_opt_pass *
6664 make_pass_lower_bitint (gcc::context *ctxt)
6666 return new pass_lower_bitint (ctxt);
6670 namespace {
6672 const pass_data pass_data_lower_bitint_O0 =
6674 GIMPLE_PASS, /* type */
6675 "bitintlower0", /* name */
6676 OPTGROUP_NONE, /* optinfo_flags */
6677 TV_NONE, /* tv_id */
6678 PROP_cfg, /* properties_required */
6679 PROP_gimple_lbitint, /* properties_provided */
6680 0, /* properties_destroyed */
6681 0, /* todo_flags_start */
6682 0, /* todo_flags_finish */
6685 class pass_lower_bitint_O0 : public gimple_opt_pass
6687 public:
6688 pass_lower_bitint_O0 (gcc::context *ctxt)
6689 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6692 /* opt_pass methods: */
6693 bool gate (function *fun) final override
6695 /* With errors, normal optimization passes are not run. If we don't
6696 lower bitint operations at all, rtl expansion will abort. */
6697 return !(fun->curr_properties & PROP_gimple_lbitint);
6700 unsigned int execute (function *) final override
6702 return gimple_lower_bitint ();
6705 }; // class pass_lower_bitint_O0
6707 } // anon namespace
6709 gimple_opt_pass *
6710 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6712 return new pass_lower_bitint_O0 (ctxt);