hppa: Export main in pr104869.C on hpux
[official-gcc.git] / gcc / gimple-lower-bitint.cc
blobc429cb245d3727fe816ad2d61648ff123d31dd51
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
100 small_max_prec = prec;
101 return bitint_prec_small;
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
115 if (prec <= MAX_FIXED_MODE_SIZE)
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
131 return bitint_precision_kind (TYPE_PRECISION (type));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
156 ext = -1;
157 return min_prec2;
160 return min_prec;
163 namespace {
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
183 if (TREE_CODE (op) != SSA_NAME)
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
199 bool
200 mergeable_op (gimple *stmt)
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
243 break;
245 default:
246 break;
248 return false;
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple *stmt)
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
291 if (is_ubsan)
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
342 else
343 return 0;
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
365 else if (is_gimple_assign (stmt))
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
381 *pop1 = op1;
382 *pop2 = op2;
384 return code;
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_bit_query (gimple *);
431 void lower_call (tree, gimple *);
432 void lower_asm (gimple *);
433 void lower_stmt (gimple *);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
437 bitmap m_names;
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
440 their uses. */
441 bitmap m_loads;
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
444 bitmap m_preserved;
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
452 set in m_names. */
453 var_map m_map;
454 /* Mapping of the partitions to corresponding decls. */
455 tree *m_vars;
456 /* Unsigned integer type with limb precision. */
457 tree m_limb_type;
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size;
460 /* Location of a gimple stmt which is being currently lowered. */
461 location_t m_loc;
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple *m_after_stmt;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb, m_preheader_bb;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi;
470 /* Decl into which a mergeable statement stores result. */
471 tree m_lhs;
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
520 bool m_first;
521 bool m_var_msb;
522 unsigned m_upwards_2limb;
523 bool m_upwards;
524 bool m_cast_conditional;
525 unsigned m_bitfld_load;
526 vec<tree> m_data;
527 unsigned int m_data_cnt;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names);
533 BITMAP_FREE (m_loads);
534 BITMAP_FREE (m_preserved);
535 BITMAP_FREE (m_single_use_names);
536 if (m_map)
537 delete_var_map (m_map);
538 XDELETEVEC (m_vars);
539 m_data.release ();
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
545 void
546 bitint_large_huge::insert_before (gimple *g)
548 gimple_set_location (g, m_loc);
549 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
556 tree
557 bitint_large_huge::limb_access_type (tree type, tree idx)
559 if (type == NULL_TREE)
560 return m_limb_type;
561 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
562 unsigned int prec = TYPE_PRECISION (type);
563 gcc_assert (i * limb_prec < prec);
564 if ((i + 1) * limb_prec <= prec)
565 return m_limb_type;
566 else
567 return build_nonstandard_integer_type (prec % limb_prec,
568 TYPE_UNSIGNED (type));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
574 tree
575 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
577 tree atype = (tree_fits_uhwi_p (idx)
578 ? limb_access_type (type, idx) : m_limb_type);
579 tree ret;
580 if (DECL_P (var) && tree_fits_uhwi_p (idx))
582 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
583 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
584 ret = build2 (MEM_REF, m_limb_type,
585 build_fold_addr_expr (var),
586 build_int_cst (ptype, off));
587 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
588 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
590 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
593 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
594 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
596 tree_to_uhwi (idx)
597 * m_limb_size)));
598 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
599 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
600 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
602 else
604 var = unshare_expr (var);
605 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type,
607 TREE_TYPE (TREE_TYPE (var))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
611 tree atype = build_array_type_nelts (m_limb_type, nelts);
612 var = build1 (VIEW_CONVERT_EXPR, atype, var);
614 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
616 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
618 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
619 insert_before (g);
620 ret = gimple_assign_lhs (g);
621 ret = build1 (NOP_EXPR, atype, ret);
623 return ret;
626 /* Emit a half diamond,
627 if (COND)
631 | new_bb1
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
641 void
642 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
643 edge &edge_true, edge &edge_false)
645 insert_before (cond);
646 edge e1 = split_block (gsi_bb (m_gsi), cond);
647 edge e2 = split_block (e1->dest, (gimple *) NULL);
648 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
649 e1->flags = EDGE_TRUE_VALUE;
650 e1->probability = prob;
651 e3->probability = prob.invert ();
652 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
653 edge_true = e2;
654 edge_false = e3;
655 m_gsi = gsi_after_labels (e1->dest);
658 /* Emit a full diamond,
659 if (COND)
663 new_bb1 new_bb2
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
673 void
674 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
675 edge &edge_true, edge &edge_false)
677 insert_before (cond);
678 edge e1 = split_block (gsi_bb (m_gsi), cond);
679 edge e2 = split_block (e1->dest, (gimple *) NULL);
680 basic_block bb = create_empty_bb (e1->dest);
681 add_bb_to_loop (bb, e1->dest->loop_father);
682 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
683 e1->flags = EDGE_FALSE_VALUE;
684 e3->probability = prob;
685 e1->probability = prob.invert ();
686 bb->count = e1->src->count.apply_probability (prob);
687 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
688 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
689 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
690 edge_false = e2;
691 m_gsi = gsi_after_labels (bb);
694 /* Emit a half diamond with full diamond in it
695 if (COND1)
699 | if (COND2)
700 | / \
701 | / \
702 |new_bb1 new_bb2
703 | | /
704 \ | /
705 \ | /
706 \ | /
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
719 void
720 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
721 profile_probability prob1,
722 profile_probability prob2,
723 edge &edge_true_true,
724 edge &edge_true_false,
725 edge &edge_false)
727 edge e2, e3, e4 = NULL;
728 if_then (cond1, prob1, e2, e3);
729 if (cond2 == NULL)
731 edge_true_true = NULL;
732 edge_true_false = e2;
733 edge_false = e3;
734 return;
736 insert_before (cond2);
737 e2 = split_block (gsi_bb (m_gsi), cond2);
738 basic_block bb = create_empty_bb (e2->dest);
739 add_bb_to_loop (bb, e2->dest->loop_father);
740 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
741 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
742 e4->probability = prob2;
743 e2->flags = EDGE_FALSE_VALUE;
744 e2->probability = prob2.invert ();
745 bb->count = e2->src->count.apply_probability (prob2);
746 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
747 e2 = find_edge (e2->dest, e3->dest);
748 edge_true_true = e4;
749 edge_true_false = e2;
750 edge_false = e3;
751 m_gsi = gsi_after_labels (e2->src);
754 /* Emit code to access limb IDX from OP. */
756 tree
757 bitint_large_huge::handle_operand (tree op, tree idx)
759 switch (TREE_CODE (op))
761 case SSA_NAME:
762 if (m_names == NULL
763 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op))
767 if (m_first)
769 tree v = create_tmp_reg (m_limb_type);
770 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
772 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
773 DECL_SOURCE_LOCATION (v)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
776 v = get_or_create_ssa_default_def (cfun, v);
777 m_data.safe_push (v);
779 tree ret = m_data[m_data_cnt];
780 m_data_cnt++;
781 if (tree_fits_uhwi_p (idx))
783 tree type = limb_access_type (TREE_TYPE (op), idx);
784 ret = add_cast (type, ret);
786 return ret;
788 location_t loc_save = m_loc;
789 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
790 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
791 m_loc = loc_save;
792 return ret;
794 int p;
795 gimple *g;
796 tree t;
797 p = var_to_partition (m_map, op);
798 gcc_assert (m_vars[p] != NULL_TREE);
799 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
801 insert_before (g);
802 t = gimple_assign_lhs (g);
803 if (m_first
804 && m_single_use_names
805 && m_vars[p] != m_lhs
806 && m_after_stmt
807 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
809 tree clobber = build_clobber (TREE_TYPE (m_vars[p]), CLOBBER_EOL);
810 g = gimple_build_assign (m_vars[p], clobber);
811 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
812 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
814 return t;
815 case INTEGER_CST:
816 if (tree_fits_uhwi_p (idx))
818 tree c, type = limb_access_type (TREE_TYPE (op), idx);
819 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
820 if (m_first)
822 m_data.safe_push (NULL_TREE);
823 m_data.safe_push (NULL_TREE);
825 if (limb_prec != HOST_BITS_PER_WIDE_INT)
827 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
828 TYPE_SIGN (TREE_TYPE (op)));
829 c = wide_int_to_tree (type,
830 wide_int::from (w, TYPE_PRECISION (type),
831 UNSIGNED));
833 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
834 c = build_int_cst (type,
835 tree_int_cst_sgn (op) < 0 ? -1 : 0);
836 else
837 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
838 m_data_cnt += 2;
839 return c;
841 if (m_first
842 || (m_data[m_data_cnt] == NULL_TREE
843 && m_data[m_data_cnt + 1] == NULL_TREE))
845 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
846 unsigned int rem = prec % (2 * limb_prec);
847 int ext;
848 unsigned min_prec = bitint_min_cst_precision (op, ext);
849 if (m_first)
851 m_data.safe_push (NULL_TREE);
852 m_data.safe_push (NULL_TREE);
854 if (integer_zerop (op))
856 tree c = build_zero_cst (m_limb_type);
857 m_data[m_data_cnt] = c;
858 m_data[m_data_cnt + 1] = c;
860 else if (integer_all_onesp (op))
862 tree c = build_all_ones_cst (m_limb_type);
863 m_data[m_data_cnt] = c;
864 m_data[m_data_cnt + 1] = c;
866 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
868 /* Single limb constant. Use a phi with that limb from
869 the preheader edge and 0 or -1 constant from the other edge
870 and for the second limb in the loop. */
871 tree out;
872 gcc_assert (m_first);
873 m_data.pop ();
874 m_data.pop ();
875 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out);
876 g = gimple_build_assign (m_data[m_data_cnt + 1],
877 build_int_cst (m_limb_type, ext));
878 insert_before (g);
879 m_data[m_data_cnt + 1] = gimple_assign_rhs1 (g);
881 else if (min_prec > prec - rem - 2 * limb_prec)
883 /* Constant which has enough significant bits that it isn't
884 worth trying to save .rodata space by extending from smaller
885 number. */
886 tree type;
887 if (m_var_msb)
888 type = TREE_TYPE (op);
889 else
890 /* If we have a guarantee the most significant partial limb
891 (if any) will be only accessed through handle_operand
892 with INTEGER_CST idx, we don't need to include the partial
893 limb in .rodata. */
894 type = build_bitint_type (prec - rem, 1);
895 tree c = tree_output_constant_def (fold_convert (type, op));
896 m_data[m_data_cnt] = c;
897 m_data[m_data_cnt + 1] = NULL_TREE;
899 else if (m_upwards_2limb)
901 /* Constant with smaller number of bits. Trade conditional
902 code for .rodata space by extending from smaller number. */
903 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
904 tree type = build_bitint_type (min_prec, 1);
905 tree c = tree_output_constant_def (fold_convert (type, op));
906 tree idx2 = make_ssa_name (sizetype);
907 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
908 insert_before (g);
909 g = gimple_build_cond (LT_EXPR, idx,
910 size_int (min_prec / limb_prec),
911 NULL_TREE, NULL_TREE);
912 edge edge_true, edge_false;
913 if_then (g, (min_prec >= (prec - rem) / 2
914 ? profile_probability::likely ()
915 : profile_probability::unlikely ()),
916 edge_true, edge_false);
917 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
918 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
919 insert_before (g);
920 c1 = gimple_assign_lhs (g);
921 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
922 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
923 insert_before (g);
924 c2 = gimple_assign_lhs (g);
925 tree c3 = build_int_cst (m_limb_type, ext);
926 m_gsi = gsi_after_labels (edge_true->dest);
927 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
928 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
929 gphi *phi = create_phi_node (m_data[m_data_cnt],
930 edge_true->dest);
931 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
932 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
933 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
934 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
935 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
937 else
939 /* Constant with smaller number of bits. Trade conditional
940 code for .rodata space by extending from smaller number.
941 Version for loops with random access to the limbs or
942 downwards loops. */
943 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
944 tree c;
945 if (min_prec <= (unsigned) limb_prec)
946 c = fold_convert (m_limb_type, op);
947 else
949 tree type = build_bitint_type (min_prec, 1);
950 c = tree_output_constant_def (fold_convert (type, op));
952 m_data[m_data_cnt] = c;
953 m_data[m_data_cnt + 1] = integer_type_node;
955 t = m_data[m_data_cnt];
956 if (m_data[m_data_cnt + 1] == NULL_TREE)
958 t = limb_access (TREE_TYPE (op), t, idx, false);
959 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
960 insert_before (g);
961 t = gimple_assign_lhs (g);
964 else if (m_data[m_data_cnt + 1] == NULL_TREE)
966 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
967 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
968 insert_before (g);
969 t = gimple_assign_lhs (g);
971 else
972 t = m_data[m_data_cnt + 1];
973 if (m_data[m_data_cnt + 1] == integer_type_node)
975 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
976 unsigned rem = prec % (2 * limb_prec);
977 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
978 tree c = m_data[m_data_cnt];
979 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
980 g = gimple_build_cond (LT_EXPR, idx,
981 size_int (min_prec / limb_prec),
982 NULL_TREE, NULL_TREE);
983 edge edge_true, edge_false;
984 if_then (g, (min_prec >= (prec - rem) / 2
985 ? profile_probability::likely ()
986 : profile_probability::unlikely ()),
987 edge_true, edge_false);
988 if (min_prec > (unsigned) limb_prec)
990 c = limb_access (TREE_TYPE (op), c, idx, false);
991 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
992 insert_before (g);
993 c = gimple_assign_lhs (g);
995 tree c2 = build_int_cst (m_limb_type, ext);
996 m_gsi = gsi_after_labels (edge_true->dest);
997 t = make_ssa_name (m_limb_type);
998 gphi *phi = create_phi_node (t, edge_true->dest);
999 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1000 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1002 m_data_cnt += 2;
1003 return t;
1004 default:
1005 gcc_unreachable ();
1009 /* Helper method, add a PHI node with VAL from preheader edge if
1010 inside of a loop and m_first. Keep state in a pair of m_data
1011 elements. */
1013 tree
1014 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out)
1016 if (!m_first)
1018 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1019 return m_data[m_data_cnt];
1022 *data_out = NULL_TREE;
1023 if (tree_fits_uhwi_p (idx))
1025 m_data.safe_push (val);
1026 m_data.safe_push (NULL_TREE);
1027 return val;
1030 tree in = make_ssa_name (TREE_TYPE (val));
1031 gphi *phi = create_phi_node (in, m_bb);
1032 edge e1 = find_edge (m_preheader_bb, m_bb);
1033 edge e2 = EDGE_PRED (m_bb, 0);
1034 if (e1 == e2)
1035 e2 = EDGE_PRED (m_bb, 1);
1036 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1037 tree out = make_ssa_name (TREE_TYPE (val));
1038 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1039 m_data.safe_push (in);
1040 m_data.safe_push (out);
1041 return in;
1044 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1045 convert it without emitting any code, otherwise emit
1046 the conversion statement before the current location. */
1048 tree
1049 bitint_large_huge::add_cast (tree type, tree val)
1051 if (TREE_CODE (val) == INTEGER_CST)
1052 return fold_convert (type, val);
1054 tree lhs = make_ssa_name (type);
1055 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1056 insert_before (g);
1057 return lhs;
1060 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1062 tree
1063 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1064 tree idx)
1066 tree lhs, data_out, ctype;
1067 tree rhs1_type = TREE_TYPE (rhs1);
1068 gimple *g;
1069 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1070 &data_out);
1072 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1073 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1075 ctype = build_complex_type (m_limb_type);
1076 if (!types_compatible_p (rhs1_type, m_limb_type))
1078 if (!TYPE_UNSIGNED (rhs1_type))
1080 tree type = unsigned_type_for (rhs1_type);
1081 rhs1 = add_cast (type, rhs1);
1082 rhs2 = add_cast (type, rhs2);
1084 rhs1 = add_cast (m_limb_type, rhs1);
1085 rhs2 = add_cast (m_limb_type, rhs2);
1087 lhs = make_ssa_name (ctype);
1088 g = gimple_build_call_internal (code == PLUS_EXPR
1089 ? IFN_UADDC : IFN_USUBC,
1090 3, rhs1, rhs2, data_in);
1091 gimple_call_set_lhs (g, lhs);
1092 insert_before (g);
1093 if (data_out == NULL_TREE)
1094 data_out = make_ssa_name (m_limb_type);
1095 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1096 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1097 insert_before (g);
1099 else if (types_compatible_p (rhs1_type, m_limb_type))
1101 ctype = build_complex_type (m_limb_type);
1102 lhs = make_ssa_name (ctype);
1103 g = gimple_build_call_internal (code == PLUS_EXPR
1104 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1105 2, rhs1, rhs2);
1106 gimple_call_set_lhs (g, lhs);
1107 insert_before (g);
1108 if (data_out == NULL_TREE)
1109 data_out = make_ssa_name (m_limb_type);
1110 if (!integer_zerop (data_in))
1112 rhs1 = make_ssa_name (m_limb_type);
1113 g = gimple_build_assign (rhs1, REALPART_EXPR,
1114 build1 (REALPART_EXPR, m_limb_type, lhs));
1115 insert_before (g);
1116 rhs2 = make_ssa_name (m_limb_type);
1117 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1118 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1119 insert_before (g);
1120 lhs = make_ssa_name (ctype);
1121 g = gimple_build_call_internal (code == PLUS_EXPR
1122 ? IFN_ADD_OVERFLOW
1123 : IFN_SUB_OVERFLOW,
1124 2, rhs1, data_in);
1125 gimple_call_set_lhs (g, lhs);
1126 insert_before (g);
1127 data_in = make_ssa_name (m_limb_type);
1128 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1129 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1130 insert_before (g);
1131 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1132 insert_before (g);
1134 else
1136 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1137 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1138 insert_before (g);
1141 else
1143 tree in = add_cast (rhs1_type, data_in);
1144 lhs = make_ssa_name (rhs1_type);
1145 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1146 insert_before (g);
1147 rhs1 = make_ssa_name (rhs1_type);
1148 g = gimple_build_assign (rhs1, code, lhs, in);
1149 insert_before (g);
1150 m_data[m_data_cnt] = NULL_TREE;
1151 m_data_cnt += 2;
1152 return rhs1;
1154 rhs1 = make_ssa_name (m_limb_type);
1155 g = gimple_build_assign (rhs1, REALPART_EXPR,
1156 build1 (REALPART_EXPR, m_limb_type, lhs));
1157 insert_before (g);
1158 if (!types_compatible_p (rhs1_type, m_limb_type))
1159 rhs1 = add_cast (rhs1_type, rhs1);
1160 m_data[m_data_cnt] = data_out;
1161 m_data_cnt += 2;
1162 return rhs1;
1165 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1166 count in [0, limb_prec - 1] range. */
1168 tree
1169 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1171 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1172 gcc_checking_assert (cnt < (unsigned) limb_prec);
1173 if (cnt == 0)
1174 return rhs1;
1176 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1177 gimple *g;
1178 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1179 &data_out);
1181 if (!integer_zerop (data_in))
1183 lhs = make_ssa_name (m_limb_type);
1184 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1185 build_int_cst (unsigned_type_node,
1186 limb_prec - cnt));
1187 insert_before (g);
1188 if (!types_compatible_p (rhs1_type, m_limb_type))
1189 lhs = add_cast (rhs1_type, lhs);
1190 data_in = lhs;
1192 if (types_compatible_p (rhs1_type, m_limb_type))
1194 if (data_out == NULL_TREE)
1195 data_out = make_ssa_name (m_limb_type);
1196 g = gimple_build_assign (data_out, rhs1);
1197 insert_before (g);
1199 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1201 lhs = make_ssa_name (rhs1_type);
1202 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1203 insert_before (g);
1204 if (!integer_zerop (data_in))
1206 rhs1 = lhs;
1207 lhs = make_ssa_name (rhs1_type);
1208 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1209 insert_before (g);
1212 else
1213 lhs = data_in;
1214 m_data[m_data_cnt] = data_out;
1215 m_data_cnt += 2;
1216 return lhs;
1219 /* Helper function for handle_stmt method, handle an integral
1220 to integral conversion. */
1222 tree
1223 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1225 tree rhs_type = TREE_TYPE (rhs1);
1226 gimple *g;
1227 if (TREE_CODE (rhs1) == SSA_NAME
1228 && TREE_CODE (lhs_type) == BITINT_TYPE
1229 && TREE_CODE (rhs_type) == BITINT_TYPE
1230 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1231 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1233 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1234 /* If lhs has bigger precision than rhs, we can use
1235 the simple case only if there is a guarantee that
1236 the most significant limb is handled in straight
1237 line code. If m_var_msb (on left shifts) or
1238 if m_upwards_2limb * limb_prec is equal to
1239 lhs precision that is not the case. */
1240 || (!m_var_msb
1241 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1242 TYPE_SIZE (lhs_type))
1243 && (!m_upwards_2limb
1244 || (m_upwards_2limb * limb_prec
1245 < TYPE_PRECISION (lhs_type)))))
1247 rhs1 = handle_operand (rhs1, idx);
1248 if (tree_fits_uhwi_p (idx))
1250 tree type = limb_access_type (lhs_type, idx);
1251 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1252 rhs1 = add_cast (type, rhs1);
1254 return rhs1;
1256 tree t;
1257 /* Indexes lower than this don't need any special processing. */
1258 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1259 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1260 /* Indexes >= than this always contain an extension. */
1261 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1262 bool save_first = m_first;
1263 if (m_first)
1265 m_data.safe_push (NULL_TREE);
1266 m_data.safe_push (NULL_TREE);
1267 m_data.safe_push (NULL_TREE);
1268 if (TYPE_UNSIGNED (rhs_type))
1269 /* No need to keep state between iterations. */
1271 else if (m_upwards && !m_upwards_2limb)
1272 /* We need to keep state between iterations, but
1273 not within any loop, everything is straight line
1274 code with only increasing indexes. */
1276 else if (!m_upwards_2limb)
1278 unsigned save_data_cnt = m_data_cnt;
1279 gimple_stmt_iterator save_gsi = m_gsi;
1280 m_gsi = m_init_gsi;
1281 if (gsi_end_p (m_gsi))
1282 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1283 else
1284 gsi_next (&m_gsi);
1285 m_data_cnt = save_data_cnt + 3;
1286 t = handle_operand (rhs1, size_int (low));
1287 m_first = false;
1288 m_data[save_data_cnt + 2]
1289 = build_int_cst (NULL_TREE, m_data_cnt);
1290 m_data_cnt = save_data_cnt;
1291 t = add_cast (signed_type_for (m_limb_type), t);
1292 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1293 tree n = make_ssa_name (TREE_TYPE (t));
1294 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1295 insert_before (g);
1296 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1297 m_gsi = save_gsi;
1299 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1300 /* We need to keep state between iterations, but
1301 fortunately not within the loop, only afterwards. */
1303 else
1305 tree out;
1306 m_data.truncate (m_data_cnt);
1307 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1308 m_data.safe_push (NULL_TREE);
1312 unsigned save_data_cnt = m_data_cnt;
1313 m_data_cnt += 3;
1314 if (!tree_fits_uhwi_p (idx))
1316 if (m_upwards_2limb
1317 && (m_upwards_2limb * limb_prec
1318 <= ((unsigned) TYPE_PRECISION (rhs_type)
1319 - !TYPE_UNSIGNED (rhs_type))))
1321 rhs1 = handle_operand (rhs1, idx);
1322 if (m_first)
1323 m_data[save_data_cnt + 2]
1324 = build_int_cst (NULL_TREE, m_data_cnt);
1325 m_first = save_first;
1326 return rhs1;
1328 bool single_comparison
1329 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1330 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1331 idx, size_int (low), NULL_TREE, NULL_TREE);
1332 edge edge_true_true, edge_true_false, edge_false;
1333 if_then_if_then_else (g, (single_comparison ? NULL
1334 : gimple_build_cond (EQ_EXPR, idx,
1335 size_int (low),
1336 NULL_TREE,
1337 NULL_TREE)),
1338 profile_probability::likely (),
1339 profile_probability::unlikely (),
1340 edge_true_true, edge_true_false, edge_false);
1341 bool save_cast_conditional = m_cast_conditional;
1342 m_cast_conditional = true;
1343 m_bitfld_load = 0;
1344 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1345 if (m_first)
1346 m_data[save_data_cnt + 2]
1347 = build_int_cst (NULL_TREE, m_data_cnt);
1348 tree ext = NULL_TREE;
1349 tree bitfld = NULL_TREE;
1350 if (!single_comparison)
1352 m_gsi = gsi_after_labels (edge_true_true->src);
1353 m_first = false;
1354 m_data_cnt = save_data_cnt + 3;
1355 if (m_bitfld_load)
1357 bitfld = m_data[m_bitfld_load];
1358 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1359 m_bitfld_load = 0;
1361 t2 = handle_operand (rhs1, size_int (low));
1362 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1363 t2 = add_cast (m_limb_type, t2);
1364 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1366 ext = add_cast (signed_type_for (m_limb_type), t2);
1367 tree lpm1 = build_int_cst (unsigned_type_node,
1368 limb_prec - 1);
1369 tree n = make_ssa_name (TREE_TYPE (ext));
1370 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1371 insert_before (g);
1372 ext = add_cast (m_limb_type, n);
1375 tree t3;
1376 if (TYPE_UNSIGNED (rhs_type))
1377 t3 = build_zero_cst (m_limb_type);
1378 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1379 t3 = m_data[save_data_cnt];
1380 else
1381 t3 = m_data[save_data_cnt + 1];
1382 m_gsi = gsi_after_labels (edge_true_false->dest);
1383 t = make_ssa_name (m_limb_type);
1384 gphi *phi = create_phi_node (t, edge_true_false->dest);
1385 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1386 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1387 if (edge_true_true)
1388 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1389 if (ext)
1391 tree t4 = make_ssa_name (m_limb_type);
1392 phi = create_phi_node (t4, edge_true_false->dest);
1393 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1394 UNKNOWN_LOCATION);
1395 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1396 UNKNOWN_LOCATION);
1397 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1398 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1399 insert_before (g);
1401 if (m_bitfld_load)
1403 tree t4;
1404 if (!m_first)
1405 t4 = m_data[m_bitfld_load + 1];
1406 else
1407 t4 = make_ssa_name (m_limb_type);
1408 phi = create_phi_node (t4, edge_true_false->dest);
1409 add_phi_arg (phi,
1410 edge_true_true ? bitfld : m_data[m_bitfld_load],
1411 edge_true_false, UNKNOWN_LOCATION);
1412 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1413 edge_false, UNKNOWN_LOCATION);
1414 if (edge_true_true)
1415 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1416 UNKNOWN_LOCATION);
1417 m_data[m_bitfld_load] = t4;
1418 m_data[m_bitfld_load + 2] = t4;
1419 m_bitfld_load = 0;
1421 m_cast_conditional = save_cast_conditional;
1422 m_first = save_first;
1423 return t;
1425 else
1427 if (tree_to_uhwi (idx) < low)
1429 t = handle_operand (rhs1, idx);
1430 if (m_first)
1431 m_data[save_data_cnt + 2]
1432 = build_int_cst (NULL_TREE, m_data_cnt);
1434 else if (tree_to_uhwi (idx) < high)
1436 t = handle_operand (rhs1, size_int (low));
1437 if (m_first)
1438 m_data[save_data_cnt + 2]
1439 = build_int_cst (NULL_TREE, m_data_cnt);
1440 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1441 t = add_cast (m_limb_type, t);
1442 tree ext = NULL_TREE;
1443 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1445 ext = add_cast (signed_type_for (m_limb_type), t);
1446 tree lpm1 = build_int_cst (unsigned_type_node,
1447 limb_prec - 1);
1448 tree n = make_ssa_name (TREE_TYPE (ext));
1449 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1450 insert_before (g);
1451 ext = add_cast (m_limb_type, n);
1452 m_data[save_data_cnt + 1] = ext;
1455 else
1457 if (TYPE_UNSIGNED (rhs_type) && m_first)
1459 handle_operand (rhs1, size_zero_node);
1460 m_data[save_data_cnt + 2]
1461 = build_int_cst (NULL_TREE, m_data_cnt);
1463 else
1464 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1465 if (TYPE_UNSIGNED (rhs_type))
1466 t = build_zero_cst (m_limb_type);
1467 else
1468 t = m_data[save_data_cnt + 1];
1470 tree type = limb_access_type (lhs_type, idx);
1471 if (!useless_type_conversion_p (type, m_limb_type))
1472 t = add_cast (type, t);
1473 m_first = save_first;
1474 return t;
1477 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1478 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1479 && INTEGRAL_TYPE_P (rhs_type))
1481 /* Add support for 3 or more limbs filled in from normal integral
1482 type if this assert fails. If no target chooses limb mode smaller
1483 than half of largest supported normal integral type, this will not
1484 be needed. */
1485 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1486 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1487 if (m_first)
1489 gimple_stmt_iterator save_gsi = m_gsi;
1490 m_gsi = m_init_gsi;
1491 if (gsi_end_p (m_gsi))
1492 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1493 else
1494 gsi_next (&m_gsi);
1495 if (TREE_CODE (rhs_type) == BITINT_TYPE
1496 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1498 tree type = NULL_TREE;
1499 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1500 rhs_type = TREE_TYPE (rhs1);
1502 r1 = rhs1;
1503 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1504 r1 = add_cast (m_limb_type, rhs1);
1505 if (TYPE_PRECISION (rhs_type) > limb_prec)
1507 g = gimple_build_assign (make_ssa_name (rhs_type),
1508 RSHIFT_EXPR, rhs1,
1509 build_int_cst (unsigned_type_node,
1510 limb_prec));
1511 insert_before (g);
1512 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1514 if (TYPE_UNSIGNED (rhs_type))
1515 rext = build_zero_cst (m_limb_type);
1516 else
1518 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1519 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1520 RSHIFT_EXPR, rext,
1521 build_int_cst (unsigned_type_node,
1522 limb_prec - 1));
1523 insert_before (g);
1524 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1526 m_gsi = save_gsi;
1528 tree t;
1529 if (m_upwards_2limb)
1531 if (m_first)
1533 tree out1, out2;
1534 prepare_data_in_out (r1, idx, &out1);
1535 g = gimple_build_assign (m_data[m_data_cnt + 1], rext);
1536 insert_before (g);
1537 if (TYPE_PRECISION (rhs_type) > limb_prec)
1539 prepare_data_in_out (r2, idx, &out2);
1540 g = gimple_build_assign (m_data[m_data_cnt + 3], rext);
1541 insert_before (g);
1542 m_data.pop ();
1543 t = m_data.pop ();
1544 m_data[m_data_cnt + 1] = t;
1546 else
1547 m_data[m_data_cnt + 1] = rext;
1548 m_data.safe_push (rext);
1549 t = m_data[m_data_cnt];
1551 else if (!tree_fits_uhwi_p (idx))
1552 t = m_data[m_data_cnt + 1];
1553 else
1555 tree type = limb_access_type (lhs_type, idx);
1556 t = m_data[m_data_cnt + 2];
1557 if (!useless_type_conversion_p (type, m_limb_type))
1558 t = add_cast (type, t);
1560 m_data_cnt += 3;
1561 return t;
1563 else if (m_first)
1565 m_data.safe_push (r1);
1566 m_data.safe_push (r2);
1567 m_data.safe_push (rext);
1569 if (tree_fits_uhwi_p (idx))
1571 tree type = limb_access_type (lhs_type, idx);
1572 if (integer_zerop (idx))
1573 t = m_data[m_data_cnt];
1574 else if (TYPE_PRECISION (rhs_type) > limb_prec
1575 && integer_onep (idx))
1576 t = m_data[m_data_cnt + 1];
1577 else
1578 t = m_data[m_data_cnt + 2];
1579 if (!useless_type_conversion_p (type, m_limb_type))
1580 t = add_cast (type, t);
1581 m_data_cnt += 3;
1582 return t;
1584 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1585 NULL_TREE, NULL_TREE);
1586 edge e2, e3, e4 = NULL;
1587 if_then (g, profile_probability::likely (), e2, e3);
1588 if (m_data[m_data_cnt + 1])
1590 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1591 NULL_TREE, NULL_TREE);
1592 insert_before (g);
1593 edge e5 = split_block (gsi_bb (m_gsi), g);
1594 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1595 e2 = find_edge (e5->dest, e2->dest);
1596 e4->probability = profile_probability::unlikely ();
1597 e5->flags = EDGE_FALSE_VALUE;
1598 e5->probability = e4->probability.invert ();
1600 m_gsi = gsi_after_labels (e2->dest);
1601 t = make_ssa_name (m_limb_type);
1602 gphi *phi = create_phi_node (t, e2->dest);
1603 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1604 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1605 if (e4)
1606 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1607 m_data_cnt += 3;
1608 return t;
1610 return NULL_TREE;
1613 /* Helper function for handle_stmt method, handle a load from memory. */
1615 tree
1616 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1618 tree rhs1 = gimple_assign_rhs1 (stmt);
1619 tree rhs_type = TREE_TYPE (rhs1);
1620 bool eh = stmt_ends_bb_p (stmt);
1621 edge eh_edge = NULL;
1622 gimple *g;
1624 if (eh)
1626 edge_iterator ei;
1627 basic_block bb = gimple_bb (stmt);
1629 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1630 if (eh_edge->flags & EDGE_EH)
1631 break;
1634 if (TREE_CODE (rhs1) == COMPONENT_REF
1635 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1637 tree fld = TREE_OPERAND (rhs1, 1);
1638 /* For little-endian, we can allow as inputs bit-fields
1639 which start at a limb boundary. */
1640 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1641 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1642 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1643 goto normal_load;
1644 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1645 handle it normally for now. */
1646 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1647 goto normal_load;
1648 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1649 poly_int64 bitoffset;
1650 poly_uint64 field_offset, repr_offset;
1651 bool var_field_off = false;
1652 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1653 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1654 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1655 else
1657 bitoffset = 0;
1658 var_field_off = true;
1660 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1661 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1662 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1663 TREE_OPERAND (rhs1, 0), repr,
1664 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1665 HOST_WIDE_INT bo = bitoffset.to_constant ();
1666 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1667 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1668 if (m_first)
1670 if (m_upwards)
1672 gimple_stmt_iterator save_gsi = m_gsi;
1673 m_gsi = m_init_gsi;
1674 if (gsi_end_p (m_gsi))
1675 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1676 else
1677 gsi_next (&m_gsi);
1678 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1679 tree iv = make_ssa_name (m_limb_type);
1680 g = gimple_build_assign (iv, t);
1681 insert_before (g);
1682 if (eh)
1684 maybe_duplicate_eh_stmt (g, stmt);
1685 if (eh_edge)
1687 edge e = split_block (gsi_bb (m_gsi), g);
1688 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1689 = profile_probability::very_unlikely ();
1690 m_init_gsi.bb = e->dest;
1693 m_gsi = save_gsi;
1694 tree out;
1695 prepare_data_in_out (iv, idx, &out);
1696 out = m_data[m_data_cnt];
1697 m_data.safe_push (out);
1699 else
1701 m_data.safe_push (NULL_TREE);
1702 m_data.safe_push (NULL_TREE);
1703 m_data.safe_push (NULL_TREE);
1707 tree nidx0 = NULL_TREE, nidx1;
1708 tree iv = m_data[m_data_cnt];
1709 if (m_cast_conditional && iv)
1711 gcc_assert (!m_bitfld_load);
1712 m_bitfld_load = m_data_cnt;
1714 if (tree_fits_uhwi_p (idx))
1716 unsigned prec = TYPE_PRECISION (rhs_type);
1717 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1718 gcc_assert (i * limb_prec < prec);
1719 nidx1 = size_int (i + bo_idx + 1);
1720 if ((i + 1) * limb_prec > prec)
1722 prec %= limb_prec;
1723 if (prec + bo_bit <= (unsigned) limb_prec)
1724 nidx1 = NULL_TREE;
1726 if (!iv)
1727 nidx0 = size_int (i + bo_idx);
1729 else
1731 if (!iv)
1733 if (bo_idx == 0)
1734 nidx0 = idx;
1735 else
1737 nidx0 = make_ssa_name (sizetype);
1738 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1739 size_int (bo_idx));
1740 insert_before (g);
1743 nidx1 = make_ssa_name (sizetype);
1744 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1745 size_int (bo_idx + 1));
1746 insert_before (g);
1749 tree iv2 = NULL_TREE;
1750 if (nidx0)
1752 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1753 iv = make_ssa_name (m_limb_type);
1754 g = gimple_build_assign (iv, t);
1755 insert_before (g);
1756 gcc_assert (!eh);
1758 if (nidx1)
1760 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1761 unsigned prec = TYPE_PRECISION (rhs_type);
1762 if (conditional)
1764 if ((prec % limb_prec) == 0
1765 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1766 conditional = false;
1768 edge edge_true = NULL, edge_false = NULL;
1769 if (conditional)
1771 g = gimple_build_cond (NE_EXPR, idx,
1772 size_int (prec / limb_prec),
1773 NULL_TREE, NULL_TREE);
1774 if_then (g, profile_probability::likely (),
1775 edge_true, edge_false);
1777 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1778 if (m_upwards_2limb
1779 && !m_first
1780 && !m_bitfld_load
1781 && !tree_fits_uhwi_p (idx))
1782 iv2 = m_data[m_data_cnt + 1];
1783 else
1784 iv2 = make_ssa_name (m_limb_type);
1785 g = gimple_build_assign (iv2, t);
1786 insert_before (g);
1787 if (eh)
1789 maybe_duplicate_eh_stmt (g, stmt);
1790 if (eh_edge)
1792 edge e = split_block (gsi_bb (m_gsi), g);
1793 m_gsi = gsi_after_labels (e->dest);
1794 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1795 = profile_probability::very_unlikely ();
1798 if (conditional)
1800 tree iv3 = make_ssa_name (m_limb_type);
1801 if (eh)
1802 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1803 gphi *phi = create_phi_node (iv3, edge_true->dest);
1804 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1805 add_phi_arg (phi, build_zero_cst (m_limb_type),
1806 edge_false, UNKNOWN_LOCATION);
1807 m_gsi = gsi_after_labels (edge_true->dest);
1810 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1811 iv, build_int_cst (unsigned_type_node, bo_bit));
1812 insert_before (g);
1813 iv = gimple_assign_lhs (g);
1814 if (iv2)
1816 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1817 iv2, build_int_cst (unsigned_type_node,
1818 limb_prec - bo_bit));
1819 insert_before (g);
1820 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1821 gimple_assign_lhs (g), iv);
1822 insert_before (g);
1823 iv = gimple_assign_lhs (g);
1824 if (m_data[m_data_cnt])
1825 m_data[m_data_cnt] = iv2;
1827 if (tree_fits_uhwi_p (idx))
1829 tree atype = limb_access_type (rhs_type, idx);
1830 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1831 iv = add_cast (atype, iv);
1833 m_data_cnt += 3;
1834 return iv;
1837 normal_load:
1838 /* Use write_p = true for loads with EH edges to make
1839 sure limb_access doesn't add a cast as separate
1840 statement after it. */
1841 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1842 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1843 g = gimple_build_assign (ret, rhs1);
1844 insert_before (g);
1845 if (eh)
1847 maybe_duplicate_eh_stmt (g, stmt);
1848 if (eh_edge)
1850 edge e = split_block (gsi_bb (m_gsi), g);
1851 m_gsi = gsi_after_labels (e->dest);
1852 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1853 = profile_probability::very_unlikely ();
1855 if (tree_fits_uhwi_p (idx))
1857 tree atype = limb_access_type (rhs_type, idx);
1858 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1859 ret = add_cast (atype, ret);
1862 return ret;
1865 /* Return a limb IDX from a mergeable statement STMT. */
1867 tree
1868 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1870 tree lhs, rhs1, rhs2 = NULL_TREE;
1871 gimple *g;
1872 switch (gimple_code (stmt))
1874 case GIMPLE_ASSIGN:
1875 if (gimple_assign_load_p (stmt))
1876 return handle_load (stmt, idx);
1877 switch (gimple_assign_rhs_code (stmt))
1879 case BIT_AND_EXPR:
1880 case BIT_IOR_EXPR:
1881 case BIT_XOR_EXPR:
1882 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1883 /* FALLTHRU */
1884 case BIT_NOT_EXPR:
1885 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1886 lhs = make_ssa_name (TREE_TYPE (rhs1));
1887 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1888 rhs1, rhs2);
1889 insert_before (g);
1890 return lhs;
1891 case PLUS_EXPR:
1892 case MINUS_EXPR:
1893 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1894 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1895 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1896 rhs1, rhs2, idx);
1897 case NEGATE_EXPR:
1898 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1899 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1900 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1901 case LSHIFT_EXPR:
1902 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1903 idx),
1904 gimple_assign_rhs2 (stmt), idx);
1905 case SSA_NAME:
1906 case INTEGER_CST:
1907 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1908 CASE_CONVERT:
1909 case VIEW_CONVERT_EXPR:
1910 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1911 gimple_assign_rhs1 (stmt), idx);
1912 default:
1913 break;
1915 break;
1916 default:
1917 break;
1919 gcc_unreachable ();
1922 /* Return minimum precision of OP at STMT.
1923 Positive value is minimum precision above which all bits
1924 are zero, negative means all bits above negation of the
1925 value are copies of the sign bit. */
1927 static int
1928 range_to_prec (tree op, gimple *stmt)
1930 int_range_max r;
1931 wide_int w;
1932 tree type = TREE_TYPE (op);
1933 unsigned int prec = TYPE_PRECISION (type);
1935 if (!optimize
1936 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1937 || r.undefined_p ())
1939 if (TYPE_UNSIGNED (type))
1940 return prec;
1941 else
1942 return -prec;
1945 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1947 w = r.lower_bound ();
1948 if (wi::neg_p (w))
1950 int min_prec1 = wi::min_precision (w, SIGNED);
1951 w = r.upper_bound ();
1952 int min_prec2 = wi::min_precision (w, SIGNED);
1953 int min_prec = MAX (min_prec1, min_prec2);
1954 return MIN (-min_prec, -2);
1958 w = r.upper_bound ();
1959 int min_prec = wi::min_precision (w, UNSIGNED);
1960 return MAX (min_prec, 1);
1963 /* Return address of the first limb of OP and write into *PREC
1964 its precision. If positive, the operand is zero extended
1965 from that precision, if it is negative, the operand is sign-extended
1966 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
1967 otherwise *PREC_STORED is prec from the innermost call without
1968 range optimizations. */
1970 tree
1971 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
1972 int *prec_stored, int *prec)
1974 wide_int w;
1975 location_t loc_save = m_loc;
1976 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
1977 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
1978 && TREE_CODE (op) != INTEGER_CST)
1980 do_int:
1981 *prec = range_to_prec (op, stmt);
1982 bitint_prec_kind kind = bitint_prec_small;
1983 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
1984 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
1985 kind = bitint_precision_kind (TREE_TYPE (op));
1986 if (kind == bitint_prec_middle)
1988 tree type = NULL_TREE;
1989 op = maybe_cast_middle_bitint (&m_gsi, op, type);
1991 tree op_type = TREE_TYPE (op);
1992 unsigned HOST_WIDE_INT nelts
1993 = CEIL (TYPE_PRECISION (op_type), limb_prec);
1994 /* Add support for 3 or more limbs filled in from normal
1995 integral type if this assert fails. If no target chooses
1996 limb mode smaller than half of largest supported normal
1997 integral type, this will not be needed. */
1998 gcc_assert (nelts <= 2);
1999 if (prec_stored)
2000 *prec_stored = (TYPE_UNSIGNED (op_type)
2001 ? TYPE_PRECISION (op_type)
2002 : -TYPE_PRECISION (op_type));
2003 if (*prec <= limb_prec && *prec >= -limb_prec)
2005 nelts = 1;
2006 if (prec_stored)
2008 if (TYPE_UNSIGNED (op_type))
2010 if (*prec_stored > limb_prec)
2011 *prec_stored = limb_prec;
2013 else if (*prec_stored < -limb_prec)
2014 *prec_stored = -limb_prec;
2017 tree atype = build_array_type_nelts (m_limb_type, nelts);
2018 tree var = create_tmp_var (atype);
2019 tree t1 = op;
2020 if (!useless_type_conversion_p (m_limb_type, op_type))
2021 t1 = add_cast (m_limb_type, t1);
2022 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2023 NULL_TREE, NULL_TREE);
2024 gimple *g = gimple_build_assign (v, t1);
2025 insert_before (g);
2026 if (nelts > 1)
2028 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2029 g = gimple_build_assign (make_ssa_name (op_type),
2030 RSHIFT_EXPR, op, lp);
2031 insert_before (g);
2032 tree t2 = gimple_assign_lhs (g);
2033 t2 = add_cast (m_limb_type, t2);
2034 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2035 NULL_TREE, NULL_TREE);
2036 g = gimple_build_assign (v, t2);
2037 insert_before (g);
2039 tree ret = build_fold_addr_expr (var);
2040 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2042 tree clobber = build_clobber (atype, CLOBBER_EOL);
2043 g = gimple_build_assign (var, clobber);
2044 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2046 m_loc = loc_save;
2047 return ret;
2049 switch (TREE_CODE (op))
2051 case SSA_NAME:
2052 if (m_names == NULL
2053 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2055 gimple *g = SSA_NAME_DEF_STMT (op);
2056 tree ret;
2057 m_loc = gimple_location (g);
2058 if (gimple_assign_load_p (g))
2060 *prec = range_to_prec (op, NULL);
2061 if (prec_stored)
2062 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2063 ? TYPE_PRECISION (TREE_TYPE (op))
2064 : -TYPE_PRECISION (TREE_TYPE (op)));
2065 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2066 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2067 NULL_TREE, true, GSI_SAME_STMT);
2069 else if (gimple_code (g) == GIMPLE_NOP)
2071 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2072 if (prec_stored)
2073 *prec_stored = *prec;
2074 tree var = create_tmp_var (m_limb_type);
2075 TREE_ADDRESSABLE (var) = 1;
2076 ret = build_fold_addr_expr (var);
2077 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2079 tree clobber = build_clobber (m_limb_type, CLOBBER_EOL);
2080 g = gimple_build_assign (var, clobber);
2081 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2084 else
2086 gcc_assert (gimple_assign_cast_p (g));
2087 tree rhs1 = gimple_assign_rhs1 (g);
2088 bitint_prec_kind kind = bitint_prec_small;
2089 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2090 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2091 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2092 if (kind >= bitint_prec_large)
2094 tree lhs_type = TREE_TYPE (op);
2095 tree rhs_type = TREE_TYPE (rhs1);
2096 int prec_stored_val = 0;
2097 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2098 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2100 if (TYPE_UNSIGNED (lhs_type)
2101 && !TYPE_UNSIGNED (rhs_type))
2102 gcc_assert (*prec >= 0 || prec_stored == NULL);
2104 else
2106 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2108 else if (TYPE_UNSIGNED (lhs_type))
2110 gcc_assert (*prec > 0
2111 || prec_stored_val > 0
2112 || (-prec_stored_val
2113 >= TYPE_PRECISION (lhs_type)));
2114 *prec = TYPE_PRECISION (lhs_type);
2116 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2118 else
2119 *prec = -TYPE_PRECISION (lhs_type);
2122 else
2124 op = rhs1;
2125 stmt = g;
2126 goto do_int;
2129 m_loc = loc_save;
2130 return ret;
2132 else
2134 int p = var_to_partition (m_map, op);
2135 gcc_assert (m_vars[p] != NULL_TREE);
2136 *prec = range_to_prec (op, stmt);
2137 if (prec_stored)
2138 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2139 ? TYPE_PRECISION (TREE_TYPE (op))
2140 : -TYPE_PRECISION (TREE_TYPE (op)));
2141 return build_fold_addr_expr (m_vars[p]);
2143 case INTEGER_CST:
2144 unsigned int min_prec, mp;
2145 tree type;
2146 w = wi::to_wide (op);
2147 if (tree_int_cst_sgn (op) >= 0)
2149 min_prec = wi::min_precision (w, UNSIGNED);
2150 *prec = MAX (min_prec, 1);
2152 else
2154 min_prec = wi::min_precision (w, SIGNED);
2155 *prec = MIN ((int) -min_prec, -2);
2157 mp = CEIL (min_prec, limb_prec) * limb_prec;
2158 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2159 type = TREE_TYPE (op);
2160 else
2161 type = build_bitint_type (mp, 1);
2162 if (TREE_CODE (type) != BITINT_TYPE
2163 || bitint_precision_kind (type) == bitint_prec_small)
2165 if (TYPE_PRECISION (type) <= limb_prec)
2166 type = m_limb_type;
2167 else
2168 /* This case is for targets which e.g. have 64-bit
2169 limb but categorize up to 128-bits _BitInts as
2170 small. We could use type of m_limb_type[2] and
2171 similar instead to save space. */
2172 type = build_bitint_type (mid_min_prec, 1);
2174 if (prec_stored)
2176 if (tree_int_cst_sgn (op) >= 0)
2177 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2178 else
2179 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2181 op = tree_output_constant_def (fold_convert (type, op));
2182 return build_fold_addr_expr (op);
2183 default:
2184 gcc_unreachable ();
2188 /* Helper function, create a loop before the current location,
2189 start with sizetype INIT value from the preheader edge. Return
2190 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2191 from the latch edge. */
2193 tree
2194 bitint_large_huge::create_loop (tree init, tree *idx_next)
2196 if (!gsi_end_p (m_gsi))
2197 gsi_prev (&m_gsi);
2198 else
2199 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2200 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2201 edge e2 = split_block (e1->dest, (gimple *) NULL);
2202 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2203 e3->probability = profile_probability::very_unlikely ();
2204 e2->flags = EDGE_FALSE_VALUE;
2205 e2->probability = e3->probability.invert ();
2206 tree idx = make_ssa_name (sizetype);
2207 gphi *phi = create_phi_node (idx, e1->dest);
2208 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2209 *idx_next = make_ssa_name (sizetype);
2210 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2211 m_gsi = gsi_after_labels (e1->dest);
2212 m_bb = e1->dest;
2213 m_preheader_bb = e1->src;
2214 class loop *loop = alloc_loop ();
2215 loop->header = e1->dest;
2216 add_loop (loop, e1->src->loop_father);
2217 return idx;
2220 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2221 lowered using iteration from the least significant limb up to the most
2222 significant limb. For large _BitInt it is emitted as straight line code
2223 before current location, for huge _BitInt as a loop handling two limbs
2224 at once, followed by handling up to limbs in straight line code (at most
2225 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2226 comparisons, in that case CMP_CODE should be the comparison code and
2227 CMP_OP1/CMP_OP2 the comparison operands. */
2229 tree
2230 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2231 tree cmp_op1, tree cmp_op2)
2233 bool eq_p = cmp_code != ERROR_MARK;
2234 tree type;
2235 if (eq_p)
2236 type = TREE_TYPE (cmp_op1);
2237 else
2238 type = TREE_TYPE (gimple_assign_lhs (stmt));
2239 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2240 bitint_prec_kind kind = bitint_precision_kind (type);
2241 gcc_assert (kind >= bitint_prec_large);
2242 gimple *g;
2243 tree lhs = gimple_get_lhs (stmt);
2244 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2245 if (lhs
2246 && TREE_CODE (lhs) == SSA_NAME
2247 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2248 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2250 int p = var_to_partition (m_map, lhs);
2251 gcc_assert (m_vars[p] != NULL_TREE);
2252 m_lhs = lhs = m_vars[p];
2254 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2255 bool sext = false;
2256 tree ext = NULL_TREE, store_operand = NULL_TREE;
2257 bool eh = false;
2258 basic_block eh_pad = NULL;
2259 tree nlhs = NULL_TREE;
2260 unsigned HOST_WIDE_INT bo_idx = 0;
2261 unsigned HOST_WIDE_INT bo_bit = 0;
2262 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2263 if (gimple_store_p (stmt))
2265 store_operand = gimple_assign_rhs1 (stmt);
2266 eh = stmt_ends_bb_p (stmt);
2267 if (eh)
2269 edge e;
2270 edge_iterator ei;
2271 basic_block bb = gimple_bb (stmt);
2273 FOR_EACH_EDGE (e, ei, bb->succs)
2274 if (e->flags & EDGE_EH)
2276 eh_pad = e->dest;
2277 break;
2280 if (TREE_CODE (lhs) == COMPONENT_REF
2281 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2283 tree fld = TREE_OPERAND (lhs, 1);
2284 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2285 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2286 poly_int64 bitoffset;
2287 poly_uint64 field_offset, repr_offset;
2288 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2289 nlhs = lhs;
2290 else
2292 bool var_field_off = false;
2293 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2294 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2295 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2296 else
2298 bitoffset = 0;
2299 var_field_off = true;
2301 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2302 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2303 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2304 TREE_OPERAND (lhs, 0), repr,
2305 var_field_off
2306 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2307 HOST_WIDE_INT bo = bitoffset.to_constant ();
2308 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2309 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2313 if ((store_operand
2314 && TREE_CODE (store_operand) == SSA_NAME
2315 && (m_names == NULL
2316 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2317 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2318 || gimple_assign_cast_p (stmt))
2320 rhs1 = gimple_assign_rhs1 (store_operand
2321 ? SSA_NAME_DEF_STMT (store_operand)
2322 : stmt);
2323 /* Optimize mergeable ops ending with widening cast to _BitInt
2324 (or followed by store). We can lower just the limbs of the
2325 cast operand and widen afterwards. */
2326 if (TREE_CODE (rhs1) == SSA_NAME
2327 && (m_names == NULL
2328 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2329 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2330 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2331 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2332 limb_prec) < CEIL (prec, limb_prec)
2333 || (kind == bitint_prec_huge
2334 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2336 store_operand = rhs1;
2337 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2338 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2339 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2340 sext = true;
2343 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2344 if (kind == bitint_prec_large)
2345 cnt = CEIL (prec, limb_prec);
2346 else
2348 rem = (prec % (2 * limb_prec));
2349 end = (prec - rem) / limb_prec;
2350 cnt = 2 + CEIL (rem, limb_prec);
2351 idx = idx_first = create_loop (size_zero_node, &idx_next);
2354 basic_block edge_bb = NULL;
2355 if (eq_p)
2357 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2358 gsi_prev (&gsi);
2359 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2360 edge_bb = e->src;
2361 if (kind == bitint_prec_large)
2363 m_gsi = gsi_last_bb (edge_bb);
2364 if (!gsi_end_p (m_gsi))
2365 gsi_next (&m_gsi);
2368 else
2369 m_after_stmt = stmt;
2370 if (kind != bitint_prec_large)
2371 m_upwards_2limb = end;
2372 m_upwards = true;
2374 bool separate_ext
2375 = (prec != (unsigned) TYPE_PRECISION (type)
2376 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2377 > CEIL (prec, limb_prec)));
2379 for (unsigned i = 0; i < cnt; i++)
2381 m_data_cnt = 0;
2382 if (kind == bitint_prec_large)
2383 idx = size_int (i);
2384 else if (i >= 2)
2385 idx = size_int (end + (i > 2));
2386 if (eq_p)
2388 rhs1 = handle_operand (cmp_op1, idx);
2389 tree rhs2 = handle_operand (cmp_op2, idx);
2390 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2391 insert_before (g);
2392 edge e1 = split_block (gsi_bb (m_gsi), g);
2393 e1->flags = EDGE_FALSE_VALUE;
2394 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2395 e1->probability = profile_probability::unlikely ();
2396 e2->probability = e1->probability.invert ();
2397 if (i == 0)
2398 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2399 m_gsi = gsi_after_labels (e1->dest);
2401 else
2403 if (store_operand)
2404 rhs1 = handle_operand (store_operand, idx);
2405 else
2406 rhs1 = handle_stmt (stmt, idx);
2407 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2408 rhs1 = add_cast (m_limb_type, rhs1);
2409 if (sext && i == cnt - 1)
2410 ext = rhs1;
2411 tree nidx = idx;
2412 if (bo_idx)
2414 if (tree_fits_uhwi_p (idx))
2415 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2416 else
2418 nidx = make_ssa_name (sizetype);
2419 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2420 size_int (bo_idx));
2421 insert_before (g);
2424 bool done = false;
2425 basic_block new_bb = NULL;
2426 /* Handle stores into bit-fields. */
2427 if (bo_bit)
2429 if (i == 0)
2431 edge e2 = NULL;
2432 if (kind != bitint_prec_large)
2434 prepare_data_in_out (build_zero_cst (m_limb_type),
2435 idx, &bf_next);
2436 bf_next = m_data.pop ();
2437 bf_cur = m_data.pop ();
2438 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2439 NULL_TREE, NULL_TREE);
2440 edge edge_true;
2441 if_then_else (g, profile_probability::unlikely (),
2442 edge_true, e2);
2443 new_bb = e2->dest;
2445 tree ftype
2446 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2447 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2448 bitsize_int (limb_prec - bo_bit),
2449 bitsize_int (bo_idx * limb_prec + bo_bit));
2450 tree t = add_cast (ftype, rhs1);
2451 g = gimple_build_assign (bfr, t);
2452 insert_before (g);
2453 if (eh)
2455 maybe_duplicate_eh_stmt (g, stmt);
2456 if (eh_pad)
2458 edge e = split_block (gsi_bb (m_gsi), g);
2459 m_gsi = gsi_after_labels (e->dest);
2460 make_edge (e->src, eh_pad, EDGE_EH)->probability
2461 = profile_probability::very_unlikely ();
2464 if (kind == bitint_prec_large)
2466 bf_cur = rhs1;
2467 done = true;
2469 else if (e2)
2470 m_gsi = gsi_after_labels (e2->src);
2472 if (!done)
2474 tree t1 = make_ssa_name (m_limb_type);
2475 tree t2 = make_ssa_name (m_limb_type);
2476 tree t3 = make_ssa_name (m_limb_type);
2477 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2478 build_int_cst (unsigned_type_node,
2479 limb_prec - bo_bit));
2480 insert_before (g);
2481 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2482 build_int_cst (unsigned_type_node,
2483 bo_bit));
2484 insert_before (g);
2485 bf_cur = rhs1;
2486 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2487 insert_before (g);
2488 rhs1 = t3;
2489 if (bf_next && i == 1)
2491 g = gimple_build_assign (bf_next, bf_cur);
2492 insert_before (g);
2496 if (!done)
2498 /* Handle bit-field access to partial last limb if needed. */
2499 if (nlhs
2500 && i == cnt - 1
2501 && !separate_ext
2502 && tree_fits_uhwi_p (idx))
2504 unsigned int tprec = TYPE_PRECISION (type);
2505 unsigned int rprec = tprec % limb_prec;
2506 if (rprec + bo_bit < (unsigned) limb_prec)
2508 tree ftype
2509 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2510 tree bfr = build3 (BIT_FIELD_REF, ftype,
2511 unshare_expr (nlhs),
2512 bitsize_int (rprec + bo_bit),
2513 bitsize_int ((bo_idx
2514 + tprec / limb_prec)
2515 * limb_prec));
2516 tree t = add_cast (ftype, rhs1);
2517 g = gimple_build_assign (bfr, t);
2518 done = true;
2519 bf_cur = NULL_TREE;
2521 else if (rprec + bo_bit == (unsigned) limb_prec)
2522 bf_cur = NULL_TREE;
2524 /* Otherwise, stores to any other lhs. */
2525 if (!done)
2527 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2528 nidx, true);
2529 g = gimple_build_assign (l, rhs1);
2531 insert_before (g);
2532 if (eh)
2534 maybe_duplicate_eh_stmt (g, stmt);
2535 if (eh_pad)
2537 edge e = split_block (gsi_bb (m_gsi), g);
2538 m_gsi = gsi_after_labels (e->dest);
2539 make_edge (e->src, eh_pad, EDGE_EH)->probability
2540 = profile_probability::very_unlikely ();
2543 if (new_bb)
2544 m_gsi = gsi_after_labels (new_bb);
2547 m_first = false;
2548 if (kind == bitint_prec_huge && i <= 1)
2550 if (i == 0)
2552 idx = make_ssa_name (sizetype);
2553 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2554 size_one_node);
2555 insert_before (g);
2557 else
2559 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2560 size_int (2));
2561 insert_before (g);
2562 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2563 NULL_TREE, NULL_TREE);
2564 insert_before (g);
2565 if (eq_p)
2566 m_gsi = gsi_after_labels (edge_bb);
2567 else
2568 m_gsi = gsi_for_stmt (stmt);
2573 if (separate_ext)
2575 if (sext)
2577 ext = add_cast (signed_type_for (m_limb_type), ext);
2578 tree lpm1 = build_int_cst (unsigned_type_node,
2579 limb_prec - 1);
2580 tree n = make_ssa_name (TREE_TYPE (ext));
2581 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2582 insert_before (g);
2583 ext = add_cast (m_limb_type, n);
2585 else
2586 ext = build_zero_cst (m_limb_type);
2587 kind = bitint_precision_kind (type);
2588 unsigned start = CEIL (prec, limb_prec);
2589 prec = TYPE_PRECISION (type);
2590 idx = idx_first = idx_next = NULL_TREE;
2591 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2592 kind = bitint_prec_large;
2593 if (kind == bitint_prec_large)
2594 cnt = CEIL (prec, limb_prec) - start;
2595 else
2597 rem = prec % limb_prec;
2598 end = (prec - rem) / limb_prec;
2599 cnt = (bo_bit != 0) + 1 + (rem != 0);
2601 for (unsigned i = 0; i < cnt; i++)
2603 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2604 idx = size_int (start + i);
2605 else if (i == cnt - 1)
2606 idx = size_int (end);
2607 else if (i == (bo_bit != 0))
2608 idx = create_loop (size_int (start + i), &idx_next);
2609 rhs1 = ext;
2610 if (bf_cur != NULL_TREE && bf_cur != ext)
2612 tree t1 = make_ssa_name (m_limb_type);
2613 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2614 build_int_cst (unsigned_type_node,
2615 limb_prec - bo_bit));
2616 insert_before (g);
2617 if (integer_zerop (ext))
2618 rhs1 = t1;
2619 else
2621 tree t2 = make_ssa_name (m_limb_type);
2622 rhs1 = make_ssa_name (m_limb_type);
2623 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2624 build_int_cst (unsigned_type_node,
2625 bo_bit));
2626 insert_before (g);
2627 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2628 insert_before (g);
2630 bf_cur = ext;
2632 tree nidx = idx;
2633 if (bo_idx)
2635 if (tree_fits_uhwi_p (idx))
2636 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2637 else
2639 nidx = make_ssa_name (sizetype);
2640 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2641 size_int (bo_idx));
2642 insert_before (g);
2645 bool done = false;
2646 /* Handle bit-field access to partial last limb if needed. */
2647 if (nlhs && i == cnt - 1)
2649 unsigned int tprec = TYPE_PRECISION (type);
2650 unsigned int rprec = tprec % limb_prec;
2651 if (rprec + bo_bit < (unsigned) limb_prec)
2653 tree ftype
2654 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2655 tree bfr = build3 (BIT_FIELD_REF, ftype,
2656 unshare_expr (nlhs),
2657 bitsize_int (rprec + bo_bit),
2658 bitsize_int ((bo_idx + tprec / limb_prec)
2659 * limb_prec));
2660 tree t = add_cast (ftype, rhs1);
2661 g = gimple_build_assign (bfr, t);
2662 done = true;
2663 bf_cur = NULL_TREE;
2665 else if (rprec + bo_bit == (unsigned) limb_prec)
2666 bf_cur = NULL_TREE;
2668 /* Otherwise, stores to any other lhs. */
2669 if (!done)
2671 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2672 g = gimple_build_assign (l, rhs1);
2674 insert_before (g);
2675 if (eh)
2677 maybe_duplicate_eh_stmt (g, stmt);
2678 if (eh_pad)
2680 edge e = split_block (gsi_bb (m_gsi), g);
2681 m_gsi = gsi_after_labels (e->dest);
2682 make_edge (e->src, eh_pad, EDGE_EH)->probability
2683 = profile_probability::very_unlikely ();
2686 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2688 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2689 size_one_node);
2690 insert_before (g);
2691 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2692 NULL_TREE, NULL_TREE);
2693 insert_before (g);
2694 m_gsi = gsi_for_stmt (stmt);
2698 if (bf_cur != NULL_TREE)
2700 unsigned int tprec = TYPE_PRECISION (type);
2701 unsigned int rprec = tprec % limb_prec;
2702 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2703 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2704 bitsize_int (rprec + bo_bit),
2705 bitsize_int ((bo_idx + tprec / limb_prec)
2706 * limb_prec));
2707 rhs1 = bf_cur;
2708 if (bf_cur != ext)
2710 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2711 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2712 build_int_cst (unsigned_type_node,
2713 limb_prec - bo_bit));
2714 insert_before (g);
2716 rhs1 = add_cast (ftype, rhs1);
2717 g = gimple_build_assign (bfr, rhs1);
2718 insert_before (g);
2719 if (eh)
2721 maybe_duplicate_eh_stmt (g, stmt);
2722 if (eh_pad)
2724 edge e = split_block (gsi_bb (m_gsi), g);
2725 m_gsi = gsi_after_labels (e->dest);
2726 make_edge (e->src, eh_pad, EDGE_EH)->probability
2727 = profile_probability::very_unlikely ();
2732 if (gimple_store_p (stmt))
2734 unlink_stmt_vdef (stmt);
2735 release_ssa_name (gimple_vdef (stmt));
2736 gsi_remove (&m_gsi, true);
2738 if (eq_p)
2740 lhs = make_ssa_name (boolean_type_node);
2741 basic_block bb = gimple_bb (stmt);
2742 gphi *phi = create_phi_node (lhs, bb);
2743 edge e = find_edge (gsi_bb (m_gsi), bb);
2744 unsigned int n = EDGE_COUNT (bb->preds);
2745 for (unsigned int i = 0; i < n; i++)
2747 edge e2 = EDGE_PRED (bb, i);
2748 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2749 e2, UNKNOWN_LOCATION);
2751 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2752 return lhs;
2754 else
2755 return NULL_TREE;
2758 /* Handle a large/huge _BitInt comparison statement STMT other than
2759 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2760 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2761 lowered by iteration from the most significant limb downwards to
2762 the least significant one, for large _BitInt in straight line code,
2763 otherwise with most significant limb handled in
2764 straight line code followed by a loop handling one limb at a time.
2765 Comparisons with unsigned huge _BitInt with precisions which are
2766 multiples of limb precision can use just the loop and don't need to
2767 handle most significant limb before the loop. The loop or straight
2768 line code jumps to final basic block if a particular pair of limbs
2769 is not equal. */
2771 tree
2772 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2773 tree cmp_op1, tree cmp_op2)
2775 tree type = TREE_TYPE (cmp_op1);
2776 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2777 bitint_prec_kind kind = bitint_precision_kind (type);
2778 gcc_assert (kind >= bitint_prec_large);
2779 gimple *g;
2780 if (!TYPE_UNSIGNED (type)
2781 && integer_zerop (cmp_op2)
2782 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2784 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2785 tree idx = size_int (end);
2786 m_data_cnt = 0;
2787 tree rhs1 = handle_operand (cmp_op1, idx);
2788 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2790 tree stype = signed_type_for (TREE_TYPE (rhs1));
2791 rhs1 = add_cast (stype, rhs1);
2793 tree lhs = make_ssa_name (boolean_type_node);
2794 g = gimple_build_assign (lhs, cmp_code, rhs1,
2795 build_zero_cst (TREE_TYPE (rhs1)));
2796 insert_before (g);
2797 cmp_code = NE_EXPR;
2798 return lhs;
2801 unsigned cnt, rem = 0, end = 0;
2802 tree idx = NULL_TREE, idx_next = NULL_TREE;
2803 if (kind == bitint_prec_large)
2804 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2805 else
2807 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2808 if (rem == 0 && !TYPE_UNSIGNED (type))
2809 rem = limb_prec;
2810 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2811 cnt = 1 + (rem != 0);
2814 basic_block edge_bb = NULL;
2815 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2816 gsi_prev (&gsi);
2817 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2818 edge_bb = e->src;
2819 m_gsi = gsi_last_bb (edge_bb);
2820 if (!gsi_end_p (m_gsi))
2821 gsi_next (&m_gsi);
2823 edge *edges = XALLOCAVEC (edge, cnt * 2);
2824 for (unsigned i = 0; i < cnt; i++)
2826 m_data_cnt = 0;
2827 if (kind == bitint_prec_large)
2828 idx = size_int (cnt - i - 1);
2829 else if (i == cnt - 1)
2830 idx = create_loop (size_int (end - 1), &idx_next);
2831 else
2832 idx = size_int (end);
2833 tree rhs1 = handle_operand (cmp_op1, idx);
2834 tree rhs2 = handle_operand (cmp_op2, idx);
2835 if (i == 0
2836 && !TYPE_UNSIGNED (type)
2837 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2839 tree stype = signed_type_for (TREE_TYPE (rhs1));
2840 rhs1 = add_cast (stype, rhs1);
2841 rhs2 = add_cast (stype, rhs2);
2843 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2844 insert_before (g);
2845 edge e1 = split_block (gsi_bb (m_gsi), g);
2846 e1->flags = EDGE_FALSE_VALUE;
2847 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2848 e1->probability = profile_probability::likely ();
2849 e2->probability = e1->probability.invert ();
2850 if (i == 0)
2851 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2852 m_gsi = gsi_after_labels (e1->dest);
2853 edges[2 * i] = e2;
2854 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2855 insert_before (g);
2856 e1 = split_block (gsi_bb (m_gsi), g);
2857 e1->flags = EDGE_FALSE_VALUE;
2858 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2859 e1->probability = profile_probability::unlikely ();
2860 e2->probability = e1->probability.invert ();
2861 m_gsi = gsi_after_labels (e1->dest);
2862 edges[2 * i + 1] = e2;
2863 m_first = false;
2864 if (kind == bitint_prec_huge && i == cnt - 1)
2866 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2867 insert_before (g);
2868 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2869 NULL_TREE, NULL_TREE);
2870 insert_before (g);
2871 edge true_edge, false_edge;
2872 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2873 &true_edge, &false_edge);
2874 m_gsi = gsi_after_labels (false_edge->dest);
2878 tree lhs = make_ssa_name (boolean_type_node);
2879 basic_block bb = gimple_bb (stmt);
2880 gphi *phi = create_phi_node (lhs, bb);
2881 for (unsigned int i = 0; i < cnt * 2; i++)
2883 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2884 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2885 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2887 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2888 ? boolean_true_node : boolean_false_node,
2889 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2890 cmp_code = NE_EXPR;
2891 return lhs;
2894 /* Lower large/huge _BitInt left and right shift except for left
2895 shift by < limb_prec constant. */
2897 void
2898 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2900 tree rhs1 = gimple_assign_rhs1 (stmt);
2901 tree lhs = gimple_assign_lhs (stmt);
2902 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2903 tree type = TREE_TYPE (rhs1);
2904 gimple *final_stmt = gsi_stmt (m_gsi);
2905 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2906 && bitint_precision_kind (type) >= bitint_prec_large);
2907 int prec = TYPE_PRECISION (type);
2908 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2909 gimple *g;
2910 if (obj == NULL_TREE)
2912 int part = var_to_partition (m_map, lhs);
2913 gcc_assert (m_vars[part] != NULL_TREE);
2914 obj = m_vars[part];
2916 /* Preparation code common for both left and right shifts.
2917 unsigned n1 = n % limb_prec;
2918 size_t n2 = n / limb_prec;
2919 size_t n3 = n1 != 0;
2920 unsigned n4 = (limb_prec - n1) % limb_prec;
2921 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2922 if (TREE_CODE (n) == INTEGER_CST)
2924 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2925 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2926 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2927 n3 = size_int (!integer_zerop (n1));
2928 n4 = int_const_binop (TRUNC_MOD_EXPR,
2929 int_const_binop (MINUS_EXPR, lp, n1), lp);
2931 else
2933 n1 = make_ssa_name (TREE_TYPE (n));
2934 n2 = make_ssa_name (sizetype);
2935 n3 = make_ssa_name (sizetype);
2936 n4 = make_ssa_name (TREE_TYPE (n));
2937 if (pow2p_hwi (limb_prec))
2939 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2940 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2941 insert_before (g);
2942 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2943 TREE_TYPE (n))
2944 ? n2 : make_ssa_name (TREE_TYPE (n)),
2945 RSHIFT_EXPR, n,
2946 build_int_cst (TREE_TYPE (n),
2947 exact_log2 (limb_prec)));
2948 insert_before (g);
2949 if (gimple_assign_lhs (g) != n2)
2951 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2952 insert_before (g);
2954 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2955 NEGATE_EXPR, n1);
2956 insert_before (g);
2957 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
2958 lpm1);
2959 insert_before (g);
2961 else
2963 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2964 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
2965 insert_before (g);
2966 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2967 TREE_TYPE (n))
2968 ? n2 : make_ssa_name (TREE_TYPE (n)),
2969 TRUNC_DIV_EXPR, n, lp);
2970 insert_before (g);
2971 if (gimple_assign_lhs (g) != n2)
2973 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2974 insert_before (g);
2976 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2977 MINUS_EXPR, lp, n1);
2978 insert_before (g);
2979 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
2980 lp);
2981 insert_before (g);
2983 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
2984 build_zero_cst (TREE_TYPE (n)));
2985 insert_before (g);
2986 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
2987 insert_before (g);
2989 tree p = build_int_cst (sizetype,
2990 prec / limb_prec - (prec % limb_prec == 0));
2991 if (rhs_code == RSHIFT_EXPR)
2993 /* Lower
2994 dst = src >> n;
2996 unsigned n1 = n % limb_prec;
2997 size_t n2 = n / limb_prec;
2998 size_t n3 = n1 != 0;
2999 unsigned n4 = (limb_prec - n1) % limb_prec;
3000 size_t idx;
3001 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3002 int signed_p = (typeof (src) -1) < 0;
3003 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3004 ? p : p - n3); ++idx)
3005 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3006 limb_type ext;
3007 if (prec % limb_prec == 0)
3008 ext = src[p];
3009 else if (signed_p)
3010 ext = ((signed limb_type) (src[p] << (limb_prec
3011 - (prec % limb_prec))))
3012 >> (limb_prec - (prec % limb_prec));
3013 else
3014 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3015 if (!signed_p && (prec % limb_prec == 0))
3017 else if (idx < prec / 64)
3019 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3020 ++idx;
3022 idx -= n2;
3023 if (signed_p)
3025 dst[idx] = ((signed limb_type) ext) >> n1;
3026 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3028 else
3030 dst[idx] = ext >> n1;
3031 ext = 0;
3033 for (++idx; idx <= p; ++idx)
3034 dst[idx] = ext; */
3035 tree pmn3;
3036 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3037 pmn3 = p;
3038 else if (TREE_CODE (n3) == INTEGER_CST)
3039 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3040 else
3042 pmn3 = make_ssa_name (sizetype);
3043 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3044 insert_before (g);
3046 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3047 edge edge_true, edge_false;
3048 if_then (g, profile_probability::likely (), edge_true, edge_false);
3049 tree idx_next;
3050 tree idx = create_loop (n2, &idx_next);
3051 tree idxmn2 = make_ssa_name (sizetype);
3052 tree idxpn3 = make_ssa_name (sizetype);
3053 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3054 insert_before (g);
3055 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3056 insert_before (g);
3057 m_data_cnt = 0;
3058 tree t1 = handle_operand (rhs1, idx);
3059 m_first = false;
3060 g = gimple_build_assign (make_ssa_name (m_limb_type),
3061 RSHIFT_EXPR, t1, n1);
3062 insert_before (g);
3063 t1 = gimple_assign_lhs (g);
3064 if (!integer_zerop (n3))
3066 m_data_cnt = 0;
3067 tree t2 = handle_operand (rhs1, idxpn3);
3068 g = gimple_build_assign (make_ssa_name (m_limb_type),
3069 LSHIFT_EXPR, t2, n4);
3070 insert_before (g);
3071 t2 = gimple_assign_lhs (g);
3072 g = gimple_build_assign (make_ssa_name (m_limb_type),
3073 BIT_IOR_EXPR, t1, t2);
3074 insert_before (g);
3075 t1 = gimple_assign_lhs (g);
3077 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3078 g = gimple_build_assign (l, t1);
3079 insert_before (g);
3080 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3081 insert_before (g);
3082 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3083 insert_before (g);
3084 idx = make_ssa_name (sizetype);
3085 m_gsi = gsi_for_stmt (final_stmt);
3086 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3087 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3088 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3089 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3090 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3091 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3092 m_data_cnt = 0;
3093 tree ms = handle_operand (rhs1, p);
3094 tree ext = ms;
3095 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3096 ext = add_cast (m_limb_type, ms);
3097 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3098 && !integer_zerop (n3))
3100 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3101 if_then (g, profile_probability::likely (), edge_true, edge_false);
3102 m_data_cnt = 0;
3103 t1 = handle_operand (rhs1, idx);
3104 g = gimple_build_assign (make_ssa_name (m_limb_type),
3105 RSHIFT_EXPR, t1, n1);
3106 insert_before (g);
3107 t1 = gimple_assign_lhs (g);
3108 g = gimple_build_assign (make_ssa_name (m_limb_type),
3109 LSHIFT_EXPR, ext, n4);
3110 insert_before (g);
3111 tree t2 = gimple_assign_lhs (g);
3112 g = gimple_build_assign (make_ssa_name (m_limb_type),
3113 BIT_IOR_EXPR, t1, t2);
3114 insert_before (g);
3115 t1 = gimple_assign_lhs (g);
3116 idxmn2 = make_ssa_name (sizetype);
3117 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3118 insert_before (g);
3119 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3120 g = gimple_build_assign (l, t1);
3121 insert_before (g);
3122 idx_next = make_ssa_name (sizetype);
3123 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3124 insert_before (g);
3125 m_gsi = gsi_for_stmt (final_stmt);
3126 tree nidx = make_ssa_name (sizetype);
3127 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3128 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3129 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3130 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3131 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3132 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3133 idx = nidx;
3135 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3136 insert_before (g);
3137 idx = gimple_assign_lhs (g);
3138 tree sext = ext;
3139 if (!TYPE_UNSIGNED (type))
3140 sext = add_cast (signed_type_for (m_limb_type), ext);
3141 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3142 RSHIFT_EXPR, sext, n1);
3143 insert_before (g);
3144 t1 = gimple_assign_lhs (g);
3145 if (!TYPE_UNSIGNED (type))
3147 t1 = add_cast (m_limb_type, t1);
3148 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3149 RSHIFT_EXPR, sext,
3150 build_int_cst (TREE_TYPE (n),
3151 limb_prec - 1));
3152 insert_before (g);
3153 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3155 else
3156 ext = build_zero_cst (m_limb_type);
3157 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3158 g = gimple_build_assign (l, t1);
3159 insert_before (g);
3160 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3161 size_one_node);
3162 insert_before (g);
3163 idx = gimple_assign_lhs (g);
3164 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3165 if_then (g, profile_probability::likely (), edge_true, edge_false);
3166 idx = create_loop (idx, &idx_next);
3167 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3168 g = gimple_build_assign (l, ext);
3169 insert_before (g);
3170 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3171 insert_before (g);
3172 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3173 insert_before (g);
3175 else
3177 /* Lower
3178 dst = src << n;
3180 unsigned n1 = n % limb_prec;
3181 size_t n2 = n / limb_prec;
3182 size_t n3 = n1 != 0;
3183 unsigned n4 = (limb_prec - n1) % limb_prec;
3184 size_t idx;
3185 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3186 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3187 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3188 if (n1)
3190 dst[idx] = src[idx - n2] << n1;
3191 --idx;
3193 for (; (ssize_t) idx >= 0; --idx)
3194 dst[idx] = 0; */
3195 tree n2pn3;
3196 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3197 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3198 else
3200 n2pn3 = make_ssa_name (sizetype);
3201 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3202 insert_before (g);
3204 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3205 idx even to access the most significant partial limb. */
3206 m_var_msb = true;
3207 if (integer_zerop (n3))
3208 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3209 counts. Emit if (true) condition that can be optimized later. */
3210 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3211 NULL_TREE, NULL_TREE);
3212 else
3213 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3214 edge edge_true, edge_false;
3215 if_then (g, profile_probability::likely (), edge_true, edge_false);
3216 tree idx_next;
3217 tree idx = create_loop (p, &idx_next);
3218 tree idxmn2 = make_ssa_name (sizetype);
3219 tree idxmn2mn3 = make_ssa_name (sizetype);
3220 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3221 insert_before (g);
3222 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3223 insert_before (g);
3224 m_data_cnt = 0;
3225 tree t1 = handle_operand (rhs1, idxmn2);
3226 m_first = false;
3227 g = gimple_build_assign (make_ssa_name (m_limb_type),
3228 LSHIFT_EXPR, t1, n1);
3229 insert_before (g);
3230 t1 = gimple_assign_lhs (g);
3231 if (!integer_zerop (n3))
3233 m_data_cnt = 0;
3234 tree t2 = handle_operand (rhs1, idxmn2mn3);
3235 g = gimple_build_assign (make_ssa_name (m_limb_type),
3236 RSHIFT_EXPR, t2, n4);
3237 insert_before (g);
3238 t2 = gimple_assign_lhs (g);
3239 g = gimple_build_assign (make_ssa_name (m_limb_type),
3240 BIT_IOR_EXPR, t1, t2);
3241 insert_before (g);
3242 t1 = gimple_assign_lhs (g);
3244 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3245 g = gimple_build_assign (l, t1);
3246 insert_before (g);
3247 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3248 insert_before (g);
3249 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3250 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3251 NULL_TREE, NULL_TREE);
3252 insert_before (g);
3253 idx = make_ssa_name (sizetype);
3254 m_gsi = gsi_for_stmt (final_stmt);
3255 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3256 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3257 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3258 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3259 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3260 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3261 m_data_cnt = 0;
3262 if (!integer_zerop (n3))
3264 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3265 NULL_TREE, NULL_TREE);
3266 if_then (g, profile_probability::likely (), edge_true, edge_false);
3267 idxmn2 = make_ssa_name (sizetype);
3268 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3269 insert_before (g);
3270 m_data_cnt = 0;
3271 t1 = handle_operand (rhs1, idxmn2);
3272 g = gimple_build_assign (make_ssa_name (m_limb_type),
3273 LSHIFT_EXPR, t1, n1);
3274 insert_before (g);
3275 t1 = gimple_assign_lhs (g);
3276 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3277 g = gimple_build_assign (l, t1);
3278 insert_before (g);
3279 idx_next = make_ssa_name (sizetype);
3280 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3281 insert_before (g);
3282 m_gsi = gsi_for_stmt (final_stmt);
3283 tree nidx = make_ssa_name (sizetype);
3284 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3285 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3286 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3287 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3288 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3289 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3290 idx = nidx;
3292 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3293 ssize_int (0), NULL_TREE, NULL_TREE);
3294 if_then (g, profile_probability::likely (), edge_true, edge_false);
3295 idx = create_loop (idx, &idx_next);
3296 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3297 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3298 insert_before (g);
3299 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3300 insert_before (g);
3301 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3302 ssize_int (0), NULL_TREE, NULL_TREE);
3303 insert_before (g);
3307 /* Lower large/huge _BitInt multiplication or division. */
3309 void
3310 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3312 tree rhs1 = gimple_assign_rhs1 (stmt);
3313 tree rhs2 = gimple_assign_rhs2 (stmt);
3314 tree lhs = gimple_assign_lhs (stmt);
3315 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3316 tree type = TREE_TYPE (rhs1);
3317 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3318 && bitint_precision_kind (type) >= bitint_prec_large);
3319 int prec = TYPE_PRECISION (type), prec1, prec2;
3320 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3321 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3322 if (obj == NULL_TREE)
3324 int part = var_to_partition (m_map, lhs);
3325 gcc_assert (m_vars[part] != NULL_TREE);
3326 obj = m_vars[part];
3327 lhs = build_fold_addr_expr (obj);
3329 else
3331 lhs = build_fold_addr_expr (obj);
3332 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3333 NULL_TREE, true, GSI_SAME_STMT);
3335 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3336 gimple *g;
3337 switch (rhs_code)
3339 case MULT_EXPR:
3340 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3341 lhs, build_int_cst (sitype, prec),
3342 rhs1, build_int_cst (sitype, prec1),
3343 rhs2, build_int_cst (sitype, prec2));
3344 insert_before (g);
3345 break;
3346 case TRUNC_DIV_EXPR:
3347 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3348 lhs, build_int_cst (sitype, prec),
3349 null_pointer_node,
3350 build_int_cst (sitype, 0),
3351 rhs1, build_int_cst (sitype, prec1),
3352 rhs2, build_int_cst (sitype, prec2));
3353 if (!stmt_ends_bb_p (stmt))
3354 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3355 insert_before (g);
3356 break;
3357 case TRUNC_MOD_EXPR:
3358 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3359 build_int_cst (sitype, 0),
3360 lhs, build_int_cst (sitype, prec),
3361 rhs1, build_int_cst (sitype, prec1),
3362 rhs2, build_int_cst (sitype, prec2));
3363 if (!stmt_ends_bb_p (stmt))
3364 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3365 insert_before (g);
3366 break;
3367 default:
3368 gcc_unreachable ();
3370 if (stmt_ends_bb_p (stmt))
3372 maybe_duplicate_eh_stmt (g, stmt);
3373 edge e1;
3374 edge_iterator ei;
3375 basic_block bb = gimple_bb (stmt);
3377 FOR_EACH_EDGE (e1, ei, bb->succs)
3378 if (e1->flags & EDGE_EH)
3379 break;
3380 if (e1)
3382 edge e2 = split_block (gsi_bb (m_gsi), g);
3383 m_gsi = gsi_after_labels (e2->dest);
3384 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3385 = profile_probability::very_unlikely ();
3390 /* Lower large/huge _BitInt conversion to/from floating point. */
3392 void
3393 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3395 tree rhs1 = gimple_assign_rhs1 (stmt);
3396 tree lhs = gimple_assign_lhs (stmt);
3397 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3398 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3399 gimple *g;
3400 if (rhs_code == FIX_TRUNC_EXPR)
3402 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3403 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3404 prec = -prec;
3405 if (obj == NULL_TREE)
3407 int part = var_to_partition (m_map, lhs);
3408 gcc_assert (m_vars[part] != NULL_TREE);
3409 obj = m_vars[part];
3410 lhs = build_fold_addr_expr (obj);
3412 else
3414 lhs = build_fold_addr_expr (obj);
3415 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3416 NULL_TREE, true, GSI_SAME_STMT);
3418 scalar_mode from_mode
3419 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3420 #ifdef HAVE_SFmode
3421 /* IEEE single is a full superset of both IEEE half and
3422 bfloat formats, convert to float first and then to _BitInt
3423 to avoid the need of another 2 library routines. */
3424 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3425 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3426 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3428 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3429 if (type)
3430 rhs1 = add_cast (type, rhs1);
3432 #endif
3433 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3434 lhs, build_int_cst (sitype, prec),
3435 rhs1);
3436 insert_before (g);
3438 else
3440 int prec;
3441 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3442 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3443 rhs1, build_int_cst (sitype, prec));
3444 gimple_call_set_lhs (g, lhs);
3445 if (!stmt_ends_bb_p (stmt))
3446 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3447 gsi_replace (&m_gsi, g, true);
3451 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3452 If check_zero is true, caller wants to check if all bits in [start, end)
3453 are zero, otherwise if bits in [start, end) are either all zero or
3454 all ones. L is the limb with index LIMB, START and END are measured
3455 in bits. */
3457 tree
3458 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3459 unsigned int end, tree l,
3460 unsigned int limb,
3461 bool check_zero)
3463 unsigned startlimb = start / limb_prec;
3464 unsigned endlimb = (end - 1) / limb_prec;
3465 gimple *g;
3467 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3468 return l;
3469 if (startlimb == endlimb && limb == startlimb)
3471 if (check_zero)
3473 wide_int w = wi::shifted_mask (start % limb_prec,
3474 end - start, false, limb_prec);
3475 g = gimple_build_assign (make_ssa_name (m_limb_type),
3476 BIT_AND_EXPR, l,
3477 wide_int_to_tree (m_limb_type, w));
3478 insert_before (g);
3479 return gimple_assign_lhs (g);
3481 unsigned int shift = start % limb_prec;
3482 if ((end % limb_prec) != 0)
3484 unsigned int lshift = (-end) % limb_prec;
3485 shift += lshift;
3486 g = gimple_build_assign (make_ssa_name (m_limb_type),
3487 LSHIFT_EXPR, l,
3488 build_int_cst (unsigned_type_node,
3489 lshift));
3490 insert_before (g);
3491 l = gimple_assign_lhs (g);
3493 l = add_cast (signed_type_for (m_limb_type), l);
3494 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3495 RSHIFT_EXPR, l,
3496 build_int_cst (unsigned_type_node, shift));
3497 insert_before (g);
3498 return add_cast (m_limb_type, gimple_assign_lhs (g));
3500 else if (limb == startlimb)
3502 if ((start % limb_prec) == 0)
3503 return l;
3504 if (!check_zero)
3505 l = add_cast (signed_type_for (m_limb_type), l);
3506 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3507 RSHIFT_EXPR, l,
3508 build_int_cst (unsigned_type_node,
3509 start % limb_prec));
3510 insert_before (g);
3511 l = gimple_assign_lhs (g);
3512 if (!check_zero)
3513 l = add_cast (m_limb_type, l);
3514 return l;
3516 else if (limb == endlimb)
3518 if ((end % limb_prec) == 0)
3519 return l;
3520 if (check_zero)
3522 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3523 g = gimple_build_assign (make_ssa_name (m_limb_type),
3524 BIT_AND_EXPR, l,
3525 wide_int_to_tree (m_limb_type, w));
3526 insert_before (g);
3527 return gimple_assign_lhs (g);
3529 unsigned int shift = (-end) % limb_prec;
3530 g = gimple_build_assign (make_ssa_name (m_limb_type),
3531 LSHIFT_EXPR, l,
3532 build_int_cst (unsigned_type_node, shift));
3533 insert_before (g);
3534 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3535 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3536 RSHIFT_EXPR, l,
3537 build_int_cst (unsigned_type_node, shift));
3538 insert_before (g);
3539 return add_cast (m_limb_type, gimple_assign_lhs (g));
3541 return l;
3544 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3545 result including overflow flag into the right locations. */
3547 void
3548 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3549 tree ovf, tree lhs, tree orig_obj,
3550 gimple *stmt, tree_code code)
3552 gimple *g;
3554 if (obj == NULL_TREE
3555 && (TREE_CODE (type) != BITINT_TYPE
3556 || bitint_precision_kind (type) < bitint_prec_large))
3558 /* Add support for 3 or more limbs filled in from normal integral
3559 type if this assert fails. If no target chooses limb mode smaller
3560 than half of largest supported normal integral type, this will not
3561 be needed. */
3562 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3563 tree lhs_type = type;
3564 if (TREE_CODE (type) == BITINT_TYPE
3565 && bitint_precision_kind (type) == bitint_prec_middle)
3566 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3567 TYPE_UNSIGNED (type));
3568 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3569 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3570 insert_before (g);
3571 r1 = gimple_assign_lhs (g);
3572 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3573 r1 = add_cast (lhs_type, r1);
3574 if (TYPE_PRECISION (lhs_type) > limb_prec)
3576 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3577 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3578 insert_before (g);
3579 r2 = gimple_assign_lhs (g);
3580 r2 = add_cast (lhs_type, r2);
3581 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3582 build_int_cst (unsigned_type_node,
3583 limb_prec));
3584 insert_before (g);
3585 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3586 gimple_assign_lhs (g));
3587 insert_before (g);
3588 r1 = gimple_assign_lhs (g);
3590 if (lhs_type != type)
3591 r1 = add_cast (type, r1);
3592 ovf = add_cast (lhs_type, ovf);
3593 if (lhs_type != type)
3594 ovf = add_cast (type, ovf);
3595 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3596 m_gsi = gsi_for_stmt (stmt);
3597 gsi_replace (&m_gsi, g, true);
3599 else
3601 unsigned HOST_WIDE_INT nelts = 0;
3602 tree atype = NULL_TREE;
3603 if (obj)
3605 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3606 if (orig_obj == NULL_TREE)
3607 nelts >>= 1;
3608 atype = build_array_type_nelts (m_limb_type, nelts);
3610 if (var && obj)
3612 tree v1, v2;
3613 tree zero;
3614 if (orig_obj == NULL_TREE)
3616 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3617 v1 = build2 (MEM_REF, atype,
3618 build_fold_addr_expr (unshare_expr (obj)), zero);
3620 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3621 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3622 else
3623 v1 = unshare_expr (obj);
3624 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3625 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3626 g = gimple_build_assign (v1, v2);
3627 insert_before (g);
3629 if (orig_obj == NULL_TREE && obj)
3631 ovf = add_cast (m_limb_type, ovf);
3632 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3633 g = gimple_build_assign (l, ovf);
3634 insert_before (g);
3635 if (nelts > 1)
3637 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3638 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3639 (nelts + 1) * m_limb_size);
3640 tree v1 = build2 (MEM_REF, atype,
3641 build_fold_addr_expr (unshare_expr (obj)),
3642 off);
3643 g = gimple_build_assign (v1, build_zero_cst (atype));
3644 insert_before (g);
3647 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3649 imm_use_iterator ui;
3650 use_operand_p use_p;
3651 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3653 g = USE_STMT (use_p);
3654 if (!is_gimple_assign (g)
3655 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3656 continue;
3657 tree lhs2 = gimple_assign_lhs (g);
3658 gimple *use_stmt;
3659 single_imm_use (lhs2, &use_p, &use_stmt);
3660 lhs2 = gimple_assign_lhs (use_stmt);
3661 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3662 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3663 g = gimple_build_assign (lhs2, ovf);
3664 else
3665 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3666 gsi_replace (&gsi, g, true);
3667 break;
3670 else if (ovf != boolean_false_node)
3672 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3673 NULL_TREE, NULL_TREE);
3674 edge edge_true, edge_false;
3675 if_then (g, profile_probability::very_unlikely (),
3676 edge_true, edge_false);
3677 tree zero = build_zero_cst (TREE_TYPE (lhs));
3678 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3679 TREE_TYPE (lhs),
3680 zero, zero, NULL);
3681 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3682 true, GSI_SAME_STMT);
3683 m_gsi = gsi_after_labels (edge_true->dest);
3686 if (var)
3688 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_EOL);
3689 g = gimple_build_assign (var, clobber);
3690 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3694 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3695 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3696 argument 1 precision PREC1 and minimum precision for the result
3697 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3699 static tree
3700 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3701 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3703 *start = 0;
3704 *end = 0;
3705 *check_zero = true;
3706 /* Ignore this special rule for subtraction, even if both
3707 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3708 in infinite precision. */
3709 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3711 /* Result in [0, prec2) is unsigned, if prec > prec2,
3712 all bits above it will be zero. */
3713 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3714 return boolean_false_node;
3715 else
3717 /* ovf if any of bits in [start, end) is non-zero. */
3718 *start = prec - !TYPE_UNSIGNED (type);
3719 *end = prec2;
3722 else if (TYPE_UNSIGNED (type))
3724 /* If result in [0, prec2) is signed and if prec > prec2,
3725 all bits above it will be sign bit copies. */
3726 if (prec >= prec2)
3728 /* ovf if bit prec - 1 is non-zero. */
3729 *start = prec - 1;
3730 *end = prec;
3732 else
3734 /* ovf if any of bits in [start, end) is non-zero. */
3735 *start = prec;
3736 *end = prec2;
3739 else if (prec >= prec2)
3740 return boolean_false_node;
3741 else
3743 /* ovf if [start, end) bits aren't all zeros or all ones. */
3744 *start = prec - 1;
3745 *end = prec2;
3746 *check_zero = false;
3748 return NULL_TREE;
3751 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3752 argument or return type _Complex large/huge _BitInt. */
3754 void
3755 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3757 tree arg0 = gimple_call_arg (stmt, 0);
3758 tree arg1 = gimple_call_arg (stmt, 1);
3759 tree lhs = gimple_call_lhs (stmt);
3760 gimple *g;
3762 if (!lhs)
3764 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3765 gsi_remove (&gsi, true);
3766 return;
3768 gimple *final_stmt = gsi_stmt (m_gsi);
3769 tree type = TREE_TYPE (lhs);
3770 if (TREE_CODE (type) == COMPLEX_TYPE)
3771 type = TREE_TYPE (type);
3772 int prec = TYPE_PRECISION (type);
3773 int prec0 = range_to_prec (arg0, stmt);
3774 int prec1 = range_to_prec (arg1, stmt);
3775 int prec2 = ((prec0 < 0) == (prec1 < 0)
3776 ? MAX (prec0 < 0 ? -prec0 : prec0,
3777 prec1 < 0 ? -prec1 : prec1) + 1
3778 : MAX (prec0 < 0 ? -prec0 : prec0 + 1,
3779 prec1 < 0 ? -prec1 : prec1 + 1) + 1);
3780 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3781 prec1 < 0 ? -prec1 : prec1);
3782 prec3 = MAX (prec3, prec);
3783 tree var = NULL_TREE;
3784 tree orig_obj = obj;
3785 if (obj == NULL_TREE
3786 && TREE_CODE (type) == BITINT_TYPE
3787 && bitint_precision_kind (type) >= bitint_prec_large
3788 && m_names
3789 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3791 int part = var_to_partition (m_map, lhs);
3792 gcc_assert (m_vars[part] != NULL_TREE);
3793 obj = m_vars[part];
3794 if (TREE_TYPE (lhs) == type)
3795 orig_obj = obj;
3797 if (TREE_CODE (type) != BITINT_TYPE
3798 || bitint_precision_kind (type) < bitint_prec_large)
3800 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3801 tree atype = build_array_type_nelts (m_limb_type, nelts);
3802 var = create_tmp_var (atype);
3805 enum tree_code code;
3806 switch (gimple_call_internal_fn (stmt))
3808 case IFN_ADD_OVERFLOW:
3809 case IFN_UBSAN_CHECK_ADD:
3810 code = PLUS_EXPR;
3811 break;
3812 case IFN_SUB_OVERFLOW:
3813 case IFN_UBSAN_CHECK_SUB:
3814 code = MINUS_EXPR;
3815 break;
3816 default:
3817 gcc_unreachable ();
3819 unsigned start, end;
3820 bool check_zero;
3821 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3822 &start, &end, &check_zero);
3824 unsigned startlimb, endlimb;
3825 if (ovf)
3827 startlimb = ~0U;
3828 endlimb = ~0U;
3830 else
3832 startlimb = start / limb_prec;
3833 endlimb = (end - 1) / limb_prec;
3836 int prec4 = ovf != NULL_TREE ? prec : prec3;
3837 bitint_prec_kind kind = bitint_precision_kind (prec4);
3838 unsigned cnt, rem = 0, fin = 0;
3839 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3840 bool last_ovf = (ovf == NULL_TREE
3841 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3842 if (kind != bitint_prec_huge)
3843 cnt = CEIL (prec4, limb_prec) + last_ovf;
3844 else
3846 rem = (prec4 % (2 * limb_prec));
3847 fin = (prec4 - rem) / limb_prec;
3848 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3849 idx = idx_first = create_loop (size_zero_node, &idx_next);
3852 if (kind == bitint_prec_huge)
3853 m_upwards_2limb = fin;
3854 m_upwards = true;
3856 tree type0 = TREE_TYPE (arg0);
3857 tree type1 = TREE_TYPE (arg1);
3858 if (TYPE_PRECISION (type0) < prec3)
3860 type0 = build_bitint_type (prec3, TYPE_UNSIGNED (type0));
3861 if (TREE_CODE (arg0) == INTEGER_CST)
3862 arg0 = fold_convert (type0, arg0);
3864 if (TYPE_PRECISION (type1) < prec3)
3866 type1 = build_bitint_type (prec3, TYPE_UNSIGNED (type1));
3867 if (TREE_CODE (arg1) == INTEGER_CST)
3868 arg1 = fold_convert (type1, arg1);
3870 unsigned int data_cnt = 0;
3871 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3872 tree cmp = build_zero_cst (m_limb_type);
3873 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3874 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3875 for (unsigned i = 0; i < cnt; i++)
3877 m_data_cnt = 0;
3878 tree rhs1, rhs2;
3879 if (kind != bitint_prec_huge)
3880 idx = size_int (i);
3881 else if (i >= 2)
3882 idx = size_int (fin + (i > 2));
3883 if (!last_ovf || i < cnt - 1)
3885 if (type0 != TREE_TYPE (arg0))
3886 rhs1 = handle_cast (type0, arg0, idx);
3887 else
3888 rhs1 = handle_operand (arg0, idx);
3889 if (type1 != TREE_TYPE (arg1))
3890 rhs2 = handle_cast (type1, arg1, idx);
3891 else
3892 rhs2 = handle_operand (arg1, idx);
3893 if (i == 0)
3894 data_cnt = m_data_cnt;
3895 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3896 rhs1 = add_cast (m_limb_type, rhs1);
3897 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3898 rhs2 = add_cast (m_limb_type, rhs2);
3899 last_rhs1 = rhs1;
3900 last_rhs2 = rhs2;
3902 else
3904 m_data_cnt = data_cnt;
3905 if (TYPE_UNSIGNED (type0))
3906 rhs1 = build_zero_cst (m_limb_type);
3907 else
3909 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3910 if (TREE_CODE (rhs1) == INTEGER_CST)
3911 rhs1 = build_int_cst (m_limb_type,
3912 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3913 else
3915 tree lpm1 = build_int_cst (unsigned_type_node,
3916 limb_prec - 1);
3917 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
3918 RSHIFT_EXPR, rhs1, lpm1);
3919 insert_before (g);
3920 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
3923 if (TYPE_UNSIGNED (type1))
3924 rhs2 = build_zero_cst (m_limb_type);
3925 else
3927 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
3928 if (TREE_CODE (rhs2) == INTEGER_CST)
3929 rhs2 = build_int_cst (m_limb_type,
3930 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
3931 else
3933 tree lpm1 = build_int_cst (unsigned_type_node,
3934 limb_prec - 1);
3935 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
3936 RSHIFT_EXPR, rhs2, lpm1);
3937 insert_before (g);
3938 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
3942 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
3943 if (ovf != boolean_false_node)
3945 if (tree_fits_uhwi_p (idx))
3947 unsigned limb = tree_to_uhwi (idx);
3948 if (limb >= startlimb && limb <= endlimb)
3950 tree l = arith_overflow_extract_bits (start, end, rhs,
3951 limb, check_zero);
3952 tree this_ovf = make_ssa_name (boolean_type_node);
3953 if (ovf == NULL_TREE && !check_zero)
3955 cmp = l;
3956 g = gimple_build_assign (make_ssa_name (m_limb_type),
3957 PLUS_EXPR, l,
3958 build_int_cst (m_limb_type, 1));
3959 insert_before (g);
3960 g = gimple_build_assign (this_ovf, GT_EXPR,
3961 gimple_assign_lhs (g),
3962 build_int_cst (m_limb_type, 1));
3964 else
3965 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
3966 insert_before (g);
3967 if (ovf == NULL_TREE)
3968 ovf = this_ovf;
3969 else
3971 tree b = make_ssa_name (boolean_type_node);
3972 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
3973 insert_before (g);
3974 ovf = b;
3978 else if (startlimb < fin)
3980 if (m_first && startlimb + 2 < fin)
3982 tree data_out;
3983 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
3984 ovf_out = m_data.pop ();
3985 m_data.pop ();
3986 if (!check_zero)
3988 cmp = prepare_data_in_out (cmp, idx, &data_out);
3989 cmp_out = m_data.pop ();
3990 m_data.pop ();
3993 if (i != 0 || startlimb != fin - 1)
3995 tree_code cmp_code;
3996 bool single_comparison
3997 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
3998 if (!single_comparison)
4000 cmp_code = GE_EXPR;
4001 if (!check_zero && (start % limb_prec) == 0)
4002 single_comparison = true;
4004 else if ((startlimb & 1) == (i & 1))
4005 cmp_code = EQ_EXPR;
4006 else
4007 cmp_code = GT_EXPR;
4008 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4009 NULL_TREE, NULL_TREE);
4010 edge edge_true_true, edge_true_false, edge_false;
4011 gimple *g2 = NULL;
4012 if (!single_comparison)
4013 g2 = gimple_build_cond (EQ_EXPR, idx,
4014 size_int (startlimb), NULL_TREE,
4015 NULL_TREE);
4016 if_then_if_then_else (g, g2, profile_probability::likely (),
4017 profile_probability::unlikely (),
4018 edge_true_true, edge_true_false,
4019 edge_false);
4020 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4021 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4022 check_zero);
4023 tree this_ovf = make_ssa_name (boolean_type_node);
4024 if (cmp_code != GT_EXPR && !check_zero)
4026 g = gimple_build_assign (make_ssa_name (m_limb_type),
4027 PLUS_EXPR, l,
4028 build_int_cst (m_limb_type, 1));
4029 insert_before (g);
4030 g = gimple_build_assign (this_ovf, GT_EXPR,
4031 gimple_assign_lhs (g),
4032 build_int_cst (m_limb_type, 1));
4034 else
4035 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4036 insert_before (g);
4037 if (cmp_code == GT_EXPR)
4039 tree t = make_ssa_name (boolean_type_node);
4040 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4041 insert_before (g);
4042 this_ovf = t;
4044 tree this_ovf2 = NULL_TREE;
4045 if (!single_comparison)
4047 m_gsi = gsi_after_labels (edge_true_true->src);
4048 tree t = make_ssa_name (boolean_type_node);
4049 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4050 insert_before (g);
4051 this_ovf2 = make_ssa_name (boolean_type_node);
4052 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4053 ovf, t);
4054 insert_before (g);
4056 m_gsi = gsi_after_labels (edge_true_false->dest);
4057 tree t;
4058 if (i == 1 && ovf_out)
4059 t = ovf_out;
4060 else
4061 t = make_ssa_name (boolean_type_node);
4062 gphi *phi = create_phi_node (t, edge_true_false->dest);
4063 add_phi_arg (phi, this_ovf, edge_true_false,
4064 UNKNOWN_LOCATION);
4065 add_phi_arg (phi, ovf ? ovf
4066 : boolean_false_node, edge_false,
4067 UNKNOWN_LOCATION);
4068 if (edge_true_true)
4069 add_phi_arg (phi, this_ovf2, edge_true_true,
4070 UNKNOWN_LOCATION);
4071 ovf = t;
4072 if (!check_zero && cmp_code != GT_EXPR)
4074 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4075 phi = create_phi_node (t, edge_true_false->dest);
4076 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4077 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4078 if (edge_true_true)
4079 add_phi_arg (phi, cmp, edge_true_true,
4080 UNKNOWN_LOCATION);
4081 cmp = t;
4087 if (var || obj)
4089 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4091 else if (!tree_fits_uhwi_p (idx)
4092 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4094 bool single_comparison
4095 = (((unsigned) prec % limb_prec) == 0
4096 || prec_limbs + 1 >= fin
4097 || (prec_limbs & 1) == (i & 1));
4098 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4099 NULL_TREE, NULL_TREE);
4100 gimple *g2 = NULL;
4101 if (!single_comparison)
4102 g2 = gimple_build_cond (LT_EXPR, idx,
4103 size_int (prec_limbs - 1),
4104 NULL_TREE, NULL_TREE);
4105 edge edge_true_true, edge_true_false, edge_false;
4106 if_then_if_then_else (g, g2, profile_probability::likely (),
4107 profile_probability::likely (),
4108 edge_true_true, edge_true_false,
4109 edge_false);
4110 tree l = limb_access (type, var ? var : obj, idx, true);
4111 g = gimple_build_assign (l, rhs);
4112 insert_before (g);
4113 if (!single_comparison)
4115 m_gsi = gsi_after_labels (edge_true_true->src);
4116 l = limb_access (type, var ? var : obj,
4117 size_int (prec_limbs - 1), true);
4118 if (!useless_type_conversion_p (TREE_TYPE (l),
4119 TREE_TYPE (rhs)))
4120 rhs = add_cast (TREE_TYPE (l), rhs);
4121 g = gimple_build_assign (l, rhs);
4122 insert_before (g);
4124 m_gsi = gsi_after_labels (edge_true_false->dest);
4126 else
4128 tree l = limb_access (type, var ? var : obj, idx, true);
4129 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4130 rhs = add_cast (TREE_TYPE (l), rhs);
4131 g = gimple_build_assign (l, rhs);
4132 insert_before (g);
4135 m_first = false;
4136 if (kind == bitint_prec_huge && i <= 1)
4138 if (i == 0)
4140 idx = make_ssa_name (sizetype);
4141 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4142 size_one_node);
4143 insert_before (g);
4145 else
4147 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4148 size_int (2));
4149 insert_before (g);
4150 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4151 NULL_TREE, NULL_TREE);
4152 insert_before (g);
4153 m_gsi = gsi_for_stmt (final_stmt);
4158 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4161 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4162 argument or return type _Complex large/huge _BitInt. */
4164 void
4165 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4167 tree arg0 = gimple_call_arg (stmt, 0);
4168 tree arg1 = gimple_call_arg (stmt, 1);
4169 tree lhs = gimple_call_lhs (stmt);
4170 if (!lhs)
4172 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4173 gsi_remove (&gsi, true);
4174 return;
4176 gimple *final_stmt = gsi_stmt (m_gsi);
4177 tree type = TREE_TYPE (lhs);
4178 if (TREE_CODE (type) == COMPLEX_TYPE)
4179 type = TREE_TYPE (type);
4180 int prec = TYPE_PRECISION (type), prec0, prec1;
4181 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4182 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4183 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4184 + (prec1 < 0 ? -prec1 : prec1)
4185 + ((prec0 < 0) != (prec1 < 0)));
4186 tree var = NULL_TREE;
4187 tree orig_obj = obj;
4188 bool force_var = false;
4189 if (obj == NULL_TREE
4190 && TREE_CODE (type) == BITINT_TYPE
4191 && bitint_precision_kind (type) >= bitint_prec_large
4192 && m_names
4193 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4195 int part = var_to_partition (m_map, lhs);
4196 gcc_assert (m_vars[part] != NULL_TREE);
4197 obj = m_vars[part];
4198 if (TREE_TYPE (lhs) == type)
4199 orig_obj = obj;
4201 else if (obj != NULL_TREE && DECL_P (obj))
4203 for (int i = 0; i < 2; ++i)
4205 tree arg = i ? arg1 : arg0;
4206 if (TREE_CODE (arg) == ADDR_EXPR)
4207 arg = TREE_OPERAND (arg, 0);
4208 if (get_base_address (arg) == obj)
4210 force_var = true;
4211 break;
4215 if (obj == NULL_TREE
4216 || force_var
4217 || TREE_CODE (type) != BITINT_TYPE
4218 || bitint_precision_kind (type) < bitint_prec_large
4219 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4221 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4222 tree atype = build_array_type_nelts (m_limb_type, nelts);
4223 var = create_tmp_var (atype);
4225 tree addr = build_fold_addr_expr (var ? var : obj);
4226 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4227 NULL_TREE, true, GSI_SAME_STMT);
4228 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4229 gimple *g
4230 = gimple_build_call_internal (IFN_MULBITINT, 6,
4231 addr, build_int_cst (sitype,
4232 MAX (prec2, prec)),
4233 arg0, build_int_cst (sitype, prec0),
4234 arg1, build_int_cst (sitype, prec1));
4235 insert_before (g);
4237 unsigned start, end;
4238 bool check_zero;
4239 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4240 &start, &end, &check_zero);
4241 if (ovf == NULL_TREE)
4243 unsigned startlimb = start / limb_prec;
4244 unsigned endlimb = (end - 1) / limb_prec;
4245 unsigned cnt;
4246 bool use_loop = false;
4247 if (startlimb == endlimb)
4248 cnt = 1;
4249 else if (startlimb + 1 == endlimb)
4250 cnt = 2;
4251 else if ((end % limb_prec) == 0)
4253 cnt = 2;
4254 use_loop = true;
4256 else
4258 cnt = 3;
4259 use_loop = startlimb + 2 < endlimb;
4261 if (cnt == 1)
4263 tree l = limb_access (NULL_TREE, var ? var : obj,
4264 size_int (startlimb), true);
4265 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4266 insert_before (g);
4267 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4268 startlimb, check_zero);
4269 ovf = make_ssa_name (boolean_type_node);
4270 if (check_zero)
4271 g = gimple_build_assign (ovf, NE_EXPR, l,
4272 build_zero_cst (m_limb_type));
4273 else
4275 g = gimple_build_assign (make_ssa_name (m_limb_type),
4276 PLUS_EXPR, l,
4277 build_int_cst (m_limb_type, 1));
4278 insert_before (g);
4279 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4280 build_int_cst (m_limb_type, 1));
4282 insert_before (g);
4284 else
4286 basic_block edge_bb = NULL;
4287 gimple_stmt_iterator gsi = m_gsi;
4288 gsi_prev (&gsi);
4289 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4290 edge_bb = e->src;
4291 m_gsi = gsi_last_bb (edge_bb);
4292 if (!gsi_end_p (m_gsi))
4293 gsi_next (&m_gsi);
4295 tree cmp = build_zero_cst (m_limb_type);
4296 for (unsigned i = 0; i < cnt; i++)
4298 tree idx, idx_next = NULL_TREE;
4299 if (i == 0)
4300 idx = size_int (startlimb);
4301 else if (i == 2)
4302 idx = size_int (endlimb);
4303 else if (use_loop)
4304 idx = create_loop (size_int (startlimb + 1), &idx_next);
4305 else
4306 idx = size_int (startlimb + 1);
4307 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4308 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4309 insert_before (g);
4310 l = gimple_assign_lhs (g);
4311 if (i == 0 || i == 2)
4312 l = arith_overflow_extract_bits (start, end, l,
4313 tree_to_uhwi (idx),
4314 check_zero);
4315 if (i == 0 && !check_zero)
4317 cmp = l;
4318 g = gimple_build_assign (make_ssa_name (m_limb_type),
4319 PLUS_EXPR, l,
4320 build_int_cst (m_limb_type, 1));
4321 insert_before (g);
4322 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4323 build_int_cst (m_limb_type, 1),
4324 NULL_TREE, NULL_TREE);
4326 else
4327 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4328 insert_before (g);
4329 edge e1 = split_block (gsi_bb (m_gsi), g);
4330 e1->flags = EDGE_FALSE_VALUE;
4331 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4332 EDGE_TRUE_VALUE);
4333 e1->probability = profile_probability::likely ();
4334 e2->probability = e1->probability.invert ();
4335 if (i == 0)
4336 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4337 m_gsi = gsi_after_labels (e1->dest);
4338 if (i == 1 && use_loop)
4340 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4341 size_one_node);
4342 insert_before (g);
4343 g = gimple_build_cond (NE_EXPR, idx_next,
4344 size_int (endlimb + (cnt == 1)),
4345 NULL_TREE, NULL_TREE);
4346 insert_before (g);
4347 edge true_edge, false_edge;
4348 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4349 &true_edge,
4350 &false_edge);
4351 m_gsi = gsi_after_labels (false_edge->dest);
4355 ovf = make_ssa_name (boolean_type_node);
4356 basic_block bb = gimple_bb (final_stmt);
4357 gphi *phi = create_phi_node (ovf, bb);
4358 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4359 edge_iterator ei;
4360 FOR_EACH_EDGE (e, ei, bb->preds)
4362 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4363 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4365 m_gsi = gsi_for_stmt (final_stmt);
4369 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4372 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4373 .{ADD,SUB,MUL}_OVERFLOW call. */
4375 void
4376 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4378 tree rhs1 = gimple_assign_rhs1 (stmt);
4379 rhs1 = TREE_OPERAND (rhs1, 0);
4380 if (obj == NULL_TREE)
4382 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4383 gcc_assert (m_vars[part] != NULL_TREE);
4384 obj = m_vars[part];
4386 if (TREE_CODE (rhs1) == SSA_NAME
4387 && (m_names == NULL
4388 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4390 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4391 return;
4393 int part = var_to_partition (m_map, rhs1);
4394 gcc_assert (m_vars[part] != NULL_TREE);
4395 tree var = m_vars[part];
4396 unsigned HOST_WIDE_INT nelts
4397 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4398 tree atype = build_array_type_nelts (m_limb_type, nelts);
4399 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4400 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4401 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4402 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4403 ? 0 : nelts * m_limb_size);
4404 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4405 gimple *g = gimple_build_assign (obj, v2);
4406 insert_before (g);
4409 /* Lower COMPLEX_EXPR stmt. */
4411 void
4412 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4414 tree lhs = gimple_assign_lhs (stmt);
4415 tree rhs1 = gimple_assign_rhs1 (stmt);
4416 tree rhs2 = gimple_assign_rhs2 (stmt);
4417 int part = var_to_partition (m_map, lhs);
4418 gcc_assert (m_vars[part] != NULL_TREE);
4419 lhs = m_vars[part];
4420 unsigned HOST_WIDE_INT nelts
4421 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4422 tree atype = build_array_type_nelts (m_limb_type, nelts);
4423 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4424 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4425 tree v2;
4426 if (TREE_CODE (rhs1) == SSA_NAME)
4428 part = var_to_partition (m_map, rhs1);
4429 gcc_assert (m_vars[part] != NULL_TREE);
4430 v2 = m_vars[part];
4432 else if (integer_zerop (rhs1))
4433 v2 = build_zero_cst (atype);
4434 else
4435 v2 = tree_output_constant_def (rhs1);
4436 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4437 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4438 gimple *g = gimple_build_assign (v1, v2);
4439 insert_before (g);
4440 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4441 TYPE_SIZE_UNIT (atype));
4442 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4443 if (TREE_CODE (rhs2) == SSA_NAME)
4445 part = var_to_partition (m_map, rhs2);
4446 gcc_assert (m_vars[part] != NULL_TREE);
4447 v2 = m_vars[part];
4449 else if (integer_zerop (rhs2))
4450 v2 = build_zero_cst (atype);
4451 else
4452 v2 = tree_output_constant_def (rhs2);
4453 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4454 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4455 g = gimple_build_assign (v1, v2);
4456 insert_before (g);
4459 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4460 argument. */
4462 void
4463 bitint_large_huge::lower_bit_query (gimple *stmt)
4465 tree arg0 = gimple_call_arg (stmt, 0);
4466 tree arg1 = (gimple_call_num_args (stmt) == 2
4467 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4468 tree lhs = gimple_call_lhs (stmt);
4469 gimple *g;
4471 if (!lhs)
4473 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4474 gsi_remove (&gsi, true);
4475 return;
4477 tree type = TREE_TYPE (arg0);
4478 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4479 bitint_prec_kind kind = bitint_precision_kind (type);
4480 gcc_assert (kind >= bitint_prec_large);
4481 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4482 enum built_in_function fcode = END_BUILTINS;
4483 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4484 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4485 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4486 switch (ifn)
4488 case IFN_CLZ:
4489 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4490 fcode = BUILT_IN_CLZ;
4491 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4492 fcode = BUILT_IN_CLZL;
4493 else
4494 fcode = BUILT_IN_CLZLL;
4495 break;
4496 case IFN_FFS:
4497 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4498 we don't add the addend at the end. */
4499 arg1 = integer_zero_node;
4500 /* FALLTHRU */
4501 case IFN_CTZ:
4502 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4503 fcode = BUILT_IN_CTZ;
4504 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4505 fcode = BUILT_IN_CTZL;
4506 else
4507 fcode = BUILT_IN_CTZLL;
4508 m_upwards = true;
4509 break;
4510 case IFN_CLRSB:
4511 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4512 fcode = BUILT_IN_CLRSB;
4513 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4514 fcode = BUILT_IN_CLRSBL;
4515 else
4516 fcode = BUILT_IN_CLRSBLL;
4517 break;
4518 case IFN_PARITY:
4519 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4520 fcode = BUILT_IN_PARITY;
4521 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4522 fcode = BUILT_IN_PARITYL;
4523 else
4524 fcode = BUILT_IN_PARITYLL;
4525 m_upwards = true;
4526 break;
4527 case IFN_POPCOUNT:
4528 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4529 fcode = BUILT_IN_POPCOUNT;
4530 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4531 fcode = BUILT_IN_POPCOUNTL;
4532 else
4533 fcode = BUILT_IN_POPCOUNTLL;
4534 m_upwards = true;
4535 break;
4536 default:
4537 gcc_unreachable ();
4539 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4540 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4541 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4542 basic_block edge_bb = NULL;
4543 if (m_upwards)
4545 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4546 if (kind == bitint_prec_large)
4547 cnt = CEIL (prec, limb_prec);
4548 else
4550 rem = (prec % (2 * limb_prec));
4551 end = (prec - rem) / limb_prec;
4552 cnt = 2 + CEIL (rem, limb_prec);
4553 idx = idx_first = create_loop (size_zero_node, &idx_next);
4556 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4558 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4559 gsi_prev (&gsi);
4560 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4561 edge_bb = e->src;
4562 if (kind == bitint_prec_large)
4564 m_gsi = gsi_last_bb (edge_bb);
4565 if (!gsi_end_p (m_gsi))
4566 gsi_next (&m_gsi);
4568 bqp = XALLOCAVEC (struct bq_details, cnt);
4570 else
4571 m_after_stmt = stmt;
4572 if (kind != bitint_prec_large)
4573 m_upwards_2limb = end;
4575 for (unsigned i = 0; i < cnt; i++)
4577 m_data_cnt = 0;
4578 if (kind == bitint_prec_large)
4579 idx = size_int (i);
4580 else if (i >= 2)
4581 idx = size_int (end + (i > 2));
4583 tree rhs1 = handle_operand (arg0, idx);
4584 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4586 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4587 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4588 rhs1 = add_cast (m_limb_type, rhs1);
4591 tree in, out, tem;
4592 if (ifn == IFN_PARITY)
4593 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4594 else if (ifn == IFN_FFS)
4595 in = prepare_data_in_out (integer_one_node, idx, &out);
4596 else
4597 in = prepare_data_in_out (integer_zero_node, idx, &out);
4599 switch (ifn)
4601 case IFN_CTZ:
4602 case IFN_FFS:
4603 g = gimple_build_cond (NE_EXPR, rhs1,
4604 build_zero_cst (m_limb_type),
4605 NULL_TREE, NULL_TREE);
4606 insert_before (g);
4607 edge e1, e2;
4608 e1 = split_block (gsi_bb (m_gsi), g);
4609 e1->flags = EDGE_FALSE_VALUE;
4610 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4611 e1->probability = profile_probability::unlikely ();
4612 e2->probability = e1->probability.invert ();
4613 if (i == 0)
4614 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4615 m_gsi = gsi_after_labels (e1->dest);
4616 bqp[i].e = e2;
4617 bqp[i].val = rhs1;
4618 if (tree_fits_uhwi_p (idx))
4619 bqp[i].addend
4620 = build_int_cst (integer_type_node,
4621 tree_to_uhwi (idx) * limb_prec
4622 + (ifn == IFN_FFS));
4623 else
4625 bqp[i].addend = in;
4626 if (i == 1)
4627 res = out;
4628 else
4629 res = make_ssa_name (integer_type_node);
4630 g = gimple_build_assign (res, PLUS_EXPR, in,
4631 build_int_cst (integer_type_node,
4632 limb_prec));
4633 insert_before (g);
4634 m_data[m_data_cnt] = res;
4636 break;
4637 case IFN_PARITY:
4638 if (!integer_zerop (in))
4640 if (kind == bitint_prec_huge && i == 1)
4641 res = out;
4642 else
4643 res = make_ssa_name (m_limb_type);
4644 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4645 insert_before (g);
4647 else
4648 res = rhs1;
4649 m_data[m_data_cnt] = res;
4650 break;
4651 case IFN_POPCOUNT:
4652 g = gimple_build_call (fndecl, 1, rhs1);
4653 tem = make_ssa_name (integer_type_node);
4654 gimple_call_set_lhs (g, tem);
4655 insert_before (g);
4656 if (!integer_zerop (in))
4658 if (kind == bitint_prec_huge && i == 1)
4659 res = out;
4660 else
4661 res = make_ssa_name (integer_type_node);
4662 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4663 insert_before (g);
4665 else
4666 res = tem;
4667 m_data[m_data_cnt] = res;
4668 break;
4669 default:
4670 gcc_unreachable ();
4673 m_first = false;
4674 if (kind == bitint_prec_huge && i <= 1)
4676 if (i == 0)
4678 idx = make_ssa_name (sizetype);
4679 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4680 size_one_node);
4681 insert_before (g);
4683 else
4685 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4686 size_int (2));
4687 insert_before (g);
4688 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4689 NULL_TREE, NULL_TREE);
4690 insert_before (g);
4691 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4692 m_gsi = gsi_after_labels (edge_bb);
4693 else
4694 m_gsi = gsi_for_stmt (stmt);
4699 else
4701 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4702 int sub_one = 0;
4703 if (kind == bitint_prec_large)
4704 cnt = CEIL (prec, limb_prec);
4705 else
4707 rem = prec % limb_prec;
4708 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4709 rem = limb_prec;
4710 end = (prec - rem) / limb_prec;
4711 cnt = 1 + (rem != 0);
4712 if (ifn == IFN_CLRSB)
4713 sub_one = 1;
4716 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4717 gsi_prev (&gsi);
4718 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4719 edge_bb = e->src;
4720 m_gsi = gsi_last_bb (edge_bb);
4721 if (!gsi_end_p (m_gsi))
4722 gsi_next (&m_gsi);
4724 if (ifn == IFN_CLZ)
4725 bqp = XALLOCAVEC (struct bq_details, cnt);
4726 else
4728 gsi = gsi_for_stmt (stmt);
4729 gsi_prev (&gsi);
4730 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4731 edge_bb = e->src;
4732 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4735 for (unsigned i = 0; i < cnt; i++)
4737 m_data_cnt = 0;
4738 if (kind == bitint_prec_large)
4739 idx = size_int (cnt - i - 1);
4740 else if (i == cnt - 1)
4741 idx = create_loop (size_int (end - 1), &idx_next);
4742 else
4743 idx = size_int (end);
4745 tree rhs1 = handle_operand (arg0, idx);
4746 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4748 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4749 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4750 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4751 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4752 rhs1 = add_cast (m_limb_type, rhs1);
4755 if (ifn == IFN_CLZ)
4757 g = gimple_build_cond (NE_EXPR, rhs1,
4758 build_zero_cst (m_limb_type),
4759 NULL_TREE, NULL_TREE);
4760 insert_before (g);
4761 edge e1 = split_block (gsi_bb (m_gsi), g);
4762 e1->flags = EDGE_FALSE_VALUE;
4763 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4764 e1->probability = profile_probability::unlikely ();
4765 e2->probability = e1->probability.invert ();
4766 if (i == 0)
4767 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4768 m_gsi = gsi_after_labels (e1->dest);
4769 bqp[i].e = e2;
4770 bqp[i].val = rhs1;
4772 else
4774 if (i == 0)
4776 first = rhs1;
4777 g = gimple_build_assign (make_ssa_name (m_limb_type),
4778 PLUS_EXPR, rhs1,
4779 build_int_cst (m_limb_type, 1));
4780 insert_before (g);
4781 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4782 build_int_cst (m_limb_type, 1),
4783 NULL_TREE, NULL_TREE);
4784 insert_before (g);
4786 else
4788 g = gimple_build_assign (make_ssa_name (m_limb_type),
4789 BIT_XOR_EXPR, rhs1, first);
4790 insert_before (g);
4791 tree stype = signed_type_for (m_limb_type);
4792 g = gimple_build_cond (LT_EXPR,
4793 add_cast (stype,
4794 gimple_assign_lhs (g)),
4795 build_zero_cst (stype),
4796 NULL_TREE, NULL_TREE);
4797 insert_before (g);
4798 edge e1 = split_block (gsi_bb (m_gsi), g);
4799 e1->flags = EDGE_FALSE_VALUE;
4800 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4801 EDGE_TRUE_VALUE);
4802 e1->probability = profile_probability::unlikely ();
4803 e2->probability = e1->probability.invert ();
4804 if (i == 1)
4805 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4806 e2->src);
4807 m_gsi = gsi_after_labels (e1->dest);
4808 bqp[2 * i].e = e2;
4809 g = gimple_build_cond (NE_EXPR, rhs1, first,
4810 NULL_TREE, NULL_TREE);
4811 insert_before (g);
4813 edge e1 = split_block (gsi_bb (m_gsi), g);
4814 e1->flags = EDGE_FALSE_VALUE;
4815 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4816 e1->probability = profile_probability::unlikely ();
4817 e2->probability = e1->probability.invert ();
4818 if (i == 0)
4819 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4820 m_gsi = gsi_after_labels (e1->dest);
4821 bqp[2 * i + 1].e = e2;
4822 bqp[i].val = rhs1;
4824 if (tree_fits_uhwi_p (idx))
4825 bqp[i].addend
4826 = build_int_cst (integer_type_node,
4827 (int) prec
4828 - (((int) tree_to_uhwi (idx) + 1)
4829 * limb_prec) - sub_one);
4830 else
4832 tree in, out;
4833 in = build_int_cst (integer_type_node, rem - sub_one);
4834 m_first = true;
4835 in = prepare_data_in_out (in, idx, &out);
4836 out = m_data[m_data_cnt + 1];
4837 bqp[i].addend = in;
4838 g = gimple_build_assign (out, PLUS_EXPR, in,
4839 build_int_cst (integer_type_node,
4840 limb_prec));
4841 insert_before (g);
4842 m_data[m_data_cnt] = out;
4845 m_first = false;
4846 if (kind == bitint_prec_huge && i == cnt - 1)
4848 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4849 size_int (-1));
4850 insert_before (g);
4851 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4852 NULL_TREE, NULL_TREE);
4853 insert_before (g);
4854 edge true_edge, false_edge;
4855 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4856 &true_edge, &false_edge);
4857 m_gsi = gsi_after_labels (false_edge->dest);
4861 switch (ifn)
4863 case IFN_CLZ:
4864 case IFN_CTZ:
4865 case IFN_FFS:
4866 gphi *phi1, *phi2, *phi3;
4867 basic_block bb;
4868 bb = gsi_bb (m_gsi);
4869 remove_edge (find_edge (bb, gimple_bb (stmt)));
4870 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4871 gimple_bb (stmt));
4872 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4873 gimple_bb (stmt));
4874 for (unsigned i = 0; i < cnt; i++)
4876 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4877 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4879 if (arg1 == NULL_TREE)
4881 g = gimple_build_builtin_unreachable (m_loc);
4882 insert_before (g);
4884 m_gsi = gsi_for_stmt (stmt);
4885 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4886 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4887 insert_before (g);
4888 if (arg1 == NULL_TREE)
4889 g = gimple_build_assign (lhs, PLUS_EXPR,
4890 gimple_phi_result (phi2),
4891 gimple_call_lhs (g));
4892 else
4894 g = gimple_build_assign (make_ssa_name (integer_type_node),
4895 PLUS_EXPR, gimple_phi_result (phi2),
4896 gimple_call_lhs (g));
4897 insert_before (g);
4898 edge e1 = split_block (gimple_bb (stmt), g);
4899 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
4900 e2->probability = profile_probability::always ();
4901 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
4902 get_immediate_dominator (CDI_DOMINATORS,
4903 e1->src));
4904 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
4905 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
4906 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
4907 m_gsi = gsi_for_stmt (stmt);
4908 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4910 gsi_replace (&m_gsi, g, true);
4911 break;
4912 case IFN_CLRSB:
4913 bb = gsi_bb (m_gsi);
4914 remove_edge (find_edge (bb, edge_bb));
4915 edge e;
4916 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
4917 e->probability = profile_probability::always ();
4918 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
4919 get_immediate_dominator (CDI_DOMINATORS,
4920 edge_bb));
4921 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4922 edge_bb);
4923 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4924 edge_bb);
4925 phi3 = create_phi_node (make_ssa_name (integer_type_node),
4926 gimple_bb (stmt));
4927 for (unsigned i = 0; i < cnt; i++)
4929 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
4930 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
4931 UNKNOWN_LOCATION);
4932 tree a = bqp[i].addend;
4933 if (i && kind == bitint_prec_large)
4934 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
4935 if (i)
4936 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
4938 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
4939 UNKNOWN_LOCATION);
4940 m_gsi = gsi_after_labels (edge_bb);
4941 g = gimple_build_call (fndecl, 1,
4942 add_cast (signed_type_for (m_limb_type),
4943 gimple_phi_result (phi1)));
4944 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4945 insert_before (g);
4946 g = gimple_build_assign (make_ssa_name (integer_type_node),
4947 PLUS_EXPR, gimple_call_lhs (g),
4948 gimple_phi_result (phi2));
4949 insert_before (g);
4950 if (kind != bitint_prec_large)
4952 g = gimple_build_assign (make_ssa_name (integer_type_node),
4953 PLUS_EXPR, gimple_assign_lhs (g),
4954 integer_one_node);
4955 insert_before (g);
4957 add_phi_arg (phi3, gimple_assign_lhs (g),
4958 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
4959 m_gsi = gsi_for_stmt (stmt);
4960 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4961 gsi_replace (&m_gsi, g, true);
4962 break;
4963 case IFN_PARITY:
4964 g = gimple_build_call (fndecl, 1, res);
4965 gimple_call_set_lhs (g, lhs);
4966 gsi_replace (&m_gsi, g, true);
4967 break;
4968 case IFN_POPCOUNT:
4969 g = gimple_build_assign (lhs, res);
4970 gsi_replace (&m_gsi, g, true);
4971 break;
4972 default:
4973 gcc_unreachable ();
4977 /* Lower a call statement with one or more large/huge _BitInt
4978 arguments or large/huge _BitInt return value. */
4980 void
4981 bitint_large_huge::lower_call (tree obj, gimple *stmt)
4983 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4984 unsigned int nargs = gimple_call_num_args (stmt);
4985 if (gimple_call_internal_p (stmt))
4986 switch (gimple_call_internal_fn (stmt))
4988 case IFN_ADD_OVERFLOW:
4989 case IFN_SUB_OVERFLOW:
4990 case IFN_UBSAN_CHECK_ADD:
4991 case IFN_UBSAN_CHECK_SUB:
4992 lower_addsub_overflow (obj, stmt);
4993 return;
4994 case IFN_MUL_OVERFLOW:
4995 case IFN_UBSAN_CHECK_MUL:
4996 lower_mul_overflow (obj, stmt);
4997 return;
4998 case IFN_CLZ:
4999 case IFN_CTZ:
5000 case IFN_CLRSB:
5001 case IFN_FFS:
5002 case IFN_PARITY:
5003 case IFN_POPCOUNT:
5004 lower_bit_query (stmt);
5005 return;
5006 default:
5007 break;
5009 for (unsigned int i = 0; i < nargs; ++i)
5011 tree arg = gimple_call_arg (stmt, i);
5012 if (TREE_CODE (arg) != SSA_NAME
5013 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5014 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5015 continue;
5016 int p = var_to_partition (m_map, arg);
5017 tree v = m_vars[p];
5018 gcc_assert (v != NULL_TREE);
5019 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5020 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5021 arg = make_ssa_name (TREE_TYPE (arg));
5022 gimple *g = gimple_build_assign (arg, v);
5023 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5024 gimple_call_set_arg (stmt, i, arg);
5025 if (m_preserved == NULL)
5026 m_preserved = BITMAP_ALLOC (NULL);
5027 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5029 tree lhs = gimple_call_lhs (stmt);
5030 if (lhs
5031 && TREE_CODE (lhs) == SSA_NAME
5032 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5033 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5035 int p = var_to_partition (m_map, lhs);
5036 tree v = m_vars[p];
5037 gcc_assert (v != NULL_TREE);
5038 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5039 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5040 gimple_call_set_lhs (stmt, v);
5041 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5043 update_stmt (stmt);
5046 /* Lower __asm STMT which involves large/huge _BitInt values. */
5048 void
5049 bitint_large_huge::lower_asm (gimple *stmt)
5051 gasm *g = as_a <gasm *> (stmt);
5052 unsigned noutputs = gimple_asm_noutputs (g);
5053 unsigned ninputs = gimple_asm_ninputs (g);
5055 for (unsigned i = 0; i < noutputs; ++i)
5057 tree t = gimple_asm_output_op (g, i);
5058 tree s = TREE_VALUE (t);
5059 if (TREE_CODE (s) == SSA_NAME
5060 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5061 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5063 int part = var_to_partition (m_map, s);
5064 gcc_assert (m_vars[part] != NULL_TREE);
5065 TREE_VALUE (t) = m_vars[part];
5068 for (unsigned i = 0; i < ninputs; ++i)
5070 tree t = gimple_asm_input_op (g, i);
5071 tree s = TREE_VALUE (t);
5072 if (TREE_CODE (s) == SSA_NAME
5073 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5074 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5076 int part = var_to_partition (m_map, s);
5077 gcc_assert (m_vars[part] != NULL_TREE);
5078 TREE_VALUE (t) = m_vars[part];
5081 update_stmt (stmt);
5084 /* Lower statement STMT which involves large/huge _BitInt values
5085 into code accessing individual limbs. */
5087 void
5088 bitint_large_huge::lower_stmt (gimple *stmt)
5090 m_first = true;
5091 m_lhs = NULL_TREE;
5092 m_data.truncate (0);
5093 m_data_cnt = 0;
5094 m_gsi = gsi_for_stmt (stmt);
5095 m_after_stmt = NULL;
5096 m_bb = NULL;
5097 m_init_gsi = m_gsi;
5098 gsi_prev (&m_init_gsi);
5099 m_preheader_bb = NULL;
5100 m_upwards_2limb = 0;
5101 m_upwards = false;
5102 m_var_msb = false;
5103 m_cast_conditional = false;
5104 m_bitfld_load = 0;
5105 m_loc = gimple_location (stmt);
5106 if (is_gimple_call (stmt))
5108 lower_call (NULL_TREE, stmt);
5109 return;
5111 if (gimple_code (stmt) == GIMPLE_ASM)
5113 lower_asm (stmt);
5114 return;
5116 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5117 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5118 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5119 bool mergeable_cast_p = false;
5120 bool final_cast_p = false;
5121 if (gimple_assign_cast_p (stmt))
5123 lhs = gimple_assign_lhs (stmt);
5124 tree rhs1 = gimple_assign_rhs1 (stmt);
5125 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5126 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5127 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5128 mergeable_cast_p = true;
5129 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5130 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5131 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5133 final_cast_p = true;
5134 if (TREE_CODE (rhs1) == SSA_NAME
5135 && (m_names == NULL
5136 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5138 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5139 if (is_gimple_assign (g)
5140 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5142 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5143 if (TREE_CODE (rhs2) == SSA_NAME
5144 && (m_names == NULL
5145 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5147 g = SSA_NAME_DEF_STMT (rhs2);
5148 int ovf = optimizable_arith_overflow (g);
5149 if (ovf == 2)
5150 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5151 and IMAGPART_EXPR uses, where the latter is cast to
5152 non-_BitInt, it will be optimized when handling
5153 the REALPART_EXPR. */
5154 return;
5155 if (ovf == 1)
5157 lower_call (NULL_TREE, g);
5158 return;
5165 if (gimple_store_p (stmt))
5167 tree rhs1 = gimple_assign_rhs1 (stmt);
5168 if (TREE_CODE (rhs1) == SSA_NAME
5169 && (m_names == NULL
5170 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5172 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5173 m_loc = gimple_location (g);
5174 lhs = gimple_assign_lhs (stmt);
5175 if (is_gimple_assign (g) && !mergeable_op (g))
5176 switch (gimple_assign_rhs_code (g))
5178 case LSHIFT_EXPR:
5179 case RSHIFT_EXPR:
5180 lower_shift_stmt (lhs, g);
5181 handled:
5182 m_gsi = gsi_for_stmt (stmt);
5183 unlink_stmt_vdef (stmt);
5184 release_ssa_name (gimple_vdef (stmt));
5185 gsi_remove (&m_gsi, true);
5186 return;
5187 case MULT_EXPR:
5188 case TRUNC_DIV_EXPR:
5189 case TRUNC_MOD_EXPR:
5190 lower_muldiv_stmt (lhs, g);
5191 goto handled;
5192 case FIX_TRUNC_EXPR:
5193 lower_float_conv_stmt (lhs, g);
5194 goto handled;
5195 case REALPART_EXPR:
5196 case IMAGPART_EXPR:
5197 lower_cplxpart_stmt (lhs, g);
5198 goto handled;
5199 default:
5200 break;
5202 else if (optimizable_arith_overflow (g) == 3)
5204 lower_call (lhs, g);
5205 goto handled;
5207 m_loc = gimple_location (stmt);
5210 if (mergeable_op (stmt)
5211 || gimple_store_p (stmt)
5212 || gimple_assign_load_p (stmt)
5213 || eq_p
5214 || mergeable_cast_p)
5216 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5217 if (!eq_p)
5218 return;
5220 else if (cmp_code != ERROR_MARK)
5221 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5222 if (cmp_code != ERROR_MARK)
5224 if (gimple_code (stmt) == GIMPLE_COND)
5226 gcond *cstmt = as_a <gcond *> (stmt);
5227 gimple_cond_set_lhs (cstmt, lhs);
5228 gimple_cond_set_rhs (cstmt, boolean_false_node);
5229 gimple_cond_set_code (cstmt, cmp_code);
5230 update_stmt (stmt);
5231 return;
5233 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5235 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5236 boolean_false_node);
5237 gimple_assign_set_rhs1 (stmt, cond);
5238 lhs = gimple_assign_lhs (stmt);
5239 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5240 || (bitint_precision_kind (TREE_TYPE (lhs))
5241 <= bitint_prec_middle));
5242 update_stmt (stmt);
5243 return;
5245 gimple_assign_set_rhs1 (stmt, lhs);
5246 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5247 gimple_assign_set_rhs_code (stmt, cmp_code);
5248 update_stmt (stmt);
5249 return;
5251 if (final_cast_p)
5253 tree lhs_type = TREE_TYPE (lhs);
5254 /* Add support for 3 or more limbs filled in from normal integral
5255 type if this assert fails. If no target chooses limb mode smaller
5256 than half of largest supported normal integral type, this will not
5257 be needed. */
5258 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5259 gimple *g;
5260 if (TREE_CODE (lhs_type) == BITINT_TYPE
5261 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5262 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5263 TYPE_UNSIGNED (lhs_type));
5264 m_data_cnt = 0;
5265 tree rhs1 = gimple_assign_rhs1 (stmt);
5266 tree r1 = handle_operand (rhs1, size_int (0));
5267 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5268 r1 = add_cast (lhs_type, r1);
5269 if (TYPE_PRECISION (lhs_type) > limb_prec)
5271 m_data_cnt = 0;
5272 m_first = false;
5273 tree r2 = handle_operand (rhs1, size_int (1));
5274 r2 = add_cast (lhs_type, r2);
5275 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5276 build_int_cst (unsigned_type_node,
5277 limb_prec));
5278 insert_before (g);
5279 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5280 gimple_assign_lhs (g));
5281 insert_before (g);
5282 r1 = gimple_assign_lhs (g);
5284 if (lhs_type != TREE_TYPE (lhs))
5285 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5286 else
5287 g = gimple_build_assign (lhs, r1);
5288 gsi_replace (&m_gsi, g, true);
5289 return;
5291 if (is_gimple_assign (stmt))
5292 switch (gimple_assign_rhs_code (stmt))
5294 case LSHIFT_EXPR:
5295 case RSHIFT_EXPR:
5296 lower_shift_stmt (NULL_TREE, stmt);
5297 return;
5298 case MULT_EXPR:
5299 case TRUNC_DIV_EXPR:
5300 case TRUNC_MOD_EXPR:
5301 lower_muldiv_stmt (NULL_TREE, stmt);
5302 return;
5303 case FIX_TRUNC_EXPR:
5304 case FLOAT_EXPR:
5305 lower_float_conv_stmt (NULL_TREE, stmt);
5306 return;
5307 case REALPART_EXPR:
5308 case IMAGPART_EXPR:
5309 lower_cplxpart_stmt (NULL_TREE, stmt);
5310 return;
5311 case COMPLEX_EXPR:
5312 lower_complexexpr_stmt (stmt);
5313 return;
5314 default:
5315 break;
5317 gcc_unreachable ();
5320 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5321 the desired memory state. */
5323 void *
5324 vuse_eq (ao_ref *, tree vuse1, void *data)
5326 tree vuse2 = (tree) data;
5327 if (vuse1 == vuse2)
5328 return data;
5330 return NULL;
5333 /* Return true if STMT uses a library function and needs to take
5334 address of its inputs. We need to avoid bit-fields in those
5335 cases. */
5337 bool
5338 stmt_needs_operand_addr (gimple *stmt)
5340 if (is_gimple_assign (stmt))
5341 switch (gimple_assign_rhs_code (stmt))
5343 case MULT_EXPR:
5344 case TRUNC_DIV_EXPR:
5345 case TRUNC_MOD_EXPR:
5346 case FLOAT_EXPR:
5347 return true;
5348 default:
5349 break;
5351 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5352 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5353 return true;
5354 return false;
5357 /* Dominator walker used to discover which large/huge _BitInt
5358 loads could be sunk into all their uses. */
5360 class bitint_dom_walker : public dom_walker
5362 public:
5363 bitint_dom_walker (bitmap names, bitmap loads)
5364 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5366 edge before_dom_children (basic_block) final override;
5368 private:
5369 bitmap m_names, m_loads;
5372 edge
5373 bitint_dom_walker::before_dom_children (basic_block bb)
5375 gphi *phi = get_virtual_phi (bb);
5376 tree vop;
5377 if (phi)
5378 vop = gimple_phi_result (phi);
5379 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5380 vop = NULL_TREE;
5381 else
5382 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5384 auto_vec<tree, 16> worklist;
5385 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5386 !gsi_end_p (gsi); gsi_next (&gsi))
5388 gimple *stmt = gsi_stmt (gsi);
5389 if (is_gimple_debug (stmt))
5390 continue;
5392 if (!vop && gimple_vuse (stmt))
5393 vop = gimple_vuse (stmt);
5395 tree cvop = vop;
5396 if (gimple_vdef (stmt))
5397 vop = gimple_vdef (stmt);
5399 tree lhs = gimple_get_lhs (stmt);
5400 if (lhs
5401 && TREE_CODE (lhs) == SSA_NAME
5402 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5403 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5404 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5405 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5406 it means it will be handled in a loop or straight line code
5407 at the location of its (ultimate) immediate use, so for
5408 vop checking purposes check these only at the ultimate
5409 immediate use. */
5410 continue;
5412 ssa_op_iter oi;
5413 use_operand_p use_p;
5414 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5416 tree s = USE_FROM_PTR (use_p);
5417 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5418 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5419 worklist.safe_push (s);
5422 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5423 while (worklist.length () > 0)
5425 tree s = worklist.pop ();
5427 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5429 gimple *g = SSA_NAME_DEF_STMT (s);
5430 needs_operand_addr |= stmt_needs_operand_addr (g);
5431 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5433 tree s2 = USE_FROM_PTR (use_p);
5434 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5435 && (bitint_precision_kind (TREE_TYPE (s2))
5436 >= bitint_prec_large))
5437 worklist.safe_push (s2);
5439 continue;
5441 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5442 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5444 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5445 if (TREE_CODE (rhs) == SSA_NAME
5446 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5447 s = rhs;
5448 else
5449 continue;
5451 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5452 continue;
5454 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5455 if (needs_operand_addr
5456 && TREE_CODE (rhs1) == COMPONENT_REF
5457 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5459 tree fld = TREE_OPERAND (rhs1, 1);
5460 /* For little-endian, we can allow as inputs bit-fields
5461 which start at a limb boundary. */
5462 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5463 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5464 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5465 % limb_prec) == 0)
5467 else
5469 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5470 continue;
5474 ao_ref ref;
5475 ao_ref_init (&ref, rhs1);
5476 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5477 unsigned limit = 64;
5478 tree vuse = cvop;
5479 if (vop != cvop
5480 && is_gimple_assign (stmt)
5481 && gimple_store_p (stmt)
5482 && !operand_equal_p (lhs,
5483 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5485 vuse = vop;
5486 if (vuse != lvop
5487 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5488 NULL, NULL, limit, lvop) == NULL)
5489 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5493 bb->aux = (void *) vop;
5494 return NULL;
5499 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5500 build_ssa_conflict_graph.
5501 The differences are:
5502 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5503 2) for large/huge _BitInt multiplication/division/modulo process def
5504 only after processing uses rather than before to make uses conflict
5505 with the definition
5506 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5507 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5508 the final statement. */
5510 void
5511 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5512 ssa_conflicts *graph, bitmap names,
5513 void (*def) (live_track *, tree,
5514 ssa_conflicts *),
5515 void (*use) (live_track *, tree))
5517 bool muldiv_p = false;
5518 tree lhs = NULL_TREE;
5519 if (is_gimple_assign (stmt))
5521 lhs = gimple_assign_lhs (stmt);
5522 if (TREE_CODE (lhs) == SSA_NAME
5523 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5524 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5526 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5527 return;
5528 switch (gimple_assign_rhs_code (stmt))
5530 case MULT_EXPR:
5531 case TRUNC_DIV_EXPR:
5532 case TRUNC_MOD_EXPR:
5533 muldiv_p = true;
5534 default:
5535 break;
5540 ssa_op_iter iter;
5541 tree var;
5542 if (!muldiv_p)
5544 /* For stmts with more than one SSA_NAME definition pretend all the
5545 SSA_NAME outputs but the first one are live at this point, so
5546 that conflicts are added in between all those even when they are
5547 actually not really live after the asm, because expansion might
5548 copy those into pseudos after the asm and if multiple outputs
5549 share the same partition, it might overwrite those that should
5550 be live. E.g.
5551 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5552 return a;
5553 See PR70593. */
5554 bool first = true;
5555 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5556 if (first)
5557 first = false;
5558 else
5559 use (live, var);
5561 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5562 def (live, var, graph);
5565 auto_vec<tree, 16> worklist;
5566 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5567 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5568 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5570 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5571 use (live, var);
5572 else
5573 worklist.safe_push (var);
5576 while (worklist.length () > 0)
5578 tree s = worklist.pop ();
5579 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5580 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5581 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5583 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5584 use (live, var);
5585 else
5586 worklist.safe_push (var);
5590 if (muldiv_p)
5591 def (live, lhs, graph);
5594 /* Entry point for _BitInt(N) operation lowering during optimization. */
5596 static unsigned int
5597 gimple_lower_bitint (void)
5599 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5600 limb_prec = 0;
5602 unsigned int i;
5603 for (i = 0; i < num_ssa_names; ++i)
5605 tree s = ssa_name (i);
5606 if (s == NULL)
5607 continue;
5608 tree type = TREE_TYPE (s);
5609 if (TREE_CODE (type) == COMPLEX_TYPE)
5610 type = TREE_TYPE (type);
5611 if (TREE_CODE (type) == BITINT_TYPE
5612 && bitint_precision_kind (type) != bitint_prec_small)
5613 break;
5614 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5615 into memory. Such functions could have no large/huge SSA_NAMEs. */
5616 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5618 gimple *g = SSA_NAME_DEF_STMT (s);
5619 if (is_gimple_assign (g) && gimple_store_p (g))
5621 tree t = gimple_assign_rhs1 (g);
5622 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5623 && (bitint_precision_kind (TREE_TYPE (t))
5624 >= bitint_prec_large))
5625 break;
5629 if (i == num_ssa_names)
5630 return 0;
5632 basic_block bb;
5633 auto_vec<gimple *, 4> switch_statements;
5634 FOR_EACH_BB_FN (bb, cfun)
5636 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5638 tree idx = gimple_switch_index (swtch);
5639 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5640 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5641 continue;
5643 if (optimize)
5644 group_case_labels_stmt (swtch);
5645 switch_statements.safe_push (swtch);
5649 if (!switch_statements.is_empty ())
5651 bool expanded = false;
5652 gimple *stmt;
5653 unsigned int j;
5654 i = 0;
5655 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5657 gswitch *swtch = as_a<gswitch *> (stmt);
5658 tree_switch_conversion::switch_decision_tree dt (swtch);
5659 expanded |= dt.analyze_switch_statement ();
5662 if (expanded)
5664 free_dominance_info (CDI_DOMINATORS);
5665 free_dominance_info (CDI_POST_DOMINATORS);
5666 mark_virtual_operands_for_renaming (cfun);
5667 cleanup_tree_cfg (TODO_update_ssa);
5671 struct bitint_large_huge large_huge;
5672 bool has_large_huge_parm_result = false;
5673 bool has_large_huge = false;
5674 unsigned int ret = 0, first_large_huge = ~0U;
5675 bool edge_insertions = false;
5676 for (; i < num_ssa_names; ++i)
5678 tree s = ssa_name (i);
5679 if (s == NULL)
5680 continue;
5681 tree type = TREE_TYPE (s);
5682 if (TREE_CODE (type) == COMPLEX_TYPE)
5683 type = TREE_TYPE (type);
5684 if (TREE_CODE (type) == BITINT_TYPE
5685 && bitint_precision_kind (type) >= bitint_prec_large)
5687 if (first_large_huge == ~0U)
5688 first_large_huge = i;
5689 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5690 gimple_stmt_iterator gsi;
5691 tree_code rhs_code;
5692 /* Unoptimize certain constructs to simpler alternatives to
5693 avoid having to lower all of them. */
5694 if (is_gimple_assign (stmt))
5695 switch (rhs_code = gimple_assign_rhs_code (stmt))
5697 default:
5698 break;
5699 case LROTATE_EXPR:
5700 case RROTATE_EXPR:
5702 first_large_huge = 0;
5703 location_t loc = gimple_location (stmt);
5704 gsi = gsi_for_stmt (stmt);
5705 tree rhs1 = gimple_assign_rhs1 (stmt);
5706 tree type = TREE_TYPE (rhs1);
5707 tree n = gimple_assign_rhs2 (stmt), m;
5708 tree p = build_int_cst (TREE_TYPE (n),
5709 TYPE_PRECISION (type));
5710 if (TREE_CODE (n) == INTEGER_CST)
5711 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5712 else
5714 m = make_ssa_name (TREE_TYPE (n));
5715 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5716 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5717 gimple_set_location (g, loc);
5719 if (!TYPE_UNSIGNED (type))
5721 tree utype = build_bitint_type (TYPE_PRECISION (type),
5723 if (TREE_CODE (rhs1) == INTEGER_CST)
5724 rhs1 = fold_convert (utype, rhs1);
5725 else
5727 tree t = make_ssa_name (type);
5728 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5729 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5730 gimple_set_location (g, loc);
5733 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5734 rhs_code == LROTATE_EXPR
5735 ? LSHIFT_EXPR : RSHIFT_EXPR,
5736 rhs1, n);
5737 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5738 gimple_set_location (g, loc);
5739 tree op1 = gimple_assign_lhs (g);
5740 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5741 rhs_code == LROTATE_EXPR
5742 ? RSHIFT_EXPR : LSHIFT_EXPR,
5743 rhs1, m);
5744 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5745 gimple_set_location (g, loc);
5746 tree op2 = gimple_assign_lhs (g);
5747 tree lhs = gimple_assign_lhs (stmt);
5748 if (!TYPE_UNSIGNED (type))
5750 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5751 BIT_IOR_EXPR, op1, op2);
5752 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5753 gimple_set_location (g, loc);
5754 g = gimple_build_assign (lhs, NOP_EXPR,
5755 gimple_assign_lhs (g));
5757 else
5758 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5759 gsi_replace (&gsi, g, true);
5760 gimple_set_location (g, loc);
5762 break;
5763 case ABS_EXPR:
5764 case ABSU_EXPR:
5765 case MIN_EXPR:
5766 case MAX_EXPR:
5767 case COND_EXPR:
5768 first_large_huge = 0;
5769 gsi = gsi_for_stmt (stmt);
5770 tree lhs = gimple_assign_lhs (stmt);
5771 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5772 location_t loc = gimple_location (stmt);
5773 if (rhs_code == ABS_EXPR)
5774 g = gimple_build_cond (LT_EXPR, rhs1,
5775 build_zero_cst (TREE_TYPE (rhs1)),
5776 NULL_TREE, NULL_TREE);
5777 else if (rhs_code == ABSU_EXPR)
5779 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5780 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5781 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5782 gimple_set_location (g, loc);
5783 g = gimple_build_cond (LT_EXPR, rhs1,
5784 build_zero_cst (TREE_TYPE (rhs1)),
5785 NULL_TREE, NULL_TREE);
5786 rhs1 = rhs2;
5788 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5790 rhs2 = gimple_assign_rhs2 (stmt);
5791 if (TREE_CODE (rhs1) == INTEGER_CST)
5792 std::swap (rhs1, rhs2);
5793 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5794 NULL_TREE, NULL_TREE);
5795 if (rhs_code == MAX_EXPR)
5796 std::swap (rhs1, rhs2);
5798 else
5800 g = gimple_build_cond (NE_EXPR, rhs1,
5801 build_zero_cst (TREE_TYPE (rhs1)),
5802 NULL_TREE, NULL_TREE);
5803 rhs1 = gimple_assign_rhs2 (stmt);
5804 rhs2 = gimple_assign_rhs3 (stmt);
5806 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5807 gimple_set_location (g, loc);
5808 edge e1 = split_block (gsi_bb (gsi), g);
5809 edge e2 = split_block (e1->dest, (gimple *) NULL);
5810 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5811 e3->probability = profile_probability::even ();
5812 e1->flags = EDGE_TRUE_VALUE;
5813 e1->probability = e3->probability.invert ();
5814 if (dom_info_available_p (CDI_DOMINATORS))
5815 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5816 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5818 gsi = gsi_after_labels (e1->dest);
5819 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5820 NEGATE_EXPR, rhs1);
5821 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5822 gimple_set_location (g, loc);
5823 rhs2 = gimple_assign_lhs (g);
5824 std::swap (rhs1, rhs2);
5826 gsi = gsi_for_stmt (stmt);
5827 gsi_remove (&gsi, true);
5828 gphi *phi = create_phi_node (lhs, e2->dest);
5829 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5830 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5831 break;
5834 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5835 into memory. Such functions could have no large/huge SSA_NAMEs. */
5836 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5838 gimple *g = SSA_NAME_DEF_STMT (s);
5839 if (is_gimple_assign (g) && gimple_store_p (g))
5841 tree t = gimple_assign_rhs1 (g);
5842 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5843 && (bitint_precision_kind (TREE_TYPE (t))
5844 >= bitint_prec_large))
5845 has_large_huge = true;
5849 for (i = first_large_huge; i < num_ssa_names; ++i)
5851 tree s = ssa_name (i);
5852 if (s == NULL)
5853 continue;
5854 tree type = TREE_TYPE (s);
5855 if (TREE_CODE (type) == COMPLEX_TYPE)
5856 type = TREE_TYPE (type);
5857 if (TREE_CODE (type) == BITINT_TYPE
5858 && bitint_precision_kind (type) >= bitint_prec_large)
5860 use_operand_p use_p;
5861 gimple *use_stmt;
5862 has_large_huge = true;
5863 if (optimize
5864 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
5865 continue;
5866 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
5867 the same bb and could be handled in the same loop with the
5868 immediate use. */
5869 if (optimize
5870 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5871 && single_imm_use (s, &use_p, &use_stmt)
5872 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
5874 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
5876 if (mergeable_op (use_stmt))
5877 continue;
5878 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
5879 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
5880 continue;
5881 if (gimple_assign_cast_p (use_stmt))
5883 tree lhs = gimple_assign_lhs (use_stmt);
5884 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5885 continue;
5887 else if (gimple_store_p (use_stmt)
5888 && is_gimple_assign (use_stmt)
5889 && !gimple_has_volatile_ops (use_stmt)
5890 && !stmt_ends_bb_p (use_stmt))
5891 continue;
5893 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5895 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5896 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5897 && ((is_gimple_assign (use_stmt)
5898 && (gimple_assign_rhs_code (use_stmt)
5899 != COMPLEX_EXPR))
5900 || gimple_code (use_stmt) == GIMPLE_COND)
5901 && (!gimple_store_p (use_stmt)
5902 || (is_gimple_assign (use_stmt)
5903 && !gimple_has_volatile_ops (use_stmt)
5904 && !stmt_ends_bb_p (use_stmt)))
5905 && (TREE_CODE (rhs1) != SSA_NAME
5906 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5908 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
5909 || (bitint_precision_kind (TREE_TYPE (rhs1))
5910 < bitint_prec_large)
5911 || (TYPE_PRECISION (TREE_TYPE (rhs1))
5912 >= TYPE_PRECISION (TREE_TYPE (s)))
5913 || mergeable_op (SSA_NAME_DEF_STMT (s)))
5914 continue;
5915 /* Prevent merging a widening non-mergeable cast
5916 on result of some narrower mergeable op
5917 together with later mergeable operations. E.g.
5918 result of _BitInt(223) addition shouldn't be
5919 sign-extended to _BitInt(513) and have another
5920 _BitInt(513) added to it, as handle_plus_minus
5921 with its PHI node handling inside of handle_cast
5922 will not work correctly. An exception is if
5923 use_stmt is a store, this is handled directly
5924 in lower_mergeable_stmt. */
5925 if (TREE_CODE (rhs1) != SSA_NAME
5926 || !has_single_use (rhs1)
5927 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
5928 != gimple_bb (SSA_NAME_DEF_STMT (s)))
5929 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
5930 || gimple_store_p (use_stmt))
5931 continue;
5932 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
5934 /* Another exception is if the widening cast is
5935 from mergeable same precision cast from something
5936 not mergeable. */
5937 tree rhs2
5938 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
5939 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
5940 && (TYPE_PRECISION (TREE_TYPE (rhs1))
5941 == TYPE_PRECISION (TREE_TYPE (rhs2))))
5943 if (TREE_CODE (rhs2) != SSA_NAME
5944 || !has_single_use (rhs2)
5945 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
5946 != gimple_bb (SSA_NAME_DEF_STMT (s)))
5947 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
5948 continue;
5953 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
5954 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
5956 case IMAGPART_EXPR:
5958 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5959 rhs1 = TREE_OPERAND (rhs1, 0);
5960 if (TREE_CODE (rhs1) == SSA_NAME)
5962 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5963 if (optimizable_arith_overflow (g))
5964 continue;
5967 /* FALLTHRU */
5968 case LSHIFT_EXPR:
5969 case RSHIFT_EXPR:
5970 case MULT_EXPR:
5971 case TRUNC_DIV_EXPR:
5972 case TRUNC_MOD_EXPR:
5973 case FIX_TRUNC_EXPR:
5974 case REALPART_EXPR:
5975 if (gimple_store_p (use_stmt)
5976 && is_gimple_assign (use_stmt)
5977 && !gimple_has_volatile_ops (use_stmt)
5978 && !stmt_ends_bb_p (use_stmt))
5980 tree lhs = gimple_assign_lhs (use_stmt);
5981 /* As multiply/division passes address of the lhs
5982 to library function and that assumes it can extend
5983 it to whole number of limbs, avoid merging those
5984 with bit-field stores. Don't allow it for
5985 shifts etc. either, so that the bit-field store
5986 handling doesn't have to be done everywhere. */
5987 if (TREE_CODE (lhs) == COMPONENT_REF
5988 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
5989 break;
5990 continue;
5992 break;
5993 default:
5994 break;
5998 /* Also ignore uninitialized uses. */
5999 if (SSA_NAME_IS_DEFAULT_DEF (s)
6000 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6001 continue;
6003 if (!large_huge.m_names)
6004 large_huge.m_names = BITMAP_ALLOC (NULL);
6005 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6006 if (has_single_use (s))
6008 if (!large_huge.m_single_use_names)
6009 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6010 bitmap_set_bit (large_huge.m_single_use_names,
6011 SSA_NAME_VERSION (s));
6013 if (SSA_NAME_VAR (s)
6014 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6015 && SSA_NAME_IS_DEFAULT_DEF (s))
6016 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6017 has_large_huge_parm_result = true;
6018 if (optimize
6019 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6020 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6021 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6022 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6024 use_operand_p use_p;
6025 imm_use_iterator iter;
6026 bool optimizable_load = true;
6027 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6029 gimple *use_stmt = USE_STMT (use_p);
6030 if (is_gimple_debug (use_stmt))
6031 continue;
6032 if (gimple_code (use_stmt) == GIMPLE_PHI
6033 || is_gimple_call (use_stmt))
6035 optimizable_load = false;
6036 break;
6040 ssa_op_iter oi;
6041 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6042 oi, SSA_OP_USE)
6044 tree s2 = USE_FROM_PTR (use_p);
6045 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6047 optimizable_load = false;
6048 break;
6052 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6054 if (!large_huge.m_loads)
6055 large_huge.m_loads = BITMAP_ALLOC (NULL);
6056 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6060 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6061 into memory. Such functions could have no large/huge SSA_NAMEs. */
6062 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6064 gimple *g = SSA_NAME_DEF_STMT (s);
6065 if (is_gimple_assign (g) && gimple_store_p (g))
6067 tree t = gimple_assign_rhs1 (g);
6068 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6069 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6070 has_large_huge = true;
6075 if (large_huge.m_names || has_large_huge)
6077 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6078 calculate_dominance_info (CDI_DOMINATORS);
6079 if (optimize)
6080 enable_ranger (cfun);
6081 if (large_huge.m_loads)
6083 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6084 entry->aux = NULL;
6085 bitint_dom_walker (large_huge.m_names,
6086 large_huge.m_loads).walk (entry);
6087 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6088 clear_aux_for_blocks ();
6089 BITMAP_FREE (large_huge.m_loads);
6091 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6092 large_huge.m_limb_size
6093 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6095 if (large_huge.m_names)
6097 large_huge.m_map
6098 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6099 coalesce_ssa_name (large_huge.m_map);
6100 partition_view_normal (large_huge.m_map);
6101 if (dump_file && (dump_flags & TDF_DETAILS))
6103 fprintf (dump_file, "After Coalescing:\n");
6104 dump_var_map (dump_file, large_huge.m_map);
6106 large_huge.m_vars
6107 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6108 bitmap_iterator bi;
6109 if (has_large_huge_parm_result)
6110 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6112 tree s = ssa_name (i);
6113 if (SSA_NAME_VAR (s)
6114 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6115 && SSA_NAME_IS_DEFAULT_DEF (s))
6116 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6118 int p = var_to_partition (large_huge.m_map, s);
6119 if (large_huge.m_vars[p] == NULL_TREE)
6121 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6122 mark_addressable (SSA_NAME_VAR (s));
6126 tree atype = NULL_TREE;
6127 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6129 tree s = ssa_name (i);
6130 int p = var_to_partition (large_huge.m_map, s);
6131 if (large_huge.m_vars[p] != NULL_TREE)
6132 continue;
6133 if (atype == NULL_TREE
6134 || !tree_int_cst_equal (TYPE_SIZE (atype),
6135 TYPE_SIZE (TREE_TYPE (s))))
6137 unsigned HOST_WIDE_INT nelts
6138 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6139 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6141 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6142 mark_addressable (large_huge.m_vars[p]);
6146 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6148 gimple_stmt_iterator prev;
6149 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6150 gsi = prev)
6152 prev = gsi;
6153 gsi_prev (&prev);
6154 ssa_op_iter iter;
6155 gimple *stmt = gsi_stmt (gsi);
6156 if (is_gimple_debug (stmt))
6157 continue;
6158 bitint_prec_kind kind = bitint_prec_small;
6159 tree t;
6160 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6161 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6163 bitint_prec_kind this_kind
6164 = bitint_precision_kind (TREE_TYPE (t));
6165 if (this_kind > kind)
6166 kind = this_kind;
6168 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6170 t = gimple_assign_rhs1 (stmt);
6171 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6173 bitint_prec_kind this_kind
6174 = bitint_precision_kind (TREE_TYPE (t));
6175 if (this_kind > kind)
6176 kind = this_kind;
6179 if (is_gimple_call (stmt))
6181 t = gimple_call_lhs (stmt);
6182 if (t
6183 && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
6184 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6186 bitint_prec_kind this_kind
6187 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6188 if (this_kind > kind)
6189 kind = this_kind;
6192 if (kind == bitint_prec_small)
6193 continue;
6194 switch (gimple_code (stmt))
6196 case GIMPLE_CALL:
6197 /* For now. We'll need to handle some internal functions and
6198 perhaps some builtins. */
6199 if (kind == bitint_prec_middle)
6200 continue;
6201 break;
6202 case GIMPLE_ASM:
6203 if (kind == bitint_prec_middle)
6204 continue;
6205 break;
6206 case GIMPLE_RETURN:
6207 continue;
6208 case GIMPLE_ASSIGN:
6209 if (gimple_clobber_p (stmt))
6210 continue;
6211 if (kind >= bitint_prec_large)
6212 break;
6213 if (gimple_assign_single_p (stmt))
6214 /* No need to lower copies, loads or stores. */
6215 continue;
6216 if (gimple_assign_cast_p (stmt))
6218 tree lhs = gimple_assign_lhs (stmt);
6219 tree rhs = gimple_assign_rhs1 (stmt);
6220 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6221 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6222 && (TYPE_PRECISION (TREE_TYPE (lhs))
6223 == TYPE_PRECISION (TREE_TYPE (rhs))))
6224 /* No need to lower casts to same precision. */
6225 continue;
6227 break;
6228 default:
6229 break;
6232 if (kind == bitint_prec_middle)
6234 tree type = NULL_TREE;
6235 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6236 with the same precision and back. */
6237 if (tree lhs = gimple_get_lhs (stmt))
6238 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6239 && (bitint_precision_kind (TREE_TYPE (lhs))
6240 == bitint_prec_middle))
6242 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6243 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6244 type = build_nonstandard_integer_type (prec, uns);
6245 tree lhs2 = make_ssa_name (type);
6246 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6247 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6248 gimple_set_lhs (stmt, lhs2);
6250 unsigned int nops = gimple_num_ops (stmt);
6251 for (unsigned int i = 0; i < nops; ++i)
6252 if (tree op = gimple_op (stmt, i))
6254 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6255 if (nop != op)
6256 gimple_set_op (stmt, i, nop);
6257 else if (COMPARISON_CLASS_P (op))
6259 TREE_OPERAND (op, 0)
6260 = maybe_cast_middle_bitint (&gsi,
6261 TREE_OPERAND (op, 0),
6262 type);
6263 TREE_OPERAND (op, 1)
6264 = maybe_cast_middle_bitint (&gsi,
6265 TREE_OPERAND (op, 1),
6266 type);
6268 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6270 CASE_LOW (op)
6271 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6272 type);
6273 CASE_HIGH (op)
6274 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6275 type);
6278 update_stmt (stmt);
6279 continue;
6282 if (tree lhs = gimple_get_lhs (stmt))
6283 if (TREE_CODE (lhs) == SSA_NAME)
6285 tree type = TREE_TYPE (lhs);
6286 if (TREE_CODE (type) == COMPLEX_TYPE)
6287 type = TREE_TYPE (type);
6288 if (TREE_CODE (type) == BITINT_TYPE
6289 && bitint_precision_kind (type) >= bitint_prec_large
6290 && (large_huge.m_names == NULL
6291 || !bitmap_bit_p (large_huge.m_names,
6292 SSA_NAME_VERSION (lhs))))
6293 continue;
6296 large_huge.lower_stmt (stmt);
6299 tree atype = NULL_TREE;
6300 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6301 gsi_next (&gsi))
6303 gphi *phi = gsi.phi ();
6304 tree lhs = gimple_phi_result (phi);
6305 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6306 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6307 continue;
6308 int p1 = var_to_partition (large_huge.m_map, lhs);
6309 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6310 tree v1 = large_huge.m_vars[p1];
6311 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6313 tree arg = gimple_phi_arg_def (phi, i);
6314 edge e = gimple_phi_arg_edge (phi, i);
6315 gimple *g;
6316 switch (TREE_CODE (arg))
6318 case INTEGER_CST:
6319 if (integer_zerop (arg) && VAR_P (v1))
6321 tree zero = build_zero_cst (TREE_TYPE (v1));
6322 g = gimple_build_assign (v1, zero);
6323 gsi_insert_on_edge (e, g);
6324 edge_insertions = true;
6325 break;
6327 int ext;
6328 unsigned int min_prec, prec, rem;
6329 tree c;
6330 prec = TYPE_PRECISION (TREE_TYPE (arg));
6331 rem = prec % (2 * limb_prec);
6332 min_prec = bitint_min_cst_precision (arg, ext);
6333 if (min_prec > prec - rem - 2 * limb_prec
6334 && min_prec > (unsigned) limb_prec)
6335 /* Constant which has enough significant bits that it
6336 isn't worth trying to save .rodata space by extending
6337 from smaller number. */
6338 min_prec = prec;
6339 else
6340 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6341 if (min_prec == 0)
6342 c = NULL_TREE;
6343 else if (min_prec == prec)
6344 c = tree_output_constant_def (arg);
6345 else if (min_prec == (unsigned) limb_prec)
6346 c = fold_convert (large_huge.m_limb_type, arg);
6347 else
6349 tree ctype = build_bitint_type (min_prec, 1);
6350 c = tree_output_constant_def (fold_convert (ctype, arg));
6352 if (c)
6354 if (VAR_P (v1) && min_prec == prec)
6356 tree v2 = build1 (VIEW_CONVERT_EXPR,
6357 TREE_TYPE (v1), c);
6358 g = gimple_build_assign (v1, v2);
6359 gsi_insert_on_edge (e, g);
6360 edge_insertions = true;
6361 break;
6363 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6364 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6365 TREE_TYPE (c), v1),
6367 else
6369 unsigned HOST_WIDE_INT nelts
6370 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6371 / limb_prec;
6372 tree vtype
6373 = build_array_type_nelts (large_huge.m_limb_type,
6374 nelts);
6375 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6376 vtype, v1),
6377 build1 (VIEW_CONVERT_EXPR,
6378 vtype, c));
6380 gsi_insert_on_edge (e, g);
6382 if (ext == 0)
6384 unsigned HOST_WIDE_INT nelts
6385 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6386 - min_prec) / limb_prec;
6387 tree vtype
6388 = build_array_type_nelts (large_huge.m_limb_type,
6389 nelts);
6390 tree ptype = build_pointer_type (TREE_TYPE (v1));
6391 tree off = fold_convert (ptype,
6392 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6393 tree vd = build2 (MEM_REF, vtype,
6394 build_fold_addr_expr (v1), off);
6395 g = gimple_build_assign (vd, build_zero_cst (vtype));
6397 else
6399 tree vd = v1;
6400 if (c)
6402 tree ptype = build_pointer_type (TREE_TYPE (v1));
6403 tree off
6404 = fold_convert (ptype,
6405 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6406 vd = build2 (MEM_REF, large_huge.m_limb_type,
6407 build_fold_addr_expr (v1), off);
6409 vd = build_fold_addr_expr (vd);
6410 unsigned HOST_WIDE_INT nbytes
6411 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6412 if (c)
6413 nbytes
6414 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6415 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6416 g = gimple_build_call (fn, 3, vd,
6417 integer_minus_one_node,
6418 build_int_cst (sizetype,
6419 nbytes));
6421 gsi_insert_on_edge (e, g);
6422 edge_insertions = true;
6423 break;
6424 default:
6425 gcc_unreachable ();
6426 case SSA_NAME:
6427 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6429 if (large_huge.m_names == NULL
6430 || !bitmap_bit_p (large_huge.m_names,
6431 SSA_NAME_VERSION (arg)))
6432 continue;
6434 int p2 = var_to_partition (large_huge.m_map, arg);
6435 if (p1 == p2)
6436 continue;
6437 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6438 tree v2 = large_huge.m_vars[p2];
6439 if (VAR_P (v1) && VAR_P (v2))
6440 g = gimple_build_assign (v1, v2);
6441 else if (VAR_P (v1))
6442 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6443 TREE_TYPE (v1), v2));
6444 else if (VAR_P (v2))
6445 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6446 TREE_TYPE (v2), v1), v2);
6447 else
6449 if (atype == NULL_TREE
6450 || !tree_int_cst_equal (TYPE_SIZE (atype),
6451 TYPE_SIZE (TREE_TYPE (lhs))))
6453 unsigned HOST_WIDE_INT nelts
6454 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6455 / limb_prec;
6456 atype
6457 = build_array_type_nelts (large_huge.m_limb_type,
6458 nelts);
6460 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6461 atype, v1),
6462 build1 (VIEW_CONVERT_EXPR,
6463 atype, v2));
6465 gsi_insert_on_edge (e, g);
6466 edge_insertions = true;
6467 break;
6473 if (large_huge.m_names || has_large_huge)
6475 gimple *nop = NULL;
6476 for (i = 0; i < num_ssa_names; ++i)
6478 tree s = ssa_name (i);
6479 if (s == NULL_TREE)
6480 continue;
6481 tree type = TREE_TYPE (s);
6482 if (TREE_CODE (type) == COMPLEX_TYPE)
6483 type = TREE_TYPE (type);
6484 if (TREE_CODE (type) == BITINT_TYPE
6485 && bitint_precision_kind (type) >= bitint_prec_large)
6487 if (large_huge.m_preserved
6488 && bitmap_bit_p (large_huge.m_preserved,
6489 SSA_NAME_VERSION (s)))
6490 continue;
6491 gimple *g = SSA_NAME_DEF_STMT (s);
6492 if (gimple_code (g) == GIMPLE_NOP)
6494 if (SSA_NAME_VAR (s))
6495 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6496 release_ssa_name (s);
6497 continue;
6499 if (gimple_code (g) != GIMPLE_ASM)
6501 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6502 bool save_vta = flag_var_tracking_assignments;
6503 flag_var_tracking_assignments = false;
6504 gsi_remove (&gsi, true);
6505 flag_var_tracking_assignments = save_vta;
6507 if (nop == NULL)
6508 nop = gimple_build_nop ();
6509 SSA_NAME_DEF_STMT (s) = nop;
6510 release_ssa_name (s);
6513 if (optimize)
6514 disable_ranger (cfun);
6517 if (edge_insertions)
6518 gsi_commit_edge_inserts ();
6520 return ret;
6523 namespace {
6525 const pass_data pass_data_lower_bitint =
6527 GIMPLE_PASS, /* type */
6528 "bitintlower", /* name */
6529 OPTGROUP_NONE, /* optinfo_flags */
6530 TV_NONE, /* tv_id */
6531 PROP_ssa, /* properties_required */
6532 PROP_gimple_lbitint, /* properties_provided */
6533 0, /* properties_destroyed */
6534 0, /* todo_flags_start */
6535 0, /* todo_flags_finish */
6538 class pass_lower_bitint : public gimple_opt_pass
6540 public:
6541 pass_lower_bitint (gcc::context *ctxt)
6542 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6545 /* opt_pass methods: */
6546 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6547 unsigned int execute (function *) final override
6549 return gimple_lower_bitint ();
6552 }; // class pass_lower_bitint
6554 } // anon namespace
6556 gimple_opt_pass *
6557 make_pass_lower_bitint (gcc::context *ctxt)
6559 return new pass_lower_bitint (ctxt);
6563 namespace {
6565 const pass_data pass_data_lower_bitint_O0 =
6567 GIMPLE_PASS, /* type */
6568 "bitintlower0", /* name */
6569 OPTGROUP_NONE, /* optinfo_flags */
6570 TV_NONE, /* tv_id */
6571 PROP_cfg, /* properties_required */
6572 PROP_gimple_lbitint, /* properties_provided */
6573 0, /* properties_destroyed */
6574 0, /* todo_flags_start */
6575 0, /* todo_flags_finish */
6578 class pass_lower_bitint_O0 : public gimple_opt_pass
6580 public:
6581 pass_lower_bitint_O0 (gcc::context *ctxt)
6582 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6585 /* opt_pass methods: */
6586 bool gate (function *fun) final override
6588 /* With errors, normal optimization passes are not run. If we don't
6589 lower bitint operations at all, rtl expansion will abort. */
6590 return !(fun->curr_properties & PROP_gimple_lbitint);
6593 unsigned int execute (function *) final override
6595 return gimple_lower_bitint ();
6598 }; // class pass_lower_bitint_O0
6600 } // anon namespace
6602 gimple_opt_pass *
6603 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6605 return new pass_lower_bitint_O0 (ctxt);