Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob1a63ae5f10446f2d6ce85be99da3ad2265d9990e
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2021 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
110 References:
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "backend.h"
125 #include "target.h"
126 #include "tree.h"
127 #include "gimple.h"
128 #include "tree-pass.h"
129 #include "ssa.h"
130 #include "gimple-pretty-print.h"
131 #include "fold-const.h"
132 #include "gimple-fold.h"
133 #include "tree-eh.h"
134 #include "gimplify.h"
135 #include "gimple-iterator.h"
136 #include "tree-cfg.h"
137 #include "tree-ssa-propagate.h"
138 #include "dbgcnt.h"
139 #include "builtins.h"
140 #include "cfgloop.h"
141 #include "stor-layout.h"
142 #include "optabs-query.h"
143 #include "tree-ssa-ccp.h"
144 #include "tree-dfa.h"
145 #include "diagnostic-core.h"
146 #include "stringpool.h"
147 #include "attribs.h"
148 #include "tree-vector-builder.h"
149 #include "cgraph.h"
150 #include "alloc-pool.h"
151 #include "symbol-summary.h"
152 #include "ipa-utils.h"
153 #include "ipa-prop.h"
155 /* Possible lattice values. */
156 typedef enum
158 UNINITIALIZED,
159 UNDEFINED,
160 CONSTANT,
161 VARYING
162 } ccp_lattice_t;
164 class ccp_prop_value_t {
165 public:
166 /* Lattice value. */
167 ccp_lattice_t lattice_val;
169 /* Propagated value. */
170 tree value;
172 /* Mask that applies to the propagated value during CCP. For X
173 with a CONSTANT lattice value X & ~mask == value & ~mask. The
174 zero bits in the mask cover constant values. The ones mean no
175 information. */
176 widest_int mask;
179 class ccp_propagate : public ssa_propagation_engine
181 public:
182 enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) FINAL OVERRIDE;
183 enum ssa_prop_result visit_phi (gphi *) FINAL OVERRIDE;
186 /* Array of propagated constant values. After propagation,
187 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
188 the constant is held in an SSA name representing a memory store
189 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
190 memory reference used to store (i.e., the LHS of the assignment
191 doing the store). */
192 static ccp_prop_value_t *const_val;
193 static unsigned n_const_val;
195 static void canonicalize_value (ccp_prop_value_t *);
196 static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
198 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
200 static void
201 dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
203 switch (val.lattice_val)
205 case UNINITIALIZED:
206 fprintf (outf, "%sUNINITIALIZED", prefix);
207 break;
208 case UNDEFINED:
209 fprintf (outf, "%sUNDEFINED", prefix);
210 break;
211 case VARYING:
212 fprintf (outf, "%sVARYING", prefix);
213 break;
214 case CONSTANT:
215 if (TREE_CODE (val.value) != INTEGER_CST
216 || val.mask == 0)
218 fprintf (outf, "%sCONSTANT ", prefix);
219 print_generic_expr (outf, val.value, dump_flags);
221 else
223 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
224 val.mask);
225 fprintf (outf, "%sCONSTANT ", prefix);
226 print_hex (cval, outf);
227 fprintf (outf, " (");
228 print_hex (val.mask, outf);
229 fprintf (outf, ")");
231 break;
232 default:
233 gcc_unreachable ();
238 /* Print lattice value VAL to stderr. */
240 void debug_lattice_value (ccp_prop_value_t val);
242 DEBUG_FUNCTION void
243 debug_lattice_value (ccp_prop_value_t val)
245 dump_lattice_value (stderr, "", val);
246 fprintf (stderr, "\n");
249 /* Extend NONZERO_BITS to a full mask, based on sgn. */
251 static widest_int
252 extend_mask (const wide_int &nonzero_bits, signop sgn)
254 return widest_int::from (nonzero_bits, sgn);
257 /* Compute a default value for variable VAR and store it in the
258 CONST_VAL array. The following rules are used to get default
259 values:
261 1- Global and static variables that are declared constant are
262 considered CONSTANT.
264 2- Any other value is considered UNDEFINED. This is useful when
265 considering PHI nodes. PHI arguments that are undefined do not
266 change the constant value of the PHI node, which allows for more
267 constants to be propagated.
269 3- Variables defined by statements other than assignments and PHI
270 nodes are considered VARYING.
272 4- Initial values of variables that are not GIMPLE registers are
273 considered VARYING. */
275 static ccp_prop_value_t
276 get_default_value (tree var)
278 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
279 gimple *stmt;
281 stmt = SSA_NAME_DEF_STMT (var);
283 if (gimple_nop_p (stmt))
285 /* Variables defined by an empty statement are those used
286 before being initialized. If VAR is a local variable, we
287 can assume initially that it is UNDEFINED, otherwise we must
288 consider it VARYING. */
289 if (!virtual_operand_p (var)
290 && SSA_NAME_VAR (var)
291 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
292 val.lattice_val = UNDEFINED;
293 else
295 val.lattice_val = VARYING;
296 val.mask = -1;
297 if (flag_tree_bit_ccp)
299 wide_int nonzero_bits = get_nonzero_bits (var);
300 tree value;
301 widest_int mask;
303 if (SSA_NAME_VAR (var)
304 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL
305 && ipcp_get_parm_bits (SSA_NAME_VAR (var), &value, &mask))
307 val.lattice_val = CONSTANT;
308 val.value = value;
309 widest_int ipa_value = wi::to_widest (value);
310 /* Unknown bits from IPA CP must be equal to zero. */
311 gcc_assert (wi::bit_and (ipa_value, mask) == 0);
312 val.mask = mask;
313 if (nonzero_bits != -1)
314 val.mask &= extend_mask (nonzero_bits,
315 TYPE_SIGN (TREE_TYPE (var)));
317 else if (nonzero_bits != -1)
319 val.lattice_val = CONSTANT;
320 val.value = build_zero_cst (TREE_TYPE (var));
321 val.mask = extend_mask (nonzero_bits,
322 TYPE_SIGN (TREE_TYPE (var)));
327 else if (is_gimple_assign (stmt))
329 tree cst;
330 if (gimple_assign_single_p (stmt)
331 && DECL_P (gimple_assign_rhs1 (stmt))
332 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
334 val.lattice_val = CONSTANT;
335 val.value = cst;
337 else
339 /* Any other variable defined by an assignment is considered
340 UNDEFINED. */
341 val.lattice_val = UNDEFINED;
344 else if ((is_gimple_call (stmt)
345 && gimple_call_lhs (stmt) != NULL_TREE)
346 || gimple_code (stmt) == GIMPLE_PHI)
348 /* A variable defined by a call or a PHI node is considered
349 UNDEFINED. */
350 val.lattice_val = UNDEFINED;
352 else
354 /* Otherwise, VAR will never take on a constant value. */
355 val.lattice_val = VARYING;
356 val.mask = -1;
359 return val;
363 /* Get the constant value associated with variable VAR. */
365 static inline ccp_prop_value_t *
366 get_value (tree var)
368 ccp_prop_value_t *val;
370 if (const_val == NULL
371 || SSA_NAME_VERSION (var) >= n_const_val)
372 return NULL;
374 val = &const_val[SSA_NAME_VERSION (var)];
375 if (val->lattice_val == UNINITIALIZED)
376 *val = get_default_value (var);
378 canonicalize_value (val);
380 return val;
383 /* Return the constant tree value associated with VAR. */
385 static inline tree
386 get_constant_value (tree var)
388 ccp_prop_value_t *val;
389 if (TREE_CODE (var) != SSA_NAME)
391 if (is_gimple_min_invariant (var))
392 return var;
393 return NULL_TREE;
395 val = get_value (var);
396 if (val
397 && val->lattice_val == CONSTANT
398 && (TREE_CODE (val->value) != INTEGER_CST
399 || val->mask == 0))
400 return val->value;
401 return NULL_TREE;
404 /* Sets the value associated with VAR to VARYING. */
406 static inline void
407 set_value_varying (tree var)
409 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
411 val->lattice_val = VARYING;
412 val->value = NULL_TREE;
413 val->mask = -1;
416 /* For integer constants, make sure to drop TREE_OVERFLOW. */
418 static void
419 canonicalize_value (ccp_prop_value_t *val)
421 if (val->lattice_val != CONSTANT)
422 return;
424 if (TREE_OVERFLOW_P (val->value))
425 val->value = drop_tree_overflow (val->value);
428 /* Return whether the lattice transition is valid. */
430 static bool
431 valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
433 /* Lattice transitions must always be monotonically increasing in
434 value. */
435 if (old_val.lattice_val < new_val.lattice_val)
436 return true;
438 if (old_val.lattice_val != new_val.lattice_val)
439 return false;
441 if (!old_val.value && !new_val.value)
442 return true;
444 /* Now both lattice values are CONSTANT. */
446 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
447 when only a single copy edge is executable. */
448 if (TREE_CODE (old_val.value) == SSA_NAME
449 && TREE_CODE (new_val.value) == SSA_NAME)
450 return true;
452 /* Allow transitioning from a constant to a copy. */
453 if (is_gimple_min_invariant (old_val.value)
454 && TREE_CODE (new_val.value) == SSA_NAME)
455 return true;
457 /* Allow transitioning from PHI <&x, not executable> == &x
458 to PHI <&x, &y> == common alignment. */
459 if (TREE_CODE (old_val.value) != INTEGER_CST
460 && TREE_CODE (new_val.value) == INTEGER_CST)
461 return true;
463 /* Bit-lattices have to agree in the still valid bits. */
464 if (TREE_CODE (old_val.value) == INTEGER_CST
465 && TREE_CODE (new_val.value) == INTEGER_CST)
466 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
467 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
469 /* Otherwise constant values have to agree. */
470 if (operand_equal_p (old_val.value, new_val.value, 0))
471 return true;
473 /* At least the kinds and types should agree now. */
474 if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
475 || !types_compatible_p (TREE_TYPE (old_val.value),
476 TREE_TYPE (new_val.value)))
477 return false;
479 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
480 to non-NaN. */
481 tree type = TREE_TYPE (new_val.value);
482 if (SCALAR_FLOAT_TYPE_P (type)
483 && !HONOR_NANS (type))
485 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
486 return true;
488 else if (VECTOR_FLOAT_TYPE_P (type)
489 && !HONOR_NANS (type))
491 unsigned int count
492 = tree_vector_builder::binary_encoded_nelts (old_val.value,
493 new_val.value);
494 for (unsigned int i = 0; i < count; ++i)
495 if (!REAL_VALUE_ISNAN
496 (TREE_REAL_CST (VECTOR_CST_ENCODED_ELT (old_val.value, i)))
497 && !operand_equal_p (VECTOR_CST_ENCODED_ELT (old_val.value, i),
498 VECTOR_CST_ENCODED_ELT (new_val.value, i), 0))
499 return false;
500 return true;
502 else if (COMPLEX_FLOAT_TYPE_P (type)
503 && !HONOR_NANS (type))
505 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
506 && !operand_equal_p (TREE_REALPART (old_val.value),
507 TREE_REALPART (new_val.value), 0))
508 return false;
509 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
510 && !operand_equal_p (TREE_IMAGPART (old_val.value),
511 TREE_IMAGPART (new_val.value), 0))
512 return false;
513 return true;
515 return false;
518 /* Set the value for variable VAR to NEW_VAL. Return true if the new
519 value is different from VAR's previous value. */
521 static bool
522 set_lattice_value (tree var, ccp_prop_value_t *new_val)
524 /* We can deal with old UNINITIALIZED values just fine here. */
525 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
527 canonicalize_value (new_val);
529 /* We have to be careful to not go up the bitwise lattice
530 represented by the mask. Instead of dropping to VARYING
531 use the meet operator to retain a conservative value.
532 Missed optimizations like PR65851 makes this necessary.
533 It also ensures we converge to a stable lattice solution. */
534 if (old_val->lattice_val != UNINITIALIZED)
535 ccp_lattice_meet (new_val, old_val);
537 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
539 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
540 caller that this was a non-transition. */
541 if (old_val->lattice_val != new_val->lattice_val
542 || (new_val->lattice_val == CONSTANT
543 && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
544 || (TREE_CODE (new_val->value) == INTEGER_CST
545 && (new_val->mask != old_val->mask
546 || (wi::bit_and_not (wi::to_widest (old_val->value),
547 new_val->mask)
548 != wi::bit_and_not (wi::to_widest (new_val->value),
549 new_val->mask))))
550 || (TREE_CODE (new_val->value) != INTEGER_CST
551 && !operand_equal_p (new_val->value, old_val->value, 0)))))
553 /* ??? We would like to delay creation of INTEGER_CSTs from
554 partially constants here. */
556 if (dump_file && (dump_flags & TDF_DETAILS))
558 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
559 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
562 *old_val = *new_val;
564 gcc_assert (new_val->lattice_val != UNINITIALIZED);
565 return true;
568 return false;
571 static ccp_prop_value_t get_value_for_expr (tree, bool);
572 static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
573 void bit_value_binop (enum tree_code, signop, int, widest_int *, widest_int *,
574 signop, int, const widest_int &, const widest_int &,
575 signop, int, const widest_int &, const widest_int &);
577 /* Return a widest_int that can be used for bitwise simplifications
578 from VAL. */
580 static widest_int
581 value_to_wide_int (ccp_prop_value_t val)
583 if (val.value
584 && TREE_CODE (val.value) == INTEGER_CST)
585 return wi::to_widest (val.value);
587 return 0;
590 /* Return the value for the address expression EXPR based on alignment
591 information. */
593 static ccp_prop_value_t
594 get_value_from_alignment (tree expr)
596 tree type = TREE_TYPE (expr);
597 ccp_prop_value_t val;
598 unsigned HOST_WIDE_INT bitpos;
599 unsigned int align;
601 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
603 get_pointer_alignment_1 (expr, &align, &bitpos);
604 val.mask = wi::bit_and_not
605 (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
606 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
607 : -1,
608 align / BITS_PER_UNIT - 1);
609 val.lattice_val
610 = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
611 if (val.lattice_val == CONSTANT)
612 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
613 else
614 val.value = NULL_TREE;
616 return val;
619 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
620 return constant bits extracted from alignment information for
621 invariant addresses. */
623 static ccp_prop_value_t
624 get_value_for_expr (tree expr, bool for_bits_p)
626 ccp_prop_value_t val;
628 if (TREE_CODE (expr) == SSA_NAME)
630 ccp_prop_value_t *val_ = get_value (expr);
631 if (val_)
632 val = *val_;
633 else
635 val.lattice_val = VARYING;
636 val.value = NULL_TREE;
637 val.mask = -1;
639 if (for_bits_p
640 && val.lattice_val == CONSTANT)
642 if (TREE_CODE (val.value) == ADDR_EXPR)
643 val = get_value_from_alignment (val.value);
644 else if (TREE_CODE (val.value) != INTEGER_CST)
646 val.lattice_val = VARYING;
647 val.value = NULL_TREE;
648 val.mask = -1;
651 /* Fall back to a copy value. */
652 if (!for_bits_p
653 && val.lattice_val == VARYING
654 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
656 val.lattice_val = CONSTANT;
657 val.value = expr;
658 val.mask = -1;
661 else if (is_gimple_min_invariant (expr)
662 && (!for_bits_p || TREE_CODE (expr) == INTEGER_CST))
664 val.lattice_val = CONSTANT;
665 val.value = expr;
666 val.mask = 0;
667 canonicalize_value (&val);
669 else if (TREE_CODE (expr) == ADDR_EXPR)
670 val = get_value_from_alignment (expr);
671 else
673 val.lattice_val = VARYING;
674 val.mask = -1;
675 val.value = NULL_TREE;
678 if (val.lattice_val == VARYING
679 && TYPE_UNSIGNED (TREE_TYPE (expr)))
680 val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr)));
682 return val;
685 /* Return the likely CCP lattice value for STMT.
687 If STMT has no operands, then return CONSTANT.
689 Else if undefinedness of operands of STMT cause its value to be
690 undefined, then return UNDEFINED.
692 Else if any operands of STMT are constants, then return CONSTANT.
694 Else return VARYING. */
696 static ccp_lattice_t
697 likely_value (gimple *stmt)
699 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
700 bool has_nsa_operand;
701 tree use;
702 ssa_op_iter iter;
703 unsigned i;
705 enum gimple_code code = gimple_code (stmt);
707 /* This function appears to be called only for assignments, calls,
708 conditionals, and switches, due to the logic in visit_stmt. */
709 gcc_assert (code == GIMPLE_ASSIGN
710 || code == GIMPLE_CALL
711 || code == GIMPLE_COND
712 || code == GIMPLE_SWITCH);
714 /* If the statement has volatile operands, it won't fold to a
715 constant value. */
716 if (gimple_has_volatile_ops (stmt))
717 return VARYING;
719 /* Arrive here for more complex cases. */
720 has_constant_operand = false;
721 has_undefined_operand = false;
722 all_undefined_operands = true;
723 has_nsa_operand = false;
724 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
726 ccp_prop_value_t *val = get_value (use);
728 if (val && val->lattice_val == UNDEFINED)
729 has_undefined_operand = true;
730 else
731 all_undefined_operands = false;
733 if (val && val->lattice_val == CONSTANT)
734 has_constant_operand = true;
736 if (SSA_NAME_IS_DEFAULT_DEF (use)
737 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
738 has_nsa_operand = true;
741 /* There may be constants in regular rhs operands. For calls we
742 have to ignore lhs, fndecl and static chain, otherwise only
743 the lhs. */
744 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
745 i < gimple_num_ops (stmt); ++i)
747 tree op = gimple_op (stmt, i);
748 if (!op || TREE_CODE (op) == SSA_NAME)
749 continue;
750 if (is_gimple_min_invariant (op))
751 has_constant_operand = true;
754 if (has_constant_operand)
755 all_undefined_operands = false;
757 if (has_undefined_operand
758 && code == GIMPLE_CALL
759 && gimple_call_internal_p (stmt))
760 switch (gimple_call_internal_fn (stmt))
762 /* These 3 builtins use the first argument just as a magic
763 way how to find out a decl uid. */
764 case IFN_GOMP_SIMD_LANE:
765 case IFN_GOMP_SIMD_VF:
766 case IFN_GOMP_SIMD_LAST_LANE:
767 has_undefined_operand = false;
768 break;
769 default:
770 break;
773 /* If the operation combines operands like COMPLEX_EXPR make sure to
774 not mark the result UNDEFINED if only one part of the result is
775 undefined. */
776 if (has_undefined_operand && all_undefined_operands)
777 return UNDEFINED;
778 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
780 switch (gimple_assign_rhs_code (stmt))
782 /* Unary operators are handled with all_undefined_operands. */
783 case PLUS_EXPR:
784 case MINUS_EXPR:
785 case POINTER_PLUS_EXPR:
786 case BIT_XOR_EXPR:
787 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
788 Not bitwise operators, one VARYING operand may specify the
789 result completely.
790 Not logical operators for the same reason, apart from XOR.
791 Not COMPLEX_EXPR as one VARYING operand makes the result partly
792 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
793 the undefined operand may be promoted. */
794 return UNDEFINED;
796 case ADDR_EXPR:
797 /* If any part of an address is UNDEFINED, like the index
798 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
799 return UNDEFINED;
801 default:
805 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
806 fall back to CONSTANT. During iteration UNDEFINED may still drop
807 to CONSTANT. */
808 if (has_undefined_operand)
809 return CONSTANT;
811 /* We do not consider virtual operands here -- load from read-only
812 memory may have only VARYING virtual operands, but still be
813 constant. Also we can combine the stmt with definitions from
814 operands whose definitions are not simulated again. */
815 if (has_constant_operand
816 || has_nsa_operand
817 || gimple_references_memory_p (stmt))
818 return CONSTANT;
820 return VARYING;
823 /* Returns true if STMT cannot be constant. */
825 static bool
826 surely_varying_stmt_p (gimple *stmt)
828 /* If the statement has operands that we cannot handle, it cannot be
829 constant. */
830 if (gimple_has_volatile_ops (stmt))
831 return true;
833 /* If it is a call and does not return a value or is not a
834 builtin and not an indirect call or a call to function with
835 assume_aligned/alloc_align attribute, it is varying. */
836 if (is_gimple_call (stmt))
838 tree fndecl, fntype = gimple_call_fntype (stmt);
839 if (!gimple_call_lhs (stmt)
840 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
841 && !fndecl_built_in_p (fndecl)
842 && !lookup_attribute ("assume_aligned",
843 TYPE_ATTRIBUTES (fntype))
844 && !lookup_attribute ("alloc_align",
845 TYPE_ATTRIBUTES (fntype))))
846 return true;
849 /* Any other store operation is not interesting. */
850 else if (gimple_vdef (stmt))
851 return true;
853 /* Anything other than assignments and conditional jumps are not
854 interesting for CCP. */
855 if (gimple_code (stmt) != GIMPLE_ASSIGN
856 && gimple_code (stmt) != GIMPLE_COND
857 && gimple_code (stmt) != GIMPLE_SWITCH
858 && gimple_code (stmt) != GIMPLE_CALL)
859 return true;
861 return false;
864 /* Initialize local data structures for CCP. */
866 static void
867 ccp_initialize (void)
869 basic_block bb;
871 n_const_val = num_ssa_names;
872 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
874 /* Initialize simulation flags for PHI nodes and statements. */
875 FOR_EACH_BB_FN (bb, cfun)
877 gimple_stmt_iterator i;
879 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
881 gimple *stmt = gsi_stmt (i);
882 bool is_varying;
884 /* If the statement is a control insn, then we do not
885 want to avoid simulating the statement once. Failure
886 to do so means that those edges will never get added. */
887 if (stmt_ends_bb_p (stmt))
888 is_varying = false;
889 else
890 is_varying = surely_varying_stmt_p (stmt);
892 if (is_varying)
894 tree def;
895 ssa_op_iter iter;
897 /* If the statement will not produce a constant, mark
898 all its outputs VARYING. */
899 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
900 set_value_varying (def);
902 prop_set_simulate_again (stmt, !is_varying);
906 /* Now process PHI nodes. We never clear the simulate_again flag on
907 phi nodes, since we do not know which edges are executable yet,
908 except for phi nodes for virtual operands when we do not do store ccp. */
909 FOR_EACH_BB_FN (bb, cfun)
911 gphi_iterator i;
913 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
915 gphi *phi = i.phi ();
917 if (virtual_operand_p (gimple_phi_result (phi)))
918 prop_set_simulate_again (phi, false);
919 else
920 prop_set_simulate_again (phi, true);
925 /* Debug count support. Reset the values of ssa names
926 VARYING when the total number ssa names analyzed is
927 beyond the debug count specified. */
929 static void
930 do_dbg_cnt (void)
932 unsigned i;
933 for (i = 0; i < num_ssa_names; i++)
935 if (!dbg_cnt (ccp))
937 const_val[i].lattice_val = VARYING;
938 const_val[i].mask = -1;
939 const_val[i].value = NULL_TREE;
945 /* We want to provide our own GET_VALUE and FOLD_STMT virtual methods. */
946 class ccp_folder : public substitute_and_fold_engine
948 public:
949 tree value_of_expr (tree, gimple *) FINAL OVERRIDE;
950 bool fold_stmt (gimple_stmt_iterator *) FINAL OVERRIDE;
953 /* This method just wraps GET_CONSTANT_VALUE for now. Over time
954 naked calls to GET_CONSTANT_VALUE should be eliminated in favor
955 of calling member functions. */
957 tree
958 ccp_folder::value_of_expr (tree op, gimple *)
960 return get_constant_value (op);
963 /* Do final substitution of propagated values, cleanup the flowgraph and
964 free allocated storage. If NONZERO_P, record nonzero bits.
966 Return TRUE when something was optimized. */
968 static bool
969 ccp_finalize (bool nonzero_p)
971 bool something_changed;
972 unsigned i;
973 tree name;
975 do_dbg_cnt ();
977 /* Derive alignment and misalignment information from partially
978 constant pointers in the lattice or nonzero bits from partially
979 constant integers. */
980 FOR_EACH_SSA_NAME (i, name, cfun)
982 ccp_prop_value_t *val;
983 unsigned int tem, align;
985 if (!POINTER_TYPE_P (TREE_TYPE (name))
986 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
987 /* Don't record nonzero bits before IPA to avoid
988 using too much memory. */
989 || !nonzero_p))
990 continue;
992 val = get_value (name);
993 if (val->lattice_val != CONSTANT
994 || TREE_CODE (val->value) != INTEGER_CST
995 || val->mask == 0)
996 continue;
998 if (POINTER_TYPE_P (TREE_TYPE (name)))
1000 /* Trailing mask bits specify the alignment, trailing value
1001 bits the misalignment. */
1002 tem = val->mask.to_uhwi ();
1003 align = least_bit_hwi (tem);
1004 if (align > 1)
1005 set_ptr_info_alignment (get_ptr_info (name), align,
1006 (TREE_INT_CST_LOW (val->value)
1007 & (align - 1)));
1009 else
1011 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
1012 wide_int nonzero_bits
1013 = (wide_int::from (val->mask, precision, UNSIGNED)
1014 | wi::to_wide (val->value));
1015 nonzero_bits &= get_nonzero_bits (name);
1016 set_nonzero_bits (name, nonzero_bits);
1020 /* Perform substitutions based on the known constant values. */
1021 class ccp_folder ccp_folder;
1022 something_changed = ccp_folder.substitute_and_fold ();
1024 free (const_val);
1025 const_val = NULL;
1026 return something_changed;
1030 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
1031 in VAL1.
1033 any M UNDEFINED = any
1034 any M VARYING = VARYING
1035 Ci M Cj = Ci if (i == j)
1036 Ci M Cj = VARYING if (i != j)
1039 static void
1040 ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
1042 if (val1->lattice_val == UNDEFINED
1043 /* For UNDEFINED M SSA we can't always SSA because its definition
1044 may not dominate the PHI node. Doing optimistic copy propagation
1045 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
1046 && (val2->lattice_val != CONSTANT
1047 || TREE_CODE (val2->value) != SSA_NAME))
1049 /* UNDEFINED M any = any */
1050 *val1 = *val2;
1052 else if (val2->lattice_val == UNDEFINED
1053 /* See above. */
1054 && (val1->lattice_val != CONSTANT
1055 || TREE_CODE (val1->value) != SSA_NAME))
1057 /* any M UNDEFINED = any
1058 Nothing to do. VAL1 already contains the value we want. */
1061 else if (val1->lattice_val == VARYING
1062 || val2->lattice_val == VARYING)
1064 /* any M VARYING = VARYING. */
1065 val1->lattice_val = VARYING;
1066 val1->mask = -1;
1067 val1->value = NULL_TREE;
1069 else if (val1->lattice_val == CONSTANT
1070 && val2->lattice_val == CONSTANT
1071 && TREE_CODE (val1->value) == INTEGER_CST
1072 && TREE_CODE (val2->value) == INTEGER_CST)
1074 /* Ci M Cj = Ci if (i == j)
1075 Ci M Cj = VARYING if (i != j)
1077 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1078 drop to varying. */
1079 val1->mask = (val1->mask | val2->mask
1080 | (wi::to_widest (val1->value)
1081 ^ wi::to_widest (val2->value)));
1082 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
1084 val1->lattice_val = VARYING;
1085 val1->value = NULL_TREE;
1088 else if (val1->lattice_val == CONSTANT
1089 && val2->lattice_val == CONSTANT
1090 && operand_equal_p (val1->value, val2->value, 0))
1092 /* Ci M Cj = Ci if (i == j)
1093 Ci M Cj = VARYING if (i != j)
1095 VAL1 already contains the value we want for equivalent values. */
1097 else if (val1->lattice_val == CONSTANT
1098 && val2->lattice_val == CONSTANT
1099 && (TREE_CODE (val1->value) == ADDR_EXPR
1100 || TREE_CODE (val2->value) == ADDR_EXPR))
1102 /* When not equal addresses are involved try meeting for
1103 alignment. */
1104 ccp_prop_value_t tem = *val2;
1105 if (TREE_CODE (val1->value) == ADDR_EXPR)
1106 *val1 = get_value_for_expr (val1->value, true);
1107 if (TREE_CODE (val2->value) == ADDR_EXPR)
1108 tem = get_value_for_expr (val2->value, true);
1109 ccp_lattice_meet (val1, &tem);
1111 else
1113 /* Any other combination is VARYING. */
1114 val1->lattice_val = VARYING;
1115 val1->mask = -1;
1116 val1->value = NULL_TREE;
1121 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1122 lattice values to determine PHI_NODE's lattice value. The value of a
1123 PHI node is determined calling ccp_lattice_meet with all the arguments
1124 of the PHI node that are incoming via executable edges. */
1126 enum ssa_prop_result
1127 ccp_propagate::visit_phi (gphi *phi)
1129 unsigned i;
1130 ccp_prop_value_t new_val;
1132 if (dump_file && (dump_flags & TDF_DETAILS))
1134 fprintf (dump_file, "\nVisiting PHI node: ");
1135 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1138 new_val.lattice_val = UNDEFINED;
1139 new_val.value = NULL_TREE;
1140 new_val.mask = 0;
1142 bool first = true;
1143 bool non_exec_edge = false;
1144 for (i = 0; i < gimple_phi_num_args (phi); i++)
1146 /* Compute the meet operator over all the PHI arguments flowing
1147 through executable edges. */
1148 edge e = gimple_phi_arg_edge (phi, i);
1150 if (dump_file && (dump_flags & TDF_DETAILS))
1152 fprintf (dump_file,
1153 "\tArgument #%d (%d -> %d %sexecutable)\n",
1154 i, e->src->index, e->dest->index,
1155 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1158 /* If the incoming edge is executable, Compute the meet operator for
1159 the existing value of the PHI node and the current PHI argument. */
1160 if (e->flags & EDGE_EXECUTABLE)
1162 tree arg = gimple_phi_arg (phi, i)->def;
1163 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
1165 if (first)
1167 new_val = arg_val;
1168 first = false;
1170 else
1171 ccp_lattice_meet (&new_val, &arg_val);
1173 if (dump_file && (dump_flags & TDF_DETAILS))
1175 fprintf (dump_file, "\t");
1176 print_generic_expr (dump_file, arg, dump_flags);
1177 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1178 fprintf (dump_file, "\n");
1181 if (new_val.lattice_val == VARYING)
1182 break;
1184 else
1185 non_exec_edge = true;
1188 /* In case there were non-executable edges and the value is a copy
1189 make sure its definition dominates the PHI node. */
1190 if (non_exec_edge
1191 && new_val.lattice_val == CONSTANT
1192 && TREE_CODE (new_val.value) == SSA_NAME
1193 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
1194 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1195 gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
1197 new_val.lattice_val = VARYING;
1198 new_val.value = NULL_TREE;
1199 new_val.mask = -1;
1202 if (dump_file && (dump_flags & TDF_DETAILS))
1204 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1205 fprintf (dump_file, "\n\n");
1208 /* Make the transition to the new value. */
1209 if (set_lattice_value (gimple_phi_result (phi), &new_val))
1211 if (new_val.lattice_val == VARYING)
1212 return SSA_PROP_VARYING;
1213 else
1214 return SSA_PROP_INTERESTING;
1216 else
1217 return SSA_PROP_NOT_INTERESTING;
1220 /* Return the constant value for OP or OP otherwise. */
1222 static tree
1223 valueize_op (tree op)
1225 if (TREE_CODE (op) == SSA_NAME)
1227 tree tem = get_constant_value (op);
1228 if (tem)
1229 return tem;
1231 return op;
1234 /* Return the constant value for OP, but signal to not follow SSA
1235 edges if the definition may be simulated again. */
1237 static tree
1238 valueize_op_1 (tree op)
1240 if (TREE_CODE (op) == SSA_NAME)
1242 /* If the definition may be simulated again we cannot follow
1243 this SSA edge as the SSA propagator does not necessarily
1244 re-visit the use. */
1245 gimple *def_stmt = SSA_NAME_DEF_STMT (op);
1246 if (!gimple_nop_p (def_stmt)
1247 && prop_simulate_again_p (def_stmt))
1248 return NULL_TREE;
1249 tree tem = get_constant_value (op);
1250 if (tem)
1251 return tem;
1253 return op;
1256 /* CCP specific front-end to the non-destructive constant folding
1257 routines.
1259 Attempt to simplify the RHS of STMT knowing that one or more
1260 operands are constants.
1262 If simplification is possible, return the simplified RHS,
1263 otherwise return the original RHS or NULL_TREE. */
1265 static tree
1266 ccp_fold (gimple *stmt)
1268 location_t loc = gimple_location (stmt);
1269 switch (gimple_code (stmt))
1271 case GIMPLE_COND:
1273 /* Handle comparison operators that can appear in GIMPLE form. */
1274 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1275 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1276 enum tree_code code = gimple_cond_code (stmt);
1277 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1280 case GIMPLE_SWITCH:
1282 /* Return the constant switch index. */
1283 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
1286 case GIMPLE_ASSIGN:
1287 case GIMPLE_CALL:
1288 return gimple_fold_stmt_to_constant_1 (stmt,
1289 valueize_op, valueize_op_1);
1291 default:
1292 gcc_unreachable ();
1296 /* Determine the minimum and maximum values, *MIN and *MAX respectively,
1297 represented by the mask pair VAL and MASK with signedness SGN and
1298 precision PRECISION. */
1300 void
1301 value_mask_to_min_max (widest_int *min, widest_int *max,
1302 const widest_int &val, const widest_int &mask,
1303 signop sgn, int precision)
1305 *min = wi::bit_and_not (val, mask);
1306 *max = val | mask;
1307 if (sgn == SIGNED && wi::neg_p (mask))
1309 widest_int sign_bit = wi::lshift (1, precision - 1);
1310 *min ^= sign_bit;
1311 *max ^= sign_bit;
1312 /* MAX is zero extended, and MIN is sign extended. */
1313 *min = wi::ext (*min, precision, sgn);
1314 *max = wi::ext (*max, precision, sgn);
1318 /* Apply the operation CODE in type TYPE to the value, mask pair
1319 RVAL and RMASK representing a value of type RTYPE and set
1320 the value, mask pair *VAL and *MASK to the result. */
1322 void
1323 bit_value_unop (enum tree_code code, signop type_sgn, int type_precision,
1324 widest_int *val, widest_int *mask,
1325 signop rtype_sgn, int rtype_precision,
1326 const widest_int &rval, const widest_int &rmask)
1328 switch (code)
1330 case BIT_NOT_EXPR:
1331 *mask = rmask;
1332 *val = ~rval;
1333 break;
1335 case NEGATE_EXPR:
1337 widest_int temv, temm;
1338 /* Return ~rval + 1. */
1339 bit_value_unop (BIT_NOT_EXPR, type_sgn, type_precision, &temv, &temm,
1340 type_sgn, type_precision, rval, rmask);
1341 bit_value_binop (PLUS_EXPR, type_sgn, type_precision, val, mask,
1342 type_sgn, type_precision, temv, temm,
1343 type_sgn, type_precision, 1, 0);
1344 break;
1347 CASE_CONVERT:
1349 /* First extend mask and value according to the original type. */
1350 *mask = wi::ext (rmask, rtype_precision, rtype_sgn);
1351 *val = wi::ext (rval, rtype_precision, rtype_sgn);
1353 /* Then extend mask and value according to the target type. */
1354 *mask = wi::ext (*mask, type_precision, type_sgn);
1355 *val = wi::ext (*val, type_precision, type_sgn);
1356 break;
1359 case ABS_EXPR:
1360 case ABSU_EXPR:
1361 if (wi::sext (rmask, rtype_precision) == -1)
1362 *mask = -1;
1363 else if (wi::neg_p (rmask))
1365 /* Result is either rval or -rval. */
1366 widest_int temv, temm;
1367 bit_value_unop (NEGATE_EXPR, rtype_sgn, rtype_precision, &temv,
1368 &temm, type_sgn, type_precision, rval, rmask);
1369 temm |= (rmask | (rval ^ temv));
1370 /* Extend the result. */
1371 *mask = wi::ext (temm, type_precision, type_sgn);
1372 *val = wi::ext (temv, type_precision, type_sgn);
1374 else if (wi::neg_p (rval))
1376 bit_value_unop (NEGATE_EXPR, type_sgn, type_precision, val, mask,
1377 type_sgn, type_precision, rval, rmask);
1379 else
1381 *mask = rmask;
1382 *val = rval;
1384 break;
1386 default:
1387 *mask = -1;
1388 break;
1392 /* Determine the mask pair *VAL and *MASK from multiplying the
1393 argument mask pair RVAL, RMASK by the unsigned constant C. */
1394 void
1395 bit_value_mult_const (signop sgn, int width,
1396 widest_int *val, widest_int *mask,
1397 const widest_int &rval, const widest_int &rmask,
1398 widest_int c)
1400 widest_int sum_mask = 0;
1402 /* Ensure rval_lo only contains known bits. */
1403 widest_int rval_lo = wi::bit_and_not (rval, rmask);
1405 if (rval_lo != 0)
1407 /* General case (some bits of multiplicand are known set). */
1408 widest_int sum_val = 0;
1409 while (c != 0)
1411 /* Determine the lowest bit set in the multiplier. */
1412 int bitpos = wi::ctz (c);
1413 widest_int term_mask = rmask << bitpos;
1414 widest_int term_val = rval_lo << bitpos;
1416 /* sum += term. */
1417 widest_int lo = sum_val + term_val;
1418 widest_int hi = (sum_val | sum_mask) + (term_val | term_mask);
1419 sum_mask |= term_mask | (lo ^ hi);
1420 sum_val = lo;
1422 /* Clear this bit in the multiplier. */
1423 c ^= wi::lshift (1, bitpos);
1425 /* Correctly extend the result value. */
1426 *val = wi::ext (sum_val, width, sgn);
1428 else
1430 /* Special case (no bits of multiplicand are known set). */
1431 while (c != 0)
1433 /* Determine the lowest bit set in the multiplier. */
1434 int bitpos = wi::ctz (c);
1435 widest_int term_mask = rmask << bitpos;
1437 /* sum += term. */
1438 widest_int hi = sum_mask + term_mask;
1439 sum_mask |= term_mask | hi;
1441 /* Clear this bit in the multiplier. */
1442 c ^= wi::lshift (1, bitpos);
1444 *val = 0;
1447 /* Correctly extend the result mask. */
1448 *mask = wi::ext (sum_mask, width, sgn);
1452 /* Apply the operation CODE in type TYPE to the value, mask pairs
1453 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1454 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1456 void
1457 bit_value_binop (enum tree_code code, signop sgn, int width,
1458 widest_int *val, widest_int *mask,
1459 signop r1type_sgn, int r1type_precision,
1460 const widest_int &r1val, const widest_int &r1mask,
1461 signop r2type_sgn, int r2type_precision ATTRIBUTE_UNUSED,
1462 const widest_int &r2val, const widest_int &r2mask)
1464 bool swap_p = false;
1466 /* Assume we'll get a constant result. Use an initial non varying
1467 value, we fall back to varying in the end if necessary. */
1468 *mask = -1;
1469 /* Ensure that VAL is initialized (to any value). */
1470 *val = 0;
1472 switch (code)
1474 case BIT_AND_EXPR:
1475 /* The mask is constant where there is a known not
1476 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1477 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1478 *val = r1val & r2val;
1479 break;
1481 case BIT_IOR_EXPR:
1482 /* The mask is constant where there is a known
1483 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1484 *mask = wi::bit_and_not (r1mask | r2mask,
1485 wi::bit_and_not (r1val, r1mask)
1486 | wi::bit_and_not (r2val, r2mask));
1487 *val = r1val | r2val;
1488 break;
1490 case BIT_XOR_EXPR:
1491 /* m1 | m2 */
1492 *mask = r1mask | r2mask;
1493 *val = r1val ^ r2val;
1494 break;
1496 case LROTATE_EXPR:
1497 case RROTATE_EXPR:
1498 if (r2mask == 0)
1500 widest_int shift = r2val;
1501 if (shift == 0)
1503 *mask = r1mask;
1504 *val = r1val;
1506 else
1508 if (wi::neg_p (shift, r2type_sgn))
1510 shift = -shift;
1511 if (code == RROTATE_EXPR)
1512 code = LROTATE_EXPR;
1513 else
1514 code = RROTATE_EXPR;
1516 if (code == RROTATE_EXPR)
1518 *mask = wi::rrotate (r1mask, shift, width);
1519 *val = wi::rrotate (r1val, shift, width);
1521 else
1523 *mask = wi::lrotate (r1mask, shift, width);
1524 *val = wi::lrotate (r1val, shift, width);
1528 break;
1530 case LSHIFT_EXPR:
1531 case RSHIFT_EXPR:
1532 /* ??? We can handle partially known shift counts if we know
1533 its sign. That way we can tell that (x << (y | 8)) & 255
1534 is zero. */
1535 if (r2mask == 0)
1537 widest_int shift = r2val;
1538 if (shift == 0)
1540 *mask = r1mask;
1541 *val = r1val;
1543 else
1545 if (wi::neg_p (shift, r2type_sgn))
1546 break;
1547 if (code == RSHIFT_EXPR)
1549 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1550 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1552 else
1554 *mask = wi::ext (r1mask << shift, width, sgn);
1555 *val = wi::ext (r1val << shift, width, sgn);
1559 break;
1561 case PLUS_EXPR:
1562 case POINTER_PLUS_EXPR:
1564 /* Do the addition with unknown bits set to zero, to give carry-ins of
1565 zero wherever possible. */
1566 widest_int lo = (wi::bit_and_not (r1val, r1mask)
1567 + wi::bit_and_not (r2val, r2mask));
1568 lo = wi::ext (lo, width, sgn);
1569 /* Do the addition with unknown bits set to one, to give carry-ins of
1570 one wherever possible. */
1571 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1572 hi = wi::ext (hi, width, sgn);
1573 /* Each bit in the result is known if (a) the corresponding bits in
1574 both inputs are known, and (b) the carry-in to that bit position
1575 is known. We can check condition (b) by seeing if we got the same
1576 result with minimised carries as with maximised carries. */
1577 *mask = r1mask | r2mask | (lo ^ hi);
1578 *mask = wi::ext (*mask, width, sgn);
1579 /* It shouldn't matter whether we choose lo or hi here. */
1580 *val = lo;
1581 break;
1584 case MINUS_EXPR:
1585 case POINTER_DIFF_EXPR:
1587 /* Subtraction is derived from the addition algorithm above. */
1588 widest_int lo = wi::bit_and_not (r1val, r1mask) - (r2val | r2mask);
1589 lo = wi::ext (lo, width, sgn);
1590 widest_int hi = (r1val | r1mask) - wi::bit_and_not (r2val, r2mask);
1591 hi = wi::ext (hi, width, sgn);
1592 *mask = r1mask | r2mask | (lo ^ hi);
1593 *mask = wi::ext (*mask, width, sgn);
1594 *val = lo;
1595 break;
1598 case MULT_EXPR:
1599 if (r2mask == 0
1600 && !wi::neg_p (r2val, sgn)
1601 && (flag_expensive_optimizations || wi::popcount (r2val) < 8))
1602 bit_value_mult_const (sgn, width, val, mask, r1val, r1mask, r2val);
1603 else if (r1mask == 0
1604 && !wi::neg_p (r1val, sgn)
1605 && (flag_expensive_optimizations || wi::popcount (r1val) < 8))
1606 bit_value_mult_const (sgn, width, val, mask, r2val, r2mask, r1val);
1607 else
1609 /* Just track trailing zeros in both operands and transfer
1610 them to the other. */
1611 int r1tz = wi::ctz (r1val | r1mask);
1612 int r2tz = wi::ctz (r2val | r2mask);
1613 if (r1tz + r2tz >= width)
1615 *mask = 0;
1616 *val = 0;
1618 else if (r1tz + r2tz > 0)
1620 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1621 width, sgn);
1622 *val = 0;
1625 break;
1627 case EQ_EXPR:
1628 case NE_EXPR:
1630 widest_int m = r1mask | r2mask;
1631 if (wi::bit_and_not (r1val, m) != wi::bit_and_not (r2val, m))
1633 *mask = 0;
1634 *val = ((code == EQ_EXPR) ? 0 : 1);
1636 else
1638 /* We know the result of a comparison is always one or zero. */
1639 *mask = 1;
1640 *val = 0;
1642 break;
1645 case GE_EXPR:
1646 case GT_EXPR:
1647 swap_p = true;
1648 code = swap_tree_comparison (code);
1649 /* Fall through. */
1650 case LT_EXPR:
1651 case LE_EXPR:
1653 widest_int min1, max1, min2, max2;
1654 int minmax, maxmin;
1656 const widest_int &o1val = swap_p ? r2val : r1val;
1657 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1658 const widest_int &o2val = swap_p ? r1val : r2val;
1659 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1661 value_mask_to_min_max (&min1, &max1, o1val, o1mask,
1662 r1type_sgn, r1type_precision);
1663 value_mask_to_min_max (&min2, &max2, o2val, o2mask,
1664 r1type_sgn, r1type_precision);
1666 /* For comparisons the signedness is in the comparison operands. */
1667 /* Do a cross comparison of the max/min pairs. */
1668 maxmin = wi::cmp (max1, min2, r1type_sgn);
1669 minmax = wi::cmp (min1, max2, r1type_sgn);
1670 if (maxmin < (code == LE_EXPR ? 1: 0)) /* o1 < or <= o2. */
1672 *mask = 0;
1673 *val = 1;
1675 else if (minmax > (code == LT_EXPR ? -1 : 0)) /* o1 >= or > o2. */
1677 *mask = 0;
1678 *val = 0;
1680 else if (maxmin == minmax) /* o1 and o2 are equal. */
1682 /* This probably should never happen as we'd have
1683 folded the thing during fully constant value folding. */
1684 *mask = 0;
1685 *val = (code == LE_EXPR ? 1 : 0);
1687 else
1689 /* We know the result of a comparison is always one or zero. */
1690 *mask = 1;
1691 *val = 0;
1693 break;
1696 case MIN_EXPR:
1697 case MAX_EXPR:
1699 widest_int min1, max1, min2, max2;
1701 value_mask_to_min_max (&min1, &max1, r1val, r1mask, sgn, width);
1702 value_mask_to_min_max (&min2, &max2, r2val, r2mask, sgn, width);
1704 if (wi::cmp (max1, min2, sgn) <= 0) /* r1 is less than r2. */
1706 if (code == MIN_EXPR)
1708 *mask = r1mask;
1709 *val = r1val;
1711 else
1713 *mask = r2mask;
1714 *val = r2val;
1717 else if (wi::cmp (min1, max2, sgn) >= 0) /* r2 is less than r1. */
1719 if (code == MIN_EXPR)
1721 *mask = r2mask;
1722 *val = r2val;
1724 else
1726 *mask = r1mask;
1727 *val = r1val;
1730 else
1732 /* The result is either r1 or r2. */
1733 *mask = r1mask | r2mask | (r1val ^ r2val);
1734 *val = r1val;
1736 break;
1739 default:;
1743 /* Return the propagation value when applying the operation CODE to
1744 the value RHS yielding type TYPE. */
1746 static ccp_prop_value_t
1747 bit_value_unop (enum tree_code code, tree type, tree rhs)
1749 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
1750 widest_int value, mask;
1751 ccp_prop_value_t val;
1753 if (rval.lattice_val == UNDEFINED)
1754 return rval;
1756 gcc_assert ((rval.lattice_val == CONSTANT
1757 && TREE_CODE (rval.value) == INTEGER_CST)
1758 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
1759 bit_value_unop (code, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
1760 TYPE_SIGN (TREE_TYPE (rhs)), TYPE_PRECISION (TREE_TYPE (rhs)),
1761 value_to_wide_int (rval), rval.mask);
1762 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1764 val.lattice_val = CONSTANT;
1765 val.mask = mask;
1766 /* ??? Delay building trees here. */
1767 val.value = wide_int_to_tree (type, value);
1769 else
1771 val.lattice_val = VARYING;
1772 val.value = NULL_TREE;
1773 val.mask = -1;
1775 return val;
1778 /* Return the propagation value when applying the operation CODE to
1779 the values RHS1 and RHS2 yielding type TYPE. */
1781 static ccp_prop_value_t
1782 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1784 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1785 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
1786 widest_int value, mask;
1787 ccp_prop_value_t val;
1789 if (r1val.lattice_val == UNDEFINED
1790 || r2val.lattice_val == UNDEFINED)
1792 val.lattice_val = VARYING;
1793 val.value = NULL_TREE;
1794 val.mask = -1;
1795 return val;
1798 gcc_assert ((r1val.lattice_val == CONSTANT
1799 && TREE_CODE (r1val.value) == INTEGER_CST)
1800 || wi::sext (r1val.mask,
1801 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
1802 gcc_assert ((r2val.lattice_val == CONSTANT
1803 && TREE_CODE (r2val.value) == INTEGER_CST)
1804 || wi::sext (r2val.mask,
1805 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
1806 bit_value_binop (code, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
1807 TYPE_SIGN (TREE_TYPE (rhs1)), TYPE_PRECISION (TREE_TYPE (rhs1)),
1808 value_to_wide_int (r1val), r1val.mask,
1809 TYPE_SIGN (TREE_TYPE (rhs2)), TYPE_PRECISION (TREE_TYPE (rhs2)),
1810 value_to_wide_int (r2val), r2val.mask);
1812 /* (x * x) & 2 == 0. */
1813 if (code == MULT_EXPR && rhs1 == rhs2 && TYPE_PRECISION (type) > 1)
1815 widest_int m = 2;
1816 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1817 value = wi::bit_and_not (value, m);
1818 else
1819 value = 0;
1820 mask = wi::bit_and_not (mask, m);
1823 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1825 val.lattice_val = CONSTANT;
1826 val.mask = mask;
1827 /* ??? Delay building trees here. */
1828 val.value = wide_int_to_tree (type, value);
1830 else
1832 val.lattice_val = VARYING;
1833 val.value = NULL_TREE;
1834 val.mask = -1;
1836 return val;
1839 /* Return the propagation value for __builtin_assume_aligned
1840 and functions with assume_aligned or alloc_aligned attribute.
1841 For __builtin_assume_aligned, ATTR is NULL_TREE,
1842 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1843 is false, for alloc_aligned attribute ATTR is non-NULL and
1844 ALLOC_ALIGNED is true. */
1846 static ccp_prop_value_t
1847 bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
1848 bool alloc_aligned)
1850 tree align, misalign = NULL_TREE, type;
1851 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1852 ccp_prop_value_t alignval;
1853 widest_int value, mask;
1854 ccp_prop_value_t val;
1856 if (attr == NULL_TREE)
1858 tree ptr = gimple_call_arg (stmt, 0);
1859 type = TREE_TYPE (ptr);
1860 ptrval = get_value_for_expr (ptr, true);
1862 else
1864 tree lhs = gimple_call_lhs (stmt);
1865 type = TREE_TYPE (lhs);
1868 if (ptrval.lattice_val == UNDEFINED)
1869 return ptrval;
1870 gcc_assert ((ptrval.lattice_val == CONSTANT
1871 && TREE_CODE (ptrval.value) == INTEGER_CST)
1872 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
1873 if (attr == NULL_TREE)
1875 /* Get aligni and misaligni from __builtin_assume_aligned. */
1876 align = gimple_call_arg (stmt, 1);
1877 if (!tree_fits_uhwi_p (align))
1878 return ptrval;
1879 aligni = tree_to_uhwi (align);
1880 if (gimple_call_num_args (stmt) > 2)
1882 misalign = gimple_call_arg (stmt, 2);
1883 if (!tree_fits_uhwi_p (misalign))
1884 return ptrval;
1885 misaligni = tree_to_uhwi (misalign);
1888 else
1890 /* Get aligni and misaligni from assume_aligned or
1891 alloc_align attributes. */
1892 if (TREE_VALUE (attr) == NULL_TREE)
1893 return ptrval;
1894 attr = TREE_VALUE (attr);
1895 align = TREE_VALUE (attr);
1896 if (!tree_fits_uhwi_p (align))
1897 return ptrval;
1898 aligni = tree_to_uhwi (align);
1899 if (alloc_aligned)
1901 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1902 return ptrval;
1903 align = gimple_call_arg (stmt, aligni - 1);
1904 if (!tree_fits_uhwi_p (align))
1905 return ptrval;
1906 aligni = tree_to_uhwi (align);
1908 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1910 misalign = TREE_VALUE (TREE_CHAIN (attr));
1911 if (!tree_fits_uhwi_p (misalign))
1912 return ptrval;
1913 misaligni = tree_to_uhwi (misalign);
1916 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1917 return ptrval;
1919 align = build_int_cst_type (type, -aligni);
1920 alignval = get_value_for_expr (align, true);
1921 bit_value_binop (BIT_AND_EXPR, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
1922 TYPE_SIGN (type), TYPE_PRECISION (type), value_to_wide_int (ptrval), ptrval.mask,
1923 TYPE_SIGN (type), TYPE_PRECISION (type), value_to_wide_int (alignval), alignval.mask);
1925 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1927 val.lattice_val = CONSTANT;
1928 val.mask = mask;
1929 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1930 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1931 value |= misaligni;
1932 /* ??? Delay building trees here. */
1933 val.value = wide_int_to_tree (type, value);
1935 else
1937 val.lattice_val = VARYING;
1938 val.value = NULL_TREE;
1939 val.mask = -1;
1941 return val;
1944 /* Evaluate statement STMT.
1945 Valid only for assignments, calls, conditionals, and switches. */
1947 static ccp_prop_value_t
1948 evaluate_stmt (gimple *stmt)
1950 ccp_prop_value_t val;
1951 tree simplified = NULL_TREE;
1952 ccp_lattice_t likelyvalue = likely_value (stmt);
1953 bool is_constant = false;
1954 unsigned int align;
1955 bool ignore_return_flags = false;
1957 if (dump_file && (dump_flags & TDF_DETAILS))
1959 fprintf (dump_file, "which is likely ");
1960 switch (likelyvalue)
1962 case CONSTANT:
1963 fprintf (dump_file, "CONSTANT");
1964 break;
1965 case UNDEFINED:
1966 fprintf (dump_file, "UNDEFINED");
1967 break;
1968 case VARYING:
1969 fprintf (dump_file, "VARYING");
1970 break;
1971 default:;
1973 fprintf (dump_file, "\n");
1976 /* If the statement is likely to have a CONSTANT result, then try
1977 to fold the statement to determine the constant value. */
1978 /* FIXME. This is the only place that we call ccp_fold.
1979 Since likely_value never returns CONSTANT for calls, we will
1980 not attempt to fold them, including builtins that may profit. */
1981 if (likelyvalue == CONSTANT)
1983 fold_defer_overflow_warnings ();
1984 simplified = ccp_fold (stmt);
1985 if (simplified
1986 && TREE_CODE (simplified) == SSA_NAME)
1988 /* We may not use values of something that may be simulated again,
1989 see valueize_op_1. */
1990 if (SSA_NAME_IS_DEFAULT_DEF (simplified)
1991 || ! prop_simulate_again_p (SSA_NAME_DEF_STMT (simplified)))
1993 ccp_prop_value_t *val = get_value (simplified);
1994 if (val && val->lattice_val != VARYING)
1996 fold_undefer_overflow_warnings (true, stmt, 0);
1997 return *val;
2000 else
2001 /* We may also not place a non-valueized copy in the lattice
2002 as that might become stale if we never re-visit this stmt. */
2003 simplified = NULL_TREE;
2005 is_constant = simplified && is_gimple_min_invariant (simplified);
2006 fold_undefer_overflow_warnings (is_constant, stmt, 0);
2007 if (is_constant)
2009 /* The statement produced a constant value. */
2010 val.lattice_val = CONSTANT;
2011 val.value = simplified;
2012 val.mask = 0;
2013 return val;
2016 /* If the statement is likely to have a VARYING result, then do not
2017 bother folding the statement. */
2018 else if (likelyvalue == VARYING)
2020 enum gimple_code code = gimple_code (stmt);
2021 if (code == GIMPLE_ASSIGN)
2023 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2025 /* Other cases cannot satisfy is_gimple_min_invariant
2026 without folding. */
2027 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
2028 simplified = gimple_assign_rhs1 (stmt);
2030 else if (code == GIMPLE_SWITCH)
2031 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
2032 else
2033 /* These cannot satisfy is_gimple_min_invariant without folding. */
2034 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
2035 is_constant = simplified && is_gimple_min_invariant (simplified);
2036 if (is_constant)
2038 /* The statement produced a constant value. */
2039 val.lattice_val = CONSTANT;
2040 val.value = simplified;
2041 val.mask = 0;
2044 /* If the statement result is likely UNDEFINED, make it so. */
2045 else if (likelyvalue == UNDEFINED)
2047 val.lattice_val = UNDEFINED;
2048 val.value = NULL_TREE;
2049 val.mask = 0;
2050 return val;
2053 /* Resort to simplification for bitwise tracking. */
2054 if (flag_tree_bit_ccp
2055 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
2056 || (gimple_assign_single_p (stmt)
2057 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
2058 && !is_constant)
2060 enum gimple_code code = gimple_code (stmt);
2061 val.lattice_val = VARYING;
2062 val.value = NULL_TREE;
2063 val.mask = -1;
2064 if (code == GIMPLE_ASSIGN)
2066 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2067 tree rhs1 = gimple_assign_rhs1 (stmt);
2068 tree lhs = gimple_assign_lhs (stmt);
2069 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
2070 || POINTER_TYPE_P (TREE_TYPE (lhs)))
2071 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2072 || POINTER_TYPE_P (TREE_TYPE (rhs1))))
2073 switch (get_gimple_rhs_class (subcode))
2075 case GIMPLE_SINGLE_RHS:
2076 val = get_value_for_expr (rhs1, true);
2077 break;
2079 case GIMPLE_UNARY_RHS:
2080 val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
2081 break;
2083 case GIMPLE_BINARY_RHS:
2084 val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
2085 gimple_assign_rhs2 (stmt));
2086 break;
2088 default:;
2091 else if (code == GIMPLE_COND)
2093 enum tree_code code = gimple_cond_code (stmt);
2094 tree rhs1 = gimple_cond_lhs (stmt);
2095 tree rhs2 = gimple_cond_rhs (stmt);
2096 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2097 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2098 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2100 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2102 tree fndecl = gimple_call_fndecl (stmt);
2103 switch (DECL_FUNCTION_CODE (fndecl))
2105 case BUILT_IN_MALLOC:
2106 case BUILT_IN_REALLOC:
2107 case BUILT_IN_CALLOC:
2108 case BUILT_IN_STRDUP:
2109 case BUILT_IN_STRNDUP:
2110 val.lattice_val = CONSTANT;
2111 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2112 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
2113 / BITS_PER_UNIT - 1);
2114 break;
2116 CASE_BUILT_IN_ALLOCA:
2117 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2118 ? BIGGEST_ALIGNMENT
2119 : TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
2120 val.lattice_val = CONSTANT;
2121 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2122 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
2123 break;
2125 case BUILT_IN_ASSUME_ALIGNED:
2126 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
2127 ignore_return_flags = true;
2128 break;
2130 case BUILT_IN_ALIGNED_ALLOC:
2131 case BUILT_IN_GOMP_ALLOC:
2133 tree align = get_constant_value (gimple_call_arg (stmt, 0));
2134 if (align
2135 && tree_fits_uhwi_p (align))
2137 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
2138 if (aligni > 1
2139 /* align must be power-of-two */
2140 && (aligni & (aligni - 1)) == 0)
2142 val.lattice_val = CONSTANT;
2143 val.value = build_int_cst (ptr_type_node, 0);
2144 val.mask = -aligni;
2147 break;
2150 case BUILT_IN_BSWAP16:
2151 case BUILT_IN_BSWAP32:
2152 case BUILT_IN_BSWAP64:
2153 case BUILT_IN_BSWAP128:
2154 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
2155 if (val.lattice_val == UNDEFINED)
2156 break;
2157 else if (val.lattice_val == CONSTANT
2158 && val.value
2159 && TREE_CODE (val.value) == INTEGER_CST)
2161 tree type = TREE_TYPE (gimple_call_lhs (stmt));
2162 int prec = TYPE_PRECISION (type);
2163 wide_int wval = wi::to_wide (val.value);
2164 val.value
2165 = wide_int_to_tree (type,
2166 wide_int::from (wval, prec,
2167 UNSIGNED).bswap ());
2168 val.mask
2169 = widest_int::from (wide_int::from (val.mask, prec,
2170 UNSIGNED).bswap (),
2171 UNSIGNED);
2172 if (wi::sext (val.mask, prec) != -1)
2173 break;
2175 val.lattice_val = VARYING;
2176 val.value = NULL_TREE;
2177 val.mask = -1;
2178 break;
2180 default:;
2183 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
2185 tree fntype = gimple_call_fntype (stmt);
2186 if (fntype)
2188 tree attrs = lookup_attribute ("assume_aligned",
2189 TYPE_ATTRIBUTES (fntype));
2190 if (attrs)
2191 val = bit_value_assume_aligned (stmt, attrs, val, false);
2192 attrs = lookup_attribute ("alloc_align",
2193 TYPE_ATTRIBUTES (fntype));
2194 if (attrs)
2195 val = bit_value_assume_aligned (stmt, attrs, val, true);
2197 int flags = ignore_return_flags
2198 ? 0 : gimple_call_return_flags (as_a <gcall *> (stmt));
2199 if (flags & ERF_RETURNS_ARG
2200 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
2202 val = get_value_for_expr
2203 (gimple_call_arg (stmt,
2204 flags & ERF_RETURN_ARG_MASK), true);
2207 is_constant = (val.lattice_val == CONSTANT);
2210 if (flag_tree_bit_ccp
2211 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
2212 || !is_constant)
2213 && gimple_get_lhs (stmt)
2214 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
2216 tree lhs = gimple_get_lhs (stmt);
2217 wide_int nonzero_bits = get_nonzero_bits (lhs);
2218 if (nonzero_bits != -1)
2220 if (!is_constant)
2222 val.lattice_val = CONSTANT;
2223 val.value = build_zero_cst (TREE_TYPE (lhs));
2224 val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (lhs)));
2225 is_constant = true;
2227 else
2229 if (wi::bit_and_not (wi::to_wide (val.value), nonzero_bits) != 0)
2230 val.value = wide_int_to_tree (TREE_TYPE (lhs),
2231 nonzero_bits
2232 & wi::to_wide (val.value));
2233 if (nonzero_bits == 0)
2234 val.mask = 0;
2235 else
2236 val.mask = val.mask & extend_mask (nonzero_bits,
2237 TYPE_SIGN (TREE_TYPE (lhs)));
2242 /* The statement produced a nonconstant value. */
2243 if (!is_constant)
2245 /* The statement produced a copy. */
2246 if (simplified && TREE_CODE (simplified) == SSA_NAME
2247 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
2249 val.lattice_val = CONSTANT;
2250 val.value = simplified;
2251 val.mask = -1;
2253 /* The statement is VARYING. */
2254 else
2256 val.lattice_val = VARYING;
2257 val.value = NULL_TREE;
2258 val.mask = -1;
2262 return val;
2265 typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
2267 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
2268 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
2270 static void
2271 insert_clobber_before_stack_restore (tree saved_val, tree var,
2272 gimple_htab **visited)
2274 gimple *stmt;
2275 gassign *clobber_stmt;
2276 tree clobber;
2277 imm_use_iterator iter;
2278 gimple_stmt_iterator i;
2279 gimple **slot;
2281 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
2282 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2284 clobber = build_clobber (TREE_TYPE (var));
2285 clobber_stmt = gimple_build_assign (var, clobber);
2287 i = gsi_for_stmt (stmt);
2288 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2290 else if (gimple_code (stmt) == GIMPLE_PHI)
2292 if (!*visited)
2293 *visited = new gimple_htab (10);
2295 slot = (*visited)->find_slot (stmt, INSERT);
2296 if (*slot != NULL)
2297 continue;
2299 *slot = stmt;
2300 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2301 visited);
2303 else if (gimple_assign_ssa_name_copy_p (stmt))
2304 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2305 visited);
2308 /* Advance the iterator to the previous non-debug gimple statement in the same
2309 or dominating basic block. */
2311 static inline void
2312 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2314 basic_block dom;
2316 gsi_prev_nondebug (i);
2317 while (gsi_end_p (*i))
2319 dom = get_immediate_dominator (CDI_DOMINATORS, gsi_bb (*i));
2320 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2321 return;
2323 *i = gsi_last_bb (dom);
2327 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2328 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2330 It is possible that BUILT_IN_STACK_SAVE cannot be found in a dominator when
2331 a previous pass (such as DOM) duplicated it along multiple paths to a BB.
2332 In that case the function gives up without inserting the clobbers. */
2334 static void
2335 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2337 gimple *stmt;
2338 tree saved_val;
2339 gimple_htab *visited = NULL;
2341 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2343 stmt = gsi_stmt (i);
2345 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2346 continue;
2348 saved_val = gimple_call_lhs (stmt);
2349 if (saved_val == NULL_TREE)
2350 continue;
2352 insert_clobber_before_stack_restore (saved_val, var, &visited);
2353 break;
2356 delete visited;
2359 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2360 fixed-size array and returns the address, if found, otherwise returns
2361 NULL_TREE. */
2363 static tree
2364 fold_builtin_alloca_with_align (gimple *stmt)
2366 unsigned HOST_WIDE_INT size, threshold, n_elem;
2367 tree lhs, arg, block, var, elem_type, array_type;
2369 /* Get lhs. */
2370 lhs = gimple_call_lhs (stmt);
2371 if (lhs == NULL_TREE)
2372 return NULL_TREE;
2374 /* Detect constant argument. */
2375 arg = get_constant_value (gimple_call_arg (stmt, 0));
2376 if (arg == NULL_TREE
2377 || TREE_CODE (arg) != INTEGER_CST
2378 || !tree_fits_uhwi_p (arg))
2379 return NULL_TREE;
2381 size = tree_to_uhwi (arg);
2383 /* Heuristic: don't fold large allocas. */
2384 threshold = (unsigned HOST_WIDE_INT)param_large_stack_frame;
2385 /* In case the alloca is located at function entry, it has the same lifetime
2386 as a declared array, so we allow a larger size. */
2387 block = gimple_block (stmt);
2388 if (!(cfun->after_inlining
2389 && block
2390 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2391 threshold /= 10;
2392 if (size > threshold)
2393 return NULL_TREE;
2395 /* We have to be able to move points-to info. We used to assert
2396 that we can but IPA PTA might end up with two UIDs here
2397 as it might need to handle more than one instance being
2398 live at the same time. Instead of trying to detect this case
2399 (using the first UID would be OK) just give up for now. */
2400 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2401 unsigned uid = 0;
2402 if (pi != NULL
2403 && !pi->pt.anything
2404 && !pt_solution_singleton_or_null_p (&pi->pt, &uid))
2405 return NULL_TREE;
2407 /* Declare array. */
2408 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2409 n_elem = size * 8 / BITS_PER_UNIT;
2410 array_type = build_array_type_nelts (elem_type, n_elem);
2412 if (tree ssa_name = SSA_NAME_IDENTIFIER (lhs))
2414 /* Give the temporary a name derived from the name of the VLA
2415 declaration so it can be referenced in diagnostics. */
2416 const char *name = IDENTIFIER_POINTER (ssa_name);
2417 var = create_tmp_var (array_type, name);
2419 else
2420 var = create_tmp_var (array_type);
2422 if (gimple *lhsdef = SSA_NAME_DEF_STMT (lhs))
2424 /* Set the temporary's location to that of the VLA declaration
2425 so it can be pointed to in diagnostics. */
2426 location_t loc = gimple_location (lhsdef);
2427 DECL_SOURCE_LOCATION (var) = loc;
2430 SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
2431 if (uid != 0)
2432 SET_DECL_PT_UID (var, uid);
2434 /* Fold alloca to the address of the array. */
2435 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2438 /* Fold the stmt at *GSI with CCP specific information that propagating
2439 and regular folding does not catch. */
2441 bool
2442 ccp_folder::fold_stmt (gimple_stmt_iterator *gsi)
2444 gimple *stmt = gsi_stmt (*gsi);
2446 switch (gimple_code (stmt))
2448 case GIMPLE_COND:
2450 gcond *cond_stmt = as_a <gcond *> (stmt);
2451 ccp_prop_value_t val;
2452 /* Statement evaluation will handle type mismatches in constants
2453 more gracefully than the final propagation. This allows us to
2454 fold more conditionals here. */
2455 val = evaluate_stmt (stmt);
2456 if (val.lattice_val != CONSTANT
2457 || val.mask != 0)
2458 return false;
2460 if (dump_file)
2462 fprintf (dump_file, "Folding predicate ");
2463 print_gimple_expr (dump_file, stmt, 0);
2464 fprintf (dump_file, " to ");
2465 print_generic_expr (dump_file, val.value);
2466 fprintf (dump_file, "\n");
2469 if (integer_zerop (val.value))
2470 gimple_cond_make_false (cond_stmt);
2471 else
2472 gimple_cond_make_true (cond_stmt);
2474 return true;
2477 case GIMPLE_CALL:
2479 tree lhs = gimple_call_lhs (stmt);
2480 int flags = gimple_call_flags (stmt);
2481 tree val;
2482 tree argt;
2483 bool changed = false;
2484 unsigned i;
2486 /* If the call was folded into a constant make sure it goes
2487 away even if we cannot propagate into all uses because of
2488 type issues. */
2489 if (lhs
2490 && TREE_CODE (lhs) == SSA_NAME
2491 && (val = get_constant_value (lhs))
2492 /* Don't optimize away calls that have side-effects. */
2493 && (flags & (ECF_CONST|ECF_PURE)) != 0
2494 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
2496 tree new_rhs = unshare_expr (val);
2497 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2498 TREE_TYPE (new_rhs)))
2499 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2500 gimplify_and_update_call_from_tree (gsi, new_rhs);
2501 return true;
2504 /* Internal calls provide no argument types, so the extra laxity
2505 for normal calls does not apply. */
2506 if (gimple_call_internal_p (stmt))
2507 return false;
2509 /* The heuristic of fold_builtin_alloca_with_align differs before and
2510 after inlining, so we don't require the arg to be changed into a
2511 constant for folding, but just to be constant. */
2512 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)
2513 || gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX))
2515 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2516 if (new_rhs)
2518 gimplify_and_update_call_from_tree (gsi, new_rhs);
2519 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2520 insert_clobbers_for_var (*gsi, var);
2521 return true;
2525 /* If there's no extra info from an assume_aligned call,
2526 drop it so it doesn't act as otherwise useless dataflow
2527 barrier. */
2528 if (gimple_call_builtin_p (stmt, BUILT_IN_ASSUME_ALIGNED))
2530 tree ptr = gimple_call_arg (stmt, 0);
2531 ccp_prop_value_t ptrval = get_value_for_expr (ptr, true);
2532 if (ptrval.lattice_val == CONSTANT
2533 && TREE_CODE (ptrval.value) == INTEGER_CST
2534 && ptrval.mask != 0)
2536 ccp_prop_value_t val
2537 = bit_value_assume_aligned (stmt, NULL_TREE, ptrval, false);
2538 unsigned int ptralign = least_bit_hwi (ptrval.mask.to_uhwi ());
2539 unsigned int align = least_bit_hwi (val.mask.to_uhwi ());
2540 if (ptralign == align
2541 && ((TREE_INT_CST_LOW (ptrval.value) & (align - 1))
2542 == (TREE_INT_CST_LOW (val.value) & (align - 1))))
2544 replace_call_with_value (gsi, ptr);
2545 return true;
2550 /* Propagate into the call arguments. Compared to replace_uses_in
2551 this can use the argument slot types for type verification
2552 instead of the current argument type. We also can safely
2553 drop qualifiers here as we are dealing with constants anyway. */
2554 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2555 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2556 ++i, argt = TREE_CHAIN (argt))
2558 tree arg = gimple_call_arg (stmt, i);
2559 if (TREE_CODE (arg) == SSA_NAME
2560 && (val = get_constant_value (arg))
2561 && useless_type_conversion_p
2562 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2563 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2565 gimple_call_set_arg (stmt, i, unshare_expr (val));
2566 changed = true;
2570 return changed;
2573 case GIMPLE_ASSIGN:
2575 tree lhs = gimple_assign_lhs (stmt);
2576 tree val;
2578 /* If we have a load that turned out to be constant replace it
2579 as we cannot propagate into all uses in all cases. */
2580 if (gimple_assign_single_p (stmt)
2581 && TREE_CODE (lhs) == SSA_NAME
2582 && (val = get_constant_value (lhs)))
2584 tree rhs = unshare_expr (val);
2585 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2586 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2587 gimple_assign_set_rhs_from_tree (gsi, rhs);
2588 return true;
2591 return false;
2594 default:
2595 return false;
2599 /* Visit the assignment statement STMT. Set the value of its LHS to the
2600 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2601 creates virtual definitions, set the value of each new name to that
2602 of the RHS (if we can derive a constant out of the RHS).
2603 Value-returning call statements also perform an assignment, and
2604 are handled here. */
2606 static enum ssa_prop_result
2607 visit_assignment (gimple *stmt, tree *output_p)
2609 ccp_prop_value_t val;
2610 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
2612 tree lhs = gimple_get_lhs (stmt);
2613 if (TREE_CODE (lhs) == SSA_NAME)
2615 /* Evaluate the statement, which could be
2616 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2617 val = evaluate_stmt (stmt);
2619 /* If STMT is an assignment to an SSA_NAME, we only have one
2620 value to set. */
2621 if (set_lattice_value (lhs, &val))
2623 *output_p = lhs;
2624 if (val.lattice_val == VARYING)
2625 retval = SSA_PROP_VARYING;
2626 else
2627 retval = SSA_PROP_INTERESTING;
2631 return retval;
2635 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2636 if it can determine which edge will be taken. Otherwise, return
2637 SSA_PROP_VARYING. */
2639 static enum ssa_prop_result
2640 visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
2642 ccp_prop_value_t val;
2643 basic_block block;
2645 block = gimple_bb (stmt);
2646 val = evaluate_stmt (stmt);
2647 if (val.lattice_val != CONSTANT
2648 || val.mask != 0)
2649 return SSA_PROP_VARYING;
2651 /* Find which edge out of the conditional block will be taken and add it
2652 to the worklist. If no single edge can be determined statically,
2653 return SSA_PROP_VARYING to feed all the outgoing edges to the
2654 propagation engine. */
2655 *taken_edge_p = find_taken_edge (block, val.value);
2656 if (*taken_edge_p)
2657 return SSA_PROP_INTERESTING;
2658 else
2659 return SSA_PROP_VARYING;
2663 /* Evaluate statement STMT. If the statement produces an output value and
2664 its evaluation changes the lattice value of its output, return
2665 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2666 output value.
2668 If STMT is a conditional branch and we can determine its truth
2669 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2670 value, return SSA_PROP_VARYING. */
2672 enum ssa_prop_result
2673 ccp_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
2675 tree def;
2676 ssa_op_iter iter;
2678 if (dump_file && (dump_flags & TDF_DETAILS))
2680 fprintf (dump_file, "\nVisiting statement:\n");
2681 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2684 switch (gimple_code (stmt))
2686 case GIMPLE_ASSIGN:
2687 /* If the statement is an assignment that produces a single
2688 output value, evaluate its RHS to see if the lattice value of
2689 its output has changed. */
2690 return visit_assignment (stmt, output_p);
2692 case GIMPLE_CALL:
2693 /* A value-returning call also performs an assignment. */
2694 if (gimple_call_lhs (stmt) != NULL_TREE)
2695 return visit_assignment (stmt, output_p);
2696 break;
2698 case GIMPLE_COND:
2699 case GIMPLE_SWITCH:
2700 /* If STMT is a conditional branch, see if we can determine
2701 which branch will be taken. */
2702 /* FIXME. It appears that we should be able to optimize
2703 computed GOTOs here as well. */
2704 return visit_cond_stmt (stmt, taken_edge_p);
2706 default:
2707 break;
2710 /* Any other kind of statement is not interesting for constant
2711 propagation and, therefore, not worth simulating. */
2712 if (dump_file && (dump_flags & TDF_DETAILS))
2713 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2715 /* Definitions made by statements other than assignments to
2716 SSA_NAMEs represent unknown modifications to their outputs.
2717 Mark them VARYING. */
2718 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2719 set_value_varying (def);
2721 return SSA_PROP_VARYING;
2725 /* Main entry point for SSA Conditional Constant Propagation. If NONZERO_P,
2726 record nonzero bits. */
2728 static unsigned int
2729 do_ssa_ccp (bool nonzero_p)
2731 unsigned int todo = 0;
2732 calculate_dominance_info (CDI_DOMINATORS);
2734 ccp_initialize ();
2735 class ccp_propagate ccp_propagate;
2736 ccp_propagate.ssa_propagate ();
2737 if (ccp_finalize (nonzero_p || flag_ipa_bit_cp))
2739 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2741 /* ccp_finalize does not preserve loop-closed ssa. */
2742 loops_state_clear (LOOP_CLOSED_SSA);
2745 free_dominance_info (CDI_DOMINATORS);
2746 return todo;
2750 namespace {
2752 const pass_data pass_data_ccp =
2754 GIMPLE_PASS, /* type */
2755 "ccp", /* name */
2756 OPTGROUP_NONE, /* optinfo_flags */
2757 TV_TREE_CCP, /* tv_id */
2758 ( PROP_cfg | PROP_ssa ), /* properties_required */
2759 0, /* properties_provided */
2760 0, /* properties_destroyed */
2761 0, /* todo_flags_start */
2762 TODO_update_address_taken, /* todo_flags_finish */
2765 class pass_ccp : public gimple_opt_pass
2767 public:
2768 pass_ccp (gcc::context *ctxt)
2769 : gimple_opt_pass (pass_data_ccp, ctxt), nonzero_p (false)
2772 /* opt_pass methods: */
2773 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2774 void set_pass_param (unsigned int n, bool param)
2776 gcc_assert (n == 0);
2777 nonzero_p = param;
2779 virtual bool gate (function *) { return flag_tree_ccp != 0; }
2780 virtual unsigned int execute (function *) { return do_ssa_ccp (nonzero_p); }
2782 private:
2783 /* Determines whether the pass instance records nonzero bits. */
2784 bool nonzero_p;
2785 }; // class pass_ccp
2787 } // anon namespace
2789 gimple_opt_pass *
2790 make_pass_ccp (gcc::context *ctxt)
2792 return new pass_ccp (ctxt);
2797 /* Try to optimize out __builtin_stack_restore. Optimize it out
2798 if there is another __builtin_stack_restore in the same basic
2799 block and no calls or ASM_EXPRs are in between, or if this block's
2800 only outgoing edge is to EXIT_BLOCK and there are no calls or
2801 ASM_EXPRs after this __builtin_stack_restore. */
2803 static tree
2804 optimize_stack_restore (gimple_stmt_iterator i)
2806 tree callee;
2807 gimple *stmt;
2809 basic_block bb = gsi_bb (i);
2810 gimple *call = gsi_stmt (i);
2812 if (gimple_code (call) != GIMPLE_CALL
2813 || gimple_call_num_args (call) != 1
2814 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2815 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2816 return NULL_TREE;
2818 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2820 stmt = gsi_stmt (i);
2821 if (gimple_code (stmt) == GIMPLE_ASM)
2822 return NULL_TREE;
2823 if (gimple_code (stmt) != GIMPLE_CALL)
2824 continue;
2826 callee = gimple_call_fndecl (stmt);
2827 if (!callee
2828 || !fndecl_built_in_p (callee, BUILT_IN_NORMAL)
2829 /* All regular builtins are ok, just obviously not alloca. */
2830 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee)))
2831 return NULL_TREE;
2833 if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
2834 goto second_stack_restore;
2837 if (!gsi_end_p (i))
2838 return NULL_TREE;
2840 /* Allow one successor of the exit block, or zero successors. */
2841 switch (EDGE_COUNT (bb->succs))
2843 case 0:
2844 break;
2845 case 1:
2846 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2847 return NULL_TREE;
2848 break;
2849 default:
2850 return NULL_TREE;
2852 second_stack_restore:
2854 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2855 If there are multiple uses, then the last one should remove the call.
2856 In any case, whether the call to __builtin_stack_save can be removed
2857 or not is irrelevant to removing the call to __builtin_stack_restore. */
2858 if (has_single_use (gimple_call_arg (call, 0)))
2860 gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2861 if (is_gimple_call (stack_save))
2863 callee = gimple_call_fndecl (stack_save);
2864 if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
2866 gimple_stmt_iterator stack_save_gsi;
2867 tree rhs;
2869 stack_save_gsi = gsi_for_stmt (stack_save);
2870 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2871 replace_call_with_value (&stack_save_gsi, rhs);
2876 /* No effect, so the statement will be deleted. */
2877 return integer_zero_node;
2880 /* If va_list type is a simple pointer and nothing special is needed,
2881 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2882 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2883 pointer assignment. */
2885 static tree
2886 optimize_stdarg_builtin (gimple *call)
2888 tree callee, lhs, rhs, cfun_va_list;
2889 bool va_list_simple_ptr;
2890 location_t loc = gimple_location (call);
2892 callee = gimple_call_fndecl (call);
2894 cfun_va_list = targetm.fn_abi_va_list (callee);
2895 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2896 && (TREE_TYPE (cfun_va_list) == void_type_node
2897 || TREE_TYPE (cfun_va_list) == char_type_node);
2899 switch (DECL_FUNCTION_CODE (callee))
2901 case BUILT_IN_VA_START:
2902 if (!va_list_simple_ptr
2903 || targetm.expand_builtin_va_start != NULL
2904 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2905 return NULL_TREE;
2907 if (gimple_call_num_args (call) != 2)
2908 return NULL_TREE;
2910 lhs = gimple_call_arg (call, 0);
2911 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2912 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2913 != TYPE_MAIN_VARIANT (cfun_va_list))
2914 return NULL_TREE;
2916 lhs = build_fold_indirect_ref_loc (loc, lhs);
2917 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2918 1, integer_zero_node);
2919 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2920 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2922 case BUILT_IN_VA_COPY:
2923 if (!va_list_simple_ptr)
2924 return NULL_TREE;
2926 if (gimple_call_num_args (call) != 2)
2927 return NULL_TREE;
2929 lhs = gimple_call_arg (call, 0);
2930 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2931 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2932 != TYPE_MAIN_VARIANT (cfun_va_list))
2933 return NULL_TREE;
2935 lhs = build_fold_indirect_ref_loc (loc, lhs);
2936 rhs = gimple_call_arg (call, 1);
2937 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2938 != TYPE_MAIN_VARIANT (cfun_va_list))
2939 return NULL_TREE;
2941 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2942 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2944 case BUILT_IN_VA_END:
2945 /* No effect, so the statement will be deleted. */
2946 return integer_zero_node;
2948 default:
2949 gcc_unreachable ();
2953 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2954 the incoming jumps. Return true if at least one jump was changed. */
2956 static bool
2957 optimize_unreachable (gimple_stmt_iterator i)
2959 basic_block bb = gsi_bb (i);
2960 gimple_stmt_iterator gsi;
2961 gimple *stmt;
2962 edge_iterator ei;
2963 edge e;
2964 bool ret;
2966 if (flag_sanitize & SANITIZE_UNREACHABLE)
2967 return false;
2969 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2971 stmt = gsi_stmt (gsi);
2973 if (is_gimple_debug (stmt))
2974 continue;
2976 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2978 /* Verify we do not need to preserve the label. */
2979 if (FORCED_LABEL (gimple_label_label (label_stmt)))
2980 return false;
2982 continue;
2985 /* Only handle the case that __builtin_unreachable is the first statement
2986 in the block. We rely on DCE to remove stmts without side-effects
2987 before __builtin_unreachable. */
2988 if (gsi_stmt (gsi) != gsi_stmt (i))
2989 return false;
2992 ret = false;
2993 FOR_EACH_EDGE (e, ei, bb->preds)
2995 gsi = gsi_last_bb (e->src);
2996 if (gsi_end_p (gsi))
2997 continue;
2999 stmt = gsi_stmt (gsi);
3000 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3002 if (e->flags & EDGE_TRUE_VALUE)
3003 gimple_cond_make_false (cond_stmt);
3004 else if (e->flags & EDGE_FALSE_VALUE)
3005 gimple_cond_make_true (cond_stmt);
3006 else
3007 gcc_unreachable ();
3008 update_stmt (cond_stmt);
3010 else
3012 /* Todo: handle other cases. Note that unreachable switch case
3013 statements have already been removed. */
3014 continue;
3017 ret = true;
3020 return ret;
3023 /* Optimize
3024 mask_2 = 1 << cnt_1;
3025 _4 = __atomic_fetch_or_* (ptr_6, mask_2, _3);
3026 _5 = _4 & mask_2;
3028 _4 = ATOMIC_BIT_TEST_AND_SET (ptr_6, cnt_1, 0, _3);
3029 _5 = _4;
3030 If _5 is only used in _5 != 0 or _5 == 0 comparisons, 1
3031 is passed instead of 0, and the builtin just returns a zero
3032 or 1 value instead of the actual bit.
3033 Similarly for __sync_fetch_and_or_* (without the ", _3" part
3034 in there), and/or if mask_2 is a power of 2 constant.
3035 Similarly for xor instead of or, use ATOMIC_BIT_TEST_AND_COMPLEMENT
3036 in that case. And similarly for and instead of or, except that
3037 the second argument to the builtin needs to be one's complement
3038 of the mask instead of mask. */
3040 static void
3041 optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
3042 enum internal_fn fn, bool has_model_arg,
3043 bool after)
3045 gimple *call = gsi_stmt (*gsip);
3046 tree lhs = gimple_call_lhs (call);
3047 use_operand_p use_p;
3048 gimple *use_stmt;
3049 tree mask, bit;
3050 optab optab;
3052 if (!flag_inline_atomics
3053 || optimize_debug
3054 || !gimple_call_builtin_p (call, BUILT_IN_NORMAL)
3055 || !lhs
3056 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
3057 || !single_imm_use (lhs, &use_p, &use_stmt)
3058 || !is_gimple_assign (use_stmt)
3059 || gimple_assign_rhs_code (use_stmt) != BIT_AND_EXPR
3060 || !gimple_vdef (call))
3061 return;
3063 switch (fn)
3065 case IFN_ATOMIC_BIT_TEST_AND_SET:
3066 optab = atomic_bit_test_and_set_optab;
3067 break;
3068 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
3069 optab = atomic_bit_test_and_complement_optab;
3070 break;
3071 case IFN_ATOMIC_BIT_TEST_AND_RESET:
3072 optab = atomic_bit_test_and_reset_optab;
3073 break;
3074 default:
3075 return;
3078 if (optab_handler (optab, TYPE_MODE (TREE_TYPE (lhs))) == CODE_FOR_nothing)
3079 return;
3081 mask = gimple_call_arg (call, 1);
3082 tree use_lhs = gimple_assign_lhs (use_stmt);
3083 if (!use_lhs)
3084 return;
3086 if (TREE_CODE (mask) == INTEGER_CST)
3088 if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
3089 mask = const_unop (BIT_NOT_EXPR, TREE_TYPE (mask), mask);
3090 mask = fold_convert (TREE_TYPE (lhs), mask);
3091 int ibit = tree_log2 (mask);
3092 if (ibit < 0)
3093 return;
3094 bit = build_int_cst (TREE_TYPE (lhs), ibit);
3096 else if (TREE_CODE (mask) == SSA_NAME)
3098 gimple *g = SSA_NAME_DEF_STMT (mask);
3099 if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
3101 if (!is_gimple_assign (g)
3102 || gimple_assign_rhs_code (g) != BIT_NOT_EXPR)
3103 return;
3104 mask = gimple_assign_rhs1 (g);
3105 if (TREE_CODE (mask) != SSA_NAME)
3106 return;
3107 g = SSA_NAME_DEF_STMT (mask);
3109 if (!is_gimple_assign (g)
3110 || gimple_assign_rhs_code (g) != LSHIFT_EXPR
3111 || !integer_onep (gimple_assign_rhs1 (g)))
3112 return;
3113 bit = gimple_assign_rhs2 (g);
3115 else
3116 return;
3118 if (gimple_assign_rhs1 (use_stmt) == lhs)
3120 if (!operand_equal_p (gimple_assign_rhs2 (use_stmt), mask, 0))
3121 return;
3123 else if (gimple_assign_rhs2 (use_stmt) != lhs
3124 || !operand_equal_p (gimple_assign_rhs1 (use_stmt), mask, 0))
3125 return;
3127 bool use_bool = true;
3128 bool has_debug_uses = false;
3129 imm_use_iterator iter;
3130 gimple *g;
3132 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs))
3133 use_bool = false;
3134 FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)
3136 enum tree_code code = ERROR_MARK;
3137 tree op0 = NULL_TREE, op1 = NULL_TREE;
3138 if (is_gimple_debug (g))
3140 has_debug_uses = true;
3141 continue;
3143 else if (is_gimple_assign (g))
3144 switch (gimple_assign_rhs_code (g))
3146 case COND_EXPR:
3147 op1 = gimple_assign_rhs1 (g);
3148 code = TREE_CODE (op1);
3149 op0 = TREE_OPERAND (op1, 0);
3150 op1 = TREE_OPERAND (op1, 1);
3151 break;
3152 case EQ_EXPR:
3153 case NE_EXPR:
3154 code = gimple_assign_rhs_code (g);
3155 op0 = gimple_assign_rhs1 (g);
3156 op1 = gimple_assign_rhs2 (g);
3157 break;
3158 default:
3159 break;
3161 else if (gimple_code (g) == GIMPLE_COND)
3163 code = gimple_cond_code (g);
3164 op0 = gimple_cond_lhs (g);
3165 op1 = gimple_cond_rhs (g);
3168 if ((code == EQ_EXPR || code == NE_EXPR)
3169 && op0 == use_lhs
3170 && integer_zerop (op1))
3172 use_operand_p use_p;
3173 int n = 0;
3174 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3175 n++;
3176 if (n == 1)
3177 continue;
3180 use_bool = false;
3181 break;
3184 tree new_lhs = make_ssa_name (TREE_TYPE (lhs));
3185 tree flag = build_int_cst (TREE_TYPE (lhs), use_bool);
3186 if (has_model_arg)
3187 g = gimple_build_call_internal (fn, 4, gimple_call_arg (call, 0),
3188 bit, flag, gimple_call_arg (call, 2));
3189 else
3190 g = gimple_build_call_internal (fn, 3, gimple_call_arg (call, 0),
3191 bit, flag);
3192 gimple_call_set_lhs (g, new_lhs);
3193 gimple_set_location (g, gimple_location (call));
3194 gimple_move_vops (g, call);
3195 bool throws = stmt_can_throw_internal (cfun, call);
3196 gimple_call_set_nothrow (as_a <gcall *> (g),
3197 gimple_call_nothrow_p (as_a <gcall *> (call)));
3198 gimple_stmt_iterator gsi = *gsip;
3199 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3200 edge e = NULL;
3201 if (throws)
3203 maybe_clean_or_replace_eh_stmt (call, g);
3204 if (after || (use_bool && has_debug_uses))
3205 e = find_fallthru_edge (gsi_bb (gsi)->succs);
3207 if (after)
3209 /* The internal function returns the value of the specified bit
3210 before the atomic operation. If we are interested in the value
3211 of the specified bit after the atomic operation (makes only sense
3212 for xor, otherwise the bit content is compile time known),
3213 we need to invert the bit. */
3214 g = gimple_build_assign (make_ssa_name (TREE_TYPE (lhs)),
3215 BIT_XOR_EXPR, new_lhs,
3216 use_bool ? build_int_cst (TREE_TYPE (lhs), 1)
3217 : mask);
3218 new_lhs = gimple_assign_lhs (g);
3219 if (throws)
3221 gsi_insert_on_edge_immediate (e, g);
3222 gsi = gsi_for_stmt (g);
3224 else
3225 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3227 if (use_bool && has_debug_uses)
3229 tree temp = NULL_TREE;
3230 if (!throws || after || single_pred_p (e->dest))
3232 temp = make_node (DEBUG_EXPR_DECL);
3233 DECL_ARTIFICIAL (temp) = 1;
3234 TREE_TYPE (temp) = TREE_TYPE (lhs);
3235 SET_DECL_MODE (temp, TYPE_MODE (TREE_TYPE (lhs)));
3236 tree t = build2 (LSHIFT_EXPR, TREE_TYPE (lhs), new_lhs, bit);
3237 g = gimple_build_debug_bind (temp, t, g);
3238 if (throws && !after)
3240 gsi = gsi_after_labels (e->dest);
3241 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3243 else
3244 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3246 FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)
3247 if (is_gimple_debug (g))
3249 use_operand_p use_p;
3250 if (temp == NULL_TREE)
3251 gimple_debug_bind_reset_value (g);
3252 else
3253 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3254 SET_USE (use_p, temp);
3255 update_stmt (g);
3258 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_lhs)
3259 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs);
3260 replace_uses_by (use_lhs, new_lhs);
3261 gsi = gsi_for_stmt (use_stmt);
3262 gsi_remove (&gsi, true);
3263 release_defs (use_stmt);
3264 gsi_remove (gsip, true);
3265 release_ssa_name (lhs);
3268 /* Optimize
3269 a = {};
3270 b = a;
3271 into
3272 a = {};
3273 b = {};
3274 Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
3275 and/or memcpy (&b, &a, sizeof (a)); instead of b = a; */
3277 static void
3278 optimize_memcpy (gimple_stmt_iterator *gsip, tree dest, tree src, tree len)
3280 gimple *stmt = gsi_stmt (*gsip);
3281 if (gimple_has_volatile_ops (stmt))
3282 return;
3284 tree vuse = gimple_vuse (stmt);
3285 if (vuse == NULL)
3286 return;
3288 gimple *defstmt = SSA_NAME_DEF_STMT (vuse);
3289 tree src2 = NULL_TREE, len2 = NULL_TREE;
3290 poly_int64 offset, offset2;
3291 tree val = integer_zero_node;
3292 if (gimple_store_p (defstmt)
3293 && gimple_assign_single_p (defstmt)
3294 && TREE_CODE (gimple_assign_rhs1 (defstmt)) == CONSTRUCTOR
3295 && !gimple_clobber_p (defstmt))
3296 src2 = gimple_assign_lhs (defstmt);
3297 else if (gimple_call_builtin_p (defstmt, BUILT_IN_MEMSET)
3298 && TREE_CODE (gimple_call_arg (defstmt, 0)) == ADDR_EXPR
3299 && TREE_CODE (gimple_call_arg (defstmt, 1)) == INTEGER_CST)
3301 src2 = TREE_OPERAND (gimple_call_arg (defstmt, 0), 0);
3302 len2 = gimple_call_arg (defstmt, 2);
3303 val = gimple_call_arg (defstmt, 1);
3304 /* For non-0 val, we'd have to transform stmt from assignment
3305 into memset (only if dest is addressable). */
3306 if (!integer_zerop (val) && is_gimple_assign (stmt))
3307 src2 = NULL_TREE;
3310 if (src2 == NULL_TREE)
3311 return;
3313 if (len == NULL_TREE)
3314 len = (TREE_CODE (src) == COMPONENT_REF
3315 ? DECL_SIZE_UNIT (TREE_OPERAND (src, 1))
3316 : TYPE_SIZE_UNIT (TREE_TYPE (src)));
3317 if (len2 == NULL_TREE)
3318 len2 = (TREE_CODE (src2) == COMPONENT_REF
3319 ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
3320 : TYPE_SIZE_UNIT (TREE_TYPE (src2)));
3321 if (len == NULL_TREE
3322 || !poly_int_tree_p (len)
3323 || len2 == NULL_TREE
3324 || !poly_int_tree_p (len2))
3325 return;
3327 src = get_addr_base_and_unit_offset (src, &offset);
3328 src2 = get_addr_base_and_unit_offset (src2, &offset2);
3329 if (src == NULL_TREE
3330 || src2 == NULL_TREE
3331 || maybe_lt (offset, offset2))
3332 return;
3334 if (!operand_equal_p (src, src2, 0))
3335 return;
3337 /* [ src + offset2, src + offset2 + len2 - 1 ] is set to val.
3338 Make sure that
3339 [ src + offset, src + offset + len - 1 ] is a subset of that. */
3340 if (maybe_gt (wi::to_poly_offset (len) + (offset - offset2),
3341 wi::to_poly_offset (len2)))
3342 return;
3344 if (dump_file && (dump_flags & TDF_DETAILS))
3346 fprintf (dump_file, "Simplified\n ");
3347 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3348 fprintf (dump_file, "after previous\n ");
3349 print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
3352 /* For simplicity, don't change the kind of the stmt,
3353 turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
3354 into memset (&dest, val, len);
3355 In theory we could change dest = src into memset if dest
3356 is addressable (maybe beneficial if val is not 0), or
3357 memcpy (&dest, &src, len) into dest = {} if len is the size
3358 of dest, dest isn't volatile. */
3359 if (is_gimple_assign (stmt))
3361 tree ctor = build_constructor (TREE_TYPE (dest), NULL);
3362 gimple_assign_set_rhs_from_tree (gsip, ctor);
3363 update_stmt (stmt);
3365 else /* If stmt is memcpy, transform it into memset. */
3367 gcall *call = as_a <gcall *> (stmt);
3368 tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
3369 gimple_call_set_fndecl (call, fndecl);
3370 gimple_call_set_fntype (call, TREE_TYPE (fndecl));
3371 gimple_call_set_arg (call, 1, val);
3372 update_stmt (stmt);
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file, "into\n ");
3378 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3382 /* A simple pass that attempts to fold all builtin functions. This pass
3383 is run after we've propagated as many constants as we can. */
3385 namespace {
3387 const pass_data pass_data_fold_builtins =
3389 GIMPLE_PASS, /* type */
3390 "fab", /* name */
3391 OPTGROUP_NONE, /* optinfo_flags */
3392 TV_NONE, /* tv_id */
3393 ( PROP_cfg | PROP_ssa ), /* properties_required */
3394 0, /* properties_provided */
3395 0, /* properties_destroyed */
3396 0, /* todo_flags_start */
3397 TODO_update_ssa, /* todo_flags_finish */
3400 class pass_fold_builtins : public gimple_opt_pass
3402 public:
3403 pass_fold_builtins (gcc::context *ctxt)
3404 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
3407 /* opt_pass methods: */
3408 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
3409 virtual unsigned int execute (function *);
3411 }; // class pass_fold_builtins
3413 unsigned int
3414 pass_fold_builtins::execute (function *fun)
3416 bool cfg_changed = false;
3417 basic_block bb;
3418 unsigned int todoflags = 0;
3420 FOR_EACH_BB_FN (bb, fun)
3422 gimple_stmt_iterator i;
3423 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3425 gimple *stmt, *old_stmt;
3426 tree callee;
3427 enum built_in_function fcode;
3429 stmt = gsi_stmt (i);
3431 if (gimple_code (stmt) != GIMPLE_CALL)
3433 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
3434 after the last GIMPLE DSE they aren't needed and might
3435 unnecessarily keep the SSA_NAMEs live. */
3436 if (gimple_clobber_p (stmt))
3438 tree lhs = gimple_assign_lhs (stmt);
3439 if (TREE_CODE (lhs) == MEM_REF
3440 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
3442 unlink_stmt_vdef (stmt);
3443 gsi_remove (&i, true);
3444 release_defs (stmt);
3445 continue;
3448 else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
3449 optimize_memcpy (&i, gimple_assign_lhs (stmt),
3450 gimple_assign_rhs1 (stmt), NULL_TREE);
3451 gsi_next (&i);
3452 continue;
3455 callee = gimple_call_fndecl (stmt);
3456 if (!callee || !fndecl_built_in_p (callee, BUILT_IN_NORMAL))
3458 gsi_next (&i);
3459 continue;
3462 fcode = DECL_FUNCTION_CODE (callee);
3463 if (fold_stmt (&i))
3465 else
3467 tree result = NULL_TREE;
3468 switch (DECL_FUNCTION_CODE (callee))
3470 case BUILT_IN_CONSTANT_P:
3471 /* Resolve __builtin_constant_p. If it hasn't been
3472 folded to integer_one_node by now, it's fairly
3473 certain that the value simply isn't constant. */
3474 result = integer_zero_node;
3475 break;
3477 case BUILT_IN_ASSUME_ALIGNED:
3478 /* Remove __builtin_assume_aligned. */
3479 result = gimple_call_arg (stmt, 0);
3480 break;
3482 case BUILT_IN_STACK_RESTORE:
3483 result = optimize_stack_restore (i);
3484 if (result)
3485 break;
3486 gsi_next (&i);
3487 continue;
3489 case BUILT_IN_UNREACHABLE:
3490 if (optimize_unreachable (i))
3491 cfg_changed = true;
3492 break;
3494 case BUILT_IN_ATOMIC_FETCH_OR_1:
3495 case BUILT_IN_ATOMIC_FETCH_OR_2:
3496 case BUILT_IN_ATOMIC_FETCH_OR_4:
3497 case BUILT_IN_ATOMIC_FETCH_OR_8:
3498 case BUILT_IN_ATOMIC_FETCH_OR_16:
3499 optimize_atomic_bit_test_and (&i,
3500 IFN_ATOMIC_BIT_TEST_AND_SET,
3501 true, false);
3502 break;
3503 case BUILT_IN_SYNC_FETCH_AND_OR_1:
3504 case BUILT_IN_SYNC_FETCH_AND_OR_2:
3505 case BUILT_IN_SYNC_FETCH_AND_OR_4:
3506 case BUILT_IN_SYNC_FETCH_AND_OR_8:
3507 case BUILT_IN_SYNC_FETCH_AND_OR_16:
3508 optimize_atomic_bit_test_and (&i,
3509 IFN_ATOMIC_BIT_TEST_AND_SET,
3510 false, false);
3511 break;
3513 case BUILT_IN_ATOMIC_FETCH_XOR_1:
3514 case BUILT_IN_ATOMIC_FETCH_XOR_2:
3515 case BUILT_IN_ATOMIC_FETCH_XOR_4:
3516 case BUILT_IN_ATOMIC_FETCH_XOR_8:
3517 case BUILT_IN_ATOMIC_FETCH_XOR_16:
3518 optimize_atomic_bit_test_and
3519 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, false);
3520 break;
3521 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
3522 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
3523 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
3524 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
3525 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
3526 optimize_atomic_bit_test_and
3527 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, false);
3528 break;
3530 case BUILT_IN_ATOMIC_XOR_FETCH_1:
3531 case BUILT_IN_ATOMIC_XOR_FETCH_2:
3532 case BUILT_IN_ATOMIC_XOR_FETCH_4:
3533 case BUILT_IN_ATOMIC_XOR_FETCH_8:
3534 case BUILT_IN_ATOMIC_XOR_FETCH_16:
3535 optimize_atomic_bit_test_and
3536 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, true);
3537 break;
3538 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
3539 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
3540 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
3541 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
3542 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
3543 optimize_atomic_bit_test_and
3544 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, true);
3545 break;
3547 case BUILT_IN_ATOMIC_FETCH_AND_1:
3548 case BUILT_IN_ATOMIC_FETCH_AND_2:
3549 case BUILT_IN_ATOMIC_FETCH_AND_4:
3550 case BUILT_IN_ATOMIC_FETCH_AND_8:
3551 case BUILT_IN_ATOMIC_FETCH_AND_16:
3552 optimize_atomic_bit_test_and (&i,
3553 IFN_ATOMIC_BIT_TEST_AND_RESET,
3554 true, false);
3555 break;
3556 case BUILT_IN_SYNC_FETCH_AND_AND_1:
3557 case BUILT_IN_SYNC_FETCH_AND_AND_2:
3558 case BUILT_IN_SYNC_FETCH_AND_AND_4:
3559 case BUILT_IN_SYNC_FETCH_AND_AND_8:
3560 case BUILT_IN_SYNC_FETCH_AND_AND_16:
3561 optimize_atomic_bit_test_and (&i,
3562 IFN_ATOMIC_BIT_TEST_AND_RESET,
3563 false, false);
3564 break;
3566 case BUILT_IN_MEMCPY:
3567 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3568 && TREE_CODE (gimple_call_arg (stmt, 0)) == ADDR_EXPR
3569 && TREE_CODE (gimple_call_arg (stmt, 1)) == ADDR_EXPR
3570 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST)
3572 tree dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
3573 tree src = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3574 tree len = gimple_call_arg (stmt, 2);
3575 optimize_memcpy (&i, dest, src, len);
3577 break;
3579 case BUILT_IN_VA_START:
3580 case BUILT_IN_VA_END:
3581 case BUILT_IN_VA_COPY:
3582 /* These shouldn't be folded before pass_stdarg. */
3583 result = optimize_stdarg_builtin (stmt);
3584 break;
3586 default:;
3589 if (!result)
3591 gsi_next (&i);
3592 continue;
3595 gimplify_and_update_call_from_tree (&i, result);
3598 todoflags |= TODO_update_address_taken;
3600 if (dump_file && (dump_flags & TDF_DETAILS))
3602 fprintf (dump_file, "Simplified\n ");
3603 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3606 old_stmt = stmt;
3607 stmt = gsi_stmt (i);
3608 update_stmt (stmt);
3610 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3611 && gimple_purge_dead_eh_edges (bb))
3612 cfg_changed = true;
3614 if (dump_file && (dump_flags & TDF_DETAILS))
3616 fprintf (dump_file, "to\n ");
3617 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3618 fprintf (dump_file, "\n");
3621 /* Retry the same statement if it changed into another
3622 builtin, there might be new opportunities now. */
3623 if (gimple_code (stmt) != GIMPLE_CALL)
3625 gsi_next (&i);
3626 continue;
3628 callee = gimple_call_fndecl (stmt);
3629 if (!callee
3630 || !fndecl_built_in_p (callee, fcode))
3631 gsi_next (&i);
3635 /* Delete unreachable blocks. */
3636 if (cfg_changed)
3637 todoflags |= TODO_cleanup_cfg;
3639 return todoflags;
3642 } // anon namespace
3644 gimple_opt_pass *
3645 make_pass_fold_builtins (gcc::context *ctxt)
3647 return new pass_fold_builtins (ctxt);
3650 /* A simple pass that emits some warnings post IPA. */
3652 namespace {
3654 const pass_data pass_data_post_ipa_warn =
3656 GIMPLE_PASS, /* type */
3657 "post_ipa_warn", /* name */
3658 OPTGROUP_NONE, /* optinfo_flags */
3659 TV_NONE, /* tv_id */
3660 ( PROP_cfg | PROP_ssa ), /* properties_required */
3661 0, /* properties_provided */
3662 0, /* properties_destroyed */
3663 0, /* todo_flags_start */
3664 0, /* todo_flags_finish */
3667 class pass_post_ipa_warn : public gimple_opt_pass
3669 public:
3670 pass_post_ipa_warn (gcc::context *ctxt)
3671 : gimple_opt_pass (pass_data_post_ipa_warn, ctxt)
3674 /* opt_pass methods: */
3675 opt_pass * clone () { return new pass_post_ipa_warn (m_ctxt); }
3676 virtual bool gate (function *) { return warn_nonnull != 0; }
3677 virtual unsigned int execute (function *);
3679 }; // class pass_fold_builtins
3681 unsigned int
3682 pass_post_ipa_warn::execute (function *fun)
3684 basic_block bb;
3686 FOR_EACH_BB_FN (bb, fun)
3688 gimple_stmt_iterator gsi;
3689 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3691 gimple *stmt = gsi_stmt (gsi);
3692 if (!is_gimple_call (stmt) || warning_suppressed_p (stmt, OPT_Wnonnull))
3693 continue;
3695 tree fntype = gimple_call_fntype (stmt);
3696 bitmap nonnullargs = get_nonnull_args (fntype);
3697 if (!nonnullargs)
3698 continue;
3700 tree fndecl = gimple_call_fndecl (stmt);
3701 const bool closure = fndecl && DECL_LAMBDA_FUNCTION_P (fndecl);
3703 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
3705 tree arg = gimple_call_arg (stmt, i);
3706 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
3707 continue;
3708 if (!integer_zerop (arg))
3709 continue;
3710 if (i == 0 && closure)
3711 /* Avoid warning for the first argument to lambda functions. */
3712 continue;
3713 if (!bitmap_empty_p (nonnullargs)
3714 && !bitmap_bit_p (nonnullargs, i))
3715 continue;
3717 /* In C++ non-static member functions argument 0 refers
3718 to the implicit this pointer. Use the same one-based
3719 numbering for ordinary arguments. */
3720 unsigned argno = TREE_CODE (fntype) == METHOD_TYPE ? i : i + 1;
3721 location_t loc = (EXPR_HAS_LOCATION (arg)
3722 ? EXPR_LOCATION (arg)
3723 : gimple_location (stmt));
3724 auto_diagnostic_group d;
3725 if (argno == 0)
3727 if (warning_at (loc, OPT_Wnonnull,
3728 "%qs pointer is null", "this")
3729 && fndecl)
3730 inform (DECL_SOURCE_LOCATION (fndecl),
3731 "in a call to non-static member function %qD",
3732 fndecl);
3733 continue;
3736 if (!warning_at (loc, OPT_Wnonnull,
3737 "argument %u null where non-null "
3738 "expected", argno))
3739 continue;
3741 tree fndecl = gimple_call_fndecl (stmt);
3742 if (fndecl && DECL_IS_UNDECLARED_BUILTIN (fndecl))
3743 inform (loc, "in a call to built-in function %qD",
3744 fndecl);
3745 else if (fndecl)
3746 inform (DECL_SOURCE_LOCATION (fndecl),
3747 "in a call to function %qD declared %qs",
3748 fndecl, "nonnull");
3750 BITMAP_FREE (nonnullargs);
3753 return 0;
3756 } // anon namespace
3758 gimple_opt_pass *
3759 make_pass_post_ipa_warn (gcc::context *ctxt)
3761 return new pass_post_ipa_warn (ctxt);