New vectorizer messages; message format change.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob3ba321d61815cf5c1ece37092668ab49794a6a54
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2013 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 References:
103 Constant propagation with conditional branches,
104 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106 Building an Optimizing Compiler,
107 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109 Advanced Compiler Design and Implementation,
110 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "tree.h"
117 #include "flags.h"
118 #include "tm_p.h"
119 #include "basic-block.h"
120 #include "function.h"
121 #include "gimple-pretty-print.h"
122 #include "tree-flow.h"
123 #include "tree-pass.h"
124 #include "tree-ssa-propagate.h"
125 #include "value-prof.h"
126 #include "langhooks.h"
127 #include "target.h"
128 #include "diagnostic-core.h"
129 #include "dbgcnt.h"
130 #include "gimple-fold.h"
131 #include "params.h"
132 #include "hash-table.h"
135 /* Possible lattice values. */
136 typedef enum
138 UNINITIALIZED,
139 UNDEFINED,
140 CONSTANT,
141 VARYING
142 } ccp_lattice_t;
144 struct prop_value_d {
145 /* Lattice value. */
146 ccp_lattice_t lattice_val;
148 /* Propagated value. */
149 tree value;
151 /* Mask that applies to the propagated value during CCP. For
152 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
153 double_int mask;
156 typedef struct prop_value_d prop_value_t;
158 /* Array of propagated constant values. After propagation,
159 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
160 the constant is held in an SSA name representing a memory store
161 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
162 memory reference used to store (i.e., the LHS of the assignment
163 doing the store). */
164 static prop_value_t *const_val;
165 static unsigned n_const_val;
167 static void canonicalize_float_value (prop_value_t *);
168 static bool ccp_fold_stmt (gimple_stmt_iterator *);
170 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
172 static void
173 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
175 switch (val.lattice_val)
177 case UNINITIALIZED:
178 fprintf (outf, "%sUNINITIALIZED", prefix);
179 break;
180 case UNDEFINED:
181 fprintf (outf, "%sUNDEFINED", prefix);
182 break;
183 case VARYING:
184 fprintf (outf, "%sVARYING", prefix);
185 break;
186 case CONSTANT:
187 if (TREE_CODE (val.value) != INTEGER_CST
188 || val.mask.is_zero ())
190 fprintf (outf, "%sCONSTANT ", prefix);
191 print_generic_expr (outf, val.value, dump_flags);
193 else
195 double_int cval = tree_to_double_int (val.value).and_not (val.mask);
196 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
197 prefix, cval.high, cval.low);
198 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
199 val.mask.high, val.mask.low);
201 break;
202 default:
203 gcc_unreachable ();
208 /* Print lattice value VAL to stderr. */
210 void debug_lattice_value (prop_value_t val);
212 DEBUG_FUNCTION void
213 debug_lattice_value (prop_value_t val)
215 dump_lattice_value (stderr, "", val);
216 fprintf (stderr, "\n");
220 /* Compute a default value for variable VAR and store it in the
221 CONST_VAL array. The following rules are used to get default
222 values:
224 1- Global and static variables that are declared constant are
225 considered CONSTANT.
227 2- Any other value is considered UNDEFINED. This is useful when
228 considering PHI nodes. PHI arguments that are undefined do not
229 change the constant value of the PHI node, which allows for more
230 constants to be propagated.
232 3- Variables defined by statements other than assignments and PHI
233 nodes are considered VARYING.
235 4- Initial values of variables that are not GIMPLE registers are
236 considered VARYING. */
238 static prop_value_t
239 get_default_value (tree var)
241 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
242 gimple stmt;
244 stmt = SSA_NAME_DEF_STMT (var);
246 if (gimple_nop_p (stmt))
248 /* Variables defined by an empty statement are those used
249 before being initialized. If VAR is a local variable, we
250 can assume initially that it is UNDEFINED, otherwise we must
251 consider it VARYING. */
252 if (!virtual_operand_p (var)
253 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
254 val.lattice_val = UNDEFINED;
255 else
257 val.lattice_val = VARYING;
258 val.mask = double_int_minus_one;
261 else if (is_gimple_assign (stmt))
263 tree cst;
264 if (gimple_assign_single_p (stmt)
265 && DECL_P (gimple_assign_rhs1 (stmt))
266 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
268 val.lattice_val = CONSTANT;
269 val.value = cst;
271 else
273 /* Any other variable defined by an assignment is considered
274 UNDEFINED. */
275 val.lattice_val = UNDEFINED;
278 else if ((is_gimple_call (stmt)
279 && gimple_call_lhs (stmt) != NULL_TREE)
280 || gimple_code (stmt) == GIMPLE_PHI)
282 /* A variable defined by a call or a PHI node is considered
283 UNDEFINED. */
284 val.lattice_val = UNDEFINED;
286 else
288 /* Otherwise, VAR will never take on a constant value. */
289 val.lattice_val = VARYING;
290 val.mask = double_int_minus_one;
293 return val;
297 /* Get the constant value associated with variable VAR. */
299 static inline prop_value_t *
300 get_value (tree var)
302 prop_value_t *val;
304 if (const_val == NULL
305 || SSA_NAME_VERSION (var) >= n_const_val)
306 return NULL;
308 val = &const_val[SSA_NAME_VERSION (var)];
309 if (val->lattice_val == UNINITIALIZED)
310 *val = get_default_value (var);
312 canonicalize_float_value (val);
314 return val;
317 /* Return the constant tree value associated with VAR. */
319 static inline tree
320 get_constant_value (tree var)
322 prop_value_t *val;
323 if (TREE_CODE (var) != SSA_NAME)
325 if (is_gimple_min_invariant (var))
326 return var;
327 return NULL_TREE;
329 val = get_value (var);
330 if (val
331 && val->lattice_val == CONSTANT
332 && (TREE_CODE (val->value) != INTEGER_CST
333 || val->mask.is_zero ()))
334 return val->value;
335 return NULL_TREE;
338 /* Sets the value associated with VAR to VARYING. */
340 static inline void
341 set_value_varying (tree var)
343 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
345 val->lattice_val = VARYING;
346 val->value = NULL_TREE;
347 val->mask = double_int_minus_one;
350 /* For float types, modify the value of VAL to make ccp work correctly
351 for non-standard values (-0, NaN):
353 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
354 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
355 This is to fix the following problem (see PR 29921): Suppose we have
357 x = 0.0 * y
359 and we set value of y to NaN. This causes value of x to be set to NaN.
360 When we later determine that y is in fact VARYING, fold uses the fact
361 that HONOR_NANS is false, and we try to change the value of x to 0,
362 causing an ICE. With HONOR_NANS being false, the real appearance of
363 NaN would cause undefined behavior, though, so claiming that y (and x)
364 are UNDEFINED initially is correct. */
366 static void
367 canonicalize_float_value (prop_value_t *val)
369 enum machine_mode mode;
370 tree type;
371 REAL_VALUE_TYPE d;
373 if (val->lattice_val != CONSTANT
374 || TREE_CODE (val->value) != REAL_CST)
375 return;
377 d = TREE_REAL_CST (val->value);
378 type = TREE_TYPE (val->value);
379 mode = TYPE_MODE (type);
381 if (!HONOR_SIGNED_ZEROS (mode)
382 && REAL_VALUE_MINUS_ZERO (d))
384 val->value = build_real (type, dconst0);
385 return;
388 if (!HONOR_NANS (mode)
389 && REAL_VALUE_ISNAN (d))
391 val->lattice_val = UNDEFINED;
392 val->value = NULL;
393 return;
397 /* Return whether the lattice transition is valid. */
399 static bool
400 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
402 /* Lattice transitions must always be monotonically increasing in
403 value. */
404 if (old_val.lattice_val < new_val.lattice_val)
405 return true;
407 if (old_val.lattice_val != new_val.lattice_val)
408 return false;
410 if (!old_val.value && !new_val.value)
411 return true;
413 /* Now both lattice values are CONSTANT. */
415 /* Allow transitioning from PHI <&x, not executable> == &x
416 to PHI <&x, &y> == common alignment. */
417 if (TREE_CODE (old_val.value) != INTEGER_CST
418 && TREE_CODE (new_val.value) == INTEGER_CST)
419 return true;
421 /* Bit-lattices have to agree in the still valid bits. */
422 if (TREE_CODE (old_val.value) == INTEGER_CST
423 && TREE_CODE (new_val.value) == INTEGER_CST)
424 return tree_to_double_int (old_val.value).and_not (new_val.mask)
425 == tree_to_double_int (new_val.value).and_not (new_val.mask);
427 /* Otherwise constant values have to agree. */
428 return operand_equal_p (old_val.value, new_val.value, 0);
431 /* Set the value for variable VAR to NEW_VAL. Return true if the new
432 value is different from VAR's previous value. */
434 static bool
435 set_lattice_value (tree var, prop_value_t new_val)
437 /* We can deal with old UNINITIALIZED values just fine here. */
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
440 canonicalize_float_value (&new_val);
442 /* We have to be careful to not go up the bitwise lattice
443 represented by the mask.
444 ??? This doesn't seem to be the best place to enforce this. */
445 if (new_val.lattice_val == CONSTANT
446 && old_val->lattice_val == CONSTANT
447 && TREE_CODE (new_val.value) == INTEGER_CST
448 && TREE_CODE (old_val->value) == INTEGER_CST)
450 double_int diff;
451 diff = tree_to_double_int (new_val.value)
452 ^ tree_to_double_int (old_val->value);
453 new_val.mask = new_val.mask | old_val->mask | diff;
456 gcc_assert (valid_lattice_transition (*old_val, new_val));
458 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
459 caller that this was a non-transition. */
460 if (old_val->lattice_val != new_val.lattice_val
461 || (new_val.lattice_val == CONSTANT
462 && TREE_CODE (new_val.value) == INTEGER_CST
463 && (TREE_CODE (old_val->value) != INTEGER_CST
464 || new_val.mask != old_val->mask)))
466 /* ??? We would like to delay creation of INTEGER_CSTs from
467 partially constants here. */
469 if (dump_file && (dump_flags & TDF_DETAILS))
471 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
472 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
475 *old_val = new_val;
477 gcc_assert (new_val.lattice_val != UNINITIALIZED);
478 return true;
481 return false;
484 static prop_value_t get_value_for_expr (tree, bool);
485 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
486 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
487 tree, double_int, double_int,
488 tree, double_int, double_int);
490 /* Return a double_int that can be used for bitwise simplifications
491 from VAL. */
493 static double_int
494 value_to_double_int (prop_value_t val)
496 if (val.value
497 && TREE_CODE (val.value) == INTEGER_CST)
498 return tree_to_double_int (val.value);
499 else
500 return double_int_zero;
503 /* Return the value for the address expression EXPR based on alignment
504 information. */
506 static prop_value_t
507 get_value_from_alignment (tree expr)
509 tree type = TREE_TYPE (expr);
510 prop_value_t val;
511 unsigned HOST_WIDE_INT bitpos;
512 unsigned int align;
514 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
516 get_pointer_alignment_1 (expr, &align, &bitpos);
517 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
518 ? double_int::mask (TYPE_PRECISION (type))
519 : double_int_minus_one)
520 .and_not (double_int::from_uhwi (align / BITS_PER_UNIT - 1));
521 val.lattice_val = val.mask.is_minus_one () ? VARYING : CONSTANT;
522 if (val.lattice_val == CONSTANT)
523 val.value
524 = double_int_to_tree (type,
525 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
526 else
527 val.value = NULL_TREE;
529 return val;
532 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
533 return constant bits extracted from alignment information for
534 invariant addresses. */
536 static prop_value_t
537 get_value_for_expr (tree expr, bool for_bits_p)
539 prop_value_t val;
541 if (TREE_CODE (expr) == SSA_NAME)
543 val = *get_value (expr);
544 if (for_bits_p
545 && val.lattice_val == CONSTANT
546 && TREE_CODE (val.value) == ADDR_EXPR)
547 val = get_value_from_alignment (val.value);
549 else if (is_gimple_min_invariant (expr)
550 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
552 val.lattice_val = CONSTANT;
553 val.value = expr;
554 val.mask = double_int_zero;
555 canonicalize_float_value (&val);
557 else if (TREE_CODE (expr) == ADDR_EXPR)
558 val = get_value_from_alignment (expr);
559 else
561 val.lattice_val = VARYING;
562 val.mask = double_int_minus_one;
563 val.value = NULL_TREE;
565 return val;
568 /* Return the likely CCP lattice value for STMT.
570 If STMT has no operands, then return CONSTANT.
572 Else if undefinedness of operands of STMT cause its value to be
573 undefined, then return UNDEFINED.
575 Else if any operands of STMT are constants, then return CONSTANT.
577 Else return VARYING. */
579 static ccp_lattice_t
580 likely_value (gimple stmt)
582 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
583 tree use;
584 ssa_op_iter iter;
585 unsigned i;
587 enum gimple_code code = gimple_code (stmt);
589 /* This function appears to be called only for assignments, calls,
590 conditionals, and switches, due to the logic in visit_stmt. */
591 gcc_assert (code == GIMPLE_ASSIGN
592 || code == GIMPLE_CALL
593 || code == GIMPLE_COND
594 || code == GIMPLE_SWITCH);
596 /* If the statement has volatile operands, it won't fold to a
597 constant value. */
598 if (gimple_has_volatile_ops (stmt))
599 return VARYING;
601 /* Arrive here for more complex cases. */
602 has_constant_operand = false;
603 has_undefined_operand = false;
604 all_undefined_operands = true;
605 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
607 prop_value_t *val = get_value (use);
609 if (val->lattice_val == UNDEFINED)
610 has_undefined_operand = true;
611 else
612 all_undefined_operands = false;
614 if (val->lattice_val == CONSTANT)
615 has_constant_operand = true;
618 /* There may be constants in regular rhs operands. For calls we
619 have to ignore lhs, fndecl and static chain, otherwise only
620 the lhs. */
621 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
622 i < gimple_num_ops (stmt); ++i)
624 tree op = gimple_op (stmt, i);
625 if (!op || TREE_CODE (op) == SSA_NAME)
626 continue;
627 if (is_gimple_min_invariant (op))
628 has_constant_operand = true;
631 if (has_constant_operand)
632 all_undefined_operands = false;
634 if (has_undefined_operand
635 && code == GIMPLE_CALL
636 && gimple_call_internal_p (stmt))
637 switch (gimple_call_internal_fn (stmt))
639 /* These 3 builtins use the first argument just as a magic
640 way how to find out a decl uid. */
641 case IFN_GOMP_SIMD_LANE:
642 case IFN_GOMP_SIMD_VF:
643 case IFN_GOMP_SIMD_LAST_LANE:
644 has_undefined_operand = false;
645 break;
646 default:
647 break;
650 /* If the operation combines operands like COMPLEX_EXPR make sure to
651 not mark the result UNDEFINED if only one part of the result is
652 undefined. */
653 if (has_undefined_operand && all_undefined_operands)
654 return UNDEFINED;
655 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
657 switch (gimple_assign_rhs_code (stmt))
659 /* Unary operators are handled with all_undefined_operands. */
660 case PLUS_EXPR:
661 case MINUS_EXPR:
662 case POINTER_PLUS_EXPR:
663 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
664 Not bitwise operators, one VARYING operand may specify the
665 result completely. Not logical operators for the same reason.
666 Not COMPLEX_EXPR as one VARYING operand makes the result partly
667 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
668 the undefined operand may be promoted. */
669 return UNDEFINED;
671 case ADDR_EXPR:
672 /* If any part of an address is UNDEFINED, like the index
673 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
674 return UNDEFINED;
676 default:
680 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
681 fall back to CONSTANT. During iteration UNDEFINED may still drop
682 to CONSTANT. */
683 if (has_undefined_operand)
684 return CONSTANT;
686 /* We do not consider virtual operands here -- load from read-only
687 memory may have only VARYING virtual operands, but still be
688 constant. */
689 if (has_constant_operand
690 || gimple_references_memory_p (stmt))
691 return CONSTANT;
693 return VARYING;
696 /* Returns true if STMT cannot be constant. */
698 static bool
699 surely_varying_stmt_p (gimple stmt)
701 /* If the statement has operands that we cannot handle, it cannot be
702 constant. */
703 if (gimple_has_volatile_ops (stmt))
704 return true;
706 /* If it is a call and does not return a value or is not a
707 builtin and not an indirect call, it is varying. */
708 if (is_gimple_call (stmt))
710 tree fndecl;
711 if (!gimple_call_lhs (stmt)
712 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
713 && !DECL_BUILT_IN (fndecl)))
714 return true;
717 /* Any other store operation is not interesting. */
718 else if (gimple_vdef (stmt))
719 return true;
721 /* Anything other than assignments and conditional jumps are not
722 interesting for CCP. */
723 if (gimple_code (stmt) != GIMPLE_ASSIGN
724 && gimple_code (stmt) != GIMPLE_COND
725 && gimple_code (stmt) != GIMPLE_SWITCH
726 && gimple_code (stmt) != GIMPLE_CALL)
727 return true;
729 return false;
732 /* Initialize local data structures for CCP. */
734 static void
735 ccp_initialize (void)
737 basic_block bb;
739 n_const_val = num_ssa_names;
740 const_val = XCNEWVEC (prop_value_t, n_const_val);
742 /* Initialize simulation flags for PHI nodes and statements. */
743 FOR_EACH_BB (bb)
745 gimple_stmt_iterator i;
747 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
749 gimple stmt = gsi_stmt (i);
750 bool is_varying;
752 /* If the statement is a control insn, then we do not
753 want to avoid simulating the statement once. Failure
754 to do so means that those edges will never get added. */
755 if (stmt_ends_bb_p (stmt))
756 is_varying = false;
757 else
758 is_varying = surely_varying_stmt_p (stmt);
760 if (is_varying)
762 tree def;
763 ssa_op_iter iter;
765 /* If the statement will not produce a constant, mark
766 all its outputs VARYING. */
767 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
768 set_value_varying (def);
770 prop_set_simulate_again (stmt, !is_varying);
774 /* Now process PHI nodes. We never clear the simulate_again flag on
775 phi nodes, since we do not know which edges are executable yet,
776 except for phi nodes for virtual operands when we do not do store ccp. */
777 FOR_EACH_BB (bb)
779 gimple_stmt_iterator i;
781 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
783 gimple phi = gsi_stmt (i);
785 if (virtual_operand_p (gimple_phi_result (phi)))
786 prop_set_simulate_again (phi, false);
787 else
788 prop_set_simulate_again (phi, true);
793 /* Debug count support. Reset the values of ssa names
794 VARYING when the total number ssa names analyzed is
795 beyond the debug count specified. */
797 static void
798 do_dbg_cnt (void)
800 unsigned i;
801 for (i = 0; i < num_ssa_names; i++)
803 if (!dbg_cnt (ccp))
805 const_val[i].lattice_val = VARYING;
806 const_val[i].mask = double_int_minus_one;
807 const_val[i].value = NULL_TREE;
813 /* Do final substitution of propagated values, cleanup the flowgraph and
814 free allocated storage.
816 Return TRUE when something was optimized. */
818 static bool
819 ccp_finalize (void)
821 bool something_changed;
822 unsigned i;
824 do_dbg_cnt ();
826 /* Derive alignment and misalignment information from partially
827 constant pointers in the lattice. */
828 for (i = 1; i < num_ssa_names; ++i)
830 tree name = ssa_name (i);
831 prop_value_t *val;
832 unsigned int tem, align;
834 if (!name
835 || !POINTER_TYPE_P (TREE_TYPE (name)))
836 continue;
838 val = get_value (name);
839 if (val->lattice_val != CONSTANT
840 || TREE_CODE (val->value) != INTEGER_CST)
841 continue;
843 /* Trailing constant bits specify the alignment, trailing value
844 bits the misalignment. */
845 tem = val->mask.low;
846 align = (tem & -tem);
847 if (align > 1)
848 set_ptr_info_alignment (get_ptr_info (name), align,
849 TREE_INT_CST_LOW (val->value) & (align - 1));
852 /* Perform substitutions based on the known constant values. */
853 something_changed = substitute_and_fold (get_constant_value,
854 ccp_fold_stmt, true);
856 free (const_val);
857 const_val = NULL;
858 return something_changed;;
862 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
863 in VAL1.
865 any M UNDEFINED = any
866 any M VARYING = VARYING
867 Ci M Cj = Ci if (i == j)
868 Ci M Cj = VARYING if (i != j)
871 static void
872 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
874 if (val1->lattice_val == UNDEFINED)
876 /* UNDEFINED M any = any */
877 *val1 = *val2;
879 else if (val2->lattice_val == UNDEFINED)
881 /* any M UNDEFINED = any
882 Nothing to do. VAL1 already contains the value we want. */
885 else if (val1->lattice_val == VARYING
886 || val2->lattice_val == VARYING)
888 /* any M VARYING = VARYING. */
889 val1->lattice_val = VARYING;
890 val1->mask = double_int_minus_one;
891 val1->value = NULL_TREE;
893 else if (val1->lattice_val == CONSTANT
894 && val2->lattice_val == CONSTANT
895 && TREE_CODE (val1->value) == INTEGER_CST
896 && TREE_CODE (val2->value) == INTEGER_CST)
898 /* Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
901 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
902 drop to varying. */
903 val1->mask = val1->mask | val2->mask
904 | (tree_to_double_int (val1->value)
905 ^ tree_to_double_int (val2->value));
906 if (val1->mask.is_minus_one ())
908 val1->lattice_val = VARYING;
909 val1->value = NULL_TREE;
912 else if (val1->lattice_val == CONSTANT
913 && val2->lattice_val == CONSTANT
914 && simple_cst_equal (val1->value, val2->value) == 1)
916 /* Ci M Cj = Ci if (i == j)
917 Ci M Cj = VARYING if (i != j)
919 VAL1 already contains the value we want for equivalent values. */
921 else if (val1->lattice_val == CONSTANT
922 && val2->lattice_val == CONSTANT
923 && (TREE_CODE (val1->value) == ADDR_EXPR
924 || TREE_CODE (val2->value) == ADDR_EXPR))
926 /* When not equal addresses are involved try meeting for
927 alignment. */
928 prop_value_t tem = *val2;
929 if (TREE_CODE (val1->value) == ADDR_EXPR)
930 *val1 = get_value_for_expr (val1->value, true);
931 if (TREE_CODE (val2->value) == ADDR_EXPR)
932 tem = get_value_for_expr (val2->value, true);
933 ccp_lattice_meet (val1, &tem);
935 else
937 /* Any other combination is VARYING. */
938 val1->lattice_val = VARYING;
939 val1->mask = double_int_minus_one;
940 val1->value = NULL_TREE;
945 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
946 lattice values to determine PHI_NODE's lattice value. The value of a
947 PHI node is determined calling ccp_lattice_meet with all the arguments
948 of the PHI node that are incoming via executable edges. */
950 static enum ssa_prop_result
951 ccp_visit_phi_node (gimple phi)
953 unsigned i;
954 prop_value_t *old_val, new_val;
956 if (dump_file && (dump_flags & TDF_DETAILS))
958 fprintf (dump_file, "\nVisiting PHI node: ");
959 print_gimple_stmt (dump_file, phi, 0, dump_flags);
962 old_val = get_value (gimple_phi_result (phi));
963 switch (old_val->lattice_val)
965 case VARYING:
966 return SSA_PROP_VARYING;
968 case CONSTANT:
969 new_val = *old_val;
970 break;
972 case UNDEFINED:
973 new_val.lattice_val = UNDEFINED;
974 new_val.value = NULL_TREE;
975 break;
977 default:
978 gcc_unreachable ();
981 for (i = 0; i < gimple_phi_num_args (phi); i++)
983 /* Compute the meet operator over all the PHI arguments flowing
984 through executable edges. */
985 edge e = gimple_phi_arg_edge (phi, i);
987 if (dump_file && (dump_flags & TDF_DETAILS))
989 fprintf (dump_file,
990 "\n Argument #%d (%d -> %d %sexecutable)\n",
991 i, e->src->index, e->dest->index,
992 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
995 /* If the incoming edge is executable, Compute the meet operator for
996 the existing value of the PHI node and the current PHI argument. */
997 if (e->flags & EDGE_EXECUTABLE)
999 tree arg = gimple_phi_arg (phi, i)->def;
1000 prop_value_t arg_val = get_value_for_expr (arg, false);
1002 ccp_lattice_meet (&new_val, &arg_val);
1004 if (dump_file && (dump_flags & TDF_DETAILS))
1006 fprintf (dump_file, "\t");
1007 print_generic_expr (dump_file, arg, dump_flags);
1008 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1009 fprintf (dump_file, "\n");
1012 if (new_val.lattice_val == VARYING)
1013 break;
1017 if (dump_file && (dump_flags & TDF_DETAILS))
1019 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1020 fprintf (dump_file, "\n\n");
1023 /* Make the transition to the new value. */
1024 if (set_lattice_value (gimple_phi_result (phi), new_val))
1026 if (new_val.lattice_val == VARYING)
1027 return SSA_PROP_VARYING;
1028 else
1029 return SSA_PROP_INTERESTING;
1031 else
1032 return SSA_PROP_NOT_INTERESTING;
1035 /* Return the constant value for OP or OP otherwise. */
1037 static tree
1038 valueize_op (tree op)
1040 if (TREE_CODE (op) == SSA_NAME)
1042 tree tem = get_constant_value (op);
1043 if (tem)
1044 return tem;
1046 return op;
1049 /* CCP specific front-end to the non-destructive constant folding
1050 routines.
1052 Attempt to simplify the RHS of STMT knowing that one or more
1053 operands are constants.
1055 If simplification is possible, return the simplified RHS,
1056 otherwise return the original RHS or NULL_TREE. */
1058 static tree
1059 ccp_fold (gimple stmt)
1061 location_t loc = gimple_location (stmt);
1062 switch (gimple_code (stmt))
1064 case GIMPLE_COND:
1066 /* Handle comparison operators that can appear in GIMPLE form. */
1067 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1068 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1069 enum tree_code code = gimple_cond_code (stmt);
1070 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1073 case GIMPLE_SWITCH:
1075 /* Return the constant switch index. */
1076 return valueize_op (gimple_switch_index (stmt));
1079 case GIMPLE_ASSIGN:
1080 case GIMPLE_CALL:
1081 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1083 default:
1084 gcc_unreachable ();
1088 /* Apply the operation CODE in type TYPE to the value, mask pair
1089 RVAL and RMASK representing a value of type RTYPE and set
1090 the value, mask pair *VAL and *MASK to the result. */
1092 static void
1093 bit_value_unop_1 (enum tree_code code, tree type,
1094 double_int *val, double_int *mask,
1095 tree rtype, double_int rval, double_int rmask)
1097 switch (code)
1099 case BIT_NOT_EXPR:
1100 *mask = rmask;
1101 *val = ~rval;
1102 break;
1104 case NEGATE_EXPR:
1106 double_int temv, temm;
1107 /* Return ~rval + 1. */
1108 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1109 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1110 type, temv, temm,
1111 type, double_int_one, double_int_zero);
1112 break;
1115 CASE_CONVERT:
1117 bool uns;
1119 /* First extend mask and value according to the original type. */
1120 uns = TYPE_UNSIGNED (rtype);
1121 *mask = rmask.ext (TYPE_PRECISION (rtype), uns);
1122 *val = rval.ext (TYPE_PRECISION (rtype), uns);
1124 /* Then extend mask and value according to the target type. */
1125 uns = TYPE_UNSIGNED (type);
1126 *mask = (*mask).ext (TYPE_PRECISION (type), uns);
1127 *val = (*val).ext (TYPE_PRECISION (type), uns);
1128 break;
1131 default:
1132 *mask = double_int_minus_one;
1133 break;
1137 /* Apply the operation CODE in type TYPE to the value, mask pairs
1138 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1139 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1141 static void
1142 bit_value_binop_1 (enum tree_code code, tree type,
1143 double_int *val, double_int *mask,
1144 tree r1type, double_int r1val, double_int r1mask,
1145 tree r2type, double_int r2val, double_int r2mask)
1147 bool uns = TYPE_UNSIGNED (type);
1148 /* Assume we'll get a constant result. Use an initial varying value,
1149 we fall back to varying in the end if necessary. */
1150 *mask = double_int_minus_one;
1151 switch (code)
1153 case BIT_AND_EXPR:
1154 /* The mask is constant where there is a known not
1155 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1156 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1157 *val = r1val & r2val;
1158 break;
1160 case BIT_IOR_EXPR:
1161 /* The mask is constant where there is a known
1162 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1163 *mask = (r1mask | r2mask)
1164 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1165 *val = r1val | r2val;
1166 break;
1168 case BIT_XOR_EXPR:
1169 /* m1 | m2 */
1170 *mask = r1mask | r2mask;
1171 *val = r1val ^ r2val;
1172 break;
1174 case LROTATE_EXPR:
1175 case RROTATE_EXPR:
1176 if (r2mask.is_zero ())
1178 HOST_WIDE_INT shift = r2val.low;
1179 if (code == RROTATE_EXPR)
1180 shift = -shift;
1181 *mask = r1mask.lrotate (shift, TYPE_PRECISION (type));
1182 *val = r1val.lrotate (shift, TYPE_PRECISION (type));
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 /* ??? We can handle partially known shift counts if we know
1189 its sign. That way we can tell that (x << (y | 8)) & 255
1190 is zero. */
1191 if (r2mask.is_zero ())
1193 HOST_WIDE_INT shift = r2val.low;
1194 if (code == RSHIFT_EXPR)
1195 shift = -shift;
1196 /* We need to know if we are doing a left or a right shift
1197 to properly shift in zeros for left shift and unsigned
1198 right shifts and the sign bit for signed right shifts.
1199 For signed right shifts we shift in varying in case
1200 the sign bit was varying. */
1201 if (shift > 0)
1203 *mask = r1mask.llshift (shift, TYPE_PRECISION (type));
1204 *val = r1val.llshift (shift, TYPE_PRECISION (type));
1206 else if (shift < 0)
1208 shift = -shift;
1209 *mask = r1mask.rshift (shift, TYPE_PRECISION (type), !uns);
1210 *val = r1val.rshift (shift, TYPE_PRECISION (type), !uns);
1212 else
1214 *mask = r1mask;
1215 *val = r1val;
1218 break;
1220 case PLUS_EXPR:
1221 case POINTER_PLUS_EXPR:
1223 double_int lo, hi;
1224 /* Do the addition with unknown bits set to zero, to give carry-ins of
1225 zero wherever possible. */
1226 lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1227 lo = lo.ext (TYPE_PRECISION (type), uns);
1228 /* Do the addition with unknown bits set to one, to give carry-ins of
1229 one wherever possible. */
1230 hi = (r1val | r1mask) + (r2val | r2mask);
1231 hi = hi.ext (TYPE_PRECISION (type), uns);
1232 /* Each bit in the result is known if (a) the corresponding bits in
1233 both inputs are known, and (b) the carry-in to that bit position
1234 is known. We can check condition (b) by seeing if we got the same
1235 result with minimised carries as with maximised carries. */
1236 *mask = r1mask | r2mask | (lo ^ hi);
1237 *mask = (*mask).ext (TYPE_PRECISION (type), uns);
1238 /* It shouldn't matter whether we choose lo or hi here. */
1239 *val = lo;
1240 break;
1243 case MINUS_EXPR:
1245 double_int temv, temm;
1246 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1247 r2type, r2val, r2mask);
1248 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1249 r1type, r1val, r1mask,
1250 r2type, temv, temm);
1251 break;
1254 case MULT_EXPR:
1256 /* Just track trailing zeros in both operands and transfer
1257 them to the other. */
1258 int r1tz = (r1val | r1mask).trailing_zeros ();
1259 int r2tz = (r2val | r2mask).trailing_zeros ();
1260 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1262 *mask = double_int_zero;
1263 *val = double_int_zero;
1265 else if (r1tz + r2tz > 0)
1267 *mask = ~double_int::mask (r1tz + r2tz);
1268 *mask = (*mask).ext (TYPE_PRECISION (type), uns);
1269 *val = double_int_zero;
1271 break;
1274 case EQ_EXPR:
1275 case NE_EXPR:
1277 double_int m = r1mask | r2mask;
1278 if (r1val.and_not (m) != r2val.and_not (m))
1280 *mask = double_int_zero;
1281 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1283 else
1285 /* We know the result of a comparison is always one or zero. */
1286 *mask = double_int_one;
1287 *val = double_int_zero;
1289 break;
1292 case GE_EXPR:
1293 case GT_EXPR:
1295 double_int tem = r1val;
1296 r1val = r2val;
1297 r2val = tem;
1298 tem = r1mask;
1299 r1mask = r2mask;
1300 r2mask = tem;
1301 code = swap_tree_comparison (code);
1303 /* Fallthru. */
1304 case LT_EXPR:
1305 case LE_EXPR:
1307 int minmax, maxmin;
1308 /* If the most significant bits are not known we know nothing. */
1309 if (r1mask.is_negative () || r2mask.is_negative ())
1310 break;
1312 /* For comparisons the signedness is in the comparison operands. */
1313 uns = TYPE_UNSIGNED (r1type);
1315 /* If we know the most significant bits we know the values
1316 value ranges by means of treating varying bits as zero
1317 or one. Do a cross comparison of the max/min pairs. */
1318 maxmin = (r1val | r1mask).cmp (r2val.and_not (r2mask), uns);
1319 minmax = r1val.and_not (r1mask).cmp (r2val | r2mask, uns);
1320 if (maxmin < 0) /* r1 is less than r2. */
1322 *mask = double_int_zero;
1323 *val = double_int_one;
1325 else if (minmax > 0) /* r1 is not less or equal to r2. */
1327 *mask = double_int_zero;
1328 *val = double_int_zero;
1330 else if (maxmin == minmax) /* r1 and r2 are equal. */
1332 /* This probably should never happen as we'd have
1333 folded the thing during fully constant value folding. */
1334 *mask = double_int_zero;
1335 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1337 else
1339 /* We know the result of a comparison is always one or zero. */
1340 *mask = double_int_one;
1341 *val = double_int_zero;
1343 break;
1346 default:;
1350 /* Return the propagation value when applying the operation CODE to
1351 the value RHS yielding type TYPE. */
1353 static prop_value_t
1354 bit_value_unop (enum tree_code code, tree type, tree rhs)
1356 prop_value_t rval = get_value_for_expr (rhs, true);
1357 double_int value, mask;
1358 prop_value_t val;
1360 if (rval.lattice_val == UNDEFINED)
1361 return rval;
1363 gcc_assert ((rval.lattice_val == CONSTANT
1364 && TREE_CODE (rval.value) == INTEGER_CST)
1365 || rval.mask.is_minus_one ());
1366 bit_value_unop_1 (code, type, &value, &mask,
1367 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1368 if (!mask.is_minus_one ())
1370 val.lattice_val = CONSTANT;
1371 val.mask = mask;
1372 /* ??? Delay building trees here. */
1373 val.value = double_int_to_tree (type, value);
1375 else
1377 val.lattice_val = VARYING;
1378 val.value = NULL_TREE;
1379 val.mask = double_int_minus_one;
1381 return val;
1384 /* Return the propagation value when applying the operation CODE to
1385 the values RHS1 and RHS2 yielding type TYPE. */
1387 static prop_value_t
1388 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1390 prop_value_t r1val = get_value_for_expr (rhs1, true);
1391 prop_value_t r2val = get_value_for_expr (rhs2, true);
1392 double_int value, mask;
1393 prop_value_t val;
1395 if (r1val.lattice_val == UNDEFINED
1396 || r2val.lattice_val == UNDEFINED)
1398 val.lattice_val = VARYING;
1399 val.value = NULL_TREE;
1400 val.mask = double_int_minus_one;
1401 return val;
1404 gcc_assert ((r1val.lattice_val == CONSTANT
1405 && TREE_CODE (r1val.value) == INTEGER_CST)
1406 || r1val.mask.is_minus_one ());
1407 gcc_assert ((r2val.lattice_val == CONSTANT
1408 && TREE_CODE (r2val.value) == INTEGER_CST)
1409 || r2val.mask.is_minus_one ());
1410 bit_value_binop_1 (code, type, &value, &mask,
1411 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1412 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1413 if (!mask.is_minus_one ())
1415 val.lattice_val = CONSTANT;
1416 val.mask = mask;
1417 /* ??? Delay building trees here. */
1418 val.value = double_int_to_tree (type, value);
1420 else
1422 val.lattice_val = VARYING;
1423 val.value = NULL_TREE;
1424 val.mask = double_int_minus_one;
1426 return val;
1429 /* Return the propagation value when applying __builtin_assume_aligned to
1430 its arguments. */
1432 static prop_value_t
1433 bit_value_assume_aligned (gimple stmt)
1435 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1436 tree type = TREE_TYPE (ptr);
1437 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1438 prop_value_t ptrval = get_value_for_expr (ptr, true);
1439 prop_value_t alignval;
1440 double_int value, mask;
1441 prop_value_t val;
1442 if (ptrval.lattice_val == UNDEFINED)
1443 return ptrval;
1444 gcc_assert ((ptrval.lattice_val == CONSTANT
1445 && TREE_CODE (ptrval.value) == INTEGER_CST)
1446 || ptrval.mask.is_minus_one ());
1447 align = gimple_call_arg (stmt, 1);
1448 if (!host_integerp (align, 1))
1449 return ptrval;
1450 aligni = tree_low_cst (align, 1);
1451 if (aligni <= 1
1452 || (aligni & (aligni - 1)) != 0)
1453 return ptrval;
1454 if (gimple_call_num_args (stmt) > 2)
1456 misalign = gimple_call_arg (stmt, 2);
1457 if (!host_integerp (misalign, 1))
1458 return ptrval;
1459 misaligni = tree_low_cst (misalign, 1);
1460 if (misaligni >= aligni)
1461 return ptrval;
1463 align = build_int_cst_type (type, -aligni);
1464 alignval = get_value_for_expr (align, true);
1465 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1466 type, value_to_double_int (ptrval), ptrval.mask,
1467 type, value_to_double_int (alignval), alignval.mask);
1468 if (!mask.is_minus_one ())
1470 val.lattice_val = CONSTANT;
1471 val.mask = mask;
1472 gcc_assert ((mask.low & (aligni - 1)) == 0);
1473 gcc_assert ((value.low & (aligni - 1)) == 0);
1474 value.low |= misaligni;
1475 /* ??? Delay building trees here. */
1476 val.value = double_int_to_tree (type, value);
1478 else
1480 val.lattice_val = VARYING;
1481 val.value = NULL_TREE;
1482 val.mask = double_int_minus_one;
1484 return val;
1487 /* Evaluate statement STMT.
1488 Valid only for assignments, calls, conditionals, and switches. */
1490 static prop_value_t
1491 evaluate_stmt (gimple stmt)
1493 prop_value_t val;
1494 tree simplified = NULL_TREE;
1495 ccp_lattice_t likelyvalue = likely_value (stmt);
1496 bool is_constant = false;
1497 unsigned int align;
1499 if (dump_file && (dump_flags & TDF_DETAILS))
1501 fprintf (dump_file, "which is likely ");
1502 switch (likelyvalue)
1504 case CONSTANT:
1505 fprintf (dump_file, "CONSTANT");
1506 break;
1507 case UNDEFINED:
1508 fprintf (dump_file, "UNDEFINED");
1509 break;
1510 case VARYING:
1511 fprintf (dump_file, "VARYING");
1512 break;
1513 default:;
1515 fprintf (dump_file, "\n");
1518 /* If the statement is likely to have a CONSTANT result, then try
1519 to fold the statement to determine the constant value. */
1520 /* FIXME. This is the only place that we call ccp_fold.
1521 Since likely_value never returns CONSTANT for calls, we will
1522 not attempt to fold them, including builtins that may profit. */
1523 if (likelyvalue == CONSTANT)
1525 fold_defer_overflow_warnings ();
1526 simplified = ccp_fold (stmt);
1527 is_constant = simplified && is_gimple_min_invariant (simplified);
1528 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1529 if (is_constant)
1531 /* The statement produced a constant value. */
1532 val.lattice_val = CONSTANT;
1533 val.value = simplified;
1534 val.mask = double_int_zero;
1537 /* If the statement is likely to have a VARYING result, then do not
1538 bother folding the statement. */
1539 else if (likelyvalue == VARYING)
1541 enum gimple_code code = gimple_code (stmt);
1542 if (code == GIMPLE_ASSIGN)
1544 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1546 /* Other cases cannot satisfy is_gimple_min_invariant
1547 without folding. */
1548 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1549 simplified = gimple_assign_rhs1 (stmt);
1551 else if (code == GIMPLE_SWITCH)
1552 simplified = gimple_switch_index (stmt);
1553 else
1554 /* These cannot satisfy is_gimple_min_invariant without folding. */
1555 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1556 is_constant = simplified && is_gimple_min_invariant (simplified);
1557 if (is_constant)
1559 /* The statement produced a constant value. */
1560 val.lattice_val = CONSTANT;
1561 val.value = simplified;
1562 val.mask = double_int_zero;
1566 /* Resort to simplification for bitwise tracking. */
1567 if (flag_tree_bit_ccp
1568 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1569 && !is_constant)
1571 enum gimple_code code = gimple_code (stmt);
1572 val.lattice_val = VARYING;
1573 val.value = NULL_TREE;
1574 val.mask = double_int_minus_one;
1575 if (code == GIMPLE_ASSIGN)
1577 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1578 tree rhs1 = gimple_assign_rhs1 (stmt);
1579 switch (get_gimple_rhs_class (subcode))
1581 case GIMPLE_SINGLE_RHS:
1582 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1583 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1584 val = get_value_for_expr (rhs1, true);
1585 break;
1587 case GIMPLE_UNARY_RHS:
1588 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1589 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1590 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1591 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1592 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1593 break;
1595 case GIMPLE_BINARY_RHS:
1596 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1597 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1599 tree lhs = gimple_assign_lhs (stmt);
1600 tree rhs2 = gimple_assign_rhs2 (stmt);
1601 val = bit_value_binop (subcode,
1602 TREE_TYPE (lhs), rhs1, rhs2);
1604 break;
1606 default:;
1609 else if (code == GIMPLE_COND)
1611 enum tree_code code = gimple_cond_code (stmt);
1612 tree rhs1 = gimple_cond_lhs (stmt);
1613 tree rhs2 = gimple_cond_rhs (stmt);
1614 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1615 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1616 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1618 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1620 tree fndecl = gimple_call_fndecl (stmt);
1621 switch (DECL_FUNCTION_CODE (fndecl))
1623 case BUILT_IN_MALLOC:
1624 case BUILT_IN_REALLOC:
1625 case BUILT_IN_CALLOC:
1626 case BUILT_IN_STRDUP:
1627 case BUILT_IN_STRNDUP:
1628 val.lattice_val = CONSTANT;
1629 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1630 val.mask = double_int::from_shwi
1631 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1632 / BITS_PER_UNIT - 1));
1633 break;
1635 case BUILT_IN_ALLOCA:
1636 case BUILT_IN_ALLOCA_WITH_ALIGN:
1637 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1638 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1639 : BIGGEST_ALIGNMENT);
1640 val.lattice_val = CONSTANT;
1641 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1642 val.mask = double_int::from_shwi (~(((HOST_WIDE_INT) align)
1643 / BITS_PER_UNIT - 1));
1644 break;
1646 /* These builtins return their first argument, unmodified. */
1647 case BUILT_IN_MEMCPY:
1648 case BUILT_IN_MEMMOVE:
1649 case BUILT_IN_MEMSET:
1650 case BUILT_IN_STRCPY:
1651 case BUILT_IN_STRNCPY:
1652 case BUILT_IN_MEMCPY_CHK:
1653 case BUILT_IN_MEMMOVE_CHK:
1654 case BUILT_IN_MEMSET_CHK:
1655 case BUILT_IN_STRCPY_CHK:
1656 case BUILT_IN_STRNCPY_CHK:
1657 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1658 break;
1660 case BUILT_IN_ASSUME_ALIGNED:
1661 val = bit_value_assume_aligned (stmt);
1662 break;
1664 default:;
1667 is_constant = (val.lattice_val == CONSTANT);
1670 if (!is_constant)
1672 /* The statement produced a nonconstant value. If the statement
1673 had UNDEFINED operands, then the result of the statement
1674 should be UNDEFINED. Otherwise, the statement is VARYING. */
1675 if (likelyvalue == UNDEFINED)
1677 val.lattice_val = likelyvalue;
1678 val.mask = double_int_zero;
1680 else
1682 val.lattice_val = VARYING;
1683 val.mask = double_int_minus_one;
1686 val.value = NULL_TREE;
1689 return val;
1692 typedef hash_table <pointer_hash <gimple_statement_d> > gimple_htab;
1694 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1695 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1697 static void
1698 insert_clobber_before_stack_restore (tree saved_val, tree var,
1699 gimple_htab *visited)
1701 gimple stmt, clobber_stmt;
1702 tree clobber;
1703 imm_use_iterator iter;
1704 gimple_stmt_iterator i;
1705 gimple *slot;
1707 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1708 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1710 clobber = build_constructor (TREE_TYPE (var),
1711 NULL);
1712 TREE_THIS_VOLATILE (clobber) = 1;
1713 clobber_stmt = gimple_build_assign (var, clobber);
1715 i = gsi_for_stmt (stmt);
1716 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1718 else if (gimple_code (stmt) == GIMPLE_PHI)
1720 if (!visited->is_created ())
1721 visited->create (10);
1723 slot = visited->find_slot (stmt, INSERT);
1724 if (*slot != NULL)
1725 continue;
1727 *slot = stmt;
1728 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1729 visited);
1731 else
1732 gcc_assert (is_gimple_debug (stmt));
1735 /* Advance the iterator to the previous non-debug gimple statement in the same
1736 or dominating basic block. */
1738 static inline void
1739 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1741 basic_block dom;
1743 gsi_prev_nondebug (i);
1744 while (gsi_end_p (*i))
1746 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1747 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1748 return;
1750 *i = gsi_last_bb (dom);
1754 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1755 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1757 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1758 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1759 that case the function gives up without inserting the clobbers. */
1761 static void
1762 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1764 gimple stmt;
1765 tree saved_val;
1766 gimple_htab visited;
1768 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1770 stmt = gsi_stmt (i);
1772 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1773 continue;
1775 saved_val = gimple_call_lhs (stmt);
1776 if (saved_val == NULL_TREE)
1777 continue;
1779 insert_clobber_before_stack_restore (saved_val, var, &visited);
1780 break;
1783 if (visited.is_created ())
1784 visited.dispose ();
1787 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1788 fixed-size array and returns the address, if found, otherwise returns
1789 NULL_TREE. */
1791 static tree
1792 fold_builtin_alloca_with_align (gimple stmt)
1794 unsigned HOST_WIDE_INT size, threshold, n_elem;
1795 tree lhs, arg, block, var, elem_type, array_type;
1797 /* Get lhs. */
1798 lhs = gimple_call_lhs (stmt);
1799 if (lhs == NULL_TREE)
1800 return NULL_TREE;
1802 /* Detect constant argument. */
1803 arg = get_constant_value (gimple_call_arg (stmt, 0));
1804 if (arg == NULL_TREE
1805 || TREE_CODE (arg) != INTEGER_CST
1806 || !host_integerp (arg, 1))
1807 return NULL_TREE;
1809 size = TREE_INT_CST_LOW (arg);
1811 /* Heuristic: don't fold large allocas. */
1812 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1813 /* In case the alloca is located at function entry, it has the same lifetime
1814 as a declared array, so we allow a larger size. */
1815 block = gimple_block (stmt);
1816 if (!(cfun->after_inlining
1817 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1818 threshold /= 10;
1819 if (size > threshold)
1820 return NULL_TREE;
1822 /* Declare array. */
1823 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1824 n_elem = size * 8 / BITS_PER_UNIT;
1825 array_type = build_array_type_nelts (elem_type, n_elem);
1826 var = create_tmp_var (array_type, NULL);
1827 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
1829 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1830 if (pi != NULL && !pi->pt.anything)
1832 bool singleton_p;
1833 unsigned uid;
1834 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1835 gcc_assert (singleton_p);
1836 SET_DECL_PT_UID (var, uid);
1840 /* Fold alloca to the address of the array. */
1841 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1844 /* Fold the stmt at *GSI with CCP specific information that propagating
1845 and regular folding does not catch. */
1847 static bool
1848 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1850 gimple stmt = gsi_stmt (*gsi);
1852 switch (gimple_code (stmt))
1854 case GIMPLE_COND:
1856 prop_value_t val;
1857 /* Statement evaluation will handle type mismatches in constants
1858 more gracefully than the final propagation. This allows us to
1859 fold more conditionals here. */
1860 val = evaluate_stmt (stmt);
1861 if (val.lattice_val != CONSTANT
1862 || !val.mask.is_zero ())
1863 return false;
1865 if (dump_file)
1867 fprintf (dump_file, "Folding predicate ");
1868 print_gimple_expr (dump_file, stmt, 0, 0);
1869 fprintf (dump_file, " to ");
1870 print_generic_expr (dump_file, val.value, 0);
1871 fprintf (dump_file, "\n");
1874 if (integer_zerop (val.value))
1875 gimple_cond_make_false (stmt);
1876 else
1877 gimple_cond_make_true (stmt);
1879 return true;
1882 case GIMPLE_CALL:
1884 tree lhs = gimple_call_lhs (stmt);
1885 int flags = gimple_call_flags (stmt);
1886 tree val;
1887 tree argt;
1888 bool changed = false;
1889 unsigned i;
1891 /* If the call was folded into a constant make sure it goes
1892 away even if we cannot propagate into all uses because of
1893 type issues. */
1894 if (lhs
1895 && TREE_CODE (lhs) == SSA_NAME
1896 && (val = get_constant_value (lhs))
1897 /* Don't optimize away calls that have side-effects. */
1898 && (flags & (ECF_CONST|ECF_PURE)) != 0
1899 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
1901 tree new_rhs = unshare_expr (val);
1902 bool res;
1903 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1904 TREE_TYPE (new_rhs)))
1905 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1906 res = update_call_from_tree (gsi, new_rhs);
1907 gcc_assert (res);
1908 return true;
1911 /* Internal calls provide no argument types, so the extra laxity
1912 for normal calls does not apply. */
1913 if (gimple_call_internal_p (stmt))
1914 return false;
1916 /* The heuristic of fold_builtin_alloca_with_align differs before and
1917 after inlining, so we don't require the arg to be changed into a
1918 constant for folding, but just to be constant. */
1919 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1921 tree new_rhs = fold_builtin_alloca_with_align (stmt);
1922 if (new_rhs)
1924 bool res = update_call_from_tree (gsi, new_rhs);
1925 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
1926 gcc_assert (res);
1927 insert_clobbers_for_var (*gsi, var);
1928 return true;
1932 /* Propagate into the call arguments. Compared to replace_uses_in
1933 this can use the argument slot types for type verification
1934 instead of the current argument type. We also can safely
1935 drop qualifiers here as we are dealing with constants anyway. */
1936 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
1937 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1938 ++i, argt = TREE_CHAIN (argt))
1940 tree arg = gimple_call_arg (stmt, i);
1941 if (TREE_CODE (arg) == SSA_NAME
1942 && (val = get_constant_value (arg))
1943 && useless_type_conversion_p
1944 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1945 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1947 gimple_call_set_arg (stmt, i, unshare_expr (val));
1948 changed = true;
1952 return changed;
1955 case GIMPLE_ASSIGN:
1957 tree lhs = gimple_assign_lhs (stmt);
1958 tree val;
1960 /* If we have a load that turned out to be constant replace it
1961 as we cannot propagate into all uses in all cases. */
1962 if (gimple_assign_single_p (stmt)
1963 && TREE_CODE (lhs) == SSA_NAME
1964 && (val = get_constant_value (lhs)))
1966 tree rhs = unshare_expr (val);
1967 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1968 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1969 gimple_assign_set_rhs_from_tree (gsi, rhs);
1970 return true;
1973 return false;
1976 default:
1977 return false;
1981 /* Visit the assignment statement STMT. Set the value of its LHS to the
1982 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1983 creates virtual definitions, set the value of each new name to that
1984 of the RHS (if we can derive a constant out of the RHS).
1985 Value-returning call statements also perform an assignment, and
1986 are handled here. */
1988 static enum ssa_prop_result
1989 visit_assignment (gimple stmt, tree *output_p)
1991 prop_value_t val;
1992 enum ssa_prop_result retval;
1994 tree lhs = gimple_get_lhs (stmt);
1996 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1997 || gimple_call_lhs (stmt) != NULL_TREE);
1999 if (gimple_assign_single_p (stmt)
2000 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2001 /* For a simple copy operation, we copy the lattice values. */
2002 val = *get_value (gimple_assign_rhs1 (stmt));
2003 else
2004 /* Evaluate the statement, which could be
2005 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2006 val = evaluate_stmt (stmt);
2008 retval = SSA_PROP_NOT_INTERESTING;
2010 /* Set the lattice value of the statement's output. */
2011 if (TREE_CODE (lhs) == SSA_NAME)
2013 /* If STMT is an assignment to an SSA_NAME, we only have one
2014 value to set. */
2015 if (set_lattice_value (lhs, val))
2017 *output_p = lhs;
2018 if (val.lattice_val == VARYING)
2019 retval = SSA_PROP_VARYING;
2020 else
2021 retval = SSA_PROP_INTERESTING;
2025 return retval;
2029 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2030 if it can determine which edge will be taken. Otherwise, return
2031 SSA_PROP_VARYING. */
2033 static enum ssa_prop_result
2034 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2036 prop_value_t val;
2037 basic_block block;
2039 block = gimple_bb (stmt);
2040 val = evaluate_stmt (stmt);
2041 if (val.lattice_val != CONSTANT
2042 || !val.mask.is_zero ())
2043 return SSA_PROP_VARYING;
2045 /* Find which edge out of the conditional block will be taken and add it
2046 to the worklist. If no single edge can be determined statically,
2047 return SSA_PROP_VARYING to feed all the outgoing edges to the
2048 propagation engine. */
2049 *taken_edge_p = find_taken_edge (block, val.value);
2050 if (*taken_edge_p)
2051 return SSA_PROP_INTERESTING;
2052 else
2053 return SSA_PROP_VARYING;
2057 /* Evaluate statement STMT. If the statement produces an output value and
2058 its evaluation changes the lattice value of its output, return
2059 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2060 output value.
2062 If STMT is a conditional branch and we can determine its truth
2063 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2064 value, return SSA_PROP_VARYING. */
2066 static enum ssa_prop_result
2067 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2069 tree def;
2070 ssa_op_iter iter;
2072 if (dump_file && (dump_flags & TDF_DETAILS))
2074 fprintf (dump_file, "\nVisiting statement:\n");
2075 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2078 switch (gimple_code (stmt))
2080 case GIMPLE_ASSIGN:
2081 /* If the statement is an assignment that produces a single
2082 output value, evaluate its RHS to see if the lattice value of
2083 its output has changed. */
2084 return visit_assignment (stmt, output_p);
2086 case GIMPLE_CALL:
2087 /* A value-returning call also performs an assignment. */
2088 if (gimple_call_lhs (stmt) != NULL_TREE)
2089 return visit_assignment (stmt, output_p);
2090 break;
2092 case GIMPLE_COND:
2093 case GIMPLE_SWITCH:
2094 /* If STMT is a conditional branch, see if we can determine
2095 which branch will be taken. */
2096 /* FIXME. It appears that we should be able to optimize
2097 computed GOTOs here as well. */
2098 return visit_cond_stmt (stmt, taken_edge_p);
2100 default:
2101 break;
2104 /* Any other kind of statement is not interesting for constant
2105 propagation and, therefore, not worth simulating. */
2106 if (dump_file && (dump_flags & TDF_DETAILS))
2107 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2109 /* Definitions made by statements other than assignments to
2110 SSA_NAMEs represent unknown modifications to their outputs.
2111 Mark them VARYING. */
2112 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2114 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2115 set_lattice_value (def, v);
2118 return SSA_PROP_VARYING;
2122 /* Main entry point for SSA Conditional Constant Propagation. */
2124 static unsigned int
2125 do_ssa_ccp (void)
2127 unsigned int todo = 0;
2128 calculate_dominance_info (CDI_DOMINATORS);
2129 ccp_initialize ();
2130 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2131 if (ccp_finalize ())
2132 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2133 free_dominance_info (CDI_DOMINATORS);
2134 return todo;
2138 static bool
2139 gate_ccp (void)
2141 return flag_tree_ccp != 0;
2145 namespace {
2147 const pass_data pass_data_ccp =
2149 GIMPLE_PASS, /* type */
2150 "ccp", /* name */
2151 OPTGROUP_NONE, /* optinfo_flags */
2152 true, /* has_gate */
2153 true, /* has_execute */
2154 TV_TREE_CCP, /* tv_id */
2155 ( PROP_cfg | PROP_ssa ), /* properties_required */
2156 0, /* properties_provided */
2157 0, /* properties_destroyed */
2158 0, /* todo_flags_start */
2159 ( TODO_verify_ssa | TODO_update_address_taken
2160 | TODO_verify_stmts ), /* todo_flags_finish */
2163 class pass_ccp : public gimple_opt_pass
2165 public:
2166 pass_ccp(gcc::context *ctxt)
2167 : gimple_opt_pass(pass_data_ccp, ctxt)
2170 /* opt_pass methods: */
2171 opt_pass * clone () { return new pass_ccp (ctxt_); }
2172 bool gate () { return gate_ccp (); }
2173 unsigned int execute () { return do_ssa_ccp (); }
2175 }; // class pass_ccp
2177 } // anon namespace
2179 gimple_opt_pass *
2180 make_pass_ccp (gcc::context *ctxt)
2182 return new pass_ccp (ctxt);
2187 /* Try to optimize out __builtin_stack_restore. Optimize it out
2188 if there is another __builtin_stack_restore in the same basic
2189 block and no calls or ASM_EXPRs are in between, or if this block's
2190 only outgoing edge is to EXIT_BLOCK and there are no calls or
2191 ASM_EXPRs after this __builtin_stack_restore. */
2193 static tree
2194 optimize_stack_restore (gimple_stmt_iterator i)
2196 tree callee;
2197 gimple stmt;
2199 basic_block bb = gsi_bb (i);
2200 gimple call = gsi_stmt (i);
2202 if (gimple_code (call) != GIMPLE_CALL
2203 || gimple_call_num_args (call) != 1
2204 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2205 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2206 return NULL_TREE;
2208 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2210 stmt = gsi_stmt (i);
2211 if (gimple_code (stmt) == GIMPLE_ASM)
2212 return NULL_TREE;
2213 if (gimple_code (stmt) != GIMPLE_CALL)
2214 continue;
2216 callee = gimple_call_fndecl (stmt);
2217 if (!callee
2218 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2219 /* All regular builtins are ok, just obviously not alloca. */
2220 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2221 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2222 return NULL_TREE;
2224 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2225 goto second_stack_restore;
2228 if (!gsi_end_p (i))
2229 return NULL_TREE;
2231 /* Allow one successor of the exit block, or zero successors. */
2232 switch (EDGE_COUNT (bb->succs))
2234 case 0:
2235 break;
2236 case 1:
2237 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2238 return NULL_TREE;
2239 break;
2240 default:
2241 return NULL_TREE;
2243 second_stack_restore:
2245 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2246 If there are multiple uses, then the last one should remove the call.
2247 In any case, whether the call to __builtin_stack_save can be removed
2248 or not is irrelevant to removing the call to __builtin_stack_restore. */
2249 if (has_single_use (gimple_call_arg (call, 0)))
2251 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2252 if (is_gimple_call (stack_save))
2254 callee = gimple_call_fndecl (stack_save);
2255 if (callee
2256 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2257 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2259 gimple_stmt_iterator stack_save_gsi;
2260 tree rhs;
2262 stack_save_gsi = gsi_for_stmt (stack_save);
2263 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2264 update_call_from_tree (&stack_save_gsi, rhs);
2269 /* No effect, so the statement will be deleted. */
2270 return integer_zero_node;
2273 /* If va_list type is a simple pointer and nothing special is needed,
2274 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2275 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2276 pointer assignment. */
2278 static tree
2279 optimize_stdarg_builtin (gimple call)
2281 tree callee, lhs, rhs, cfun_va_list;
2282 bool va_list_simple_ptr;
2283 location_t loc = gimple_location (call);
2285 if (gimple_code (call) != GIMPLE_CALL)
2286 return NULL_TREE;
2288 callee = gimple_call_fndecl (call);
2290 cfun_va_list = targetm.fn_abi_va_list (callee);
2291 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2292 && (TREE_TYPE (cfun_va_list) == void_type_node
2293 || TREE_TYPE (cfun_va_list) == char_type_node);
2295 switch (DECL_FUNCTION_CODE (callee))
2297 case BUILT_IN_VA_START:
2298 if (!va_list_simple_ptr
2299 || targetm.expand_builtin_va_start != NULL
2300 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2301 return NULL_TREE;
2303 if (gimple_call_num_args (call) != 2)
2304 return NULL_TREE;
2306 lhs = gimple_call_arg (call, 0);
2307 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2308 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2309 != TYPE_MAIN_VARIANT (cfun_va_list))
2310 return NULL_TREE;
2312 lhs = build_fold_indirect_ref_loc (loc, lhs);
2313 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2314 1, integer_zero_node);
2315 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2316 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2318 case BUILT_IN_VA_COPY:
2319 if (!va_list_simple_ptr)
2320 return NULL_TREE;
2322 if (gimple_call_num_args (call) != 2)
2323 return NULL_TREE;
2325 lhs = gimple_call_arg (call, 0);
2326 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2327 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2328 != TYPE_MAIN_VARIANT (cfun_va_list))
2329 return NULL_TREE;
2331 lhs = build_fold_indirect_ref_loc (loc, lhs);
2332 rhs = gimple_call_arg (call, 1);
2333 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2334 != TYPE_MAIN_VARIANT (cfun_va_list))
2335 return NULL_TREE;
2337 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2338 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2340 case BUILT_IN_VA_END:
2341 /* No effect, so the statement will be deleted. */
2342 return integer_zero_node;
2344 default:
2345 gcc_unreachable ();
2349 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2350 the incoming jumps. Return true if at least one jump was changed. */
2352 static bool
2353 optimize_unreachable (gimple_stmt_iterator i)
2355 basic_block bb = gsi_bb (i);
2356 gimple_stmt_iterator gsi;
2357 gimple stmt;
2358 edge_iterator ei;
2359 edge e;
2360 bool ret;
2362 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2364 stmt = gsi_stmt (gsi);
2366 if (is_gimple_debug (stmt))
2367 continue;
2369 if (gimple_code (stmt) == GIMPLE_LABEL)
2371 /* Verify we do not need to preserve the label. */
2372 if (FORCED_LABEL (gimple_label_label (stmt)))
2373 return false;
2375 continue;
2378 /* Only handle the case that __builtin_unreachable is the first statement
2379 in the block. We rely on DCE to remove stmts without side-effects
2380 before __builtin_unreachable. */
2381 if (gsi_stmt (gsi) != gsi_stmt (i))
2382 return false;
2385 ret = false;
2386 FOR_EACH_EDGE (e, ei, bb->preds)
2388 gsi = gsi_last_bb (e->src);
2389 if (gsi_end_p (gsi))
2390 continue;
2392 stmt = gsi_stmt (gsi);
2393 if (gimple_code (stmt) == GIMPLE_COND)
2395 if (e->flags & EDGE_TRUE_VALUE)
2396 gimple_cond_make_false (stmt);
2397 else if (e->flags & EDGE_FALSE_VALUE)
2398 gimple_cond_make_true (stmt);
2399 else
2400 gcc_unreachable ();
2401 update_stmt (stmt);
2403 else
2405 /* Todo: handle other cases, f.i. switch statement. */
2406 continue;
2409 ret = true;
2412 return ret;
2415 /* A simple pass that attempts to fold all builtin functions. This pass
2416 is run after we've propagated as many constants as we can. */
2418 static unsigned int
2419 execute_fold_all_builtins (void)
2421 bool cfg_changed = false;
2422 basic_block bb;
2423 unsigned int todoflags = 0;
2425 FOR_EACH_BB (bb)
2427 gimple_stmt_iterator i;
2428 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2430 gimple stmt, old_stmt;
2431 tree callee, result;
2432 enum built_in_function fcode;
2434 stmt = gsi_stmt (i);
2436 if (gimple_code (stmt) != GIMPLE_CALL)
2438 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2439 after the last GIMPLE DSE they aren't needed and might
2440 unnecessarily keep the SSA_NAMEs live. */
2441 if (gimple_clobber_p (stmt))
2443 tree lhs = gimple_assign_lhs (stmt);
2444 if (TREE_CODE (lhs) == MEM_REF
2445 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2447 unlink_stmt_vdef (stmt);
2448 gsi_remove (&i, true);
2449 release_defs (stmt);
2450 continue;
2453 gsi_next (&i);
2454 continue;
2456 callee = gimple_call_fndecl (stmt);
2457 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2459 gsi_next (&i);
2460 continue;
2462 fcode = DECL_FUNCTION_CODE (callee);
2464 result = gimple_fold_builtin (stmt);
2466 if (result)
2467 gimple_remove_stmt_histograms (cfun, stmt);
2469 if (!result)
2470 switch (DECL_FUNCTION_CODE (callee))
2472 case BUILT_IN_CONSTANT_P:
2473 /* Resolve __builtin_constant_p. If it hasn't been
2474 folded to integer_one_node by now, it's fairly
2475 certain that the value simply isn't constant. */
2476 result = integer_zero_node;
2477 break;
2479 case BUILT_IN_ASSUME_ALIGNED:
2480 /* Remove __builtin_assume_aligned. */
2481 result = gimple_call_arg (stmt, 0);
2482 break;
2484 case BUILT_IN_STACK_RESTORE:
2485 result = optimize_stack_restore (i);
2486 if (result)
2487 break;
2488 gsi_next (&i);
2489 continue;
2491 case BUILT_IN_UNREACHABLE:
2492 if (optimize_unreachable (i))
2493 cfg_changed = true;
2494 break;
2496 case BUILT_IN_VA_START:
2497 case BUILT_IN_VA_END:
2498 case BUILT_IN_VA_COPY:
2499 /* These shouldn't be folded before pass_stdarg. */
2500 result = optimize_stdarg_builtin (stmt);
2501 if (result)
2502 break;
2503 /* FALLTHRU */
2505 default:
2506 gsi_next (&i);
2507 continue;
2510 if (result == NULL_TREE)
2511 break;
2513 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fprintf (dump_file, "Simplified\n ");
2516 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2519 old_stmt = stmt;
2520 if (!update_call_from_tree (&i, result))
2522 gimplify_and_update_call_from_tree (&i, result);
2523 todoflags |= TODO_update_address_taken;
2526 stmt = gsi_stmt (i);
2527 update_stmt (stmt);
2529 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2530 && gimple_purge_dead_eh_edges (bb))
2531 cfg_changed = true;
2533 if (dump_file && (dump_flags & TDF_DETAILS))
2535 fprintf (dump_file, "to\n ");
2536 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2537 fprintf (dump_file, "\n");
2540 /* Retry the same statement if it changed into another
2541 builtin, there might be new opportunities now. */
2542 if (gimple_code (stmt) != GIMPLE_CALL)
2544 gsi_next (&i);
2545 continue;
2547 callee = gimple_call_fndecl (stmt);
2548 if (!callee
2549 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2550 || DECL_FUNCTION_CODE (callee) == fcode)
2551 gsi_next (&i);
2555 /* Delete unreachable blocks. */
2556 if (cfg_changed)
2557 todoflags |= TODO_cleanup_cfg;
2559 return todoflags;
2563 namespace {
2565 const pass_data pass_data_fold_builtins =
2567 GIMPLE_PASS, /* type */
2568 "fab", /* name */
2569 OPTGROUP_NONE, /* optinfo_flags */
2570 false, /* has_gate */
2571 true, /* has_execute */
2572 TV_NONE, /* tv_id */
2573 ( PROP_cfg | PROP_ssa ), /* properties_required */
2574 0, /* properties_provided */
2575 0, /* properties_destroyed */
2576 0, /* todo_flags_start */
2577 ( TODO_verify_ssa | TODO_update_ssa ), /* todo_flags_finish */
2580 class pass_fold_builtins : public gimple_opt_pass
2582 public:
2583 pass_fold_builtins(gcc::context *ctxt)
2584 : gimple_opt_pass(pass_data_fold_builtins, ctxt)
2587 /* opt_pass methods: */
2588 opt_pass * clone () { return new pass_fold_builtins (ctxt_); }
2589 unsigned int execute () { return execute_fold_all_builtins (); }
2591 }; // class pass_fold_builtins
2593 } // anon namespace
2595 gimple_opt_pass *
2596 make_pass_fold_builtins (gcc::context *ctxt)
2598 return new pass_fold_builtins (ctxt);