PR middle-end/21718
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob5eb5ffd33a1342666bc72a633342d05776df87c7
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2013 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 References:
103 Constant propagation with conditional branches,
104 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106 Building an Optimizing Compiler,
107 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109 Advanced Compiler Design and Implementation,
110 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "tree.h"
117 #include "stor-layout.h"
118 #include "flags.h"
119 #include "tm_p.h"
120 #include "basic-block.h"
121 #include "function.h"
122 #include "gimple-pretty-print.h"
123 #include "gimple.h"
124 #include "gimplify.h"
125 #include "gimple-iterator.h"
126 #include "gimple-ssa.h"
127 #include "tree-cfg.h"
128 #include "tree-phinodes.h"
129 #include "ssa-iterators.h"
130 #include "stringpool.h"
131 #include "tree-ssanames.h"
132 #include "tree-pass.h"
133 #include "tree-ssa-propagate.h"
134 #include "value-prof.h"
135 #include "langhooks.h"
136 #include "target.h"
137 #include "diagnostic-core.h"
138 #include "dbgcnt.h"
139 #include "params.h"
140 #include "hash-table.h"
143 /* Possible lattice values. */
144 typedef enum
146 UNINITIALIZED,
147 UNDEFINED,
148 CONSTANT,
149 VARYING
150 } ccp_lattice_t;
152 struct prop_value_d {
153 /* Lattice value. */
154 ccp_lattice_t lattice_val;
156 /* Propagated value. */
157 tree value;
159 /* Mask that applies to the propagated value during CCP. For
160 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
161 double_int mask;
164 typedef struct prop_value_d prop_value_t;
166 /* Array of propagated constant values. After propagation,
167 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
168 the constant is held in an SSA name representing a memory store
169 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
170 memory reference used to store (i.e., the LHS of the assignment
171 doing the store). */
172 static prop_value_t *const_val;
173 static unsigned n_const_val;
175 static void canonicalize_value (prop_value_t *);
176 static bool ccp_fold_stmt (gimple_stmt_iterator *);
178 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
180 static void
181 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
183 switch (val.lattice_val)
185 case UNINITIALIZED:
186 fprintf (outf, "%sUNINITIALIZED", prefix);
187 break;
188 case UNDEFINED:
189 fprintf (outf, "%sUNDEFINED", prefix);
190 break;
191 case VARYING:
192 fprintf (outf, "%sVARYING", prefix);
193 break;
194 case CONSTANT:
195 if (TREE_CODE (val.value) != INTEGER_CST
196 || val.mask.is_zero ())
198 fprintf (outf, "%sCONSTANT ", prefix);
199 print_generic_expr (outf, val.value, dump_flags);
201 else
203 double_int cval = tree_to_double_int (val.value).and_not (val.mask);
204 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
205 prefix, cval.high, cval.low);
206 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
207 val.mask.high, val.mask.low);
209 break;
210 default:
211 gcc_unreachable ();
216 /* Print lattice value VAL to stderr. */
218 void debug_lattice_value (prop_value_t val);
220 DEBUG_FUNCTION void
221 debug_lattice_value (prop_value_t val)
223 dump_lattice_value (stderr, "", val);
224 fprintf (stderr, "\n");
228 /* Compute a default value for variable VAR and store it in the
229 CONST_VAL array. The following rules are used to get default
230 values:
232 1- Global and static variables that are declared constant are
233 considered CONSTANT.
235 2- Any other value is considered UNDEFINED. This is useful when
236 considering PHI nodes. PHI arguments that are undefined do not
237 change the constant value of the PHI node, which allows for more
238 constants to be propagated.
240 3- Variables defined by statements other than assignments and PHI
241 nodes are considered VARYING.
243 4- Initial values of variables that are not GIMPLE registers are
244 considered VARYING. */
246 static prop_value_t
247 get_default_value (tree var)
249 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
250 gimple stmt;
252 stmt = SSA_NAME_DEF_STMT (var);
254 if (gimple_nop_p (stmt))
256 /* Variables defined by an empty statement are those used
257 before being initialized. If VAR is a local variable, we
258 can assume initially that it is UNDEFINED, otherwise we must
259 consider it VARYING. */
260 if (!virtual_operand_p (var)
261 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
262 val.lattice_val = UNDEFINED;
263 else
265 val.lattice_val = VARYING;
266 val.mask = double_int_minus_one;
267 if (flag_tree_bit_ccp)
269 double_int nonzero_bits = get_nonzero_bits (var);
270 double_int mask
271 = double_int::mask (TYPE_PRECISION (TREE_TYPE (var)));
272 if (nonzero_bits != double_int_minus_one && nonzero_bits != mask)
274 val.lattice_val = CONSTANT;
275 val.value = build_zero_cst (TREE_TYPE (var));
276 /* CCP wants the bits above precision set. */
277 val.mask = nonzero_bits | ~mask;
282 else if (is_gimple_assign (stmt))
284 tree cst;
285 if (gimple_assign_single_p (stmt)
286 && DECL_P (gimple_assign_rhs1 (stmt))
287 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
289 val.lattice_val = CONSTANT;
290 val.value = cst;
292 else
294 /* Any other variable defined by an assignment is considered
295 UNDEFINED. */
296 val.lattice_val = UNDEFINED;
299 else if ((is_gimple_call (stmt)
300 && gimple_call_lhs (stmt) != NULL_TREE)
301 || gimple_code (stmt) == GIMPLE_PHI)
303 /* A variable defined by a call or a PHI node is considered
304 UNDEFINED. */
305 val.lattice_val = UNDEFINED;
307 else
309 /* Otherwise, VAR will never take on a constant value. */
310 val.lattice_val = VARYING;
311 val.mask = double_int_minus_one;
314 return val;
318 /* Get the constant value associated with variable VAR. */
320 static inline prop_value_t *
321 get_value (tree var)
323 prop_value_t *val;
325 if (const_val == NULL
326 || SSA_NAME_VERSION (var) >= n_const_val)
327 return NULL;
329 val = &const_val[SSA_NAME_VERSION (var)];
330 if (val->lattice_val == UNINITIALIZED)
331 *val = get_default_value (var);
333 canonicalize_value (val);
335 return val;
338 /* Return the constant tree value associated with VAR. */
340 static inline tree
341 get_constant_value (tree var)
343 prop_value_t *val;
344 if (TREE_CODE (var) != SSA_NAME)
346 if (is_gimple_min_invariant (var))
347 return var;
348 return NULL_TREE;
350 val = get_value (var);
351 if (val
352 && val->lattice_val == CONSTANT
353 && (TREE_CODE (val->value) != INTEGER_CST
354 || val->mask.is_zero ()))
355 return val->value;
356 return NULL_TREE;
359 /* Sets the value associated with VAR to VARYING. */
361 static inline void
362 set_value_varying (tree var)
364 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
366 val->lattice_val = VARYING;
367 val->value = NULL_TREE;
368 val->mask = double_int_minus_one;
371 /* For float types, modify the value of VAL to make ccp work correctly
372 for non-standard values (-0, NaN):
374 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
375 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
376 This is to fix the following problem (see PR 29921): Suppose we have
378 x = 0.0 * y
380 and we set value of y to NaN. This causes value of x to be set to NaN.
381 When we later determine that y is in fact VARYING, fold uses the fact
382 that HONOR_NANS is false, and we try to change the value of x to 0,
383 causing an ICE. With HONOR_NANS being false, the real appearance of
384 NaN would cause undefined behavior, though, so claiming that y (and x)
385 are UNDEFINED initially is correct.
387 For other constants, make sure to drop TREE_OVERFLOW. */
389 static void
390 canonicalize_value (prop_value_t *val)
392 enum machine_mode mode;
393 tree type;
394 REAL_VALUE_TYPE d;
396 if (val->lattice_val != CONSTANT)
397 return;
399 if (TREE_OVERFLOW_P (val->value))
400 val->value = drop_tree_overflow (val->value);
402 if (TREE_CODE (val->value) != REAL_CST)
403 return;
405 d = TREE_REAL_CST (val->value);
406 type = TREE_TYPE (val->value);
407 mode = TYPE_MODE (type);
409 if (!HONOR_SIGNED_ZEROS (mode)
410 && REAL_VALUE_MINUS_ZERO (d))
412 val->value = build_real (type, dconst0);
413 return;
416 if (!HONOR_NANS (mode)
417 && REAL_VALUE_ISNAN (d))
419 val->lattice_val = UNDEFINED;
420 val->value = NULL;
421 return;
425 /* Return whether the lattice transition is valid. */
427 static bool
428 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
430 /* Lattice transitions must always be monotonically increasing in
431 value. */
432 if (old_val.lattice_val < new_val.lattice_val)
433 return true;
435 if (old_val.lattice_val != new_val.lattice_val)
436 return false;
438 if (!old_val.value && !new_val.value)
439 return true;
441 /* Now both lattice values are CONSTANT. */
443 /* Allow transitioning from PHI <&x, not executable> == &x
444 to PHI <&x, &y> == common alignment. */
445 if (TREE_CODE (old_val.value) != INTEGER_CST
446 && TREE_CODE (new_val.value) == INTEGER_CST)
447 return true;
449 /* Bit-lattices have to agree in the still valid bits. */
450 if (TREE_CODE (old_val.value) == INTEGER_CST
451 && TREE_CODE (new_val.value) == INTEGER_CST)
452 return tree_to_double_int (old_val.value).and_not (new_val.mask)
453 == tree_to_double_int (new_val.value).and_not (new_val.mask);
455 /* Otherwise constant values have to agree. */
456 return operand_equal_p (old_val.value, new_val.value, 0);
459 /* Set the value for variable VAR to NEW_VAL. Return true if the new
460 value is different from VAR's previous value. */
462 static bool
463 set_lattice_value (tree var, prop_value_t new_val)
465 /* We can deal with old UNINITIALIZED values just fine here. */
466 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
468 canonicalize_value (&new_val);
470 /* We have to be careful to not go up the bitwise lattice
471 represented by the mask.
472 ??? This doesn't seem to be the best place to enforce this. */
473 if (new_val.lattice_val == CONSTANT
474 && old_val->lattice_val == CONSTANT
475 && TREE_CODE (new_val.value) == INTEGER_CST
476 && TREE_CODE (old_val->value) == INTEGER_CST)
478 double_int diff;
479 diff = tree_to_double_int (new_val.value)
480 ^ tree_to_double_int (old_val->value);
481 new_val.mask = new_val.mask | old_val->mask | diff;
484 gcc_assert (valid_lattice_transition (*old_val, new_val));
486 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
487 caller that this was a non-transition. */
488 if (old_val->lattice_val != new_val.lattice_val
489 || (new_val.lattice_val == CONSTANT
490 && TREE_CODE (new_val.value) == INTEGER_CST
491 && (TREE_CODE (old_val->value) != INTEGER_CST
492 || new_val.mask != old_val->mask)))
494 /* ??? We would like to delay creation of INTEGER_CSTs from
495 partially constants here. */
497 if (dump_file && (dump_flags & TDF_DETAILS))
499 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
500 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
503 *old_val = new_val;
505 gcc_assert (new_val.lattice_val != UNINITIALIZED);
506 return true;
509 return false;
512 static prop_value_t get_value_for_expr (tree, bool);
513 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
514 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
515 tree, double_int, double_int,
516 tree, double_int, double_int);
518 /* Return a double_int that can be used for bitwise simplifications
519 from VAL. */
521 static double_int
522 value_to_double_int (prop_value_t val)
524 if (val.value
525 && TREE_CODE (val.value) == INTEGER_CST)
526 return tree_to_double_int (val.value);
527 else
528 return double_int_zero;
531 /* Return the value for the address expression EXPR based on alignment
532 information. */
534 static prop_value_t
535 get_value_from_alignment (tree expr)
537 tree type = TREE_TYPE (expr);
538 prop_value_t val;
539 unsigned HOST_WIDE_INT bitpos;
540 unsigned int align;
542 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
544 get_pointer_alignment_1 (expr, &align, &bitpos);
545 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
546 ? double_int::mask (TYPE_PRECISION (type))
547 : double_int_minus_one)
548 .and_not (double_int::from_uhwi (align / BITS_PER_UNIT - 1));
549 val.lattice_val = val.mask.is_minus_one () ? VARYING : CONSTANT;
550 if (val.lattice_val == CONSTANT)
551 val.value
552 = double_int_to_tree (type,
553 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
554 else
555 val.value = NULL_TREE;
557 return val;
560 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
561 return constant bits extracted from alignment information for
562 invariant addresses. */
564 static prop_value_t
565 get_value_for_expr (tree expr, bool for_bits_p)
567 prop_value_t val;
569 if (TREE_CODE (expr) == SSA_NAME)
571 val = *get_value (expr);
572 if (for_bits_p
573 && val.lattice_val == CONSTANT
574 && TREE_CODE (val.value) == ADDR_EXPR)
575 val = get_value_from_alignment (val.value);
577 else if (is_gimple_min_invariant (expr)
578 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
580 val.lattice_val = CONSTANT;
581 val.value = expr;
582 val.mask = double_int_zero;
583 canonicalize_value (&val);
585 else if (TREE_CODE (expr) == ADDR_EXPR)
586 val = get_value_from_alignment (expr);
587 else
589 val.lattice_val = VARYING;
590 val.mask = double_int_minus_one;
591 val.value = NULL_TREE;
593 return val;
596 /* Return the likely CCP lattice value for STMT.
598 If STMT has no operands, then return CONSTANT.
600 Else if undefinedness of operands of STMT cause its value to be
601 undefined, then return UNDEFINED.
603 Else if any operands of STMT are constants, then return CONSTANT.
605 Else return VARYING. */
607 static ccp_lattice_t
608 likely_value (gimple stmt)
610 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
611 tree use;
612 ssa_op_iter iter;
613 unsigned i;
615 enum gimple_code code = gimple_code (stmt);
617 /* This function appears to be called only for assignments, calls,
618 conditionals, and switches, due to the logic in visit_stmt. */
619 gcc_assert (code == GIMPLE_ASSIGN
620 || code == GIMPLE_CALL
621 || code == GIMPLE_COND
622 || code == GIMPLE_SWITCH);
624 /* If the statement has volatile operands, it won't fold to a
625 constant value. */
626 if (gimple_has_volatile_ops (stmt))
627 return VARYING;
629 /* Arrive here for more complex cases. */
630 has_constant_operand = false;
631 has_undefined_operand = false;
632 all_undefined_operands = true;
633 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
635 prop_value_t *val = get_value (use);
637 if (val->lattice_val == UNDEFINED)
638 has_undefined_operand = true;
639 else
640 all_undefined_operands = false;
642 if (val->lattice_val == CONSTANT)
643 has_constant_operand = true;
646 /* There may be constants in regular rhs operands. For calls we
647 have to ignore lhs, fndecl and static chain, otherwise only
648 the lhs. */
649 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
650 i < gimple_num_ops (stmt); ++i)
652 tree op = gimple_op (stmt, i);
653 if (!op || TREE_CODE (op) == SSA_NAME)
654 continue;
655 if (is_gimple_min_invariant (op))
656 has_constant_operand = true;
659 if (has_constant_operand)
660 all_undefined_operands = false;
662 if (has_undefined_operand
663 && code == GIMPLE_CALL
664 && gimple_call_internal_p (stmt))
665 switch (gimple_call_internal_fn (stmt))
667 /* These 3 builtins use the first argument just as a magic
668 way how to find out a decl uid. */
669 case IFN_GOMP_SIMD_LANE:
670 case IFN_GOMP_SIMD_VF:
671 case IFN_GOMP_SIMD_LAST_LANE:
672 has_undefined_operand = false;
673 break;
674 default:
675 break;
678 /* If the operation combines operands like COMPLEX_EXPR make sure to
679 not mark the result UNDEFINED if only one part of the result is
680 undefined. */
681 if (has_undefined_operand && all_undefined_operands)
682 return UNDEFINED;
683 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
685 switch (gimple_assign_rhs_code (stmt))
687 /* Unary operators are handled with all_undefined_operands. */
688 case PLUS_EXPR:
689 case MINUS_EXPR:
690 case POINTER_PLUS_EXPR:
691 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
692 Not bitwise operators, one VARYING operand may specify the
693 result completely. Not logical operators for the same reason.
694 Not COMPLEX_EXPR as one VARYING operand makes the result partly
695 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
696 the undefined operand may be promoted. */
697 return UNDEFINED;
699 case ADDR_EXPR:
700 /* If any part of an address is UNDEFINED, like the index
701 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
702 return UNDEFINED;
704 default:
708 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
709 fall back to CONSTANT. During iteration UNDEFINED may still drop
710 to CONSTANT. */
711 if (has_undefined_operand)
712 return CONSTANT;
714 /* We do not consider virtual operands here -- load from read-only
715 memory may have only VARYING virtual operands, but still be
716 constant. */
717 if (has_constant_operand
718 || gimple_references_memory_p (stmt))
719 return CONSTANT;
721 return VARYING;
724 /* Returns true if STMT cannot be constant. */
726 static bool
727 surely_varying_stmt_p (gimple stmt)
729 /* If the statement has operands that we cannot handle, it cannot be
730 constant. */
731 if (gimple_has_volatile_ops (stmt))
732 return true;
734 /* If it is a call and does not return a value or is not a
735 builtin and not an indirect call, it is varying. */
736 if (is_gimple_call (stmt))
738 tree fndecl;
739 if (!gimple_call_lhs (stmt)
740 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
741 && !DECL_BUILT_IN (fndecl)))
742 return true;
745 /* Any other store operation is not interesting. */
746 else if (gimple_vdef (stmt))
747 return true;
749 /* Anything other than assignments and conditional jumps are not
750 interesting for CCP. */
751 if (gimple_code (stmt) != GIMPLE_ASSIGN
752 && gimple_code (stmt) != GIMPLE_COND
753 && gimple_code (stmt) != GIMPLE_SWITCH
754 && gimple_code (stmt) != GIMPLE_CALL)
755 return true;
757 return false;
760 /* Initialize local data structures for CCP. */
762 static void
763 ccp_initialize (void)
765 basic_block bb;
767 n_const_val = num_ssa_names;
768 const_val = XCNEWVEC (prop_value_t, n_const_val);
770 /* Initialize simulation flags for PHI nodes and statements. */
771 FOR_EACH_BB (bb)
773 gimple_stmt_iterator i;
775 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
777 gimple stmt = gsi_stmt (i);
778 bool is_varying;
780 /* If the statement is a control insn, then we do not
781 want to avoid simulating the statement once. Failure
782 to do so means that those edges will never get added. */
783 if (stmt_ends_bb_p (stmt))
784 is_varying = false;
785 else
786 is_varying = surely_varying_stmt_p (stmt);
788 if (is_varying)
790 tree def;
791 ssa_op_iter iter;
793 /* If the statement will not produce a constant, mark
794 all its outputs VARYING. */
795 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
796 set_value_varying (def);
798 prop_set_simulate_again (stmt, !is_varying);
802 /* Now process PHI nodes. We never clear the simulate_again flag on
803 phi nodes, since we do not know which edges are executable yet,
804 except for phi nodes for virtual operands when we do not do store ccp. */
805 FOR_EACH_BB (bb)
807 gimple_stmt_iterator i;
809 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
811 gimple phi = gsi_stmt (i);
813 if (virtual_operand_p (gimple_phi_result (phi)))
814 prop_set_simulate_again (phi, false);
815 else
816 prop_set_simulate_again (phi, true);
821 /* Debug count support. Reset the values of ssa names
822 VARYING when the total number ssa names analyzed is
823 beyond the debug count specified. */
825 static void
826 do_dbg_cnt (void)
828 unsigned i;
829 for (i = 0; i < num_ssa_names; i++)
831 if (!dbg_cnt (ccp))
833 const_val[i].lattice_val = VARYING;
834 const_val[i].mask = double_int_minus_one;
835 const_val[i].value = NULL_TREE;
841 /* Do final substitution of propagated values, cleanup the flowgraph and
842 free allocated storage.
844 Return TRUE when something was optimized. */
846 static bool
847 ccp_finalize (void)
849 bool something_changed;
850 unsigned i;
852 do_dbg_cnt ();
854 /* Derive alignment and misalignment information from partially
855 constant pointers in the lattice or nonzero bits from partially
856 constant integers. */
857 for (i = 1; i < num_ssa_names; ++i)
859 tree name = ssa_name (i);
860 prop_value_t *val;
861 unsigned int tem, align;
863 if (!name
864 || (!POINTER_TYPE_P (TREE_TYPE (name))
865 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
866 /* Don't record nonzero bits before IPA to avoid
867 using too much memory. */
868 || first_pass_instance)))
869 continue;
871 val = get_value (name);
872 if (val->lattice_val != CONSTANT
873 || TREE_CODE (val->value) != INTEGER_CST)
874 continue;
876 if (POINTER_TYPE_P (TREE_TYPE (name)))
878 /* Trailing mask bits specify the alignment, trailing value
879 bits the misalignment. */
880 tem = val->mask.low;
881 align = (tem & -tem);
882 if (align > 1)
883 set_ptr_info_alignment (get_ptr_info (name), align,
884 (TREE_INT_CST_LOW (val->value)
885 & (align - 1)));
887 else
889 double_int nonzero_bits = val->mask;
890 nonzero_bits = nonzero_bits | tree_to_double_int (val->value);
891 nonzero_bits &= get_nonzero_bits (name);
892 set_nonzero_bits (name, nonzero_bits);
896 /* Perform substitutions based on the known constant values. */
897 something_changed = substitute_and_fold (get_constant_value,
898 ccp_fold_stmt, true);
900 free (const_val);
901 const_val = NULL;
902 return something_changed;;
906 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
907 in VAL1.
909 any M UNDEFINED = any
910 any M VARYING = VARYING
911 Ci M Cj = Ci if (i == j)
912 Ci M Cj = VARYING if (i != j)
915 static void
916 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
918 if (val1->lattice_val == UNDEFINED)
920 /* UNDEFINED M any = any */
921 *val1 = *val2;
923 else if (val2->lattice_val == UNDEFINED)
925 /* any M UNDEFINED = any
926 Nothing to do. VAL1 already contains the value we want. */
929 else if (val1->lattice_val == VARYING
930 || val2->lattice_val == VARYING)
932 /* any M VARYING = VARYING. */
933 val1->lattice_val = VARYING;
934 val1->mask = double_int_minus_one;
935 val1->value = NULL_TREE;
937 else if (val1->lattice_val == CONSTANT
938 && val2->lattice_val == CONSTANT
939 && TREE_CODE (val1->value) == INTEGER_CST
940 && TREE_CODE (val2->value) == INTEGER_CST)
942 /* Ci M Cj = Ci if (i == j)
943 Ci M Cj = VARYING if (i != j)
945 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
946 drop to varying. */
947 val1->mask = val1->mask | val2->mask
948 | (tree_to_double_int (val1->value)
949 ^ tree_to_double_int (val2->value));
950 if (val1->mask.is_minus_one ())
952 val1->lattice_val = VARYING;
953 val1->value = NULL_TREE;
956 else if (val1->lattice_val == CONSTANT
957 && val2->lattice_val == CONSTANT
958 && simple_cst_equal (val1->value, val2->value) == 1)
960 /* Ci M Cj = Ci if (i == j)
961 Ci M Cj = VARYING if (i != j)
963 VAL1 already contains the value we want for equivalent values. */
965 else if (val1->lattice_val == CONSTANT
966 && val2->lattice_val == CONSTANT
967 && (TREE_CODE (val1->value) == ADDR_EXPR
968 || TREE_CODE (val2->value) == ADDR_EXPR))
970 /* When not equal addresses are involved try meeting for
971 alignment. */
972 prop_value_t tem = *val2;
973 if (TREE_CODE (val1->value) == ADDR_EXPR)
974 *val1 = get_value_for_expr (val1->value, true);
975 if (TREE_CODE (val2->value) == ADDR_EXPR)
976 tem = get_value_for_expr (val2->value, true);
977 ccp_lattice_meet (val1, &tem);
979 else
981 /* Any other combination is VARYING. */
982 val1->lattice_val = VARYING;
983 val1->mask = double_int_minus_one;
984 val1->value = NULL_TREE;
989 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
990 lattice values to determine PHI_NODE's lattice value. The value of a
991 PHI node is determined calling ccp_lattice_meet with all the arguments
992 of the PHI node that are incoming via executable edges. */
994 static enum ssa_prop_result
995 ccp_visit_phi_node (gimple phi)
997 unsigned i;
998 prop_value_t *old_val, new_val;
1000 if (dump_file && (dump_flags & TDF_DETAILS))
1002 fprintf (dump_file, "\nVisiting PHI node: ");
1003 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1006 old_val = get_value (gimple_phi_result (phi));
1007 switch (old_val->lattice_val)
1009 case VARYING:
1010 return SSA_PROP_VARYING;
1012 case CONSTANT:
1013 new_val = *old_val;
1014 break;
1016 case UNDEFINED:
1017 new_val.lattice_val = UNDEFINED;
1018 new_val.value = NULL_TREE;
1019 break;
1021 default:
1022 gcc_unreachable ();
1025 for (i = 0; i < gimple_phi_num_args (phi); i++)
1027 /* Compute the meet operator over all the PHI arguments flowing
1028 through executable edges. */
1029 edge e = gimple_phi_arg_edge (phi, i);
1031 if (dump_file && (dump_flags & TDF_DETAILS))
1033 fprintf (dump_file,
1034 "\n Argument #%d (%d -> %d %sexecutable)\n",
1035 i, e->src->index, e->dest->index,
1036 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1039 /* If the incoming edge is executable, Compute the meet operator for
1040 the existing value of the PHI node and the current PHI argument. */
1041 if (e->flags & EDGE_EXECUTABLE)
1043 tree arg = gimple_phi_arg (phi, i)->def;
1044 prop_value_t arg_val = get_value_for_expr (arg, false);
1046 ccp_lattice_meet (&new_val, &arg_val);
1048 if (dump_file && (dump_flags & TDF_DETAILS))
1050 fprintf (dump_file, "\t");
1051 print_generic_expr (dump_file, arg, dump_flags);
1052 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1053 fprintf (dump_file, "\n");
1056 if (new_val.lattice_val == VARYING)
1057 break;
1061 if (dump_file && (dump_flags & TDF_DETAILS))
1063 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1064 fprintf (dump_file, "\n\n");
1067 /* Make the transition to the new value. */
1068 if (set_lattice_value (gimple_phi_result (phi), new_val))
1070 if (new_val.lattice_val == VARYING)
1071 return SSA_PROP_VARYING;
1072 else
1073 return SSA_PROP_INTERESTING;
1075 else
1076 return SSA_PROP_NOT_INTERESTING;
1079 /* Return the constant value for OP or OP otherwise. */
1081 static tree
1082 valueize_op (tree op)
1084 if (TREE_CODE (op) == SSA_NAME)
1086 tree tem = get_constant_value (op);
1087 if (tem)
1088 return tem;
1090 return op;
1093 /* CCP specific front-end to the non-destructive constant folding
1094 routines.
1096 Attempt to simplify the RHS of STMT knowing that one or more
1097 operands are constants.
1099 If simplification is possible, return the simplified RHS,
1100 otherwise return the original RHS or NULL_TREE. */
1102 static tree
1103 ccp_fold (gimple stmt)
1105 location_t loc = gimple_location (stmt);
1106 switch (gimple_code (stmt))
1108 case GIMPLE_COND:
1110 /* Handle comparison operators that can appear in GIMPLE form. */
1111 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1112 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1113 enum tree_code code = gimple_cond_code (stmt);
1114 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1117 case GIMPLE_SWITCH:
1119 /* Return the constant switch index. */
1120 return valueize_op (gimple_switch_index (stmt));
1123 case GIMPLE_ASSIGN:
1124 case GIMPLE_CALL:
1125 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1127 default:
1128 gcc_unreachable ();
1132 /* Apply the operation CODE in type TYPE to the value, mask pair
1133 RVAL and RMASK representing a value of type RTYPE and set
1134 the value, mask pair *VAL and *MASK to the result. */
1136 static void
1137 bit_value_unop_1 (enum tree_code code, tree type,
1138 double_int *val, double_int *mask,
1139 tree rtype, double_int rval, double_int rmask)
1141 switch (code)
1143 case BIT_NOT_EXPR:
1144 *mask = rmask;
1145 *val = ~rval;
1146 break;
1148 case NEGATE_EXPR:
1150 double_int temv, temm;
1151 /* Return ~rval + 1. */
1152 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1153 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1154 type, temv, temm,
1155 type, double_int_one, double_int_zero);
1156 break;
1159 CASE_CONVERT:
1161 bool uns;
1163 /* First extend mask and value according to the original type. */
1164 uns = TYPE_UNSIGNED (rtype);
1165 *mask = rmask.ext (TYPE_PRECISION (rtype), uns);
1166 *val = rval.ext (TYPE_PRECISION (rtype), uns);
1168 /* Then extend mask and value according to the target type. */
1169 uns = TYPE_UNSIGNED (type);
1170 *mask = (*mask).ext (TYPE_PRECISION (type), uns);
1171 *val = (*val).ext (TYPE_PRECISION (type), uns);
1172 break;
1175 default:
1176 *mask = double_int_minus_one;
1177 break;
1181 /* Apply the operation CODE in type TYPE to the value, mask pairs
1182 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1183 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1185 static void
1186 bit_value_binop_1 (enum tree_code code, tree type,
1187 double_int *val, double_int *mask,
1188 tree r1type, double_int r1val, double_int r1mask,
1189 tree r2type, double_int r2val, double_int r2mask)
1191 bool uns = TYPE_UNSIGNED (type);
1192 /* Assume we'll get a constant result. Use an initial varying value,
1193 we fall back to varying in the end if necessary. */
1194 *mask = double_int_minus_one;
1195 switch (code)
1197 case BIT_AND_EXPR:
1198 /* The mask is constant where there is a known not
1199 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1200 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1201 *val = r1val & r2val;
1202 break;
1204 case BIT_IOR_EXPR:
1205 /* The mask is constant where there is a known
1206 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1207 *mask = (r1mask | r2mask)
1208 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1209 *val = r1val | r2val;
1210 break;
1212 case BIT_XOR_EXPR:
1213 /* m1 | m2 */
1214 *mask = r1mask | r2mask;
1215 *val = r1val ^ r2val;
1216 break;
1218 case LROTATE_EXPR:
1219 case RROTATE_EXPR:
1220 if (r2mask.is_zero ())
1222 HOST_WIDE_INT shift = r2val.low;
1223 if (code == RROTATE_EXPR)
1224 shift = -shift;
1225 *mask = r1mask.lrotate (shift, TYPE_PRECISION (type));
1226 *val = r1val.lrotate (shift, TYPE_PRECISION (type));
1228 break;
1230 case LSHIFT_EXPR:
1231 case RSHIFT_EXPR:
1232 /* ??? We can handle partially known shift counts if we know
1233 its sign. That way we can tell that (x << (y | 8)) & 255
1234 is zero. */
1235 if (r2mask.is_zero ())
1237 HOST_WIDE_INT shift = r2val.low;
1238 if (code == RSHIFT_EXPR)
1239 shift = -shift;
1240 /* We need to know if we are doing a left or a right shift
1241 to properly shift in zeros for left shift and unsigned
1242 right shifts and the sign bit for signed right shifts.
1243 For signed right shifts we shift in varying in case
1244 the sign bit was varying. */
1245 if (shift > 0)
1247 *mask = r1mask.llshift (shift, TYPE_PRECISION (type));
1248 *val = r1val.llshift (shift, TYPE_PRECISION (type));
1250 else if (shift < 0)
1252 shift = -shift;
1253 *mask = r1mask.rshift (shift, TYPE_PRECISION (type), !uns);
1254 *val = r1val.rshift (shift, TYPE_PRECISION (type), !uns);
1256 else
1258 *mask = r1mask;
1259 *val = r1val;
1262 break;
1264 case PLUS_EXPR:
1265 case POINTER_PLUS_EXPR:
1267 double_int lo, hi;
1268 /* Do the addition with unknown bits set to zero, to give carry-ins of
1269 zero wherever possible. */
1270 lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1271 lo = lo.ext (TYPE_PRECISION (type), uns);
1272 /* Do the addition with unknown bits set to one, to give carry-ins of
1273 one wherever possible. */
1274 hi = (r1val | r1mask) + (r2val | r2mask);
1275 hi = hi.ext (TYPE_PRECISION (type), uns);
1276 /* Each bit in the result is known if (a) the corresponding bits in
1277 both inputs are known, and (b) the carry-in to that bit position
1278 is known. We can check condition (b) by seeing if we got the same
1279 result with minimised carries as with maximised carries. */
1280 *mask = r1mask | r2mask | (lo ^ hi);
1281 *mask = (*mask).ext (TYPE_PRECISION (type), uns);
1282 /* It shouldn't matter whether we choose lo or hi here. */
1283 *val = lo;
1284 break;
1287 case MINUS_EXPR:
1289 double_int temv, temm;
1290 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1291 r2type, r2val, r2mask);
1292 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1293 r1type, r1val, r1mask,
1294 r2type, temv, temm);
1295 break;
1298 case MULT_EXPR:
1300 /* Just track trailing zeros in both operands and transfer
1301 them to the other. */
1302 int r1tz = (r1val | r1mask).trailing_zeros ();
1303 int r2tz = (r2val | r2mask).trailing_zeros ();
1304 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1306 *mask = double_int_zero;
1307 *val = double_int_zero;
1309 else if (r1tz + r2tz > 0)
1311 *mask = ~double_int::mask (r1tz + r2tz);
1312 *mask = (*mask).ext (TYPE_PRECISION (type), uns);
1313 *val = double_int_zero;
1315 break;
1318 case EQ_EXPR:
1319 case NE_EXPR:
1321 double_int m = r1mask | r2mask;
1322 if (r1val.and_not (m) != r2val.and_not (m))
1324 *mask = double_int_zero;
1325 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1327 else
1329 /* We know the result of a comparison is always one or zero. */
1330 *mask = double_int_one;
1331 *val = double_int_zero;
1333 break;
1336 case GE_EXPR:
1337 case GT_EXPR:
1339 double_int tem = r1val;
1340 r1val = r2val;
1341 r2val = tem;
1342 tem = r1mask;
1343 r1mask = r2mask;
1344 r2mask = tem;
1345 code = swap_tree_comparison (code);
1347 /* Fallthru. */
1348 case LT_EXPR:
1349 case LE_EXPR:
1351 int minmax, maxmin;
1352 /* If the most significant bits are not known we know nothing. */
1353 if (r1mask.is_negative () || r2mask.is_negative ())
1354 break;
1356 /* For comparisons the signedness is in the comparison operands. */
1357 uns = TYPE_UNSIGNED (r1type);
1359 /* If we know the most significant bits we know the values
1360 value ranges by means of treating varying bits as zero
1361 or one. Do a cross comparison of the max/min pairs. */
1362 maxmin = (r1val | r1mask).cmp (r2val.and_not (r2mask), uns);
1363 minmax = r1val.and_not (r1mask).cmp (r2val | r2mask, uns);
1364 if (maxmin < 0) /* r1 is less than r2. */
1366 *mask = double_int_zero;
1367 *val = double_int_one;
1369 else if (minmax > 0) /* r1 is not less or equal to r2. */
1371 *mask = double_int_zero;
1372 *val = double_int_zero;
1374 else if (maxmin == minmax) /* r1 and r2 are equal. */
1376 /* This probably should never happen as we'd have
1377 folded the thing during fully constant value folding. */
1378 *mask = double_int_zero;
1379 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1381 else
1383 /* We know the result of a comparison is always one or zero. */
1384 *mask = double_int_one;
1385 *val = double_int_zero;
1387 break;
1390 default:;
1394 /* Return the propagation value when applying the operation CODE to
1395 the value RHS yielding type TYPE. */
1397 static prop_value_t
1398 bit_value_unop (enum tree_code code, tree type, tree rhs)
1400 prop_value_t rval = get_value_for_expr (rhs, true);
1401 double_int value, mask;
1402 prop_value_t val;
1404 if (rval.lattice_val == UNDEFINED)
1405 return rval;
1407 gcc_assert ((rval.lattice_val == CONSTANT
1408 && TREE_CODE (rval.value) == INTEGER_CST)
1409 || rval.mask.is_minus_one ());
1410 bit_value_unop_1 (code, type, &value, &mask,
1411 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1412 if (!mask.is_minus_one ())
1414 val.lattice_val = CONSTANT;
1415 val.mask = mask;
1416 /* ??? Delay building trees here. */
1417 val.value = double_int_to_tree (type, value);
1419 else
1421 val.lattice_val = VARYING;
1422 val.value = NULL_TREE;
1423 val.mask = double_int_minus_one;
1425 return val;
1428 /* Return the propagation value when applying the operation CODE to
1429 the values RHS1 and RHS2 yielding type TYPE. */
1431 static prop_value_t
1432 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1434 prop_value_t r1val = get_value_for_expr (rhs1, true);
1435 prop_value_t r2val = get_value_for_expr (rhs2, true);
1436 double_int value, mask;
1437 prop_value_t val;
1439 if (r1val.lattice_val == UNDEFINED
1440 || r2val.lattice_val == UNDEFINED)
1442 val.lattice_val = VARYING;
1443 val.value = NULL_TREE;
1444 val.mask = double_int_minus_one;
1445 return val;
1448 gcc_assert ((r1val.lattice_val == CONSTANT
1449 && TREE_CODE (r1val.value) == INTEGER_CST)
1450 || r1val.mask.is_minus_one ());
1451 gcc_assert ((r2val.lattice_val == CONSTANT
1452 && TREE_CODE (r2val.value) == INTEGER_CST)
1453 || r2val.mask.is_minus_one ());
1454 bit_value_binop_1 (code, type, &value, &mask,
1455 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1456 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1457 if (!mask.is_minus_one ())
1459 val.lattice_val = CONSTANT;
1460 val.mask = mask;
1461 /* ??? Delay building trees here. */
1462 val.value = double_int_to_tree (type, value);
1464 else
1466 val.lattice_val = VARYING;
1467 val.value = NULL_TREE;
1468 val.mask = double_int_minus_one;
1470 return val;
1473 /* Return the propagation value when applying __builtin_assume_aligned to
1474 its arguments. */
1476 static prop_value_t
1477 bit_value_assume_aligned (gimple stmt)
1479 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1480 tree type = TREE_TYPE (ptr);
1481 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1482 prop_value_t ptrval = get_value_for_expr (ptr, true);
1483 prop_value_t alignval;
1484 double_int value, mask;
1485 prop_value_t val;
1486 if (ptrval.lattice_val == UNDEFINED)
1487 return ptrval;
1488 gcc_assert ((ptrval.lattice_val == CONSTANT
1489 && TREE_CODE (ptrval.value) == INTEGER_CST)
1490 || ptrval.mask.is_minus_one ());
1491 align = gimple_call_arg (stmt, 1);
1492 if (!tree_fits_uhwi_p (align))
1493 return ptrval;
1494 aligni = tree_to_uhwi (align);
1495 if (aligni <= 1
1496 || (aligni & (aligni - 1)) != 0)
1497 return ptrval;
1498 if (gimple_call_num_args (stmt) > 2)
1500 misalign = gimple_call_arg (stmt, 2);
1501 if (!tree_fits_uhwi_p (misalign))
1502 return ptrval;
1503 misaligni = tree_to_uhwi (misalign);
1504 if (misaligni >= aligni)
1505 return ptrval;
1507 align = build_int_cst_type (type, -aligni);
1508 alignval = get_value_for_expr (align, true);
1509 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1510 type, value_to_double_int (ptrval), ptrval.mask,
1511 type, value_to_double_int (alignval), alignval.mask);
1512 if (!mask.is_minus_one ())
1514 val.lattice_val = CONSTANT;
1515 val.mask = mask;
1516 gcc_assert ((mask.low & (aligni - 1)) == 0);
1517 gcc_assert ((value.low & (aligni - 1)) == 0);
1518 value.low |= misaligni;
1519 /* ??? Delay building trees here. */
1520 val.value = double_int_to_tree (type, value);
1522 else
1524 val.lattice_val = VARYING;
1525 val.value = NULL_TREE;
1526 val.mask = double_int_minus_one;
1528 return val;
1531 /* Evaluate statement STMT.
1532 Valid only for assignments, calls, conditionals, and switches. */
1534 static prop_value_t
1535 evaluate_stmt (gimple stmt)
1537 prop_value_t val;
1538 tree simplified = NULL_TREE;
1539 ccp_lattice_t likelyvalue = likely_value (stmt);
1540 bool is_constant = false;
1541 unsigned int align;
1543 if (dump_file && (dump_flags & TDF_DETAILS))
1545 fprintf (dump_file, "which is likely ");
1546 switch (likelyvalue)
1548 case CONSTANT:
1549 fprintf (dump_file, "CONSTANT");
1550 break;
1551 case UNDEFINED:
1552 fprintf (dump_file, "UNDEFINED");
1553 break;
1554 case VARYING:
1555 fprintf (dump_file, "VARYING");
1556 break;
1557 default:;
1559 fprintf (dump_file, "\n");
1562 /* If the statement is likely to have a CONSTANT result, then try
1563 to fold the statement to determine the constant value. */
1564 /* FIXME. This is the only place that we call ccp_fold.
1565 Since likely_value never returns CONSTANT for calls, we will
1566 not attempt to fold them, including builtins that may profit. */
1567 if (likelyvalue == CONSTANT)
1569 fold_defer_overflow_warnings ();
1570 simplified = ccp_fold (stmt);
1571 is_constant = simplified && is_gimple_min_invariant (simplified);
1572 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1573 if (is_constant)
1575 /* The statement produced a constant value. */
1576 val.lattice_val = CONSTANT;
1577 val.value = simplified;
1578 val.mask = double_int_zero;
1581 /* If the statement is likely to have a VARYING result, then do not
1582 bother folding the statement. */
1583 else if (likelyvalue == VARYING)
1585 enum gimple_code code = gimple_code (stmt);
1586 if (code == GIMPLE_ASSIGN)
1588 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1590 /* Other cases cannot satisfy is_gimple_min_invariant
1591 without folding. */
1592 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1593 simplified = gimple_assign_rhs1 (stmt);
1595 else if (code == GIMPLE_SWITCH)
1596 simplified = gimple_switch_index (stmt);
1597 else
1598 /* These cannot satisfy is_gimple_min_invariant without folding. */
1599 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1600 is_constant = simplified && is_gimple_min_invariant (simplified);
1601 if (is_constant)
1603 /* The statement produced a constant value. */
1604 val.lattice_val = CONSTANT;
1605 val.value = simplified;
1606 val.mask = double_int_zero;
1610 /* Resort to simplification for bitwise tracking. */
1611 if (flag_tree_bit_ccp
1612 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1613 && !is_constant)
1615 enum gimple_code code = gimple_code (stmt);
1616 val.lattice_val = VARYING;
1617 val.value = NULL_TREE;
1618 val.mask = double_int_minus_one;
1619 if (code == GIMPLE_ASSIGN)
1621 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1622 tree rhs1 = gimple_assign_rhs1 (stmt);
1623 switch (get_gimple_rhs_class (subcode))
1625 case GIMPLE_SINGLE_RHS:
1626 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1627 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1628 val = get_value_for_expr (rhs1, true);
1629 break;
1631 case GIMPLE_UNARY_RHS:
1632 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1633 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1634 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1635 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1636 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1637 break;
1639 case GIMPLE_BINARY_RHS:
1640 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1641 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1643 tree lhs = gimple_assign_lhs (stmt);
1644 tree rhs2 = gimple_assign_rhs2 (stmt);
1645 val = bit_value_binop (subcode,
1646 TREE_TYPE (lhs), rhs1, rhs2);
1648 break;
1650 default:;
1653 else if (code == GIMPLE_COND)
1655 enum tree_code code = gimple_cond_code (stmt);
1656 tree rhs1 = gimple_cond_lhs (stmt);
1657 tree rhs2 = gimple_cond_rhs (stmt);
1658 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1659 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1660 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1662 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1664 tree fndecl = gimple_call_fndecl (stmt);
1665 switch (DECL_FUNCTION_CODE (fndecl))
1667 case BUILT_IN_MALLOC:
1668 case BUILT_IN_REALLOC:
1669 case BUILT_IN_CALLOC:
1670 case BUILT_IN_STRDUP:
1671 case BUILT_IN_STRNDUP:
1672 val.lattice_val = CONSTANT;
1673 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1674 val.mask = double_int::from_shwi
1675 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1676 / BITS_PER_UNIT - 1));
1677 break;
1679 case BUILT_IN_ALLOCA:
1680 case BUILT_IN_ALLOCA_WITH_ALIGN:
1681 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1682 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1683 : BIGGEST_ALIGNMENT);
1684 val.lattice_val = CONSTANT;
1685 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1686 val.mask = double_int::from_shwi (~(((HOST_WIDE_INT) align)
1687 / BITS_PER_UNIT - 1));
1688 break;
1690 /* These builtins return their first argument, unmodified. */
1691 case BUILT_IN_MEMCPY:
1692 case BUILT_IN_MEMMOVE:
1693 case BUILT_IN_MEMSET:
1694 case BUILT_IN_STRCPY:
1695 case BUILT_IN_STRNCPY:
1696 case BUILT_IN_MEMCPY_CHK:
1697 case BUILT_IN_MEMMOVE_CHK:
1698 case BUILT_IN_MEMSET_CHK:
1699 case BUILT_IN_STRCPY_CHK:
1700 case BUILT_IN_STRNCPY_CHK:
1701 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1702 break;
1704 case BUILT_IN_ASSUME_ALIGNED:
1705 val = bit_value_assume_aligned (stmt);
1706 break;
1708 default:;
1711 is_constant = (val.lattice_val == CONSTANT);
1714 if (flag_tree_bit_ccp
1715 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1716 || (!is_constant && likelyvalue != UNDEFINED))
1717 && gimple_get_lhs (stmt)
1718 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1720 tree lhs = gimple_get_lhs (stmt);
1721 double_int nonzero_bits = get_nonzero_bits (lhs);
1722 double_int mask = double_int::mask (TYPE_PRECISION (TREE_TYPE (lhs)));
1723 if (nonzero_bits != double_int_minus_one && nonzero_bits != mask)
1725 if (!is_constant)
1727 val.lattice_val = CONSTANT;
1728 val.value = build_zero_cst (TREE_TYPE (lhs));
1729 /* CCP wants the bits above precision set. */
1730 val.mask = nonzero_bits | ~mask;
1731 is_constant = true;
1733 else
1735 double_int valv = tree_to_double_int (val.value);
1736 if (!(valv & ~nonzero_bits & mask).is_zero ())
1737 val.value = double_int_to_tree (TREE_TYPE (lhs),
1738 valv & nonzero_bits);
1739 if (nonzero_bits.is_zero ())
1740 val.mask = double_int_zero;
1741 else
1742 val.mask = val.mask & (nonzero_bits | ~mask);
1747 if (!is_constant)
1749 /* The statement produced a nonconstant value. If the statement
1750 had UNDEFINED operands, then the result of the statement
1751 should be UNDEFINED. Otherwise, the statement is VARYING. */
1752 if (likelyvalue == UNDEFINED)
1754 val.lattice_val = likelyvalue;
1755 val.mask = double_int_zero;
1757 else
1759 val.lattice_val = VARYING;
1760 val.mask = double_int_minus_one;
1763 val.value = NULL_TREE;
1766 return val;
1769 typedef hash_table <pointer_hash <gimple_statement_base> > gimple_htab;
1771 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1772 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1774 static void
1775 insert_clobber_before_stack_restore (tree saved_val, tree var,
1776 gimple_htab *visited)
1778 gimple stmt, clobber_stmt;
1779 tree clobber;
1780 imm_use_iterator iter;
1781 gimple_stmt_iterator i;
1782 gimple *slot;
1784 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1785 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1787 clobber = build_constructor (TREE_TYPE (var),
1788 NULL);
1789 TREE_THIS_VOLATILE (clobber) = 1;
1790 clobber_stmt = gimple_build_assign (var, clobber);
1792 i = gsi_for_stmt (stmt);
1793 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1795 else if (gimple_code (stmt) == GIMPLE_PHI)
1797 if (!visited->is_created ())
1798 visited->create (10);
1800 slot = visited->find_slot (stmt, INSERT);
1801 if (*slot != NULL)
1802 continue;
1804 *slot = stmt;
1805 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1806 visited);
1808 else if (gimple_assign_ssa_name_copy_p (stmt))
1809 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
1810 visited);
1811 else
1812 gcc_assert (is_gimple_debug (stmt));
1815 /* Advance the iterator to the previous non-debug gimple statement in the same
1816 or dominating basic block. */
1818 static inline void
1819 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1821 basic_block dom;
1823 gsi_prev_nondebug (i);
1824 while (gsi_end_p (*i))
1826 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1827 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1828 return;
1830 *i = gsi_last_bb (dom);
1834 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1835 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1837 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1838 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1839 that case the function gives up without inserting the clobbers. */
1841 static void
1842 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1844 gimple stmt;
1845 tree saved_val;
1846 gimple_htab visited;
1848 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1850 stmt = gsi_stmt (i);
1852 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1853 continue;
1855 saved_val = gimple_call_lhs (stmt);
1856 if (saved_val == NULL_TREE)
1857 continue;
1859 insert_clobber_before_stack_restore (saved_val, var, &visited);
1860 break;
1863 if (visited.is_created ())
1864 visited.dispose ();
1867 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1868 fixed-size array and returns the address, if found, otherwise returns
1869 NULL_TREE. */
1871 static tree
1872 fold_builtin_alloca_with_align (gimple stmt)
1874 unsigned HOST_WIDE_INT size, threshold, n_elem;
1875 tree lhs, arg, block, var, elem_type, array_type;
1877 /* Get lhs. */
1878 lhs = gimple_call_lhs (stmt);
1879 if (lhs == NULL_TREE)
1880 return NULL_TREE;
1882 /* Detect constant argument. */
1883 arg = get_constant_value (gimple_call_arg (stmt, 0));
1884 if (arg == NULL_TREE
1885 || TREE_CODE (arg) != INTEGER_CST
1886 || !tree_fits_uhwi_p (arg))
1887 return NULL_TREE;
1889 size = tree_to_uhwi (arg);
1891 /* Heuristic: don't fold large allocas. */
1892 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1893 /* In case the alloca is located at function entry, it has the same lifetime
1894 as a declared array, so we allow a larger size. */
1895 block = gimple_block (stmt);
1896 if (!(cfun->after_inlining
1897 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1898 threshold /= 10;
1899 if (size > threshold)
1900 return NULL_TREE;
1902 /* Declare array. */
1903 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1904 n_elem = size * 8 / BITS_PER_UNIT;
1905 array_type = build_array_type_nelts (elem_type, n_elem);
1906 var = create_tmp_var (array_type, NULL);
1907 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
1909 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1910 if (pi != NULL && !pi->pt.anything)
1912 bool singleton_p;
1913 unsigned uid;
1914 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1915 gcc_assert (singleton_p);
1916 SET_DECL_PT_UID (var, uid);
1920 /* Fold alloca to the address of the array. */
1921 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1924 /* Fold the stmt at *GSI with CCP specific information that propagating
1925 and regular folding does not catch. */
1927 static bool
1928 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1930 gimple stmt = gsi_stmt (*gsi);
1932 switch (gimple_code (stmt))
1934 case GIMPLE_COND:
1936 prop_value_t val;
1937 /* Statement evaluation will handle type mismatches in constants
1938 more gracefully than the final propagation. This allows us to
1939 fold more conditionals here. */
1940 val = evaluate_stmt (stmt);
1941 if (val.lattice_val != CONSTANT
1942 || !val.mask.is_zero ())
1943 return false;
1945 if (dump_file)
1947 fprintf (dump_file, "Folding predicate ");
1948 print_gimple_expr (dump_file, stmt, 0, 0);
1949 fprintf (dump_file, " to ");
1950 print_generic_expr (dump_file, val.value, 0);
1951 fprintf (dump_file, "\n");
1954 if (integer_zerop (val.value))
1955 gimple_cond_make_false (stmt);
1956 else
1957 gimple_cond_make_true (stmt);
1959 return true;
1962 case GIMPLE_CALL:
1964 tree lhs = gimple_call_lhs (stmt);
1965 int flags = gimple_call_flags (stmt);
1966 tree val;
1967 tree argt;
1968 bool changed = false;
1969 unsigned i;
1971 /* If the call was folded into a constant make sure it goes
1972 away even if we cannot propagate into all uses because of
1973 type issues. */
1974 if (lhs
1975 && TREE_CODE (lhs) == SSA_NAME
1976 && (val = get_constant_value (lhs))
1977 /* Don't optimize away calls that have side-effects. */
1978 && (flags & (ECF_CONST|ECF_PURE)) != 0
1979 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
1981 tree new_rhs = unshare_expr (val);
1982 bool res;
1983 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1984 TREE_TYPE (new_rhs)))
1985 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1986 res = update_call_from_tree (gsi, new_rhs);
1987 gcc_assert (res);
1988 return true;
1991 /* Internal calls provide no argument types, so the extra laxity
1992 for normal calls does not apply. */
1993 if (gimple_call_internal_p (stmt))
1994 return false;
1996 /* The heuristic of fold_builtin_alloca_with_align differs before and
1997 after inlining, so we don't require the arg to be changed into a
1998 constant for folding, but just to be constant. */
1999 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
2001 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2002 if (new_rhs)
2004 bool res = update_call_from_tree (gsi, new_rhs);
2005 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2006 gcc_assert (res);
2007 insert_clobbers_for_var (*gsi, var);
2008 return true;
2012 /* Propagate into the call arguments. Compared to replace_uses_in
2013 this can use the argument slot types for type verification
2014 instead of the current argument type. We also can safely
2015 drop qualifiers here as we are dealing with constants anyway. */
2016 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2017 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2018 ++i, argt = TREE_CHAIN (argt))
2020 tree arg = gimple_call_arg (stmt, i);
2021 if (TREE_CODE (arg) == SSA_NAME
2022 && (val = get_constant_value (arg))
2023 && useless_type_conversion_p
2024 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2025 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2027 gimple_call_set_arg (stmt, i, unshare_expr (val));
2028 changed = true;
2032 return changed;
2035 case GIMPLE_ASSIGN:
2037 tree lhs = gimple_assign_lhs (stmt);
2038 tree val;
2040 /* If we have a load that turned out to be constant replace it
2041 as we cannot propagate into all uses in all cases. */
2042 if (gimple_assign_single_p (stmt)
2043 && TREE_CODE (lhs) == SSA_NAME
2044 && (val = get_constant_value (lhs)))
2046 tree rhs = unshare_expr (val);
2047 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2048 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2049 gimple_assign_set_rhs_from_tree (gsi, rhs);
2050 return true;
2053 return false;
2056 default:
2057 return false;
2061 /* Visit the assignment statement STMT. Set the value of its LHS to the
2062 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2063 creates virtual definitions, set the value of each new name to that
2064 of the RHS (if we can derive a constant out of the RHS).
2065 Value-returning call statements also perform an assignment, and
2066 are handled here. */
2068 static enum ssa_prop_result
2069 visit_assignment (gimple stmt, tree *output_p)
2071 prop_value_t val;
2072 enum ssa_prop_result retval;
2074 tree lhs = gimple_get_lhs (stmt);
2076 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2077 || gimple_call_lhs (stmt) != NULL_TREE);
2079 if (gimple_assign_single_p (stmt)
2080 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2081 /* For a simple copy operation, we copy the lattice values. */
2082 val = *get_value (gimple_assign_rhs1 (stmt));
2083 else
2084 /* Evaluate the statement, which could be
2085 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2086 val = evaluate_stmt (stmt);
2088 retval = SSA_PROP_NOT_INTERESTING;
2090 /* Set the lattice value of the statement's output. */
2091 if (TREE_CODE (lhs) == SSA_NAME)
2093 /* If STMT is an assignment to an SSA_NAME, we only have one
2094 value to set. */
2095 if (set_lattice_value (lhs, val))
2097 *output_p = lhs;
2098 if (val.lattice_val == VARYING)
2099 retval = SSA_PROP_VARYING;
2100 else
2101 retval = SSA_PROP_INTERESTING;
2105 return retval;
2109 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2110 if it can determine which edge will be taken. Otherwise, return
2111 SSA_PROP_VARYING. */
2113 static enum ssa_prop_result
2114 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2116 prop_value_t val;
2117 basic_block block;
2119 block = gimple_bb (stmt);
2120 val = evaluate_stmt (stmt);
2121 if (val.lattice_val != CONSTANT
2122 || !val.mask.is_zero ())
2123 return SSA_PROP_VARYING;
2125 /* Find which edge out of the conditional block will be taken and add it
2126 to the worklist. If no single edge can be determined statically,
2127 return SSA_PROP_VARYING to feed all the outgoing edges to the
2128 propagation engine. */
2129 *taken_edge_p = find_taken_edge (block, val.value);
2130 if (*taken_edge_p)
2131 return SSA_PROP_INTERESTING;
2132 else
2133 return SSA_PROP_VARYING;
2137 /* Evaluate statement STMT. If the statement produces an output value and
2138 its evaluation changes the lattice value of its output, return
2139 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2140 output value.
2142 If STMT is a conditional branch and we can determine its truth
2143 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2144 value, return SSA_PROP_VARYING. */
2146 static enum ssa_prop_result
2147 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2149 tree def;
2150 ssa_op_iter iter;
2152 if (dump_file && (dump_flags & TDF_DETAILS))
2154 fprintf (dump_file, "\nVisiting statement:\n");
2155 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2158 switch (gimple_code (stmt))
2160 case GIMPLE_ASSIGN:
2161 /* If the statement is an assignment that produces a single
2162 output value, evaluate its RHS to see if the lattice value of
2163 its output has changed. */
2164 return visit_assignment (stmt, output_p);
2166 case GIMPLE_CALL:
2167 /* A value-returning call also performs an assignment. */
2168 if (gimple_call_lhs (stmt) != NULL_TREE)
2169 return visit_assignment (stmt, output_p);
2170 break;
2172 case GIMPLE_COND:
2173 case GIMPLE_SWITCH:
2174 /* If STMT is a conditional branch, see if we can determine
2175 which branch will be taken. */
2176 /* FIXME. It appears that we should be able to optimize
2177 computed GOTOs here as well. */
2178 return visit_cond_stmt (stmt, taken_edge_p);
2180 default:
2181 break;
2184 /* Any other kind of statement is not interesting for constant
2185 propagation and, therefore, not worth simulating. */
2186 if (dump_file && (dump_flags & TDF_DETAILS))
2187 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2189 /* Definitions made by statements other than assignments to
2190 SSA_NAMEs represent unknown modifications to their outputs.
2191 Mark them VARYING. */
2192 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2194 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2195 set_lattice_value (def, v);
2198 return SSA_PROP_VARYING;
2202 /* Main entry point for SSA Conditional Constant Propagation. */
2204 static unsigned int
2205 do_ssa_ccp (void)
2207 unsigned int todo = 0;
2208 calculate_dominance_info (CDI_DOMINATORS);
2209 ccp_initialize ();
2210 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2211 if (ccp_finalize ())
2212 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2213 free_dominance_info (CDI_DOMINATORS);
2214 return todo;
2218 static bool
2219 gate_ccp (void)
2221 return flag_tree_ccp != 0;
2225 namespace {
2227 const pass_data pass_data_ccp =
2229 GIMPLE_PASS, /* type */
2230 "ccp", /* name */
2231 OPTGROUP_NONE, /* optinfo_flags */
2232 true, /* has_gate */
2233 true, /* has_execute */
2234 TV_TREE_CCP, /* tv_id */
2235 ( PROP_cfg | PROP_ssa ), /* properties_required */
2236 0, /* properties_provided */
2237 0, /* properties_destroyed */
2238 0, /* todo_flags_start */
2239 ( TODO_verify_ssa | TODO_update_address_taken
2240 | TODO_verify_stmts ), /* todo_flags_finish */
2243 class pass_ccp : public gimple_opt_pass
2245 public:
2246 pass_ccp (gcc::context *ctxt)
2247 : gimple_opt_pass (pass_data_ccp, ctxt)
2250 /* opt_pass methods: */
2251 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2252 bool gate () { return gate_ccp (); }
2253 unsigned int execute () { return do_ssa_ccp (); }
2255 }; // class pass_ccp
2257 } // anon namespace
2259 gimple_opt_pass *
2260 make_pass_ccp (gcc::context *ctxt)
2262 return new pass_ccp (ctxt);
2267 /* Try to optimize out __builtin_stack_restore. Optimize it out
2268 if there is another __builtin_stack_restore in the same basic
2269 block and no calls or ASM_EXPRs are in between, or if this block's
2270 only outgoing edge is to EXIT_BLOCK and there are no calls or
2271 ASM_EXPRs after this __builtin_stack_restore. */
2273 static tree
2274 optimize_stack_restore (gimple_stmt_iterator i)
2276 tree callee;
2277 gimple stmt;
2279 basic_block bb = gsi_bb (i);
2280 gimple call = gsi_stmt (i);
2282 if (gimple_code (call) != GIMPLE_CALL
2283 || gimple_call_num_args (call) != 1
2284 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2285 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2286 return NULL_TREE;
2288 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2290 stmt = gsi_stmt (i);
2291 if (gimple_code (stmt) == GIMPLE_ASM)
2292 return NULL_TREE;
2293 if (gimple_code (stmt) != GIMPLE_CALL)
2294 continue;
2296 callee = gimple_call_fndecl (stmt);
2297 if (!callee
2298 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2299 /* All regular builtins are ok, just obviously not alloca. */
2300 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2301 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2302 return NULL_TREE;
2304 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2305 goto second_stack_restore;
2308 if (!gsi_end_p (i))
2309 return NULL_TREE;
2311 /* Allow one successor of the exit block, or zero successors. */
2312 switch (EDGE_COUNT (bb->succs))
2314 case 0:
2315 break;
2316 case 1:
2317 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2318 return NULL_TREE;
2319 break;
2320 default:
2321 return NULL_TREE;
2323 second_stack_restore:
2325 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2326 If there are multiple uses, then the last one should remove the call.
2327 In any case, whether the call to __builtin_stack_save can be removed
2328 or not is irrelevant to removing the call to __builtin_stack_restore. */
2329 if (has_single_use (gimple_call_arg (call, 0)))
2331 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2332 if (is_gimple_call (stack_save))
2334 callee = gimple_call_fndecl (stack_save);
2335 if (callee
2336 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2337 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2339 gimple_stmt_iterator stack_save_gsi;
2340 tree rhs;
2342 stack_save_gsi = gsi_for_stmt (stack_save);
2343 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2344 update_call_from_tree (&stack_save_gsi, rhs);
2349 /* No effect, so the statement will be deleted. */
2350 return integer_zero_node;
2353 /* If va_list type is a simple pointer and nothing special is needed,
2354 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2355 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2356 pointer assignment. */
2358 static tree
2359 optimize_stdarg_builtin (gimple call)
2361 tree callee, lhs, rhs, cfun_va_list;
2362 bool va_list_simple_ptr;
2363 location_t loc = gimple_location (call);
2365 if (gimple_code (call) != GIMPLE_CALL)
2366 return NULL_TREE;
2368 callee = gimple_call_fndecl (call);
2370 cfun_va_list = targetm.fn_abi_va_list (callee);
2371 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2372 && (TREE_TYPE (cfun_va_list) == void_type_node
2373 || TREE_TYPE (cfun_va_list) == char_type_node);
2375 switch (DECL_FUNCTION_CODE (callee))
2377 case BUILT_IN_VA_START:
2378 if (!va_list_simple_ptr
2379 || targetm.expand_builtin_va_start != NULL
2380 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2381 return NULL_TREE;
2383 if (gimple_call_num_args (call) != 2)
2384 return NULL_TREE;
2386 lhs = gimple_call_arg (call, 0);
2387 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2388 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2389 != TYPE_MAIN_VARIANT (cfun_va_list))
2390 return NULL_TREE;
2392 lhs = build_fold_indirect_ref_loc (loc, lhs);
2393 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2394 1, integer_zero_node);
2395 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2396 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2398 case BUILT_IN_VA_COPY:
2399 if (!va_list_simple_ptr)
2400 return NULL_TREE;
2402 if (gimple_call_num_args (call) != 2)
2403 return NULL_TREE;
2405 lhs = gimple_call_arg (call, 0);
2406 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2407 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2408 != TYPE_MAIN_VARIANT (cfun_va_list))
2409 return NULL_TREE;
2411 lhs = build_fold_indirect_ref_loc (loc, lhs);
2412 rhs = gimple_call_arg (call, 1);
2413 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2414 != TYPE_MAIN_VARIANT (cfun_va_list))
2415 return NULL_TREE;
2417 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2418 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2420 case BUILT_IN_VA_END:
2421 /* No effect, so the statement will be deleted. */
2422 return integer_zero_node;
2424 default:
2425 gcc_unreachable ();
2429 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2430 the incoming jumps. Return true if at least one jump was changed. */
2432 static bool
2433 optimize_unreachable (gimple_stmt_iterator i)
2435 basic_block bb = gsi_bb (i);
2436 gimple_stmt_iterator gsi;
2437 gimple stmt;
2438 edge_iterator ei;
2439 edge e;
2440 bool ret;
2442 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2444 stmt = gsi_stmt (gsi);
2446 if (is_gimple_debug (stmt))
2447 continue;
2449 if (gimple_code (stmt) == GIMPLE_LABEL)
2451 /* Verify we do not need to preserve the label. */
2452 if (FORCED_LABEL (gimple_label_label (stmt)))
2453 return false;
2455 continue;
2458 /* Only handle the case that __builtin_unreachable is the first statement
2459 in the block. We rely on DCE to remove stmts without side-effects
2460 before __builtin_unreachable. */
2461 if (gsi_stmt (gsi) != gsi_stmt (i))
2462 return false;
2465 ret = false;
2466 FOR_EACH_EDGE (e, ei, bb->preds)
2468 gsi = gsi_last_bb (e->src);
2469 if (gsi_end_p (gsi))
2470 continue;
2472 stmt = gsi_stmt (gsi);
2473 if (gimple_code (stmt) == GIMPLE_COND)
2475 if (e->flags & EDGE_TRUE_VALUE)
2476 gimple_cond_make_false (stmt);
2477 else if (e->flags & EDGE_FALSE_VALUE)
2478 gimple_cond_make_true (stmt);
2479 else
2480 gcc_unreachable ();
2481 update_stmt (stmt);
2483 else
2485 /* Todo: handle other cases, f.i. switch statement. */
2486 continue;
2489 ret = true;
2492 return ret;
2495 /* A simple pass that attempts to fold all builtin functions. This pass
2496 is run after we've propagated as many constants as we can. */
2498 static unsigned int
2499 execute_fold_all_builtins (void)
2501 bool cfg_changed = false;
2502 basic_block bb;
2503 unsigned int todoflags = 0;
2505 FOR_EACH_BB (bb)
2507 gimple_stmt_iterator i;
2508 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2510 gimple stmt, old_stmt;
2511 tree callee, result;
2512 enum built_in_function fcode;
2514 stmt = gsi_stmt (i);
2516 if (gimple_code (stmt) != GIMPLE_CALL)
2518 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2519 after the last GIMPLE DSE they aren't needed and might
2520 unnecessarily keep the SSA_NAMEs live. */
2521 if (gimple_clobber_p (stmt))
2523 tree lhs = gimple_assign_lhs (stmt);
2524 if (TREE_CODE (lhs) == MEM_REF
2525 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2527 unlink_stmt_vdef (stmt);
2528 gsi_remove (&i, true);
2529 release_defs (stmt);
2530 continue;
2533 gsi_next (&i);
2534 continue;
2536 callee = gimple_call_fndecl (stmt);
2537 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2539 gsi_next (&i);
2540 continue;
2542 fcode = DECL_FUNCTION_CODE (callee);
2544 result = gimple_fold_builtin (stmt);
2546 if (result)
2547 gimple_remove_stmt_histograms (cfun, stmt);
2549 if (!result)
2550 switch (DECL_FUNCTION_CODE (callee))
2552 case BUILT_IN_CONSTANT_P:
2553 /* Resolve __builtin_constant_p. If it hasn't been
2554 folded to integer_one_node by now, it's fairly
2555 certain that the value simply isn't constant. */
2556 result = integer_zero_node;
2557 break;
2559 case BUILT_IN_ASSUME_ALIGNED:
2560 /* Remove __builtin_assume_aligned. */
2561 result = gimple_call_arg (stmt, 0);
2562 break;
2564 case BUILT_IN_STACK_RESTORE:
2565 result = optimize_stack_restore (i);
2566 if (result)
2567 break;
2568 gsi_next (&i);
2569 continue;
2571 case BUILT_IN_UNREACHABLE:
2572 if (optimize_unreachable (i))
2573 cfg_changed = true;
2574 break;
2576 case BUILT_IN_VA_START:
2577 case BUILT_IN_VA_END:
2578 case BUILT_IN_VA_COPY:
2579 /* These shouldn't be folded before pass_stdarg. */
2580 result = optimize_stdarg_builtin (stmt);
2581 if (result)
2582 break;
2583 /* FALLTHRU */
2585 default:
2586 gsi_next (&i);
2587 continue;
2590 if (result == NULL_TREE)
2591 break;
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2595 fprintf (dump_file, "Simplified\n ");
2596 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2599 old_stmt = stmt;
2600 if (!update_call_from_tree (&i, result))
2602 gimplify_and_update_call_from_tree (&i, result);
2603 todoflags |= TODO_update_address_taken;
2606 stmt = gsi_stmt (i);
2607 update_stmt (stmt);
2609 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2610 && gimple_purge_dead_eh_edges (bb))
2611 cfg_changed = true;
2613 if (dump_file && (dump_flags & TDF_DETAILS))
2615 fprintf (dump_file, "to\n ");
2616 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2617 fprintf (dump_file, "\n");
2620 /* Retry the same statement if it changed into another
2621 builtin, there might be new opportunities now. */
2622 if (gimple_code (stmt) != GIMPLE_CALL)
2624 gsi_next (&i);
2625 continue;
2627 callee = gimple_call_fndecl (stmt);
2628 if (!callee
2629 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2630 || DECL_FUNCTION_CODE (callee) == fcode)
2631 gsi_next (&i);
2635 /* Delete unreachable blocks. */
2636 if (cfg_changed)
2637 todoflags |= TODO_cleanup_cfg;
2639 return todoflags;
2643 namespace {
2645 const pass_data pass_data_fold_builtins =
2647 GIMPLE_PASS, /* type */
2648 "fab", /* name */
2649 OPTGROUP_NONE, /* optinfo_flags */
2650 false, /* has_gate */
2651 true, /* has_execute */
2652 TV_NONE, /* tv_id */
2653 ( PROP_cfg | PROP_ssa ), /* properties_required */
2654 0, /* properties_provided */
2655 0, /* properties_destroyed */
2656 0, /* todo_flags_start */
2657 ( TODO_verify_ssa | TODO_update_ssa ), /* todo_flags_finish */
2660 class pass_fold_builtins : public gimple_opt_pass
2662 public:
2663 pass_fold_builtins (gcc::context *ctxt)
2664 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2667 /* opt_pass methods: */
2668 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2669 unsigned int execute () { return execute_fold_all_builtins (); }
2671 }; // class pass_fold_builtins
2673 } // anon namespace
2675 gimple_opt_pass *
2676 make_pass_fold_builtins (gcc::context *ctxt)
2678 return new pass_fold_builtins (ctxt);