Merge trunk version 211672 into gupc branch.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob66b897743ece62f8a29ff72c928c30c3a41b7e01
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2014 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
110 References:
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "tm.h"
125 #include "tree.h"
126 #include "stor-layout.h"
127 #include "flags.h"
128 #include "tm_p.h"
129 #include "basic-block.h"
130 #include "function.h"
131 #include "gimple-pretty-print.h"
132 #include "hash-table.h"
133 #include "tree-ssa-alias.h"
134 #include "internal-fn.h"
135 #include "gimple-fold.h"
136 #include "tree-eh.h"
137 #include "gimple-expr.h"
138 #include "is-a.h"
139 #include "gimple.h"
140 #include "gimplify.h"
141 #include "gimple-iterator.h"
142 #include "gimple-ssa.h"
143 #include "tree-cfg.h"
144 #include "tree-phinodes.h"
145 #include "ssa-iterators.h"
146 #include "stringpool.h"
147 #include "tree-ssanames.h"
148 #include "tree-pass.h"
149 #include "tree-ssa-propagate.h"
150 #include "value-prof.h"
151 #include "langhooks.h"
152 #include "target.h"
153 #include "diagnostic-core.h"
154 #include "dbgcnt.h"
155 #include "params.h"
156 #include "wide-int-print.h"
157 #include "builtins.h"
160 /* Possible lattice values. */
161 typedef enum
163 UNINITIALIZED,
164 UNDEFINED,
165 CONSTANT,
166 VARYING
167 } ccp_lattice_t;
169 struct prop_value_d {
170 /* Lattice value. */
171 ccp_lattice_t lattice_val;
173 /* Propagated value. */
174 tree value;
176 /* Mask that applies to the propagated value during CCP. For X
177 with a CONSTANT lattice value X & ~mask == value & ~mask. The
178 zero bits in the mask cover constant values. The ones mean no
179 information. */
180 widest_int mask;
183 typedef struct prop_value_d prop_value_t;
185 /* Array of propagated constant values. After propagation,
186 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
187 the constant is held in an SSA name representing a memory store
188 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
189 memory reference used to store (i.e., the LHS of the assignment
190 doing the store). */
191 static prop_value_t *const_val;
192 static unsigned n_const_val;
194 static void canonicalize_value (prop_value_t *);
195 static bool ccp_fold_stmt (gimple_stmt_iterator *);
197 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
199 static void
200 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
202 switch (val.lattice_val)
204 case UNINITIALIZED:
205 fprintf (outf, "%sUNINITIALIZED", prefix);
206 break;
207 case UNDEFINED:
208 fprintf (outf, "%sUNDEFINED", prefix);
209 break;
210 case VARYING:
211 fprintf (outf, "%sVARYING", prefix);
212 break;
213 case CONSTANT:
214 if (TREE_CODE (val.value) != INTEGER_CST
215 || val.mask == 0)
217 fprintf (outf, "%sCONSTANT ", prefix);
218 print_generic_expr (outf, val.value, dump_flags);
220 else
222 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
223 val.mask);
224 fprintf (outf, "%sCONSTANT ", prefix);
225 print_hex (cval, outf);
226 fprintf (outf, " (");
227 print_hex (val.mask, outf);
228 fprintf (outf, ")");
230 break;
231 default:
232 gcc_unreachable ();
237 /* Print lattice value VAL to stderr. */
239 void debug_lattice_value (prop_value_t val);
241 DEBUG_FUNCTION void
242 debug_lattice_value (prop_value_t val)
244 dump_lattice_value (stderr, "", val);
245 fprintf (stderr, "\n");
248 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
250 static widest_int
251 extend_mask (const wide_int &nonzero_bits)
253 return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
254 | widest_int::from (nonzero_bits, UNSIGNED));
257 /* Compute a default value for variable VAR and store it in the
258 CONST_VAL array. The following rules are used to get default
259 values:
261 1- Global and static variables that are declared constant are
262 considered CONSTANT.
264 2- Any other value is considered UNDEFINED. This is useful when
265 considering PHI nodes. PHI arguments that are undefined do not
266 change the constant value of the PHI node, which allows for more
267 constants to be propagated.
269 3- Variables defined by statements other than assignments and PHI
270 nodes are considered VARYING.
272 4- Initial values of variables that are not GIMPLE registers are
273 considered VARYING. */
275 static prop_value_t
276 get_default_value (tree var)
278 prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
279 gimple stmt;
281 stmt = SSA_NAME_DEF_STMT (var);
283 if (gimple_nop_p (stmt))
285 /* Variables defined by an empty statement are those used
286 before being initialized. If VAR is a local variable, we
287 can assume initially that it is UNDEFINED, otherwise we must
288 consider it VARYING. */
289 if (!virtual_operand_p (var)
290 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
291 val.lattice_val = UNDEFINED;
292 else
294 val.lattice_val = VARYING;
295 val.mask = -1;
296 if (flag_tree_bit_ccp)
298 wide_int nonzero_bits = get_nonzero_bits (var);
299 if (nonzero_bits != -1)
301 val.lattice_val = CONSTANT;
302 val.value = build_zero_cst (TREE_TYPE (var));
303 val.mask = extend_mask (nonzero_bits);
308 else if (is_gimple_assign (stmt))
310 tree cst;
311 if (gimple_assign_single_p (stmt)
312 && DECL_P (gimple_assign_rhs1 (stmt))
313 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
315 val.lattice_val = CONSTANT;
316 val.value = cst;
318 else
320 /* Any other variable defined by an assignment is considered
321 UNDEFINED. */
322 val.lattice_val = UNDEFINED;
325 else if ((is_gimple_call (stmt)
326 && gimple_call_lhs (stmt) != NULL_TREE)
327 || gimple_code (stmt) == GIMPLE_PHI)
329 /* A variable defined by a call or a PHI node is considered
330 UNDEFINED. */
331 val.lattice_val = UNDEFINED;
333 else
335 /* Otherwise, VAR will never take on a constant value. */
336 val.lattice_val = VARYING;
337 val.mask = -1;
340 return val;
344 /* Get the constant value associated with variable VAR. */
346 static inline prop_value_t *
347 get_value (tree var)
349 prop_value_t *val;
351 if (const_val == NULL
352 || SSA_NAME_VERSION (var) >= n_const_val)
353 return NULL;
355 val = &const_val[SSA_NAME_VERSION (var)];
356 if (val->lattice_val == UNINITIALIZED)
357 *val = get_default_value (var);
359 canonicalize_value (val);
361 return val;
364 /* Return the constant tree value associated with VAR. */
366 static inline tree
367 get_constant_value (tree var)
369 prop_value_t *val;
370 if (TREE_CODE (var) != SSA_NAME)
372 if (is_gimple_min_invariant (var))
373 return var;
374 return NULL_TREE;
376 val = get_value (var);
377 if (val
378 && val->lattice_val == CONSTANT
379 && (TREE_CODE (val->value) != INTEGER_CST
380 || val->mask == 0))
381 return val->value;
382 return NULL_TREE;
385 /* Sets the value associated with VAR to VARYING. */
387 static inline void
388 set_value_varying (tree var)
390 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
392 val->lattice_val = VARYING;
393 val->value = NULL_TREE;
394 val->mask = -1;
397 /* For float types, modify the value of VAL to make ccp work correctly
398 for non-standard values (-0, NaN):
400 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
401 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
402 This is to fix the following problem (see PR 29921): Suppose we have
404 x = 0.0 * y
406 and we set value of y to NaN. This causes value of x to be set to NaN.
407 When we later determine that y is in fact VARYING, fold uses the fact
408 that HONOR_NANS is false, and we try to change the value of x to 0,
409 causing an ICE. With HONOR_NANS being false, the real appearance of
410 NaN would cause undefined behavior, though, so claiming that y (and x)
411 are UNDEFINED initially is correct.
413 For other constants, make sure to drop TREE_OVERFLOW. */
415 static void
416 canonicalize_value (prop_value_t *val)
418 enum machine_mode mode;
419 tree type;
420 REAL_VALUE_TYPE d;
422 if (val->lattice_val != CONSTANT)
423 return;
425 if (TREE_OVERFLOW_P (val->value))
426 val->value = drop_tree_overflow (val->value);
428 if (TREE_CODE (val->value) != REAL_CST)
429 return;
431 d = TREE_REAL_CST (val->value);
432 type = TREE_TYPE (val->value);
433 mode = TYPE_MODE (type);
435 if (!HONOR_SIGNED_ZEROS (mode)
436 && REAL_VALUE_MINUS_ZERO (d))
438 val->value = build_real (type, dconst0);
439 return;
442 if (!HONOR_NANS (mode)
443 && REAL_VALUE_ISNAN (d))
445 val->lattice_val = UNDEFINED;
446 val->value = NULL;
447 return;
451 /* Return whether the lattice transition is valid. */
453 static bool
454 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
456 /* Lattice transitions must always be monotonically increasing in
457 value. */
458 if (old_val.lattice_val < new_val.lattice_val)
459 return true;
461 if (old_val.lattice_val != new_val.lattice_val)
462 return false;
464 if (!old_val.value && !new_val.value)
465 return true;
467 /* Now both lattice values are CONSTANT. */
469 /* Allow transitioning from PHI <&x, not executable> == &x
470 to PHI <&x, &y> == common alignment. */
471 if (TREE_CODE (old_val.value) != INTEGER_CST
472 && TREE_CODE (new_val.value) == INTEGER_CST)
473 return true;
475 /* Bit-lattices have to agree in the still valid bits. */
476 if (TREE_CODE (old_val.value) == INTEGER_CST
477 && TREE_CODE (new_val.value) == INTEGER_CST)
478 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
479 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
481 /* Otherwise constant values have to agree. */
482 return operand_equal_p (old_val.value, new_val.value, 0);
485 /* Set the value for variable VAR to NEW_VAL. Return true if the new
486 value is different from VAR's previous value. */
488 static bool
489 set_lattice_value (tree var, prop_value_t new_val)
491 /* We can deal with old UNINITIALIZED values just fine here. */
492 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
494 canonicalize_value (&new_val);
496 /* We have to be careful to not go up the bitwise lattice
497 represented by the mask.
498 ??? This doesn't seem to be the best place to enforce this. */
499 if (new_val.lattice_val == CONSTANT
500 && old_val->lattice_val == CONSTANT
501 && TREE_CODE (new_val.value) == INTEGER_CST
502 && TREE_CODE (old_val->value) == INTEGER_CST)
504 widest_int diff = (wi::to_widest (new_val.value)
505 ^ wi::to_widest (old_val->value));
506 new_val.mask = new_val.mask | old_val->mask | diff;
509 gcc_assert (valid_lattice_transition (*old_val, new_val));
511 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
512 caller that this was a non-transition. */
513 if (old_val->lattice_val != new_val.lattice_val
514 || (new_val.lattice_val == CONSTANT
515 && TREE_CODE (new_val.value) == INTEGER_CST
516 && (TREE_CODE (old_val->value) != INTEGER_CST
517 || new_val.mask != old_val->mask)))
519 /* ??? We would like to delay creation of INTEGER_CSTs from
520 partially constants here. */
522 if (dump_file && (dump_flags & TDF_DETAILS))
524 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
525 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
528 *old_val = new_val;
530 gcc_assert (new_val.lattice_val != UNINITIALIZED);
531 return true;
534 return false;
537 static prop_value_t get_value_for_expr (tree, bool);
538 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
539 static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
540 tree, const widest_int &, const widest_int &,
541 tree, const widest_int &, const widest_int &);
543 /* Return a widest_int that can be used for bitwise simplifications
544 from VAL. */
546 static widest_int
547 value_to_wide_int (prop_value_t val)
549 if (val.value
550 && TREE_CODE (val.value) == INTEGER_CST)
551 return wi::to_widest (val.value);
553 return 0;
556 /* Return the value for the address expression EXPR based on alignment
557 information. */
559 static prop_value_t
560 get_value_from_alignment (tree expr)
562 tree type = TREE_TYPE (expr);
563 prop_value_t val;
564 unsigned HOST_WIDE_INT bitpos;
565 unsigned int align;
567 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
569 get_pointer_alignment_1 (expr, &align, &bitpos);
570 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
571 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
572 : -1).and_not (align / BITS_PER_UNIT - 1);
573 val.lattice_val = val.mask == -1 ? VARYING : CONSTANT;
574 if (val.lattice_val == CONSTANT)
575 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
576 else
577 val.value = NULL_TREE;
579 return val;
582 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
583 return constant bits extracted from alignment information for
584 invariant addresses. */
586 static prop_value_t
587 get_value_for_expr (tree expr, bool for_bits_p)
589 prop_value_t val;
591 if (TREE_CODE (expr) == SSA_NAME)
593 val = *get_value (expr);
594 if (for_bits_p
595 && val.lattice_val == CONSTANT
596 && TREE_CODE (val.value) == ADDR_EXPR)
597 val = get_value_from_alignment (val.value);
599 else if (is_gimple_min_invariant (expr)
600 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
602 val.lattice_val = CONSTANT;
603 val.value = expr;
604 val.mask = 0;
605 canonicalize_value (&val);
607 else if (TREE_CODE (expr) == ADDR_EXPR)
608 val = get_value_from_alignment (expr);
609 else
611 val.lattice_val = VARYING;
612 val.mask = -1;
613 val.value = NULL_TREE;
615 return val;
618 /* Return the likely CCP lattice value for STMT.
620 If STMT has no operands, then return CONSTANT.
622 Else if undefinedness of operands of STMT cause its value to be
623 undefined, then return UNDEFINED.
625 Else if any operands of STMT are constants, then return CONSTANT.
627 Else return VARYING. */
629 static ccp_lattice_t
630 likely_value (gimple stmt)
632 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
633 tree use;
634 ssa_op_iter iter;
635 unsigned i;
637 enum gimple_code code = gimple_code (stmt);
639 /* This function appears to be called only for assignments, calls,
640 conditionals, and switches, due to the logic in visit_stmt. */
641 gcc_assert (code == GIMPLE_ASSIGN
642 || code == GIMPLE_CALL
643 || code == GIMPLE_COND
644 || code == GIMPLE_SWITCH);
646 /* If the statement has volatile operands, it won't fold to a
647 constant value. */
648 if (gimple_has_volatile_ops (stmt))
649 return VARYING;
651 /* Arrive here for more complex cases. */
652 has_constant_operand = false;
653 has_undefined_operand = false;
654 all_undefined_operands = true;
655 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
657 prop_value_t *val = get_value (use);
659 if (val->lattice_val == UNDEFINED)
660 has_undefined_operand = true;
661 else
662 all_undefined_operands = false;
664 if (val->lattice_val == CONSTANT)
665 has_constant_operand = true;
668 /* There may be constants in regular rhs operands. For calls we
669 have to ignore lhs, fndecl and static chain, otherwise only
670 the lhs. */
671 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
672 i < gimple_num_ops (stmt); ++i)
674 tree op = gimple_op (stmt, i);
675 if (!op || TREE_CODE (op) == SSA_NAME)
676 continue;
677 if (is_gimple_min_invariant (op))
678 has_constant_operand = true;
681 if (has_constant_operand)
682 all_undefined_operands = false;
684 if (has_undefined_operand
685 && code == GIMPLE_CALL
686 && gimple_call_internal_p (stmt))
687 switch (gimple_call_internal_fn (stmt))
689 /* These 3 builtins use the first argument just as a magic
690 way how to find out a decl uid. */
691 case IFN_GOMP_SIMD_LANE:
692 case IFN_GOMP_SIMD_VF:
693 case IFN_GOMP_SIMD_LAST_LANE:
694 has_undefined_operand = false;
695 break;
696 default:
697 break;
700 /* If the operation combines operands like COMPLEX_EXPR make sure to
701 not mark the result UNDEFINED if only one part of the result is
702 undefined. */
703 if (has_undefined_operand && all_undefined_operands)
704 return UNDEFINED;
705 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
707 switch (gimple_assign_rhs_code (stmt))
709 /* Unary operators are handled with all_undefined_operands. */
710 case PLUS_EXPR:
711 case MINUS_EXPR:
712 case POINTER_PLUS_EXPR:
713 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
714 Not bitwise operators, one VARYING operand may specify the
715 result completely. Not logical operators for the same reason.
716 Not COMPLEX_EXPR as one VARYING operand makes the result partly
717 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
718 the undefined operand may be promoted. */
719 return UNDEFINED;
721 case ADDR_EXPR:
722 /* If any part of an address is UNDEFINED, like the index
723 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
724 return UNDEFINED;
726 default:
730 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
731 fall back to CONSTANT. During iteration UNDEFINED may still drop
732 to CONSTANT. */
733 if (has_undefined_operand)
734 return CONSTANT;
736 /* We do not consider virtual operands here -- load from read-only
737 memory may have only VARYING virtual operands, but still be
738 constant. */
739 if (has_constant_operand
740 || gimple_references_memory_p (stmt))
741 return CONSTANT;
743 return VARYING;
746 /* Returns true if STMT cannot be constant. */
748 static bool
749 surely_varying_stmt_p (gimple stmt)
751 /* If the statement has operands that we cannot handle, it cannot be
752 constant. */
753 if (gimple_has_volatile_ops (stmt))
754 return true;
756 /* If it is a call and does not return a value or is not a
757 builtin and not an indirect call or a call to function with
758 assume_aligned/alloc_align attribute, it is varying. */
759 if (is_gimple_call (stmt))
761 tree fndecl, fntype = gimple_call_fntype (stmt);
762 if (!gimple_call_lhs (stmt)
763 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
764 && !DECL_BUILT_IN (fndecl)
765 && !lookup_attribute ("assume_aligned",
766 TYPE_ATTRIBUTES (fntype))
767 && !lookup_attribute ("alloc_align",
768 TYPE_ATTRIBUTES (fntype))))
769 return true;
772 /* Any other store operation is not interesting. */
773 else if (gimple_vdef (stmt))
774 return true;
776 /* Anything other than assignments and conditional jumps are not
777 interesting for CCP. */
778 if (gimple_code (stmt) != GIMPLE_ASSIGN
779 && gimple_code (stmt) != GIMPLE_COND
780 && gimple_code (stmt) != GIMPLE_SWITCH
781 && gimple_code (stmt) != GIMPLE_CALL)
782 return true;
784 return false;
787 /* Initialize local data structures for CCP. */
789 static void
790 ccp_initialize (void)
792 basic_block bb;
794 n_const_val = num_ssa_names;
795 const_val = XCNEWVEC (prop_value_t, n_const_val);
797 /* Initialize simulation flags for PHI nodes and statements. */
798 FOR_EACH_BB_FN (bb, cfun)
800 gimple_stmt_iterator i;
802 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
804 gimple stmt = gsi_stmt (i);
805 bool is_varying;
807 /* If the statement is a control insn, then we do not
808 want to avoid simulating the statement once. Failure
809 to do so means that those edges will never get added. */
810 if (stmt_ends_bb_p (stmt))
811 is_varying = false;
812 else
813 is_varying = surely_varying_stmt_p (stmt);
815 if (is_varying)
817 tree def;
818 ssa_op_iter iter;
820 /* If the statement will not produce a constant, mark
821 all its outputs VARYING. */
822 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
823 set_value_varying (def);
825 prop_set_simulate_again (stmt, !is_varying);
829 /* Now process PHI nodes. We never clear the simulate_again flag on
830 phi nodes, since we do not know which edges are executable yet,
831 except for phi nodes for virtual operands when we do not do store ccp. */
832 FOR_EACH_BB_FN (bb, cfun)
834 gimple_stmt_iterator i;
836 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
838 gimple phi = gsi_stmt (i);
840 if (virtual_operand_p (gimple_phi_result (phi)))
841 prop_set_simulate_again (phi, false);
842 else
843 prop_set_simulate_again (phi, true);
848 /* Debug count support. Reset the values of ssa names
849 VARYING when the total number ssa names analyzed is
850 beyond the debug count specified. */
852 static void
853 do_dbg_cnt (void)
855 unsigned i;
856 for (i = 0; i < num_ssa_names; i++)
858 if (!dbg_cnt (ccp))
860 const_val[i].lattice_val = VARYING;
861 const_val[i].mask = -1;
862 const_val[i].value = NULL_TREE;
868 /* Do final substitution of propagated values, cleanup the flowgraph and
869 free allocated storage.
871 Return TRUE when something was optimized. */
873 static bool
874 ccp_finalize (void)
876 bool something_changed;
877 unsigned i;
879 do_dbg_cnt ();
881 /* Derive alignment and misalignment information from partially
882 constant pointers in the lattice or nonzero bits from partially
883 constant integers. */
884 for (i = 1; i < num_ssa_names; ++i)
886 tree name = ssa_name (i);
887 prop_value_t *val;
888 unsigned int tem, align;
890 if (!name
891 || (!POINTER_TYPE_P (TREE_TYPE (name))
892 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
893 /* Don't record nonzero bits before IPA to avoid
894 using too much memory. */
895 || first_pass_instance)))
896 continue;
898 val = get_value (name);
899 if (val->lattice_val != CONSTANT
900 || TREE_CODE (val->value) != INTEGER_CST)
901 continue;
903 if (POINTER_TYPE_P (TREE_TYPE (name)))
905 /* Trailing mask bits specify the alignment, trailing value
906 bits the misalignment. */
907 tem = val->mask.to_uhwi ();
908 align = (tem & -tem);
909 if (align > 1)
910 set_ptr_info_alignment (get_ptr_info (name), align,
911 (TREE_INT_CST_LOW (val->value)
912 & (align - 1)));
914 else
916 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
917 wide_int nonzero_bits = wide_int::from (val->mask, precision,
918 UNSIGNED) | val->value;
919 nonzero_bits &= get_nonzero_bits (name);
920 set_nonzero_bits (name, nonzero_bits);
924 /* Perform substitutions based on the known constant values. */
925 something_changed = substitute_and_fold (get_constant_value,
926 ccp_fold_stmt, true);
928 free (const_val);
929 const_val = NULL;
930 return something_changed;;
934 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
935 in VAL1.
937 any M UNDEFINED = any
938 any M VARYING = VARYING
939 Ci M Cj = Ci if (i == j)
940 Ci M Cj = VARYING if (i != j)
943 static void
944 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
946 if (val1->lattice_val == UNDEFINED)
948 /* UNDEFINED M any = any */
949 *val1 = *val2;
951 else if (val2->lattice_val == UNDEFINED)
953 /* any M UNDEFINED = any
954 Nothing to do. VAL1 already contains the value we want. */
957 else if (val1->lattice_val == VARYING
958 || val2->lattice_val == VARYING)
960 /* any M VARYING = VARYING. */
961 val1->lattice_val = VARYING;
962 val1->mask = -1;
963 val1->value = NULL_TREE;
965 else if (val1->lattice_val == CONSTANT
966 && val2->lattice_val == CONSTANT
967 && TREE_CODE (val1->value) == INTEGER_CST
968 && TREE_CODE (val2->value) == INTEGER_CST)
970 /* Ci M Cj = Ci if (i == j)
971 Ci M Cj = VARYING if (i != j)
973 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
974 drop to varying. */
975 val1->mask = (val1->mask | val2->mask
976 | (wi::to_widest (val1->value)
977 ^ wi::to_widest (val2->value)));
978 if (val1->mask == -1)
980 val1->lattice_val = VARYING;
981 val1->value = NULL_TREE;
984 else if (val1->lattice_val == CONSTANT
985 && val2->lattice_val == CONSTANT
986 && simple_cst_equal (val1->value, val2->value) == 1)
988 /* Ci M Cj = Ci if (i == j)
989 Ci M Cj = VARYING if (i != j)
991 VAL1 already contains the value we want for equivalent values. */
993 else if (val1->lattice_val == CONSTANT
994 && val2->lattice_val == CONSTANT
995 && (TREE_CODE (val1->value) == ADDR_EXPR
996 || TREE_CODE (val2->value) == ADDR_EXPR))
998 /* When not equal addresses are involved try meeting for
999 alignment. */
1000 prop_value_t tem = *val2;
1001 if (TREE_CODE (val1->value) == ADDR_EXPR)
1002 *val1 = get_value_for_expr (val1->value, true);
1003 if (TREE_CODE (val2->value) == ADDR_EXPR)
1004 tem = get_value_for_expr (val2->value, true);
1005 ccp_lattice_meet (val1, &tem);
1007 else
1009 /* Any other combination is VARYING. */
1010 val1->lattice_val = VARYING;
1011 val1->mask = -1;
1012 val1->value = NULL_TREE;
1017 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1018 lattice values to determine PHI_NODE's lattice value. The value of a
1019 PHI node is determined calling ccp_lattice_meet with all the arguments
1020 of the PHI node that are incoming via executable edges. */
1022 static enum ssa_prop_result
1023 ccp_visit_phi_node (gimple phi)
1025 unsigned i;
1026 prop_value_t *old_val, new_val;
1028 if (dump_file && (dump_flags & TDF_DETAILS))
1030 fprintf (dump_file, "\nVisiting PHI node: ");
1031 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1034 old_val = get_value (gimple_phi_result (phi));
1035 switch (old_val->lattice_val)
1037 case VARYING:
1038 return SSA_PROP_VARYING;
1040 case CONSTANT:
1041 new_val = *old_val;
1042 break;
1044 case UNDEFINED:
1045 new_val.lattice_val = UNDEFINED;
1046 new_val.value = NULL_TREE;
1047 break;
1049 default:
1050 gcc_unreachable ();
1053 for (i = 0; i < gimple_phi_num_args (phi); i++)
1055 /* Compute the meet operator over all the PHI arguments flowing
1056 through executable edges. */
1057 edge e = gimple_phi_arg_edge (phi, i);
1059 if (dump_file && (dump_flags & TDF_DETAILS))
1061 fprintf (dump_file,
1062 "\n Argument #%d (%d -> %d %sexecutable)\n",
1063 i, e->src->index, e->dest->index,
1064 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1067 /* If the incoming edge is executable, Compute the meet operator for
1068 the existing value of the PHI node and the current PHI argument. */
1069 if (e->flags & EDGE_EXECUTABLE)
1071 tree arg = gimple_phi_arg (phi, i)->def;
1072 prop_value_t arg_val = get_value_for_expr (arg, false);
1074 ccp_lattice_meet (&new_val, &arg_val);
1076 if (dump_file && (dump_flags & TDF_DETAILS))
1078 fprintf (dump_file, "\t");
1079 print_generic_expr (dump_file, arg, dump_flags);
1080 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1081 fprintf (dump_file, "\n");
1084 if (new_val.lattice_val == VARYING)
1085 break;
1089 if (dump_file && (dump_flags & TDF_DETAILS))
1091 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1092 fprintf (dump_file, "\n\n");
1095 /* Make the transition to the new value. */
1096 if (set_lattice_value (gimple_phi_result (phi), new_val))
1098 if (new_val.lattice_val == VARYING)
1099 return SSA_PROP_VARYING;
1100 else
1101 return SSA_PROP_INTERESTING;
1103 else
1104 return SSA_PROP_NOT_INTERESTING;
1107 /* Return the constant value for OP or OP otherwise. */
1109 static tree
1110 valueize_op (tree op)
1112 if (TREE_CODE (op) == SSA_NAME)
1114 tree tem = get_constant_value (op);
1115 if (tem)
1116 return tem;
1118 return op;
1121 /* CCP specific front-end to the non-destructive constant folding
1122 routines.
1124 Attempt to simplify the RHS of STMT knowing that one or more
1125 operands are constants.
1127 If simplification is possible, return the simplified RHS,
1128 otherwise return the original RHS or NULL_TREE. */
1130 static tree
1131 ccp_fold (gimple stmt)
1133 location_t loc = gimple_location (stmt);
1134 switch (gimple_code (stmt))
1136 case GIMPLE_COND:
1138 /* Handle comparison operators that can appear in GIMPLE form. */
1139 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1140 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1141 enum tree_code code = gimple_cond_code (stmt);
1142 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1145 case GIMPLE_SWITCH:
1147 /* Return the constant switch index. */
1148 return valueize_op (gimple_switch_index (stmt));
1151 case GIMPLE_ASSIGN:
1152 case GIMPLE_CALL:
1153 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1155 default:
1156 gcc_unreachable ();
1160 /* Apply the operation CODE in type TYPE to the value, mask pair
1161 RVAL and RMASK representing a value of type RTYPE and set
1162 the value, mask pair *VAL and *MASK to the result. */
1164 static void
1165 bit_value_unop_1 (enum tree_code code, tree type,
1166 widest_int *val, widest_int *mask,
1167 tree rtype, const widest_int &rval, const widest_int &rmask)
1169 switch (code)
1171 case BIT_NOT_EXPR:
1172 *mask = rmask;
1173 *val = ~rval;
1174 break;
1176 case NEGATE_EXPR:
1178 widest_int temv, temm;
1179 /* Return ~rval + 1. */
1180 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1181 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1182 type, temv, temm, type, 1, 0);
1183 break;
1186 CASE_CONVERT:
1188 signop sgn;
1190 /* First extend mask and value according to the original type. */
1191 sgn = TYPE_SIGN (rtype);
1192 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1193 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
1195 /* Then extend mask and value according to the target type. */
1196 sgn = TYPE_SIGN (type);
1197 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1198 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
1199 break;
1202 default:
1203 *mask = -1;
1204 break;
1208 /* Apply the operation CODE in type TYPE to the value, mask pairs
1209 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1210 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1212 static void
1213 bit_value_binop_1 (enum tree_code code, tree type,
1214 widest_int *val, widest_int *mask,
1215 tree r1type, const widest_int &r1val,
1216 const widest_int &r1mask, tree r2type,
1217 const widest_int &r2val, const widest_int &r2mask)
1219 signop sgn = TYPE_SIGN (type);
1220 int width = TYPE_PRECISION (type);
1221 bool swap_p = false;
1223 /* Assume we'll get a constant result. Use an initial non varying
1224 value, we fall back to varying in the end if necessary. */
1225 *mask = -1;
1227 switch (code)
1229 case BIT_AND_EXPR:
1230 /* The mask is constant where there is a known not
1231 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1232 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1233 *val = r1val & r2val;
1234 break;
1236 case BIT_IOR_EXPR:
1237 /* The mask is constant where there is a known
1238 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1239 *mask = (r1mask | r2mask)
1240 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1241 *val = r1val | r2val;
1242 break;
1244 case BIT_XOR_EXPR:
1245 /* m1 | m2 */
1246 *mask = r1mask | r2mask;
1247 *val = r1val ^ r2val;
1248 break;
1250 case LROTATE_EXPR:
1251 case RROTATE_EXPR:
1252 if (r2mask == 0)
1254 widest_int shift = r2val;
1255 if (shift == 0)
1257 *mask = r1mask;
1258 *val = r1val;
1260 else
1262 if (wi::neg_p (shift))
1264 shift = -shift;
1265 if (code == RROTATE_EXPR)
1266 code = LROTATE_EXPR;
1267 else
1268 code = RROTATE_EXPR;
1270 if (code == RROTATE_EXPR)
1272 *mask = wi::rrotate (r1mask, shift, width);
1273 *val = wi::rrotate (r1val, shift, width);
1275 else
1277 *mask = wi::lrotate (r1mask, shift, width);
1278 *val = wi::lrotate (r1val, shift, width);
1282 break;
1284 case LSHIFT_EXPR:
1285 case RSHIFT_EXPR:
1286 /* ??? We can handle partially known shift counts if we know
1287 its sign. That way we can tell that (x << (y | 8)) & 255
1288 is zero. */
1289 if (r2mask == 0)
1291 widest_int shift = r2val;
1292 if (shift == 0)
1294 *mask = r1mask;
1295 *val = r1val;
1297 else
1299 if (wi::neg_p (shift))
1301 shift = -shift;
1302 if (code == RSHIFT_EXPR)
1303 code = LSHIFT_EXPR;
1304 else
1305 code = RSHIFT_EXPR;
1307 if (code == RSHIFT_EXPR)
1309 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1310 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1312 else
1314 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1315 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
1319 break;
1321 case PLUS_EXPR:
1322 case POINTER_PLUS_EXPR:
1324 /* Do the addition with unknown bits set to zero, to give carry-ins of
1325 zero wherever possible. */
1326 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1327 lo = wi::ext (lo, width, sgn);
1328 /* Do the addition with unknown bits set to one, to give carry-ins of
1329 one wherever possible. */
1330 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1331 hi = wi::ext (hi, width, sgn);
1332 /* Each bit in the result is known if (a) the corresponding bits in
1333 both inputs are known, and (b) the carry-in to that bit position
1334 is known. We can check condition (b) by seeing if we got the same
1335 result with minimised carries as with maximised carries. */
1336 *mask = r1mask | r2mask | (lo ^ hi);
1337 *mask = wi::ext (*mask, width, sgn);
1338 /* It shouldn't matter whether we choose lo or hi here. */
1339 *val = lo;
1340 break;
1343 case MINUS_EXPR:
1345 widest_int temv, temm;
1346 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1347 r2type, r2val, r2mask);
1348 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1349 r1type, r1val, r1mask,
1350 r2type, temv, temm);
1351 break;
1354 case MULT_EXPR:
1356 /* Just track trailing zeros in both operands and transfer
1357 them to the other. */
1358 int r1tz = wi::ctz (r1val | r1mask);
1359 int r2tz = wi::ctz (r2val | r2mask);
1360 if (r1tz + r2tz >= width)
1362 *mask = 0;
1363 *val = 0;
1365 else if (r1tz + r2tz > 0)
1367 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1368 width, sgn);
1369 *val = 0;
1371 break;
1374 case EQ_EXPR:
1375 case NE_EXPR:
1377 widest_int m = r1mask | r2mask;
1378 if (r1val.and_not (m) != r2val.and_not (m))
1380 *mask = 0;
1381 *val = ((code == EQ_EXPR) ? 0 : 1);
1383 else
1385 /* We know the result of a comparison is always one or zero. */
1386 *mask = 1;
1387 *val = 0;
1389 break;
1392 case GE_EXPR:
1393 case GT_EXPR:
1394 swap_p = true;
1395 code = swap_tree_comparison (code);
1396 /* Fall through. */
1397 case LT_EXPR:
1398 case LE_EXPR:
1400 int minmax, maxmin;
1402 const widest_int &o1val = swap_p ? r2val : r1val;
1403 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1404 const widest_int &o2val = swap_p ? r1val : r2val;
1405 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1407 /* If the most significant bits are not known we know nothing. */
1408 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
1409 break;
1411 /* For comparisons the signedness is in the comparison operands. */
1412 sgn = TYPE_SIGN (r1type);
1414 /* If we know the most significant bits we know the values
1415 value ranges by means of treating varying bits as zero
1416 or one. Do a cross comparison of the max/min pairs. */
1417 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1418 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
1419 if (maxmin < 0) /* o1 is less than o2. */
1421 *mask = 0;
1422 *val = 1;
1424 else if (minmax > 0) /* o1 is not less or equal to o2. */
1426 *mask = 0;
1427 *val = 0;
1429 else if (maxmin == minmax) /* o1 and o2 are equal. */
1431 /* This probably should never happen as we'd have
1432 folded the thing during fully constant value folding. */
1433 *mask = 0;
1434 *val = (code == LE_EXPR ? 1 : 0);
1436 else
1438 /* We know the result of a comparison is always one or zero. */
1439 *mask = 1;
1440 *val = 0;
1442 break;
1445 default:;
1449 /* Return the propagation value when applying the operation CODE to
1450 the value RHS yielding type TYPE. */
1452 static prop_value_t
1453 bit_value_unop (enum tree_code code, tree type, tree rhs)
1455 prop_value_t rval = get_value_for_expr (rhs, true);
1456 widest_int value, mask;
1457 prop_value_t val;
1459 if (rval.lattice_val == UNDEFINED)
1460 return rval;
1462 gcc_assert ((rval.lattice_val == CONSTANT
1463 && TREE_CODE (rval.value) == INTEGER_CST)
1464 || rval.mask == -1);
1465 bit_value_unop_1 (code, type, &value, &mask,
1466 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
1467 if (mask != -1)
1469 val.lattice_val = CONSTANT;
1470 val.mask = mask;
1471 /* ??? Delay building trees here. */
1472 val.value = wide_int_to_tree (type, value);
1474 else
1476 val.lattice_val = VARYING;
1477 val.value = NULL_TREE;
1478 val.mask = -1;
1480 return val;
1483 /* Return the propagation value when applying the operation CODE to
1484 the values RHS1 and RHS2 yielding type TYPE. */
1486 static prop_value_t
1487 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1489 prop_value_t r1val = get_value_for_expr (rhs1, true);
1490 prop_value_t r2val = get_value_for_expr (rhs2, true);
1491 widest_int value, mask;
1492 prop_value_t val;
1494 if (r1val.lattice_val == UNDEFINED
1495 || r2val.lattice_val == UNDEFINED)
1497 val.lattice_val = VARYING;
1498 val.value = NULL_TREE;
1499 val.mask = -1;
1500 return val;
1503 gcc_assert ((r1val.lattice_val == CONSTANT
1504 && TREE_CODE (r1val.value) == INTEGER_CST)
1505 || r1val.mask == -1);
1506 gcc_assert ((r2val.lattice_val == CONSTANT
1507 && TREE_CODE (r2val.value) == INTEGER_CST)
1508 || r2val.mask == -1);
1509 bit_value_binop_1 (code, type, &value, &mask,
1510 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1511 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1512 if (mask != -1)
1514 val.lattice_val = CONSTANT;
1515 val.mask = mask;
1516 /* ??? Delay building trees here. */
1517 val.value = wide_int_to_tree (type, value);
1519 else
1521 val.lattice_val = VARYING;
1522 val.value = NULL_TREE;
1523 val.mask = -1;
1525 return val;
1528 /* Return the propagation value for __builtin_assume_aligned
1529 and functions with assume_aligned or alloc_aligned attribute.
1530 For __builtin_assume_aligned, ATTR is NULL_TREE,
1531 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1532 is false, for alloc_aligned attribute ATTR is non-NULL and
1533 ALLOC_ALIGNED is true. */
1535 static prop_value_t
1536 bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval,
1537 bool alloc_aligned)
1539 tree align, misalign = NULL_TREE, type;
1540 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1541 prop_value_t alignval;
1542 widest_int value, mask;
1543 prop_value_t val;
1545 if (attr == NULL_TREE)
1547 tree ptr = gimple_call_arg (stmt, 0);
1548 type = TREE_TYPE (ptr);
1549 ptrval = get_value_for_expr (ptr, true);
1551 else
1553 tree lhs = gimple_call_lhs (stmt);
1554 type = TREE_TYPE (lhs);
1557 if (ptrval.lattice_val == UNDEFINED)
1558 return ptrval;
1559 gcc_assert ((ptrval.lattice_val == CONSTANT
1560 && TREE_CODE (ptrval.value) == INTEGER_CST)
1561 || ptrval.mask == -1);
1562 if (attr == NULL_TREE)
1564 /* Get aligni and misaligni from __builtin_assume_aligned. */
1565 align = gimple_call_arg (stmt, 1);
1566 if (!tree_fits_uhwi_p (align))
1567 return ptrval;
1568 aligni = tree_to_uhwi (align);
1569 if (gimple_call_num_args (stmt) > 2)
1571 misalign = gimple_call_arg (stmt, 2);
1572 if (!tree_fits_uhwi_p (misalign))
1573 return ptrval;
1574 misaligni = tree_to_uhwi (misalign);
1577 else
1579 /* Get aligni and misaligni from assume_aligned or
1580 alloc_align attributes. */
1581 if (TREE_VALUE (attr) == NULL_TREE)
1582 return ptrval;
1583 attr = TREE_VALUE (attr);
1584 align = TREE_VALUE (attr);
1585 if (!tree_fits_uhwi_p (align))
1586 return ptrval;
1587 aligni = tree_to_uhwi (align);
1588 if (alloc_aligned)
1590 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1591 return ptrval;
1592 align = gimple_call_arg (stmt, aligni - 1);
1593 if (!tree_fits_uhwi_p (align))
1594 return ptrval;
1595 aligni = tree_to_uhwi (align);
1597 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1599 misalign = TREE_VALUE (TREE_CHAIN (attr));
1600 if (!tree_fits_uhwi_p (misalign))
1601 return ptrval;
1602 misaligni = tree_to_uhwi (misalign);
1605 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1606 return ptrval;
1608 align = build_int_cst_type (type, -aligni);
1609 alignval = get_value_for_expr (align, true);
1610 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1611 type, value_to_wide_int (ptrval), ptrval.mask,
1612 type, value_to_wide_int (alignval), alignval.mask);
1613 if (mask != -1)
1615 val.lattice_val = CONSTANT;
1616 val.mask = mask;
1617 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1618 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1619 value |= misaligni;
1620 /* ??? Delay building trees here. */
1621 val.value = wide_int_to_tree (type, value);
1623 else
1625 val.lattice_val = VARYING;
1626 val.value = NULL_TREE;
1627 val.mask = -1;
1629 return val;
1632 /* Evaluate statement STMT.
1633 Valid only for assignments, calls, conditionals, and switches. */
1635 static prop_value_t
1636 evaluate_stmt (gimple stmt)
1638 prop_value_t val;
1639 tree simplified = NULL_TREE;
1640 ccp_lattice_t likelyvalue = likely_value (stmt);
1641 bool is_constant = false;
1642 unsigned int align;
1644 if (dump_file && (dump_flags & TDF_DETAILS))
1646 fprintf (dump_file, "which is likely ");
1647 switch (likelyvalue)
1649 case CONSTANT:
1650 fprintf (dump_file, "CONSTANT");
1651 break;
1652 case UNDEFINED:
1653 fprintf (dump_file, "UNDEFINED");
1654 break;
1655 case VARYING:
1656 fprintf (dump_file, "VARYING");
1657 break;
1658 default:;
1660 fprintf (dump_file, "\n");
1663 /* If the statement is likely to have a CONSTANT result, then try
1664 to fold the statement to determine the constant value. */
1665 /* FIXME. This is the only place that we call ccp_fold.
1666 Since likely_value never returns CONSTANT for calls, we will
1667 not attempt to fold them, including builtins that may profit. */
1668 if (likelyvalue == CONSTANT)
1670 fold_defer_overflow_warnings ();
1671 simplified = ccp_fold (stmt);
1672 is_constant = simplified && is_gimple_min_invariant (simplified);
1673 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1674 if (is_constant)
1676 /* The statement produced a constant value. */
1677 val.lattice_val = CONSTANT;
1678 val.value = simplified;
1679 val.mask = 0;
1682 /* If the statement is likely to have a VARYING result, then do not
1683 bother folding the statement. */
1684 else if (likelyvalue == VARYING)
1686 enum gimple_code code = gimple_code (stmt);
1687 if (code == GIMPLE_ASSIGN)
1689 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1691 /* Other cases cannot satisfy is_gimple_min_invariant
1692 without folding. */
1693 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1694 simplified = gimple_assign_rhs1 (stmt);
1696 else if (code == GIMPLE_SWITCH)
1697 simplified = gimple_switch_index (stmt);
1698 else
1699 /* These cannot satisfy is_gimple_min_invariant without folding. */
1700 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1701 is_constant = simplified && is_gimple_min_invariant (simplified);
1702 if (is_constant)
1704 /* The statement produced a constant value. */
1705 val.lattice_val = CONSTANT;
1706 val.value = simplified;
1707 val.mask = 0;
1711 /* Resort to simplification for bitwise tracking. */
1712 if (flag_tree_bit_ccp
1713 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1714 && !is_constant)
1716 enum gimple_code code = gimple_code (stmt);
1717 val.lattice_val = VARYING;
1718 val.value = NULL_TREE;
1719 val.mask = -1;
1720 if (code == GIMPLE_ASSIGN)
1722 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1723 tree rhs1 = gimple_assign_rhs1 (stmt);
1724 switch (get_gimple_rhs_class (subcode))
1726 case GIMPLE_SINGLE_RHS:
1727 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1728 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1729 val = get_value_for_expr (rhs1, true);
1730 break;
1732 case GIMPLE_UNARY_RHS:
1733 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1734 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1735 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1736 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1737 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1738 break;
1740 case GIMPLE_BINARY_RHS:
1741 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1742 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1744 tree lhs = gimple_assign_lhs (stmt);
1745 tree rhs2 = gimple_assign_rhs2 (stmt);
1746 val = bit_value_binop (subcode,
1747 TREE_TYPE (lhs), rhs1, rhs2);
1749 break;
1751 default:;
1754 else if (code == GIMPLE_COND)
1756 enum tree_code code = gimple_cond_code (stmt);
1757 tree rhs1 = gimple_cond_lhs (stmt);
1758 tree rhs2 = gimple_cond_rhs (stmt);
1759 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1760 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1761 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1763 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1765 tree fndecl = gimple_call_fndecl (stmt);
1766 switch (DECL_FUNCTION_CODE (fndecl))
1768 case BUILT_IN_MALLOC:
1769 case BUILT_IN_REALLOC:
1770 case BUILT_IN_CALLOC:
1771 case BUILT_IN_STRDUP:
1772 case BUILT_IN_STRNDUP:
1773 val.lattice_val = CONSTANT;
1774 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1775 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1776 / BITS_PER_UNIT - 1);
1777 break;
1779 case BUILT_IN_ALLOCA:
1780 case BUILT_IN_ALLOCA_WITH_ALIGN:
1781 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1782 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1783 : BIGGEST_ALIGNMENT);
1784 val.lattice_val = CONSTANT;
1785 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1786 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
1787 break;
1789 /* These builtins return their first argument, unmodified. */
1790 case BUILT_IN_MEMCPY:
1791 case BUILT_IN_MEMMOVE:
1792 case BUILT_IN_MEMSET:
1793 case BUILT_IN_STRCPY:
1794 case BUILT_IN_STRNCPY:
1795 case BUILT_IN_MEMCPY_CHK:
1796 case BUILT_IN_MEMMOVE_CHK:
1797 case BUILT_IN_MEMSET_CHK:
1798 case BUILT_IN_STRCPY_CHK:
1799 case BUILT_IN_STRNCPY_CHK:
1800 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1801 break;
1803 case BUILT_IN_ASSUME_ALIGNED:
1804 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
1805 break;
1807 case BUILT_IN_ALIGNED_ALLOC:
1809 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1810 if (align
1811 && tree_fits_uhwi_p (align))
1813 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1814 if (aligni > 1
1815 /* align must be power-of-two */
1816 && (aligni & (aligni - 1)) == 0)
1818 val.lattice_val = CONSTANT;
1819 val.value = build_int_cst (ptr_type_node, 0);
1820 val.mask = -aligni;
1823 break;
1826 default:;
1829 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1831 tree fntype = gimple_call_fntype (stmt);
1832 if (fntype)
1834 tree attrs = lookup_attribute ("assume_aligned",
1835 TYPE_ATTRIBUTES (fntype));
1836 if (attrs)
1837 val = bit_value_assume_aligned (stmt, attrs, val, false);
1838 attrs = lookup_attribute ("alloc_align",
1839 TYPE_ATTRIBUTES (fntype));
1840 if (attrs)
1841 val = bit_value_assume_aligned (stmt, attrs, val, true);
1844 is_constant = (val.lattice_val == CONSTANT);
1847 if (flag_tree_bit_ccp
1848 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1849 || (!is_constant && likelyvalue != UNDEFINED))
1850 && gimple_get_lhs (stmt)
1851 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1853 tree lhs = gimple_get_lhs (stmt);
1854 wide_int nonzero_bits = get_nonzero_bits (lhs);
1855 if (nonzero_bits != -1)
1857 if (!is_constant)
1859 val.lattice_val = CONSTANT;
1860 val.value = build_zero_cst (TREE_TYPE (lhs));
1861 val.mask = extend_mask (nonzero_bits);
1862 is_constant = true;
1864 else
1866 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
1867 val.value = wide_int_to_tree (TREE_TYPE (lhs),
1868 nonzero_bits & val.value);
1869 if (nonzero_bits == 0)
1870 val.mask = 0;
1871 else
1872 val.mask = val.mask & extend_mask (nonzero_bits);
1877 if (!is_constant)
1879 /* The statement produced a nonconstant value. If the statement
1880 had UNDEFINED operands, then the result of the statement
1881 should be UNDEFINED. Otherwise, the statement is VARYING. */
1882 if (likelyvalue == UNDEFINED)
1884 val.lattice_val = likelyvalue;
1885 val.mask = 0;
1887 else
1889 val.lattice_val = VARYING;
1890 val.mask = -1;
1893 val.value = NULL_TREE;
1896 return val;
1899 typedef hash_table <pointer_hash <gimple_statement_base> > gimple_htab;
1901 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1902 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1904 static void
1905 insert_clobber_before_stack_restore (tree saved_val, tree var,
1906 gimple_htab *visited)
1908 gimple stmt, clobber_stmt;
1909 tree clobber;
1910 imm_use_iterator iter;
1911 gimple_stmt_iterator i;
1912 gimple *slot;
1914 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1915 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1917 clobber = build_constructor (TREE_TYPE (var),
1918 NULL);
1919 TREE_THIS_VOLATILE (clobber) = 1;
1920 clobber_stmt = gimple_build_assign (var, clobber);
1922 i = gsi_for_stmt (stmt);
1923 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1925 else if (gimple_code (stmt) == GIMPLE_PHI)
1927 if (!visited->is_created ())
1928 visited->create (10);
1930 slot = visited->find_slot (stmt, INSERT);
1931 if (*slot != NULL)
1932 continue;
1934 *slot = stmt;
1935 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1936 visited);
1938 else if (gimple_assign_ssa_name_copy_p (stmt))
1939 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
1940 visited);
1941 else
1942 gcc_assert (is_gimple_debug (stmt));
1945 /* Advance the iterator to the previous non-debug gimple statement in the same
1946 or dominating basic block. */
1948 static inline void
1949 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1951 basic_block dom;
1953 gsi_prev_nondebug (i);
1954 while (gsi_end_p (*i))
1956 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1957 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1958 return;
1960 *i = gsi_last_bb (dom);
1964 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1965 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1967 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1968 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1969 that case the function gives up without inserting the clobbers. */
1971 static void
1972 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1974 gimple stmt;
1975 tree saved_val;
1976 gimple_htab visited;
1978 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1980 stmt = gsi_stmt (i);
1982 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1983 continue;
1985 saved_val = gimple_call_lhs (stmt);
1986 if (saved_val == NULL_TREE)
1987 continue;
1989 insert_clobber_before_stack_restore (saved_val, var, &visited);
1990 break;
1993 if (visited.is_created ())
1994 visited.dispose ();
1997 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1998 fixed-size array and returns the address, if found, otherwise returns
1999 NULL_TREE. */
2001 static tree
2002 fold_builtin_alloca_with_align (gimple stmt)
2004 unsigned HOST_WIDE_INT size, threshold, n_elem;
2005 tree lhs, arg, block, var, elem_type, array_type;
2007 /* Get lhs. */
2008 lhs = gimple_call_lhs (stmt);
2009 if (lhs == NULL_TREE)
2010 return NULL_TREE;
2012 /* Detect constant argument. */
2013 arg = get_constant_value (gimple_call_arg (stmt, 0));
2014 if (arg == NULL_TREE
2015 || TREE_CODE (arg) != INTEGER_CST
2016 || !tree_fits_uhwi_p (arg))
2017 return NULL_TREE;
2019 size = tree_to_uhwi (arg);
2021 /* Heuristic: don't fold large allocas. */
2022 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
2023 /* In case the alloca is located at function entry, it has the same lifetime
2024 as a declared array, so we allow a larger size. */
2025 block = gimple_block (stmt);
2026 if (!(cfun->after_inlining
2027 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2028 threshold /= 10;
2029 if (size > threshold)
2030 return NULL_TREE;
2032 /* Declare array. */
2033 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2034 n_elem = size * 8 / BITS_PER_UNIT;
2035 array_type = build_array_type_nelts (elem_type, n_elem);
2036 var = create_tmp_var (array_type, NULL);
2037 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
2039 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2040 if (pi != NULL && !pi->pt.anything)
2042 bool singleton_p;
2043 unsigned uid;
2044 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2045 gcc_assert (singleton_p);
2046 SET_DECL_PT_UID (var, uid);
2050 /* Fold alloca to the address of the array. */
2051 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2054 /* Fold the stmt at *GSI with CCP specific information that propagating
2055 and regular folding does not catch. */
2057 static bool
2058 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2060 gimple stmt = gsi_stmt (*gsi);
2062 switch (gimple_code (stmt))
2064 case GIMPLE_COND:
2066 prop_value_t val;
2067 /* Statement evaluation will handle type mismatches in constants
2068 more gracefully than the final propagation. This allows us to
2069 fold more conditionals here. */
2070 val = evaluate_stmt (stmt);
2071 if (val.lattice_val != CONSTANT
2072 || val.mask != 0)
2073 return false;
2075 if (dump_file)
2077 fprintf (dump_file, "Folding predicate ");
2078 print_gimple_expr (dump_file, stmt, 0, 0);
2079 fprintf (dump_file, " to ");
2080 print_generic_expr (dump_file, val.value, 0);
2081 fprintf (dump_file, "\n");
2084 if (integer_zerop (val.value))
2085 gimple_cond_make_false (stmt);
2086 else
2087 gimple_cond_make_true (stmt);
2089 return true;
2092 case GIMPLE_CALL:
2094 tree lhs = gimple_call_lhs (stmt);
2095 int flags = gimple_call_flags (stmt);
2096 tree val;
2097 tree argt;
2098 bool changed = false;
2099 unsigned i;
2101 /* If the call was folded into a constant make sure it goes
2102 away even if we cannot propagate into all uses because of
2103 type issues. */
2104 if (lhs
2105 && TREE_CODE (lhs) == SSA_NAME
2106 && (val = get_constant_value (lhs))
2107 /* Don't optimize away calls that have side-effects. */
2108 && (flags & (ECF_CONST|ECF_PURE)) != 0
2109 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
2111 tree new_rhs = unshare_expr (val);
2112 bool res;
2113 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2114 TREE_TYPE (new_rhs)))
2115 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2116 res = update_call_from_tree (gsi, new_rhs);
2117 gcc_assert (res);
2118 return true;
2121 /* Internal calls provide no argument types, so the extra laxity
2122 for normal calls does not apply. */
2123 if (gimple_call_internal_p (stmt))
2124 return false;
2126 /* The heuristic of fold_builtin_alloca_with_align differs before and
2127 after inlining, so we don't require the arg to be changed into a
2128 constant for folding, but just to be constant. */
2129 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
2131 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2132 if (new_rhs)
2134 bool res = update_call_from_tree (gsi, new_rhs);
2135 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2136 gcc_assert (res);
2137 insert_clobbers_for_var (*gsi, var);
2138 return true;
2142 /* Propagate into the call arguments. Compared to replace_uses_in
2143 this can use the argument slot types for type verification
2144 instead of the current argument type. We also can safely
2145 drop qualifiers here as we are dealing with constants anyway. */
2146 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2147 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2148 ++i, argt = TREE_CHAIN (argt))
2150 tree arg = gimple_call_arg (stmt, i);
2151 if (TREE_CODE (arg) == SSA_NAME
2152 && (val = get_constant_value (arg))
2153 && useless_type_conversion_p
2154 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2155 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2157 gimple_call_set_arg (stmt, i, unshare_expr (val));
2158 changed = true;
2162 return changed;
2165 case GIMPLE_ASSIGN:
2167 tree lhs = gimple_assign_lhs (stmt);
2168 tree val;
2170 /* If we have a load that turned out to be constant replace it
2171 as we cannot propagate into all uses in all cases. */
2172 if (gimple_assign_single_p (stmt)
2173 && TREE_CODE (lhs) == SSA_NAME
2174 && (val = get_constant_value (lhs)))
2176 tree rhs = unshare_expr (val);
2177 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2178 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2179 gimple_assign_set_rhs_from_tree (gsi, rhs);
2180 return true;
2183 return false;
2186 default:
2187 return false;
2191 /* Visit the assignment statement STMT. Set the value of its LHS to the
2192 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2193 creates virtual definitions, set the value of each new name to that
2194 of the RHS (if we can derive a constant out of the RHS).
2195 Value-returning call statements also perform an assignment, and
2196 are handled here. */
2198 static enum ssa_prop_result
2199 visit_assignment (gimple stmt, tree *output_p)
2201 prop_value_t val;
2202 enum ssa_prop_result retval;
2204 tree lhs = gimple_get_lhs (stmt);
2206 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2207 || gimple_call_lhs (stmt) != NULL_TREE);
2209 if (gimple_assign_single_p (stmt)
2210 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2211 /* For a simple copy operation, we copy the lattice values. */
2212 val = *get_value (gimple_assign_rhs1 (stmt));
2213 else
2214 /* Evaluate the statement, which could be
2215 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2216 val = evaluate_stmt (stmt);
2218 retval = SSA_PROP_NOT_INTERESTING;
2220 /* Set the lattice value of the statement's output. */
2221 if (TREE_CODE (lhs) == SSA_NAME)
2223 /* If STMT is an assignment to an SSA_NAME, we only have one
2224 value to set. */
2225 if (set_lattice_value (lhs, val))
2227 *output_p = lhs;
2228 if (val.lattice_val == VARYING)
2229 retval = SSA_PROP_VARYING;
2230 else
2231 retval = SSA_PROP_INTERESTING;
2235 return retval;
2239 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2240 if it can determine which edge will be taken. Otherwise, return
2241 SSA_PROP_VARYING. */
2243 static enum ssa_prop_result
2244 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2246 prop_value_t val;
2247 basic_block block;
2249 block = gimple_bb (stmt);
2250 val = evaluate_stmt (stmt);
2251 if (val.lattice_val != CONSTANT
2252 || val.mask != 0)
2253 return SSA_PROP_VARYING;
2255 /* Find which edge out of the conditional block will be taken and add it
2256 to the worklist. If no single edge can be determined statically,
2257 return SSA_PROP_VARYING to feed all the outgoing edges to the
2258 propagation engine. */
2259 *taken_edge_p = find_taken_edge (block, val.value);
2260 if (*taken_edge_p)
2261 return SSA_PROP_INTERESTING;
2262 else
2263 return SSA_PROP_VARYING;
2267 /* Evaluate statement STMT. If the statement produces an output value and
2268 its evaluation changes the lattice value of its output, return
2269 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2270 output value.
2272 If STMT is a conditional branch and we can determine its truth
2273 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2274 value, return SSA_PROP_VARYING. */
2276 static enum ssa_prop_result
2277 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2279 tree def;
2280 ssa_op_iter iter;
2282 if (dump_file && (dump_flags & TDF_DETAILS))
2284 fprintf (dump_file, "\nVisiting statement:\n");
2285 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2288 switch (gimple_code (stmt))
2290 case GIMPLE_ASSIGN:
2291 /* If the statement is an assignment that produces a single
2292 output value, evaluate its RHS to see if the lattice value of
2293 its output has changed. */
2294 return visit_assignment (stmt, output_p);
2296 case GIMPLE_CALL:
2297 /* A value-returning call also performs an assignment. */
2298 if (gimple_call_lhs (stmt) != NULL_TREE)
2299 return visit_assignment (stmt, output_p);
2300 break;
2302 case GIMPLE_COND:
2303 case GIMPLE_SWITCH:
2304 /* If STMT is a conditional branch, see if we can determine
2305 which branch will be taken. */
2306 /* FIXME. It appears that we should be able to optimize
2307 computed GOTOs here as well. */
2308 return visit_cond_stmt (stmt, taken_edge_p);
2310 default:
2311 break;
2314 /* Any other kind of statement is not interesting for constant
2315 propagation and, therefore, not worth simulating. */
2316 if (dump_file && (dump_flags & TDF_DETAILS))
2317 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2319 /* Definitions made by statements other than assignments to
2320 SSA_NAMEs represent unknown modifications to their outputs.
2321 Mark them VARYING. */
2322 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2324 prop_value_t v = { VARYING, NULL_TREE, -1 };
2325 set_lattice_value (def, v);
2328 return SSA_PROP_VARYING;
2332 /* Main entry point for SSA Conditional Constant Propagation. */
2334 static unsigned int
2335 do_ssa_ccp (void)
2337 unsigned int todo = 0;
2338 calculate_dominance_info (CDI_DOMINATORS);
2339 ccp_initialize ();
2340 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2341 if (ccp_finalize ())
2342 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2343 free_dominance_info (CDI_DOMINATORS);
2344 return todo;
2348 namespace {
2350 const pass_data pass_data_ccp =
2352 GIMPLE_PASS, /* type */
2353 "ccp", /* name */
2354 OPTGROUP_NONE, /* optinfo_flags */
2355 true, /* has_execute */
2356 TV_TREE_CCP, /* tv_id */
2357 ( PROP_cfg | PROP_ssa ), /* properties_required */
2358 0, /* properties_provided */
2359 0, /* properties_destroyed */
2360 0, /* todo_flags_start */
2361 TODO_update_address_taken, /* todo_flags_finish */
2364 class pass_ccp : public gimple_opt_pass
2366 public:
2367 pass_ccp (gcc::context *ctxt)
2368 : gimple_opt_pass (pass_data_ccp, ctxt)
2371 /* opt_pass methods: */
2372 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2373 virtual bool gate (function *) { return flag_tree_ccp != 0; }
2374 virtual unsigned int execute (function *) { return do_ssa_ccp (); }
2376 }; // class pass_ccp
2378 } // anon namespace
2380 gimple_opt_pass *
2381 make_pass_ccp (gcc::context *ctxt)
2383 return new pass_ccp (ctxt);
2388 /* Try to optimize out __builtin_stack_restore. Optimize it out
2389 if there is another __builtin_stack_restore in the same basic
2390 block and no calls or ASM_EXPRs are in between, or if this block's
2391 only outgoing edge is to EXIT_BLOCK and there are no calls or
2392 ASM_EXPRs after this __builtin_stack_restore. */
2394 static tree
2395 optimize_stack_restore (gimple_stmt_iterator i)
2397 tree callee;
2398 gimple stmt;
2400 basic_block bb = gsi_bb (i);
2401 gimple call = gsi_stmt (i);
2403 if (gimple_code (call) != GIMPLE_CALL
2404 || gimple_call_num_args (call) != 1
2405 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2406 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2407 return NULL_TREE;
2409 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2411 stmt = gsi_stmt (i);
2412 if (gimple_code (stmt) == GIMPLE_ASM)
2413 return NULL_TREE;
2414 if (gimple_code (stmt) != GIMPLE_CALL)
2415 continue;
2417 callee = gimple_call_fndecl (stmt);
2418 if (!callee
2419 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2420 /* All regular builtins are ok, just obviously not alloca. */
2421 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2422 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2423 return NULL_TREE;
2425 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2426 goto second_stack_restore;
2429 if (!gsi_end_p (i))
2430 return NULL_TREE;
2432 /* Allow one successor of the exit block, or zero successors. */
2433 switch (EDGE_COUNT (bb->succs))
2435 case 0:
2436 break;
2437 case 1:
2438 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2439 return NULL_TREE;
2440 break;
2441 default:
2442 return NULL_TREE;
2444 second_stack_restore:
2446 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2447 If there are multiple uses, then the last one should remove the call.
2448 In any case, whether the call to __builtin_stack_save can be removed
2449 or not is irrelevant to removing the call to __builtin_stack_restore. */
2450 if (has_single_use (gimple_call_arg (call, 0)))
2452 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2453 if (is_gimple_call (stack_save))
2455 callee = gimple_call_fndecl (stack_save);
2456 if (callee
2457 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2458 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2460 gimple_stmt_iterator stack_save_gsi;
2461 tree rhs;
2463 stack_save_gsi = gsi_for_stmt (stack_save);
2464 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2465 update_call_from_tree (&stack_save_gsi, rhs);
2470 /* No effect, so the statement will be deleted. */
2471 return integer_zero_node;
2474 /* If va_list type is a simple pointer and nothing special is needed,
2475 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2476 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2477 pointer assignment. */
2479 static tree
2480 optimize_stdarg_builtin (gimple call)
2482 tree callee, lhs, rhs, cfun_va_list;
2483 bool va_list_simple_ptr;
2484 location_t loc = gimple_location (call);
2486 if (gimple_code (call) != GIMPLE_CALL)
2487 return NULL_TREE;
2489 callee = gimple_call_fndecl (call);
2491 cfun_va_list = targetm.fn_abi_va_list (callee);
2492 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2493 && (TREE_TYPE (cfun_va_list) == void_type_node
2494 || TREE_TYPE (cfun_va_list) == char_type_node);
2496 switch (DECL_FUNCTION_CODE (callee))
2498 case BUILT_IN_VA_START:
2499 if (!va_list_simple_ptr
2500 || targetm.expand_builtin_va_start != NULL
2501 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2502 return NULL_TREE;
2504 if (gimple_call_num_args (call) != 2)
2505 return NULL_TREE;
2507 lhs = gimple_call_arg (call, 0);
2508 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2509 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2510 != TYPE_MAIN_VARIANT (cfun_va_list))
2511 return NULL_TREE;
2513 lhs = build_fold_indirect_ref_loc (loc, lhs);
2514 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2515 1, integer_zero_node);
2516 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2517 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2519 case BUILT_IN_VA_COPY:
2520 if (!va_list_simple_ptr)
2521 return NULL_TREE;
2523 if (gimple_call_num_args (call) != 2)
2524 return NULL_TREE;
2526 lhs = gimple_call_arg (call, 0);
2527 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2528 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2529 != TYPE_MAIN_VARIANT (cfun_va_list))
2530 return NULL_TREE;
2532 lhs = build_fold_indirect_ref_loc (loc, lhs);
2533 rhs = gimple_call_arg (call, 1);
2534 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2535 != TYPE_MAIN_VARIANT (cfun_va_list))
2536 return NULL_TREE;
2538 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2539 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2541 case BUILT_IN_VA_END:
2542 /* No effect, so the statement will be deleted. */
2543 return integer_zero_node;
2545 default:
2546 gcc_unreachable ();
2550 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2551 the incoming jumps. Return true if at least one jump was changed. */
2553 static bool
2554 optimize_unreachable (gimple_stmt_iterator i)
2556 basic_block bb = gsi_bb (i);
2557 gimple_stmt_iterator gsi;
2558 gimple stmt;
2559 edge_iterator ei;
2560 edge e;
2561 bool ret;
2563 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2565 stmt = gsi_stmt (gsi);
2567 if (is_gimple_debug (stmt))
2568 continue;
2570 if (gimple_code (stmt) == GIMPLE_LABEL)
2572 /* Verify we do not need to preserve the label. */
2573 if (FORCED_LABEL (gimple_label_label (stmt)))
2574 return false;
2576 continue;
2579 /* Only handle the case that __builtin_unreachable is the first statement
2580 in the block. We rely on DCE to remove stmts without side-effects
2581 before __builtin_unreachable. */
2582 if (gsi_stmt (gsi) != gsi_stmt (i))
2583 return false;
2586 ret = false;
2587 FOR_EACH_EDGE (e, ei, bb->preds)
2589 gsi = gsi_last_bb (e->src);
2590 if (gsi_end_p (gsi))
2591 continue;
2593 stmt = gsi_stmt (gsi);
2594 if (gimple_code (stmt) == GIMPLE_COND)
2596 if (e->flags & EDGE_TRUE_VALUE)
2597 gimple_cond_make_false (stmt);
2598 else if (e->flags & EDGE_FALSE_VALUE)
2599 gimple_cond_make_true (stmt);
2600 else
2601 gcc_unreachable ();
2602 update_stmt (stmt);
2604 else
2606 /* Todo: handle other cases, f.i. switch statement. */
2607 continue;
2610 ret = true;
2613 return ret;
2616 /* A simple pass that attempts to fold all builtin functions. This pass
2617 is run after we've propagated as many constants as we can. */
2619 namespace {
2621 const pass_data pass_data_fold_builtins =
2623 GIMPLE_PASS, /* type */
2624 "fab", /* name */
2625 OPTGROUP_NONE, /* optinfo_flags */
2626 true, /* has_execute */
2627 TV_NONE, /* tv_id */
2628 ( PROP_cfg | PROP_ssa ), /* properties_required */
2629 0, /* properties_provided */
2630 0, /* properties_destroyed */
2631 0, /* todo_flags_start */
2632 TODO_update_ssa, /* todo_flags_finish */
2635 class pass_fold_builtins : public gimple_opt_pass
2637 public:
2638 pass_fold_builtins (gcc::context *ctxt)
2639 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2642 /* opt_pass methods: */
2643 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2644 virtual unsigned int execute (function *);
2646 }; // class pass_fold_builtins
2648 unsigned int
2649 pass_fold_builtins::execute (function *fun)
2651 bool cfg_changed = false;
2652 basic_block bb;
2653 unsigned int todoflags = 0;
2655 FOR_EACH_BB_FN (bb, fun)
2657 gimple_stmt_iterator i;
2658 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2660 gimple stmt, old_stmt;
2661 tree callee, result;
2662 enum built_in_function fcode;
2664 stmt = gsi_stmt (i);
2666 if (gimple_code (stmt) != GIMPLE_CALL)
2668 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2669 after the last GIMPLE DSE they aren't needed and might
2670 unnecessarily keep the SSA_NAMEs live. */
2671 if (gimple_clobber_p (stmt))
2673 tree lhs = gimple_assign_lhs (stmt);
2674 if (TREE_CODE (lhs) == MEM_REF
2675 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2677 unlink_stmt_vdef (stmt);
2678 gsi_remove (&i, true);
2679 release_defs (stmt);
2680 continue;
2683 gsi_next (&i);
2684 continue;
2686 callee = gimple_call_fndecl (stmt);
2687 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2689 gsi_next (&i);
2690 continue;
2692 fcode = DECL_FUNCTION_CODE (callee);
2694 result = gimple_fold_builtin (stmt);
2696 if (result)
2697 gimple_remove_stmt_histograms (fun, stmt);
2699 if (!result)
2700 switch (DECL_FUNCTION_CODE (callee))
2702 case BUILT_IN_CONSTANT_P:
2703 /* Resolve __builtin_constant_p. If it hasn't been
2704 folded to integer_one_node by now, it's fairly
2705 certain that the value simply isn't constant. */
2706 result = integer_zero_node;
2707 break;
2709 case BUILT_IN_ASSUME_ALIGNED:
2710 /* Remove __builtin_assume_aligned. */
2711 result = gimple_call_arg (stmt, 0);
2712 break;
2714 case BUILT_IN_STACK_RESTORE:
2715 result = optimize_stack_restore (i);
2716 if (result)
2717 break;
2718 gsi_next (&i);
2719 continue;
2721 case BUILT_IN_UNREACHABLE:
2722 if (optimize_unreachable (i))
2723 cfg_changed = true;
2724 break;
2726 case BUILT_IN_VA_START:
2727 case BUILT_IN_VA_END:
2728 case BUILT_IN_VA_COPY:
2729 /* These shouldn't be folded before pass_stdarg. */
2730 result = optimize_stdarg_builtin (stmt);
2731 if (result)
2732 break;
2733 /* FALLTHRU */
2735 default:
2736 gsi_next (&i);
2737 continue;
2740 if (result == NULL_TREE)
2741 break;
2743 if (dump_file && (dump_flags & TDF_DETAILS))
2745 fprintf (dump_file, "Simplified\n ");
2746 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2749 old_stmt = stmt;
2750 if (!update_call_from_tree (&i, result))
2752 gimplify_and_update_call_from_tree (&i, result);
2753 todoflags |= TODO_update_address_taken;
2756 stmt = gsi_stmt (i);
2757 update_stmt (stmt);
2759 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2760 && gimple_purge_dead_eh_edges (bb))
2761 cfg_changed = true;
2763 if (dump_file && (dump_flags & TDF_DETAILS))
2765 fprintf (dump_file, "to\n ");
2766 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2767 fprintf (dump_file, "\n");
2770 /* Retry the same statement if it changed into another
2771 builtin, there might be new opportunities now. */
2772 if (gimple_code (stmt) != GIMPLE_CALL)
2774 gsi_next (&i);
2775 continue;
2777 callee = gimple_call_fndecl (stmt);
2778 if (!callee
2779 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2780 || DECL_FUNCTION_CODE (callee) == fcode)
2781 gsi_next (&i);
2785 /* Delete unreachable blocks. */
2786 if (cfg_changed)
2787 todoflags |= TODO_cleanup_cfg;
2789 return todoflags;
2792 } // anon namespace
2794 gimple_opt_pass *
2795 make_pass_fold_builtins (gcc::context *ctxt)
2797 return new pass_fold_builtins (ctxt);