ChangeLog config
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob60e2b55b71ccea040075cc176440136f6f4f0841
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
102 References:
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
113 #include "config.h"
114 #include "system.h"
115 #include "coretypes.h"
116 #include "tm.h"
117 #include "tree.h"
118 #include "flags.h"
119 #include "tm_p.h"
120 #include "basic-block.h"
121 #include "output.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
125 #include "timevar.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
132 #include "target.h"
133 #include "diagnostic-core.h"
134 #include "toplev.h"
135 #include "dbgcnt.h"
138 /* Possible lattice values. */
139 typedef enum
141 UNINITIALIZED,
142 UNDEFINED,
143 CONSTANT,
144 VARYING
145 } ccp_lattice_t;
147 struct prop_value_d {
148 /* Lattice value. */
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
152 tree value;
154 /* Mask that applies to the propagated value during CCP. For
155 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
156 double_int mask;
159 typedef struct prop_value_d prop_value_t;
161 /* Array of propagated constant values. After propagation,
162 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
163 the constant is held in an SSA name representing a memory store
164 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
165 memory reference used to store (i.e., the LHS of the assignment
166 doing the store). */
167 static prop_value_t *const_val;
169 static void canonicalize_float_value (prop_value_t *);
170 static bool ccp_fold_stmt (gimple_stmt_iterator *);
172 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
174 static void
175 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
177 switch (val.lattice_val)
179 case UNINITIALIZED:
180 fprintf (outf, "%sUNINITIALIZED", prefix);
181 break;
182 case UNDEFINED:
183 fprintf (outf, "%sUNDEFINED", prefix);
184 break;
185 case VARYING:
186 fprintf (outf, "%sVARYING", prefix);
187 break;
188 case CONSTANT:
189 fprintf (outf, "%sCONSTANT ", prefix);
190 if (TREE_CODE (val.value) != INTEGER_CST
191 || double_int_zero_p (val.mask))
192 print_generic_expr (outf, val.value, dump_flags);
193 else
195 double_int cval = double_int_and_not (tree_to_double_int (val.value),
196 val.mask);
197 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
198 prefix, cval.high, cval.low);
199 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
200 val.mask.high, val.mask.low);
202 break;
203 default:
204 gcc_unreachable ();
209 /* Print lattice value VAL to stderr. */
211 void debug_lattice_value (prop_value_t val);
213 DEBUG_FUNCTION void
214 debug_lattice_value (prop_value_t val)
216 dump_lattice_value (stderr, "", val);
217 fprintf (stderr, "\n");
221 /* Compute a default value for variable VAR and store it in the
222 CONST_VAL array. The following rules are used to get default
223 values:
225 1- Global and static variables that are declared constant are
226 considered CONSTANT.
228 2- Any other value is considered UNDEFINED. This is useful when
229 considering PHI nodes. PHI arguments that are undefined do not
230 change the constant value of the PHI node, which allows for more
231 constants to be propagated.
233 3- Variables defined by statements other than assignments and PHI
234 nodes are considered VARYING.
236 4- Initial values of variables that are not GIMPLE registers are
237 considered VARYING. */
239 static prop_value_t
240 get_default_value (tree var)
242 tree sym = SSA_NAME_VAR (var);
243 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
244 gimple stmt;
246 stmt = SSA_NAME_DEF_STMT (var);
248 if (gimple_nop_p (stmt))
250 /* Variables defined by an empty statement are those used
251 before being initialized. If VAR is a local variable, we
252 can assume initially that it is UNDEFINED, otherwise we must
253 consider it VARYING. */
254 if (is_gimple_reg (sym)
255 && TREE_CODE (sym) == VAR_DECL)
256 val.lattice_val = UNDEFINED;
257 else
259 val.lattice_val = VARYING;
260 val.mask = double_int_minus_one;
263 else if (is_gimple_assign (stmt)
264 /* Value-returning GIMPLE_CALL statements assign to
265 a variable, and are treated similarly to GIMPLE_ASSIGN. */
266 || (is_gimple_call (stmt)
267 && gimple_call_lhs (stmt) != NULL_TREE)
268 || gimple_code (stmt) == GIMPLE_PHI)
270 tree cst;
271 if (gimple_assign_single_p (stmt)
272 && DECL_P (gimple_assign_rhs1 (stmt))
273 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
275 val.lattice_val = CONSTANT;
276 val.value = cst;
278 else
279 /* Any other variable defined by an assignment or a PHI node
280 is considered UNDEFINED. */
281 val.lattice_val = UNDEFINED;
283 else
285 /* Otherwise, VAR will never take on a constant value. */
286 val.lattice_val = VARYING;
287 val.mask = double_int_minus_one;
290 return val;
294 /* Get the constant value associated with variable VAR. */
296 static inline prop_value_t *
297 get_value (tree var)
299 prop_value_t *val;
301 if (const_val == NULL)
302 return NULL;
304 val = &const_val[SSA_NAME_VERSION (var)];
305 if (val->lattice_val == UNINITIALIZED)
306 *val = get_default_value (var);
308 canonicalize_float_value (val);
310 return val;
313 /* Return the constant tree value associated with VAR. */
315 static inline tree
316 get_constant_value (tree var)
318 prop_value_t *val = get_value (var);
319 if (val
320 && val->lattice_val == CONSTANT
321 && (TREE_CODE (val->value) != INTEGER_CST
322 || double_int_zero_p (val->mask)))
323 return val->value;
324 return NULL_TREE;
327 /* Sets the value associated with VAR to VARYING. */
329 static inline void
330 set_value_varying (tree var)
332 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
334 val->lattice_val = VARYING;
335 val->value = NULL_TREE;
336 val->mask = double_int_minus_one;
339 /* For float types, modify the value of VAL to make ccp work correctly
340 for non-standard values (-0, NaN):
342 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
343 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
344 This is to fix the following problem (see PR 29921): Suppose we have
346 x = 0.0 * y
348 and we set value of y to NaN. This causes value of x to be set to NaN.
349 When we later determine that y is in fact VARYING, fold uses the fact
350 that HONOR_NANS is false, and we try to change the value of x to 0,
351 causing an ICE. With HONOR_NANS being false, the real appearance of
352 NaN would cause undefined behavior, though, so claiming that y (and x)
353 are UNDEFINED initially is correct. */
355 static void
356 canonicalize_float_value (prop_value_t *val)
358 enum machine_mode mode;
359 tree type;
360 REAL_VALUE_TYPE d;
362 if (val->lattice_val != CONSTANT
363 || TREE_CODE (val->value) != REAL_CST)
364 return;
366 d = TREE_REAL_CST (val->value);
367 type = TREE_TYPE (val->value);
368 mode = TYPE_MODE (type);
370 if (!HONOR_SIGNED_ZEROS (mode)
371 && REAL_VALUE_MINUS_ZERO (d))
373 val->value = build_real (type, dconst0);
374 return;
377 if (!HONOR_NANS (mode)
378 && REAL_VALUE_ISNAN (d))
380 val->lattice_val = UNDEFINED;
381 val->value = NULL;
382 return;
386 /* Return whether the lattice transition is valid. */
388 static bool
389 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
391 /* Lattice transitions must always be monotonically increasing in
392 value. */
393 if (old_val.lattice_val < new_val.lattice_val)
394 return true;
396 if (old_val.lattice_val != new_val.lattice_val)
397 return false;
399 if (!old_val.value && !new_val.value)
400 return true;
402 /* Now both lattice values are CONSTANT. */
404 /* Allow transitioning from &x to &x & ~3. */
405 if (TREE_CODE (old_val.value) != INTEGER_CST
406 && TREE_CODE (new_val.value) == INTEGER_CST)
407 return true;
409 /* Bit-lattices have to agree in the still valid bits. */
410 if (TREE_CODE (old_val.value) == INTEGER_CST
411 && TREE_CODE (new_val.value) == INTEGER_CST)
412 return double_int_equal_p
413 (double_int_and_not (tree_to_double_int (old_val.value),
414 new_val.mask),
415 double_int_and_not (tree_to_double_int (new_val.value),
416 new_val.mask));
418 /* Otherwise constant values have to agree. */
419 return operand_equal_p (old_val.value, new_val.value, 0);
422 /* Set the value for variable VAR to NEW_VAL. Return true if the new
423 value is different from VAR's previous value. */
425 static bool
426 set_lattice_value (tree var, prop_value_t new_val)
428 /* We can deal with old UNINITIALIZED values just fine here. */
429 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
431 canonicalize_float_value (&new_val);
433 /* We have to be careful to not go up the bitwise lattice
434 represented by the mask.
435 ??? This doesn't seem to be the best place to enforce this. */
436 if (new_val.lattice_val == CONSTANT
437 && old_val->lattice_val == CONSTANT
438 && TREE_CODE (new_val.value) == INTEGER_CST
439 && TREE_CODE (old_val->value) == INTEGER_CST)
441 double_int diff;
442 diff = double_int_xor (tree_to_double_int (new_val.value),
443 tree_to_double_int (old_val->value));
444 new_val.mask = double_int_ior (new_val.mask,
445 double_int_ior (old_val->mask, diff));
448 gcc_assert (valid_lattice_transition (*old_val, new_val));
450 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
451 caller that this was a non-transition. */
452 if (old_val->lattice_val != new_val.lattice_val
453 || (new_val.lattice_val == CONSTANT
454 && TREE_CODE (new_val.value) == INTEGER_CST
455 && (TREE_CODE (old_val->value) != INTEGER_CST
456 || !double_int_equal_p (new_val.mask, old_val->mask))))
458 /* ??? We would like to delay creation of INTEGER_CSTs from
459 partially constants here. */
461 if (dump_file && (dump_flags & TDF_DETAILS))
463 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
464 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
467 *old_val = new_val;
469 gcc_assert (new_val.lattice_val != UNINITIALIZED);
470 return true;
473 return false;
476 static prop_value_t get_value_for_expr (tree, bool);
477 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
478 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
479 tree, double_int, double_int,
480 tree, double_int, double_int);
482 /* Return a double_int that can be used for bitwise simplifications
483 from VAL. */
485 static double_int
486 value_to_double_int (prop_value_t val)
488 if (val.value
489 && TREE_CODE (val.value) == INTEGER_CST)
490 return tree_to_double_int (val.value);
491 else
492 return double_int_zero;
495 /* Return the value for the address expression EXPR based on alignment
496 information. */
498 static prop_value_t
499 get_value_from_alignment (tree expr)
501 prop_value_t val;
502 HOST_WIDE_INT bitsize, bitpos;
503 tree base, offset;
504 enum machine_mode mode;
505 int align;
507 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
509 base = get_inner_reference (TREE_OPERAND (expr, 0),
510 &bitsize, &bitpos, &offset,
511 &mode, &align, &align, false);
512 if (TREE_CODE (base) == MEM_REF)
513 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
514 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
515 else if (base
516 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
517 > BITS_PER_UNIT))
519 val.lattice_val = CONSTANT;
520 /* We assume pointers are zero-extended. */
521 val.mask = double_int_and_not
522 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
523 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
524 val.value = build_int_cst (TREE_TYPE (expr), 0);
526 else
528 val.lattice_val = VARYING;
529 val.mask = double_int_minus_one;
530 val.value = NULL_TREE;
532 if (bitpos != 0)
534 double_int value, mask;
535 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
536 TREE_TYPE (expr), value_to_double_int (val), val.mask,
537 TREE_TYPE (expr),
538 shwi_to_double_int (bitpos / BITS_PER_UNIT),
539 double_int_zero);
540 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
541 val.mask = mask;
542 if (val.lattice_val == CONSTANT)
543 val.value = double_int_to_tree (TREE_TYPE (expr), value);
544 else
545 val.value = NULL_TREE;
547 /* ??? We should handle i * 4 and more complex expressions from
548 the offset, possibly by just expanding get_value_for_expr. */
549 if (offset != NULL_TREE)
551 double_int value, mask;
552 prop_value_t oval = get_value_for_expr (offset, true);
553 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
554 TREE_TYPE (expr), value_to_double_int (val), val.mask,
555 TREE_TYPE (expr), value_to_double_int (oval),
556 oval.mask);
557 val.mask = mask;
558 if (double_int_minus_one_p (mask))
560 val.lattice_val = VARYING;
561 val.value = NULL_TREE;
563 else
565 val.lattice_val = CONSTANT;
566 val.value = double_int_to_tree (TREE_TYPE (expr), value);
570 return val;
573 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
574 return constant bits extracted from alignment information for
575 invariant addresses. */
577 static prop_value_t
578 get_value_for_expr (tree expr, bool for_bits_p)
580 prop_value_t val;
582 if (TREE_CODE (expr) == SSA_NAME)
584 val = *get_value (expr);
585 if (for_bits_p
586 && val.lattice_val == CONSTANT
587 && TREE_CODE (val.value) == ADDR_EXPR)
588 val = get_value_from_alignment (val.value);
590 else if (is_gimple_min_invariant (expr)
591 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
593 val.lattice_val = CONSTANT;
594 val.value = expr;
595 val.mask = double_int_zero;
596 canonicalize_float_value (&val);
598 else if (TREE_CODE (expr) == ADDR_EXPR)
599 val = get_value_from_alignment (expr);
600 else
602 val.lattice_val = VARYING;
603 val.mask = double_int_minus_one;
604 val.value = NULL_TREE;
606 return val;
609 /* Return the likely CCP lattice value for STMT.
611 If STMT has no operands, then return CONSTANT.
613 Else if undefinedness of operands of STMT cause its value to be
614 undefined, then return UNDEFINED.
616 Else if any operands of STMT are constants, then return CONSTANT.
618 Else return VARYING. */
620 static ccp_lattice_t
621 likely_value (gimple stmt)
623 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
624 tree use;
625 ssa_op_iter iter;
626 unsigned i;
628 enum gimple_code code = gimple_code (stmt);
630 /* This function appears to be called only for assignments, calls,
631 conditionals, and switches, due to the logic in visit_stmt. */
632 gcc_assert (code == GIMPLE_ASSIGN
633 || code == GIMPLE_CALL
634 || code == GIMPLE_COND
635 || code == GIMPLE_SWITCH);
637 /* If the statement has volatile operands, it won't fold to a
638 constant value. */
639 if (gimple_has_volatile_ops (stmt))
640 return VARYING;
642 /* Arrive here for more complex cases. */
643 has_constant_operand = false;
644 has_undefined_operand = false;
645 all_undefined_operands = true;
646 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
648 prop_value_t *val = get_value (use);
650 if (val->lattice_val == UNDEFINED)
651 has_undefined_operand = true;
652 else
653 all_undefined_operands = false;
655 if (val->lattice_val == CONSTANT)
656 has_constant_operand = true;
659 /* There may be constants in regular rhs operands. For calls we
660 have to ignore lhs, fndecl and static chain, otherwise only
661 the lhs. */
662 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
663 i < gimple_num_ops (stmt); ++i)
665 tree op = gimple_op (stmt, i);
666 if (!op || TREE_CODE (op) == SSA_NAME)
667 continue;
668 if (is_gimple_min_invariant (op))
669 has_constant_operand = true;
672 if (has_constant_operand)
673 all_undefined_operands = false;
675 /* If the operation combines operands like COMPLEX_EXPR make sure to
676 not mark the result UNDEFINED if only one part of the result is
677 undefined. */
678 if (has_undefined_operand && all_undefined_operands)
679 return UNDEFINED;
680 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
682 switch (gimple_assign_rhs_code (stmt))
684 /* Unary operators are handled with all_undefined_operands. */
685 case PLUS_EXPR:
686 case MINUS_EXPR:
687 case POINTER_PLUS_EXPR:
688 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
689 Not bitwise operators, one VARYING operand may specify the
690 result completely. Not logical operators for the same reason.
691 Not COMPLEX_EXPR as one VARYING operand makes the result partly
692 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
693 the undefined operand may be promoted. */
694 return UNDEFINED;
696 default:
700 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
701 fall back to VARYING even if there were CONSTANT operands. */
702 if (has_undefined_operand)
703 return VARYING;
705 /* We do not consider virtual operands here -- load from read-only
706 memory may have only VARYING virtual operands, but still be
707 constant. */
708 if (has_constant_operand
709 || gimple_references_memory_p (stmt))
710 return CONSTANT;
712 return VARYING;
715 /* Returns true if STMT cannot be constant. */
717 static bool
718 surely_varying_stmt_p (gimple stmt)
720 /* If the statement has operands that we cannot handle, it cannot be
721 constant. */
722 if (gimple_has_volatile_ops (stmt))
723 return true;
725 /* If it is a call and does not return a value or is not a
726 builtin and not an indirect call, it is varying. */
727 if (is_gimple_call (stmt))
729 tree fndecl;
730 if (!gimple_call_lhs (stmt)
731 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
732 && !DECL_BUILT_IN (fndecl)))
733 return true;
736 /* Any other store operation is not interesting. */
737 else if (gimple_vdef (stmt))
738 return true;
740 /* Anything other than assignments and conditional jumps are not
741 interesting for CCP. */
742 if (gimple_code (stmt) != GIMPLE_ASSIGN
743 && gimple_code (stmt) != GIMPLE_COND
744 && gimple_code (stmt) != GIMPLE_SWITCH
745 && gimple_code (stmt) != GIMPLE_CALL)
746 return true;
748 return false;
751 /* Initialize local data structures for CCP. */
753 static void
754 ccp_initialize (void)
756 basic_block bb;
758 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
760 /* Initialize simulation flags for PHI nodes and statements. */
761 FOR_EACH_BB (bb)
763 gimple_stmt_iterator i;
765 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
767 gimple stmt = gsi_stmt (i);
768 bool is_varying;
770 /* If the statement is a control insn, then we do not
771 want to avoid simulating the statement once. Failure
772 to do so means that those edges will never get added. */
773 if (stmt_ends_bb_p (stmt))
774 is_varying = false;
775 else
776 is_varying = surely_varying_stmt_p (stmt);
778 if (is_varying)
780 tree def;
781 ssa_op_iter iter;
783 /* If the statement will not produce a constant, mark
784 all its outputs VARYING. */
785 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
786 set_value_varying (def);
788 prop_set_simulate_again (stmt, !is_varying);
792 /* Now process PHI nodes. We never clear the simulate_again flag on
793 phi nodes, since we do not know which edges are executable yet,
794 except for phi nodes for virtual operands when we do not do store ccp. */
795 FOR_EACH_BB (bb)
797 gimple_stmt_iterator i;
799 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
801 gimple phi = gsi_stmt (i);
803 if (!is_gimple_reg (gimple_phi_result (phi)))
804 prop_set_simulate_again (phi, false);
805 else
806 prop_set_simulate_again (phi, true);
811 /* Debug count support. Reset the values of ssa names
812 VARYING when the total number ssa names analyzed is
813 beyond the debug count specified. */
815 static void
816 do_dbg_cnt (void)
818 unsigned i;
819 for (i = 0; i < num_ssa_names; i++)
821 if (!dbg_cnt (ccp))
823 const_val[i].lattice_val = VARYING;
824 const_val[i].mask = double_int_minus_one;
825 const_val[i].value = NULL_TREE;
831 /* Do final substitution of propagated values, cleanup the flowgraph and
832 free allocated storage.
834 Return TRUE when something was optimized. */
836 static bool
837 ccp_finalize (void)
839 bool something_changed;
840 unsigned i;
842 do_dbg_cnt ();
844 /* Derive alignment and misalignment information from partially
845 constant pointers in the lattice. */
846 for (i = 1; i < num_ssa_names; ++i)
848 tree name = ssa_name (i);
849 prop_value_t *val;
850 struct ptr_info_def *pi;
851 unsigned int tem, align;
853 if (!name
854 || !POINTER_TYPE_P (TREE_TYPE (name)))
855 continue;
857 val = get_value (name);
858 if (val->lattice_val != CONSTANT
859 || TREE_CODE (val->value) != INTEGER_CST)
860 continue;
862 /* Trailing constant bits specify the alignment, trailing value
863 bits the misalignment. */
864 tem = val->mask.low;
865 align = (tem & -tem);
866 if (align == 1)
867 continue;
869 pi = get_ptr_info (name);
870 pi->align = align;
871 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
874 /* Perform substitutions based on the known constant values. */
875 something_changed = substitute_and_fold (get_constant_value,
876 ccp_fold_stmt, true);
878 free (const_val);
879 const_val = NULL;
880 return something_changed;;
884 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
885 in VAL1.
887 any M UNDEFINED = any
888 any M VARYING = VARYING
889 Ci M Cj = Ci if (i == j)
890 Ci M Cj = VARYING if (i != j)
893 static void
894 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
896 if (val1->lattice_val == UNDEFINED)
898 /* UNDEFINED M any = any */
899 *val1 = *val2;
901 else if (val2->lattice_val == UNDEFINED)
903 /* any M UNDEFINED = any
904 Nothing to do. VAL1 already contains the value we want. */
907 else if (val1->lattice_val == VARYING
908 || val2->lattice_val == VARYING)
910 /* any M VARYING = VARYING. */
911 val1->lattice_val = VARYING;
912 val1->mask = double_int_minus_one;
913 val1->value = NULL_TREE;
915 else if (val1->lattice_val == CONSTANT
916 && val2->lattice_val == CONSTANT
917 && TREE_CODE (val1->value) == INTEGER_CST
918 && TREE_CODE (val2->value) == INTEGER_CST)
920 /* Ci M Cj = Ci if (i == j)
921 Ci M Cj = VARYING if (i != j)
923 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
924 drop to varying. */
925 val1->mask
926 = double_int_ior (double_int_ior (val1->mask,
927 val2->mask),
928 double_int_xor (tree_to_double_int (val1->value),
929 tree_to_double_int (val2->value)));
930 if (double_int_minus_one_p (val1->mask))
932 val1->lattice_val = VARYING;
933 val1->value = NULL_TREE;
936 else if (val1->lattice_val == CONSTANT
937 && val2->lattice_val == CONSTANT
938 && simple_cst_equal (val1->value, val2->value) == 1)
940 /* Ci M Cj = Ci if (i == j)
941 Ci M Cj = VARYING if (i != j)
943 VAL1 already contains the value we want for equivalent values. */
945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
947 && (TREE_CODE (val1->value) == ADDR_EXPR
948 || TREE_CODE (val2->value) == ADDR_EXPR))
950 /* When not equal addresses are involved try meeting for
951 alignment. */
952 prop_value_t tem = *val2;
953 if (TREE_CODE (val1->value) == ADDR_EXPR)
954 *val1 = get_value_for_expr (val1->value, true);
955 if (TREE_CODE (val2->value) == ADDR_EXPR)
956 tem = get_value_for_expr (val2->value, true);
957 ccp_lattice_meet (val1, &tem);
959 else
961 /* Any other combination is VARYING. */
962 val1->lattice_val = VARYING;
963 val1->mask = double_int_minus_one;
964 val1->value = NULL_TREE;
969 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
970 lattice values to determine PHI_NODE's lattice value. The value of a
971 PHI node is determined calling ccp_lattice_meet with all the arguments
972 of the PHI node that are incoming via executable edges. */
974 static enum ssa_prop_result
975 ccp_visit_phi_node (gimple phi)
977 unsigned i;
978 prop_value_t *old_val, new_val;
980 if (dump_file && (dump_flags & TDF_DETAILS))
982 fprintf (dump_file, "\nVisiting PHI node: ");
983 print_gimple_stmt (dump_file, phi, 0, dump_flags);
986 old_val = get_value (gimple_phi_result (phi));
987 switch (old_val->lattice_val)
989 case VARYING:
990 return SSA_PROP_VARYING;
992 case CONSTANT:
993 new_val = *old_val;
994 break;
996 case UNDEFINED:
997 new_val.lattice_val = UNDEFINED;
998 new_val.value = NULL_TREE;
999 break;
1001 default:
1002 gcc_unreachable ();
1005 for (i = 0; i < gimple_phi_num_args (phi); i++)
1007 /* Compute the meet operator over all the PHI arguments flowing
1008 through executable edges. */
1009 edge e = gimple_phi_arg_edge (phi, i);
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1013 fprintf (dump_file,
1014 "\n Argument #%d (%d -> %d %sexecutable)\n",
1015 i, e->src->index, e->dest->index,
1016 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1019 /* If the incoming edge is executable, Compute the meet operator for
1020 the existing value of the PHI node and the current PHI argument. */
1021 if (e->flags & EDGE_EXECUTABLE)
1023 tree arg = gimple_phi_arg (phi, i)->def;
1024 prop_value_t arg_val = get_value_for_expr (arg, false);
1026 ccp_lattice_meet (&new_val, &arg_val);
1028 if (dump_file && (dump_flags & TDF_DETAILS))
1030 fprintf (dump_file, "\t");
1031 print_generic_expr (dump_file, arg, dump_flags);
1032 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1033 fprintf (dump_file, "\n");
1036 if (new_val.lattice_val == VARYING)
1037 break;
1041 if (dump_file && (dump_flags & TDF_DETAILS))
1043 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1044 fprintf (dump_file, "\n\n");
1047 /* Make the transition to the new value. */
1048 if (set_lattice_value (gimple_phi_result (phi), new_val))
1050 if (new_val.lattice_val == VARYING)
1051 return SSA_PROP_VARYING;
1052 else
1053 return SSA_PROP_INTERESTING;
1055 else
1056 return SSA_PROP_NOT_INTERESTING;
1059 /* Return the constant value for OP or OP otherwise. */
1061 static tree
1062 valueize_op (tree op)
1064 if (TREE_CODE (op) == SSA_NAME)
1066 tree tem = get_constant_value (op);
1067 if (tem)
1068 return tem;
1070 return op;
1073 /* CCP specific front-end to the non-destructive constant folding
1074 routines.
1076 Attempt to simplify the RHS of STMT knowing that one or more
1077 operands are constants.
1079 If simplification is possible, return the simplified RHS,
1080 otherwise return the original RHS or NULL_TREE. */
1082 static tree
1083 ccp_fold (gimple stmt)
1085 location_t loc = gimple_location (stmt);
1086 switch (gimple_code (stmt))
1088 case GIMPLE_ASSIGN:
1090 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1092 switch (get_gimple_rhs_class (subcode))
1094 case GIMPLE_SINGLE_RHS:
1096 tree rhs = gimple_assign_rhs1 (stmt);
1097 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1099 if (TREE_CODE (rhs) == SSA_NAME)
1101 /* If the RHS is an SSA_NAME, return its known constant value,
1102 if any. */
1103 return get_constant_value (rhs);
1105 /* Handle propagating invariant addresses into address operations.
1106 The folding we do here matches that in tree-ssa-forwprop.c. */
1107 else if (TREE_CODE (rhs) == ADDR_EXPR)
1109 tree *base;
1110 base = &TREE_OPERAND (rhs, 0);
1111 while (handled_component_p (*base))
1112 base = &TREE_OPERAND (*base, 0);
1113 if (TREE_CODE (*base) == MEM_REF
1114 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1116 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1117 if (val
1118 && TREE_CODE (val) == ADDR_EXPR)
1120 tree ret, save = *base;
1121 tree new_base;
1122 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1123 unshare_expr (val),
1124 TREE_OPERAND (*base, 1));
1125 /* We need to return a new tree, not modify the IL
1126 or share parts of it. So play some tricks to
1127 avoid manually building it. */
1128 *base = new_base;
1129 ret = unshare_expr (rhs);
1130 recompute_tree_invariant_for_addr_expr (ret);
1131 *base = save;
1132 return ret;
1136 else if (TREE_CODE (rhs) == CONSTRUCTOR
1137 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1138 && (CONSTRUCTOR_NELTS (rhs)
1139 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1141 unsigned i;
1142 tree val, list;
1144 list = NULL_TREE;
1145 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1147 val = valueize_op (val);
1148 if (TREE_CODE (val) == INTEGER_CST
1149 || TREE_CODE (val) == REAL_CST
1150 || TREE_CODE (val) == FIXED_CST)
1151 list = tree_cons (NULL_TREE, val, list);
1152 else
1153 return NULL_TREE;
1156 return build_vector (TREE_TYPE (rhs), nreverse (list));
1159 if (kind == tcc_reference)
1161 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1162 || TREE_CODE (rhs) == REALPART_EXPR
1163 || TREE_CODE (rhs) == IMAGPART_EXPR)
1164 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1166 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1167 if (val)
1168 return fold_unary_loc (EXPR_LOCATION (rhs),
1169 TREE_CODE (rhs),
1170 TREE_TYPE (rhs), val);
1172 else if (TREE_CODE (rhs) == MEM_REF
1173 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1175 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1176 if (val
1177 && TREE_CODE (val) == ADDR_EXPR)
1179 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1180 unshare_expr (val),
1181 TREE_OPERAND (rhs, 1));
1182 if (tem)
1183 rhs = tem;
1186 return fold_const_aggregate_ref (rhs);
1188 else if (kind == tcc_declaration)
1189 return get_symbol_constant_value (rhs);
1190 return rhs;
1193 case GIMPLE_UNARY_RHS:
1195 /* Handle unary operators that can appear in GIMPLE form.
1196 Note that we know the single operand must be a constant,
1197 so this should almost always return a simplified RHS. */
1198 tree lhs = gimple_assign_lhs (stmt);
1199 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1201 /* Conversions are useless for CCP purposes if they are
1202 value-preserving. Thus the restrictions that
1203 useless_type_conversion_p places for pointer type conversions
1204 do not apply here. Substitution later will only substitute to
1205 allowed places. */
1206 if (CONVERT_EXPR_CODE_P (subcode)
1207 && POINTER_TYPE_P (TREE_TYPE (lhs))
1208 && POINTER_TYPE_P (TREE_TYPE (op0)))
1210 tree tem;
1211 /* Try to re-construct array references on-the-fly. */
1212 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1213 TREE_TYPE (op0))
1214 && ((tem = maybe_fold_offset_to_address
1215 (loc,
1216 op0, integer_zero_node, TREE_TYPE (lhs)))
1217 != NULL_TREE))
1218 return tem;
1219 return op0;
1222 return
1223 fold_unary_ignore_overflow_loc (loc, subcode,
1224 gimple_expr_type (stmt), op0);
1227 case GIMPLE_BINARY_RHS:
1229 /* Handle binary operators that can appear in GIMPLE form. */
1230 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1231 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1233 /* Translate &x + CST into an invariant form suitable for
1234 further propagation. */
1235 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1236 && TREE_CODE (op0) == ADDR_EXPR
1237 && TREE_CODE (op1) == INTEGER_CST)
1239 tree off = fold_convert (ptr_type_node, op1);
1240 return build_fold_addr_expr
1241 (fold_build2 (MEM_REF,
1242 TREE_TYPE (TREE_TYPE (op0)),
1243 unshare_expr (op0), off));
1246 return fold_binary_loc (loc, subcode,
1247 gimple_expr_type (stmt), op0, op1);
1250 case GIMPLE_TERNARY_RHS:
1252 /* Handle ternary operators that can appear in GIMPLE form. */
1253 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1254 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1255 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1257 return fold_ternary_loc (loc, subcode,
1258 gimple_expr_type (stmt), op0, op1, op2);
1261 default:
1262 gcc_unreachable ();
1265 break;
1267 case GIMPLE_CALL:
1269 tree fn = valueize_op (gimple_call_fn (stmt));
1270 if (TREE_CODE (fn) == ADDR_EXPR
1271 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1272 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1274 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1275 tree call, retval;
1276 unsigned i;
1277 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1278 args[i] = valueize_op (gimple_call_arg (stmt, i));
1279 call = build_call_array_loc (loc,
1280 gimple_call_return_type (stmt),
1281 fn, gimple_call_num_args (stmt), args);
1282 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1283 if (retval)
1284 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1285 STRIP_NOPS (retval);
1286 return retval;
1288 return NULL_TREE;
1291 case GIMPLE_COND:
1293 /* Handle comparison operators that can appear in GIMPLE form. */
1294 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1295 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1296 enum tree_code code = gimple_cond_code (stmt);
1297 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1300 case GIMPLE_SWITCH:
1302 /* Return the constant switch index. */
1303 return valueize_op (gimple_switch_index (stmt));
1306 default:
1307 gcc_unreachable ();
1311 /* Return the tree representing the element referenced by T if T is an
1312 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1313 NULL_TREE otherwise. */
1315 tree
1316 fold_const_aggregate_ref (tree t)
1318 tree base, ctor, idx, field;
1319 unsigned HOST_WIDE_INT cnt;
1320 tree cfield, cval;
1321 tree tem;
1323 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1324 return get_symbol_constant_value (t);
1326 tem = fold_read_from_constant_string (t);
1327 if (tem)
1328 return tem;
1330 switch (TREE_CODE (t))
1332 case ARRAY_REF:
1333 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1334 DECL_INITIAL. If BASE is a nested reference into another
1335 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1336 the inner reference. */
1337 base = TREE_OPERAND (t, 0);
1338 switch (TREE_CODE (base))
1340 case MEM_REF:
1341 /* ??? We could handle this case. */
1342 if (!integer_zerop (TREE_OPERAND (base, 1)))
1343 return NULL_TREE;
1344 base = get_base_address (base);
1345 if (!base
1346 || TREE_CODE (base) != VAR_DECL)
1347 return NULL_TREE;
1349 /* Fallthru. */
1350 case VAR_DECL:
1351 if (!TREE_READONLY (base)
1352 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1353 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1354 && !varpool_get_node (base)->const_value_known))
1355 return NULL_TREE;
1357 ctor = DECL_INITIAL (base);
1358 break;
1360 case ARRAY_REF:
1361 case COMPONENT_REF:
1362 ctor = fold_const_aggregate_ref (base);
1363 break;
1365 case STRING_CST:
1366 case CONSTRUCTOR:
1367 ctor = base;
1368 break;
1370 default:
1371 return NULL_TREE;
1374 if (ctor == NULL_TREE
1375 || (TREE_CODE (ctor) != CONSTRUCTOR
1376 && TREE_CODE (ctor) != STRING_CST)
1377 || !TREE_STATIC (ctor))
1378 return NULL_TREE;
1380 /* Get the index. If we have an SSA_NAME, try to resolve it
1381 with the current lattice value for the SSA_NAME. */
1382 idx = TREE_OPERAND (t, 1);
1383 switch (TREE_CODE (idx))
1385 case SSA_NAME:
1386 if ((tem = get_constant_value (idx))
1387 && TREE_CODE (tem) == INTEGER_CST)
1388 idx = tem;
1389 else
1390 return NULL_TREE;
1391 break;
1393 case INTEGER_CST:
1394 break;
1396 default:
1397 return NULL_TREE;
1400 /* Fold read from constant string. */
1401 if (TREE_CODE (ctor) == STRING_CST)
1403 tree low_bound = array_ref_low_bound (t);
1404 double_int low_bound_cst;
1405 double_int index_cst;
1406 double_int length_cst;
1407 bool signed_p = TYPE_UNSIGNED (TREE_TYPE (idx));
1409 if (TREE_CODE (idx) != INTEGER_CST
1410 || !INTEGRAL_TYPE_P (TREE_TYPE (t))
1411 || TREE_CODE (low_bound) != INTEGER_CST)
1412 return NULL_TREE;
1413 low_bound_cst = tree_to_double_int (low_bound);
1414 index_cst = tree_to_double_int (idx);
1415 length_cst = uhwi_to_double_int (TREE_STRING_LENGTH (ctor));
1416 index_cst = double_int_sub (index_cst, low_bound_cst);
1417 if ((TYPE_MODE (TREE_TYPE (t))
1418 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1419 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1420 == MODE_INT)
1421 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1422 && double_int_cmp (index_cst, length_cst, signed_p) < 0)
1423 return build_int_cst_type (TREE_TYPE (t),
1424 (TREE_STRING_POINTER (ctor)
1425 [double_int_to_uhwi (index_cst)]));
1426 return NULL_TREE;
1429 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1430 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1431 if (tree_int_cst_equal (cfield, idx))
1432 return canonicalize_constructor_val (cval);
1433 break;
1435 case COMPONENT_REF:
1436 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1437 DECL_INITIAL. If BASE is a nested reference into another
1438 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1439 the inner reference. */
1440 base = TREE_OPERAND (t, 0);
1441 switch (TREE_CODE (base))
1443 case VAR_DECL:
1444 if (!TREE_READONLY (base)
1445 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1446 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1447 && !varpool_get_node (base)->const_value_known))
1448 return NULL_TREE;
1450 ctor = DECL_INITIAL (base);
1451 break;
1453 case ARRAY_REF:
1454 case COMPONENT_REF:
1455 ctor = fold_const_aggregate_ref (base);
1456 break;
1458 default:
1459 return NULL_TREE;
1462 if (ctor == NULL_TREE
1463 || TREE_CODE (ctor) != CONSTRUCTOR
1464 || !TREE_STATIC (ctor))
1465 return NULL_TREE;
1467 field = TREE_OPERAND (t, 1);
1469 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1470 if (cfield == field
1471 /* FIXME: Handle bit-fields. */
1472 && ! DECL_BIT_FIELD (cfield))
1473 return canonicalize_constructor_val (cval);
1474 break;
1476 case REALPART_EXPR:
1477 case IMAGPART_EXPR:
1479 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1480 if (c && TREE_CODE (c) == COMPLEX_CST)
1481 return fold_build1_loc (EXPR_LOCATION (t),
1482 TREE_CODE (t), TREE_TYPE (t), c);
1483 break;
1486 case MEM_REF:
1487 /* Get the base object we are accessing. */
1488 base = TREE_OPERAND (t, 0);
1489 if (TREE_CODE (base) == SSA_NAME
1490 && (tem = get_constant_value (base)))
1491 base = tem;
1492 if (TREE_CODE (base) != ADDR_EXPR)
1493 return NULL_TREE;
1494 base = TREE_OPERAND (base, 0);
1495 switch (TREE_CODE (base))
1497 case VAR_DECL:
1498 if (DECL_P (base)
1499 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1500 && integer_zerop (TREE_OPERAND (t, 1)))
1502 tree res = get_symbol_constant_value (base);
1503 if (res
1504 && !useless_type_conversion_p
1505 (TREE_TYPE (t), TREE_TYPE (res)))
1506 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1507 return res;
1510 if (!TREE_READONLY (base)
1511 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1512 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1513 && !varpool_get_node (base)->const_value_known))
1514 return NULL_TREE;
1516 ctor = DECL_INITIAL (base);
1517 break;
1519 case STRING_CST:
1520 case CONSTRUCTOR:
1521 ctor = base;
1522 break;
1524 default:
1525 return NULL_TREE;
1528 if (ctor == NULL_TREE
1529 || (TREE_CODE (ctor) != CONSTRUCTOR
1530 && TREE_CODE (ctor) != STRING_CST)
1531 || !TREE_STATIC (ctor))
1532 return NULL_TREE;
1534 /* Get the byte offset. */
1535 idx = TREE_OPERAND (t, 1);
1537 /* Fold read from constant string. */
1538 if (TREE_CODE (ctor) == STRING_CST)
1540 if ((TYPE_MODE (TREE_TYPE (t))
1541 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1542 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1543 == MODE_INT)
1544 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1545 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1546 return build_int_cst_type (TREE_TYPE (t),
1547 (TREE_STRING_POINTER (ctor)
1548 [TREE_INT_CST_LOW (idx)]));
1549 return NULL_TREE;
1552 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1553 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1554 && (TYPE_MODE (TREE_TYPE (t))
1555 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1556 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1557 && integer_zerop
1558 (int_const_binop
1559 (TRUNC_MOD_EXPR, idx,
1560 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1562 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1563 size_int (GET_MODE_SIZE
1564 (TYPE_MODE (TREE_TYPE (t)))), 0);
1565 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1566 if (tree_int_cst_equal (cfield, idx))
1568 cval = canonicalize_constructor_val (cval);
1569 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1570 return cval;
1571 else if (CONSTANT_CLASS_P (cval))
1572 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1573 else
1574 return NULL_TREE;
1577 break;
1579 default:
1580 break;
1583 return NULL_TREE;
1586 /* Apply the operation CODE in type TYPE to the value, mask pair
1587 RVAL and RMASK representing a value of type RTYPE and set
1588 the value, mask pair *VAL and *MASK to the result. */
1590 static void
1591 bit_value_unop_1 (enum tree_code code, tree type,
1592 double_int *val, double_int *mask,
1593 tree rtype, double_int rval, double_int rmask)
1595 switch (code)
1597 case BIT_NOT_EXPR:
1598 *mask = rmask;
1599 *val = double_int_not (rval);
1600 break;
1602 case NEGATE_EXPR:
1604 double_int temv, temm;
1605 /* Return ~rval + 1. */
1606 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1607 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1608 type, temv, temm,
1609 type, double_int_one, double_int_zero);
1610 break;
1613 CASE_CONVERT:
1615 bool uns;
1617 /* First extend mask and value according to the original type. */
1618 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1619 ? 0 : TYPE_UNSIGNED (rtype));
1620 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1621 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1623 /* Then extend mask and value according to the target type. */
1624 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1625 ? 0 : TYPE_UNSIGNED (type));
1626 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1627 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1628 break;
1631 default:
1632 *mask = double_int_minus_one;
1633 break;
1637 /* Apply the operation CODE in type TYPE to the value, mask pairs
1638 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1639 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1641 static void
1642 bit_value_binop_1 (enum tree_code code, tree type,
1643 double_int *val, double_int *mask,
1644 tree r1type, double_int r1val, double_int r1mask,
1645 tree r2type, double_int r2val, double_int r2mask)
1647 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1648 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1649 /* Assume we'll get a constant result. Use an initial varying value,
1650 we fall back to varying in the end if necessary. */
1651 *mask = double_int_minus_one;
1652 switch (code)
1654 case BIT_AND_EXPR:
1655 /* The mask is constant where there is a known not
1656 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1657 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1658 double_int_and (double_int_ior (r1val, r1mask),
1659 double_int_ior (r2val, r2mask)));
1660 *val = double_int_and (r1val, r2val);
1661 break;
1663 case BIT_IOR_EXPR:
1664 /* The mask is constant where there is a known
1665 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1666 *mask = double_int_and_not
1667 (double_int_ior (r1mask, r2mask),
1668 double_int_ior (double_int_and_not (r1val, r1mask),
1669 double_int_and_not (r2val, r2mask)));
1670 *val = double_int_ior (r1val, r2val);
1671 break;
1673 case BIT_XOR_EXPR:
1674 /* m1 | m2 */
1675 *mask = double_int_ior (r1mask, r2mask);
1676 *val = double_int_xor (r1val, r2val);
1677 break;
1679 case LROTATE_EXPR:
1680 case RROTATE_EXPR:
1681 if (double_int_zero_p (r2mask))
1683 HOST_WIDE_INT shift = r2val.low;
1684 if (code == RROTATE_EXPR)
1685 shift = -shift;
1686 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1687 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1689 break;
1691 case LSHIFT_EXPR:
1692 case RSHIFT_EXPR:
1693 /* ??? We can handle partially known shift counts if we know
1694 its sign. That way we can tell that (x << (y | 8)) & 255
1695 is zero. */
1696 if (double_int_zero_p (r2mask))
1698 HOST_WIDE_INT shift = r2val.low;
1699 if (code == RSHIFT_EXPR)
1700 shift = -shift;
1701 /* We need to know if we are doing a left or a right shift
1702 to properly shift in zeros for left shift and unsigned
1703 right shifts and the sign bit for signed right shifts.
1704 For signed right shifts we shift in varying in case
1705 the sign bit was varying. */
1706 if (shift > 0)
1708 *mask = double_int_lshift (r1mask, shift,
1709 TYPE_PRECISION (type), false);
1710 *val = double_int_lshift (r1val, shift,
1711 TYPE_PRECISION (type), false);
1713 else if (shift < 0)
1715 shift = -shift;
1716 *mask = double_int_rshift (r1mask, shift,
1717 TYPE_PRECISION (type), !uns);
1718 *val = double_int_rshift (r1val, shift,
1719 TYPE_PRECISION (type), !uns);
1721 else
1723 *mask = r1mask;
1724 *val = r1val;
1727 break;
1729 case PLUS_EXPR:
1730 case POINTER_PLUS_EXPR:
1732 double_int lo, hi;
1733 /* Do the addition with unknown bits set to zero, to give carry-ins of
1734 zero wherever possible. */
1735 lo = double_int_add (double_int_and_not (r1val, r1mask),
1736 double_int_and_not (r2val, r2mask));
1737 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1738 /* Do the addition with unknown bits set to one, to give carry-ins of
1739 one wherever possible. */
1740 hi = double_int_add (double_int_ior (r1val, r1mask),
1741 double_int_ior (r2val, r2mask));
1742 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1743 /* Each bit in the result is known if (a) the corresponding bits in
1744 both inputs are known, and (b) the carry-in to that bit position
1745 is known. We can check condition (b) by seeing if we got the same
1746 result with minimised carries as with maximised carries. */
1747 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1748 double_int_xor (lo, hi));
1749 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1750 /* It shouldn't matter whether we choose lo or hi here. */
1751 *val = lo;
1752 break;
1755 case MINUS_EXPR:
1757 double_int temv, temm;
1758 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1759 r2type, r2val, r2mask);
1760 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1761 r1type, r1val, r1mask,
1762 r2type, temv, temm);
1763 break;
1766 case MULT_EXPR:
1768 /* Just track trailing zeros in both operands and transfer
1769 them to the other. */
1770 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1771 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1772 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1774 *mask = double_int_zero;
1775 *val = double_int_zero;
1777 else if (r1tz + r2tz > 0)
1779 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1780 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1781 *val = double_int_zero;
1783 break;
1786 case EQ_EXPR:
1787 case NE_EXPR:
1789 double_int m = double_int_ior (r1mask, r2mask);
1790 if (!double_int_equal_p (double_int_and_not (r1val, m),
1791 double_int_and_not (r2val, m)))
1793 *mask = double_int_zero;
1794 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1796 else
1798 /* We know the result of a comparison is always one or zero. */
1799 *mask = double_int_one;
1800 *val = double_int_zero;
1802 break;
1805 case GE_EXPR:
1806 case GT_EXPR:
1808 double_int tem = r1val;
1809 r1val = r2val;
1810 r2val = tem;
1811 tem = r1mask;
1812 r1mask = r2mask;
1813 r2mask = tem;
1814 code = swap_tree_comparison (code);
1816 /* Fallthru. */
1817 case LT_EXPR:
1818 case LE_EXPR:
1820 int minmax, maxmin;
1821 /* If the most significant bits are not known we know nothing. */
1822 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1823 break;
1825 /* If we know the most significant bits we know the values
1826 value ranges by means of treating varying bits as zero
1827 or one. Do a cross comparison of the max/min pairs. */
1828 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1829 double_int_and_not (r2val, r2mask), uns);
1830 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1831 double_int_ior (r2val, r2mask), uns);
1832 if (maxmin < 0) /* r1 is less than r2. */
1834 *mask = double_int_zero;
1835 *val = double_int_one;
1837 else if (minmax > 0) /* r1 is not less or equal to r2. */
1839 *mask = double_int_zero;
1840 *val = double_int_zero;
1842 else if (maxmin == minmax) /* r1 and r2 are equal. */
1844 /* This probably should never happen as we'd have
1845 folded the thing during fully constant value folding. */
1846 *mask = double_int_zero;
1847 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1849 else
1851 /* We know the result of a comparison is always one or zero. */
1852 *mask = double_int_one;
1853 *val = double_int_zero;
1855 break;
1858 default:;
1862 /* Return the propagation value when applying the operation CODE to
1863 the value RHS yielding type TYPE. */
1865 static prop_value_t
1866 bit_value_unop (enum tree_code code, tree type, tree rhs)
1868 prop_value_t rval = get_value_for_expr (rhs, true);
1869 double_int value, mask;
1870 prop_value_t val;
1871 gcc_assert ((rval.lattice_val == CONSTANT
1872 && TREE_CODE (rval.value) == INTEGER_CST)
1873 || double_int_minus_one_p (rval.mask));
1874 bit_value_unop_1 (code, type, &value, &mask,
1875 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1876 if (!double_int_minus_one_p (mask))
1878 val.lattice_val = CONSTANT;
1879 val.mask = mask;
1880 /* ??? Delay building trees here. */
1881 val.value = double_int_to_tree (type, value);
1883 else
1885 val.lattice_val = VARYING;
1886 val.value = NULL_TREE;
1887 val.mask = double_int_minus_one;
1889 return val;
1892 /* Return the propagation value when applying the operation CODE to
1893 the values RHS1 and RHS2 yielding type TYPE. */
1895 static prop_value_t
1896 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1898 prop_value_t r1val = get_value_for_expr (rhs1, true);
1899 prop_value_t r2val = get_value_for_expr (rhs2, true);
1900 double_int value, mask;
1901 prop_value_t val;
1902 gcc_assert ((r1val.lattice_val == CONSTANT
1903 && TREE_CODE (r1val.value) == INTEGER_CST)
1904 || double_int_minus_one_p (r1val.mask));
1905 gcc_assert ((r2val.lattice_val == CONSTANT
1906 && TREE_CODE (r2val.value) == INTEGER_CST)
1907 || double_int_minus_one_p (r2val.mask));
1908 bit_value_binop_1 (code, type, &value, &mask,
1909 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1910 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1911 if (!double_int_minus_one_p (mask))
1913 val.lattice_val = CONSTANT;
1914 val.mask = mask;
1915 /* ??? Delay building trees here. */
1916 val.value = double_int_to_tree (type, value);
1918 else
1920 val.lattice_val = VARYING;
1921 val.value = NULL_TREE;
1922 val.mask = double_int_minus_one;
1924 return val;
1927 /* Evaluate statement STMT.
1928 Valid only for assignments, calls, conditionals, and switches. */
1930 static prop_value_t
1931 evaluate_stmt (gimple stmt)
1933 prop_value_t val;
1934 tree simplified = NULL_TREE;
1935 ccp_lattice_t likelyvalue = likely_value (stmt);
1936 bool is_constant = false;
1938 if (dump_file && (dump_flags & TDF_DETAILS))
1940 fprintf (dump_file, "which is likely ");
1941 switch (likelyvalue)
1943 case CONSTANT:
1944 fprintf (dump_file, "CONSTANT");
1945 break;
1946 case UNDEFINED:
1947 fprintf (dump_file, "UNDEFINED");
1948 break;
1949 case VARYING:
1950 fprintf (dump_file, "VARYING");
1951 break;
1952 default:;
1954 fprintf (dump_file, "\n");
1957 /* If the statement is likely to have a CONSTANT result, then try
1958 to fold the statement to determine the constant value. */
1959 /* FIXME. This is the only place that we call ccp_fold.
1960 Since likely_value never returns CONSTANT for calls, we will
1961 not attempt to fold them, including builtins that may profit. */
1962 if (likelyvalue == CONSTANT)
1964 fold_defer_overflow_warnings ();
1965 simplified = ccp_fold (stmt);
1966 is_constant = simplified && is_gimple_min_invariant (simplified);
1967 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1968 if (is_constant)
1970 /* The statement produced a constant value. */
1971 val.lattice_val = CONSTANT;
1972 val.value = simplified;
1973 val.mask = double_int_zero;
1976 /* If the statement is likely to have a VARYING result, then do not
1977 bother folding the statement. */
1978 else if (likelyvalue == VARYING)
1980 enum gimple_code code = gimple_code (stmt);
1981 if (code == GIMPLE_ASSIGN)
1983 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1985 /* Other cases cannot satisfy is_gimple_min_invariant
1986 without folding. */
1987 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1988 simplified = gimple_assign_rhs1 (stmt);
1990 else if (code == GIMPLE_SWITCH)
1991 simplified = gimple_switch_index (stmt);
1992 else
1993 /* These cannot satisfy is_gimple_min_invariant without folding. */
1994 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1995 is_constant = simplified && is_gimple_min_invariant (simplified);
1996 if (is_constant)
1998 /* The statement produced a constant value. */
1999 val.lattice_val = CONSTANT;
2000 val.value = simplified;
2001 val.mask = double_int_zero;
2005 /* Resort to simplification for bitwise tracking. */
2006 if (flag_tree_bit_ccp
2007 && likelyvalue == CONSTANT
2008 && !is_constant)
2010 enum gimple_code code = gimple_code (stmt);
2011 tree fndecl;
2012 val.lattice_val = VARYING;
2013 val.value = NULL_TREE;
2014 val.mask = double_int_minus_one;
2015 if (code == GIMPLE_ASSIGN)
2017 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2018 tree rhs1 = gimple_assign_rhs1 (stmt);
2019 switch (get_gimple_rhs_class (subcode))
2021 case GIMPLE_SINGLE_RHS:
2022 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2023 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2024 val = get_value_for_expr (rhs1, true);
2025 break;
2027 case GIMPLE_UNARY_RHS:
2028 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2029 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2030 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2031 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2032 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2033 break;
2035 case GIMPLE_BINARY_RHS:
2036 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2037 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2039 tree rhs2 = gimple_assign_rhs2 (stmt);
2040 val = bit_value_binop (subcode,
2041 TREE_TYPE (rhs1), rhs1, rhs2);
2043 break;
2045 default:;
2048 else if (code == GIMPLE_COND)
2050 enum tree_code code = gimple_cond_code (stmt);
2051 tree rhs1 = gimple_cond_lhs (stmt);
2052 tree rhs2 = gimple_cond_rhs (stmt);
2053 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2054 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2055 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2057 else if (code == GIMPLE_CALL
2058 && (fndecl = gimple_call_fndecl (stmt))
2059 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2061 switch (DECL_FUNCTION_CODE (fndecl))
2063 case BUILT_IN_MALLOC:
2064 case BUILT_IN_REALLOC:
2065 case BUILT_IN_CALLOC:
2066 val.lattice_val = CONSTANT;
2067 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2068 val.mask = shwi_to_double_int
2069 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2070 / BITS_PER_UNIT - 1));
2071 break;
2073 case BUILT_IN_ALLOCA:
2074 val.lattice_val = CONSTANT;
2075 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2076 val.mask = shwi_to_double_int
2077 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2078 / BITS_PER_UNIT - 1));
2079 break;
2081 default:;
2084 is_constant = (val.lattice_val == CONSTANT);
2087 if (!is_constant)
2089 /* The statement produced a nonconstant value. If the statement
2090 had UNDEFINED operands, then the result of the statement
2091 should be UNDEFINED. Otherwise, the statement is VARYING. */
2092 if (likelyvalue == UNDEFINED)
2094 val.lattice_val = likelyvalue;
2095 val.mask = double_int_zero;
2097 else
2099 val.lattice_val = VARYING;
2100 val.mask = double_int_minus_one;
2103 val.value = NULL_TREE;
2106 return val;
2109 /* Fold the stmt at *GSI with CCP specific information that propagating
2110 and regular folding does not catch. */
2112 static bool
2113 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2115 gimple stmt = gsi_stmt (*gsi);
2117 switch (gimple_code (stmt))
2119 case GIMPLE_COND:
2121 prop_value_t val;
2122 /* Statement evaluation will handle type mismatches in constants
2123 more gracefully than the final propagation. This allows us to
2124 fold more conditionals here. */
2125 val = evaluate_stmt (stmt);
2126 if (val.lattice_val != CONSTANT
2127 || !double_int_zero_p (val.mask))
2128 return false;
2130 if (dump_file)
2132 fprintf (dump_file, "Folding predicate ");
2133 print_gimple_expr (dump_file, stmt, 0, 0);
2134 fprintf (dump_file, " to ");
2135 print_generic_expr (dump_file, val.value, 0);
2136 fprintf (dump_file, "\n");
2139 if (integer_zerop (val.value))
2140 gimple_cond_make_false (stmt);
2141 else
2142 gimple_cond_make_true (stmt);
2144 return true;
2147 case GIMPLE_CALL:
2149 tree lhs = gimple_call_lhs (stmt);
2150 tree val;
2151 tree argt;
2152 bool changed = false;
2153 unsigned i;
2155 /* If the call was folded into a constant make sure it goes
2156 away even if we cannot propagate into all uses because of
2157 type issues. */
2158 if (lhs
2159 && TREE_CODE (lhs) == SSA_NAME
2160 && (val = get_constant_value (lhs)))
2162 tree new_rhs = unshare_expr (val);
2163 bool res;
2164 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2165 TREE_TYPE (new_rhs)))
2166 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2167 res = update_call_from_tree (gsi, new_rhs);
2168 gcc_assert (res);
2169 return true;
2172 /* Propagate into the call arguments. Compared to replace_uses_in
2173 this can use the argument slot types for type verification
2174 instead of the current argument type. We also can safely
2175 drop qualifiers here as we are dealing with constants anyway. */
2176 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2177 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2178 ++i, argt = TREE_CHAIN (argt))
2180 tree arg = gimple_call_arg (stmt, i);
2181 if (TREE_CODE (arg) == SSA_NAME
2182 && (val = get_constant_value (arg))
2183 && useless_type_conversion_p
2184 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2185 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2187 gimple_call_set_arg (stmt, i, unshare_expr (val));
2188 changed = true;
2192 return changed;
2195 case GIMPLE_ASSIGN:
2197 tree lhs = gimple_assign_lhs (stmt);
2198 tree val;
2200 /* If we have a load that turned out to be constant replace it
2201 as we cannot propagate into all uses in all cases. */
2202 if (gimple_assign_single_p (stmt)
2203 && TREE_CODE (lhs) == SSA_NAME
2204 && (val = get_constant_value (lhs)))
2206 tree rhs = unshare_expr (val);
2207 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2208 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2209 gimple_assign_set_rhs_from_tree (gsi, rhs);
2210 return true;
2213 return false;
2216 default:
2217 return false;
2221 /* Visit the assignment statement STMT. Set the value of its LHS to the
2222 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2223 creates virtual definitions, set the value of each new name to that
2224 of the RHS (if we can derive a constant out of the RHS).
2225 Value-returning call statements also perform an assignment, and
2226 are handled here. */
2228 static enum ssa_prop_result
2229 visit_assignment (gimple stmt, tree *output_p)
2231 prop_value_t val;
2232 enum ssa_prop_result retval;
2234 tree lhs = gimple_get_lhs (stmt);
2236 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2237 || gimple_call_lhs (stmt) != NULL_TREE);
2239 if (gimple_assign_single_p (stmt)
2240 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2241 /* For a simple copy operation, we copy the lattice values. */
2242 val = *get_value (gimple_assign_rhs1 (stmt));
2243 else
2244 /* Evaluate the statement, which could be
2245 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2246 val = evaluate_stmt (stmt);
2248 retval = SSA_PROP_NOT_INTERESTING;
2250 /* Set the lattice value of the statement's output. */
2251 if (TREE_CODE (lhs) == SSA_NAME)
2253 /* If STMT is an assignment to an SSA_NAME, we only have one
2254 value to set. */
2255 if (set_lattice_value (lhs, val))
2257 *output_p = lhs;
2258 if (val.lattice_val == VARYING)
2259 retval = SSA_PROP_VARYING;
2260 else
2261 retval = SSA_PROP_INTERESTING;
2265 return retval;
2269 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2270 if it can determine which edge will be taken. Otherwise, return
2271 SSA_PROP_VARYING. */
2273 static enum ssa_prop_result
2274 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2276 prop_value_t val;
2277 basic_block block;
2279 block = gimple_bb (stmt);
2280 val = evaluate_stmt (stmt);
2281 if (val.lattice_val != CONSTANT
2282 || !double_int_zero_p (val.mask))
2283 return SSA_PROP_VARYING;
2285 /* Find which edge out of the conditional block will be taken and add it
2286 to the worklist. If no single edge can be determined statically,
2287 return SSA_PROP_VARYING to feed all the outgoing edges to the
2288 propagation engine. */
2289 *taken_edge_p = find_taken_edge (block, val.value);
2290 if (*taken_edge_p)
2291 return SSA_PROP_INTERESTING;
2292 else
2293 return SSA_PROP_VARYING;
2297 /* Evaluate statement STMT. If the statement produces an output value and
2298 its evaluation changes the lattice value of its output, return
2299 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2300 output value.
2302 If STMT is a conditional branch and we can determine its truth
2303 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2304 value, return SSA_PROP_VARYING. */
2306 static enum ssa_prop_result
2307 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2309 tree def;
2310 ssa_op_iter iter;
2312 if (dump_file && (dump_flags & TDF_DETAILS))
2314 fprintf (dump_file, "\nVisiting statement:\n");
2315 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2318 switch (gimple_code (stmt))
2320 case GIMPLE_ASSIGN:
2321 /* If the statement is an assignment that produces a single
2322 output value, evaluate its RHS to see if the lattice value of
2323 its output has changed. */
2324 return visit_assignment (stmt, output_p);
2326 case GIMPLE_CALL:
2327 /* A value-returning call also performs an assignment. */
2328 if (gimple_call_lhs (stmt) != NULL_TREE)
2329 return visit_assignment (stmt, output_p);
2330 break;
2332 case GIMPLE_COND:
2333 case GIMPLE_SWITCH:
2334 /* If STMT is a conditional branch, see if we can determine
2335 which branch will be taken. */
2336 /* FIXME. It appears that we should be able to optimize
2337 computed GOTOs here as well. */
2338 return visit_cond_stmt (stmt, taken_edge_p);
2340 default:
2341 break;
2344 /* Any other kind of statement is not interesting for constant
2345 propagation and, therefore, not worth simulating. */
2346 if (dump_file && (dump_flags & TDF_DETAILS))
2347 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2349 /* Definitions made by statements other than assignments to
2350 SSA_NAMEs represent unknown modifications to their outputs.
2351 Mark them VARYING. */
2352 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2354 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2355 set_lattice_value (def, v);
2358 return SSA_PROP_VARYING;
2362 /* Main entry point for SSA Conditional Constant Propagation. */
2364 static unsigned int
2365 do_ssa_ccp (void)
2367 ccp_initialize ();
2368 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2369 if (ccp_finalize ())
2370 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2371 else
2372 return 0;
2376 static bool
2377 gate_ccp (void)
2379 return flag_tree_ccp != 0;
2383 struct gimple_opt_pass pass_ccp =
2386 GIMPLE_PASS,
2387 "ccp", /* name */
2388 gate_ccp, /* gate */
2389 do_ssa_ccp, /* execute */
2390 NULL, /* sub */
2391 NULL, /* next */
2392 0, /* static_pass_number */
2393 TV_TREE_CCP, /* tv_id */
2394 PROP_cfg | PROP_ssa, /* properties_required */
2395 0, /* properties_provided */
2396 0, /* properties_destroyed */
2397 0, /* todo_flags_start */
2398 TODO_dump_func | TODO_verify_ssa
2399 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2405 /* Try to optimize out __builtin_stack_restore. Optimize it out
2406 if there is another __builtin_stack_restore in the same basic
2407 block and no calls or ASM_EXPRs are in between, or if this block's
2408 only outgoing edge is to EXIT_BLOCK and there are no calls or
2409 ASM_EXPRs after this __builtin_stack_restore. */
2411 static tree
2412 optimize_stack_restore (gimple_stmt_iterator i)
2414 tree callee;
2415 gimple stmt;
2417 basic_block bb = gsi_bb (i);
2418 gimple call = gsi_stmt (i);
2420 if (gimple_code (call) != GIMPLE_CALL
2421 || gimple_call_num_args (call) != 1
2422 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2423 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2424 return NULL_TREE;
2426 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2428 stmt = gsi_stmt (i);
2429 if (gimple_code (stmt) == GIMPLE_ASM)
2430 return NULL_TREE;
2431 if (gimple_code (stmt) != GIMPLE_CALL)
2432 continue;
2434 callee = gimple_call_fndecl (stmt);
2435 if (!callee
2436 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2437 /* All regular builtins are ok, just obviously not alloca. */
2438 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2439 return NULL_TREE;
2441 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2442 goto second_stack_restore;
2445 if (!gsi_end_p (i))
2446 return NULL_TREE;
2448 /* Allow one successor of the exit block, or zero successors. */
2449 switch (EDGE_COUNT (bb->succs))
2451 case 0:
2452 break;
2453 case 1:
2454 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2455 return NULL_TREE;
2456 break;
2457 default:
2458 return NULL_TREE;
2460 second_stack_restore:
2462 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2463 If there are multiple uses, then the last one should remove the call.
2464 In any case, whether the call to __builtin_stack_save can be removed
2465 or not is irrelevant to removing the call to __builtin_stack_restore. */
2466 if (has_single_use (gimple_call_arg (call, 0)))
2468 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2469 if (is_gimple_call (stack_save))
2471 callee = gimple_call_fndecl (stack_save);
2472 if (callee
2473 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2474 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2476 gimple_stmt_iterator stack_save_gsi;
2477 tree rhs;
2479 stack_save_gsi = gsi_for_stmt (stack_save);
2480 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2481 update_call_from_tree (&stack_save_gsi, rhs);
2486 /* No effect, so the statement will be deleted. */
2487 return integer_zero_node;
2490 /* If va_list type is a simple pointer and nothing special is needed,
2491 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2492 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2493 pointer assignment. */
2495 static tree
2496 optimize_stdarg_builtin (gimple call)
2498 tree callee, lhs, rhs, cfun_va_list;
2499 bool va_list_simple_ptr;
2500 location_t loc = gimple_location (call);
2502 if (gimple_code (call) != GIMPLE_CALL)
2503 return NULL_TREE;
2505 callee = gimple_call_fndecl (call);
2507 cfun_va_list = targetm.fn_abi_va_list (callee);
2508 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2509 && (TREE_TYPE (cfun_va_list) == void_type_node
2510 || TREE_TYPE (cfun_va_list) == char_type_node);
2512 switch (DECL_FUNCTION_CODE (callee))
2514 case BUILT_IN_VA_START:
2515 if (!va_list_simple_ptr
2516 || targetm.expand_builtin_va_start != NULL
2517 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2518 return NULL_TREE;
2520 if (gimple_call_num_args (call) != 2)
2521 return NULL_TREE;
2523 lhs = gimple_call_arg (call, 0);
2524 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2525 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2526 != TYPE_MAIN_VARIANT (cfun_va_list))
2527 return NULL_TREE;
2529 lhs = build_fold_indirect_ref_loc (loc, lhs);
2530 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2531 1, integer_zero_node);
2532 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2533 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2535 case BUILT_IN_VA_COPY:
2536 if (!va_list_simple_ptr)
2537 return NULL_TREE;
2539 if (gimple_call_num_args (call) != 2)
2540 return NULL_TREE;
2542 lhs = gimple_call_arg (call, 0);
2543 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2544 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2545 != TYPE_MAIN_VARIANT (cfun_va_list))
2546 return NULL_TREE;
2548 lhs = build_fold_indirect_ref_loc (loc, lhs);
2549 rhs = gimple_call_arg (call, 1);
2550 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2551 != TYPE_MAIN_VARIANT (cfun_va_list))
2552 return NULL_TREE;
2554 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2555 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2557 case BUILT_IN_VA_END:
2558 /* No effect, so the statement will be deleted. */
2559 return integer_zero_node;
2561 default:
2562 gcc_unreachable ();
2566 /* A simple pass that attempts to fold all builtin functions. This pass
2567 is run after we've propagated as many constants as we can. */
2569 static unsigned int
2570 execute_fold_all_builtins (void)
2572 bool cfg_changed = false;
2573 basic_block bb;
2574 unsigned int todoflags = 0;
2576 FOR_EACH_BB (bb)
2578 gimple_stmt_iterator i;
2579 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2581 gimple stmt, old_stmt;
2582 tree callee, result;
2583 enum built_in_function fcode;
2585 stmt = gsi_stmt (i);
2587 if (gimple_code (stmt) != GIMPLE_CALL)
2589 gsi_next (&i);
2590 continue;
2592 callee = gimple_call_fndecl (stmt);
2593 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2595 gsi_next (&i);
2596 continue;
2598 fcode = DECL_FUNCTION_CODE (callee);
2600 result = gimple_fold_builtin (stmt);
2602 if (result)
2603 gimple_remove_stmt_histograms (cfun, stmt);
2605 if (!result)
2606 switch (DECL_FUNCTION_CODE (callee))
2608 case BUILT_IN_CONSTANT_P:
2609 /* Resolve __builtin_constant_p. If it hasn't been
2610 folded to integer_one_node by now, it's fairly
2611 certain that the value simply isn't constant. */
2612 result = integer_zero_node;
2613 break;
2615 case BUILT_IN_STACK_RESTORE:
2616 result = optimize_stack_restore (i);
2617 if (result)
2618 break;
2619 gsi_next (&i);
2620 continue;
2622 case BUILT_IN_VA_START:
2623 case BUILT_IN_VA_END:
2624 case BUILT_IN_VA_COPY:
2625 /* These shouldn't be folded before pass_stdarg. */
2626 result = optimize_stdarg_builtin (stmt);
2627 if (result)
2628 break;
2629 /* FALLTHRU */
2631 default:
2632 gsi_next (&i);
2633 continue;
2636 if (dump_file && (dump_flags & TDF_DETAILS))
2638 fprintf (dump_file, "Simplified\n ");
2639 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2642 old_stmt = stmt;
2643 if (!update_call_from_tree (&i, result))
2645 gimplify_and_update_call_from_tree (&i, result);
2646 todoflags |= TODO_update_address_taken;
2649 stmt = gsi_stmt (i);
2650 update_stmt (stmt);
2652 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2653 && gimple_purge_dead_eh_edges (bb))
2654 cfg_changed = true;
2656 if (dump_file && (dump_flags & TDF_DETAILS))
2658 fprintf (dump_file, "to\n ");
2659 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2660 fprintf (dump_file, "\n");
2663 /* Retry the same statement if it changed into another
2664 builtin, there might be new opportunities now. */
2665 if (gimple_code (stmt) != GIMPLE_CALL)
2667 gsi_next (&i);
2668 continue;
2670 callee = gimple_call_fndecl (stmt);
2671 if (!callee
2672 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2673 || DECL_FUNCTION_CODE (callee) == fcode)
2674 gsi_next (&i);
2678 /* Delete unreachable blocks. */
2679 if (cfg_changed)
2680 todoflags |= TODO_cleanup_cfg;
2682 return todoflags;
2686 struct gimple_opt_pass pass_fold_builtins =
2689 GIMPLE_PASS,
2690 "fab", /* name */
2691 NULL, /* gate */
2692 execute_fold_all_builtins, /* execute */
2693 NULL, /* sub */
2694 NULL, /* next */
2695 0, /* static_pass_number */
2696 TV_NONE, /* tv_id */
2697 PROP_cfg | PROP_ssa, /* properties_required */
2698 0, /* properties_provided */
2699 0, /* properties_destroyed */
2700 0, /* todo_flags_start */
2701 TODO_dump_func
2702 | TODO_verify_ssa
2703 | TODO_update_ssa /* todo_flags_finish */