2011-10-30 Dmitry Plotnikov <dplotnikov@ispras.ru>
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob31c31c141c90bfeb052b73df76bba4c2bce350fd
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
102 References:
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
113 #include "config.h"
114 #include "system.h"
115 #include "coretypes.h"
116 #include "tm.h"
117 #include "tree.h"
118 #include "flags.h"
119 #include "tm_p.h"
120 #include "basic-block.h"
121 #include "output.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
125 #include "timevar.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
132 #include "target.h"
133 #include "diagnostic-core.h"
134 #include "dbgcnt.h"
135 #include "gimple-fold.h"
136 #include "params.h"
139 /* Possible lattice values. */
140 typedef enum
142 UNINITIALIZED,
143 UNDEFINED,
144 CONSTANT,
145 VARYING
146 } ccp_lattice_t;
148 struct prop_value_d {
149 /* Lattice value. */
150 ccp_lattice_t lattice_val;
152 /* Propagated value. */
153 tree value;
155 /* Mask that applies to the propagated value during CCP. For
156 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
157 double_int mask;
160 typedef struct prop_value_d prop_value_t;
162 /* Array of propagated constant values. After propagation,
163 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
164 the constant is held in an SSA name representing a memory store
165 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
166 memory reference used to store (i.e., the LHS of the assignment
167 doing the store). */
168 static prop_value_t *const_val;
170 static void canonicalize_float_value (prop_value_t *);
171 static bool ccp_fold_stmt (gimple_stmt_iterator *);
173 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
175 static void
176 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
178 switch (val.lattice_val)
180 case UNINITIALIZED:
181 fprintf (outf, "%sUNINITIALIZED", prefix);
182 break;
183 case UNDEFINED:
184 fprintf (outf, "%sUNDEFINED", prefix);
185 break;
186 case VARYING:
187 fprintf (outf, "%sVARYING", prefix);
188 break;
189 case CONSTANT:
190 fprintf (outf, "%sCONSTANT ", prefix);
191 if (TREE_CODE (val.value) != INTEGER_CST
192 || double_int_zero_p (val.mask))
193 print_generic_expr (outf, val.value, dump_flags);
194 else
196 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 val.mask);
198 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
199 prefix, cval.high, cval.low);
200 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
201 val.mask.high, val.mask.low);
203 break;
204 default:
205 gcc_unreachable ();
210 /* Print lattice value VAL to stderr. */
212 void debug_lattice_value (prop_value_t val);
214 DEBUG_FUNCTION void
215 debug_lattice_value (prop_value_t val)
217 dump_lattice_value (stderr, "", val);
218 fprintf (stderr, "\n");
222 /* Compute a default value for variable VAR and store it in the
223 CONST_VAL array. The following rules are used to get default
224 values:
226 1- Global and static variables that are declared constant are
227 considered CONSTANT.
229 2- Any other value is considered UNDEFINED. This is useful when
230 considering PHI nodes. PHI arguments that are undefined do not
231 change the constant value of the PHI node, which allows for more
232 constants to be propagated.
234 3- Variables defined by statements other than assignments and PHI
235 nodes are considered VARYING.
237 4- Initial values of variables that are not GIMPLE registers are
238 considered VARYING. */
240 static prop_value_t
241 get_default_value (tree var)
243 tree sym = SSA_NAME_VAR (var);
244 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
245 gimple stmt;
247 stmt = SSA_NAME_DEF_STMT (var);
249 if (gimple_nop_p (stmt))
251 /* Variables defined by an empty statement are those used
252 before being initialized. If VAR is a local variable, we
253 can assume initially that it is UNDEFINED, otherwise we must
254 consider it VARYING. */
255 if (is_gimple_reg (sym)
256 && TREE_CODE (sym) == VAR_DECL)
257 val.lattice_val = UNDEFINED;
258 else
260 val.lattice_val = VARYING;
261 val.mask = double_int_minus_one;
264 else if (is_gimple_assign (stmt)
265 /* Value-returning GIMPLE_CALL statements assign to
266 a variable, and are treated similarly to GIMPLE_ASSIGN. */
267 || (is_gimple_call (stmt)
268 && gimple_call_lhs (stmt) != NULL_TREE)
269 || gimple_code (stmt) == GIMPLE_PHI)
271 tree cst;
272 if (gimple_assign_single_p (stmt)
273 && DECL_P (gimple_assign_rhs1 (stmt))
274 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
276 val.lattice_val = CONSTANT;
277 val.value = cst;
279 else
280 /* Any other variable defined by an assignment or a PHI node
281 is considered UNDEFINED. */
282 val.lattice_val = UNDEFINED;
284 else
286 /* Otherwise, VAR will never take on a constant value. */
287 val.lattice_val = VARYING;
288 val.mask = double_int_minus_one;
291 return val;
295 /* Get the constant value associated with variable VAR. */
297 static inline prop_value_t *
298 get_value (tree var)
300 prop_value_t *val;
302 if (const_val == NULL)
303 return NULL;
305 val = &const_val[SSA_NAME_VERSION (var)];
306 if (val->lattice_val == UNINITIALIZED)
307 *val = get_default_value (var);
309 canonicalize_float_value (val);
311 return val;
314 /* Return the constant tree value associated with VAR. */
316 static inline tree
317 get_constant_value (tree var)
319 prop_value_t *val;
320 if (TREE_CODE (var) != SSA_NAME)
322 if (is_gimple_min_invariant (var))
323 return var;
324 return NULL_TREE;
326 val = get_value (var);
327 if (val
328 && val->lattice_val == CONSTANT
329 && (TREE_CODE (val->value) != INTEGER_CST
330 || double_int_zero_p (val->mask)))
331 return val->value;
332 return NULL_TREE;
335 /* Sets the value associated with VAR to VARYING. */
337 static inline void
338 set_value_varying (tree var)
340 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
342 val->lattice_val = VARYING;
343 val->value = NULL_TREE;
344 val->mask = double_int_minus_one;
347 /* For float types, modify the value of VAL to make ccp work correctly
348 for non-standard values (-0, NaN):
350 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
351 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
352 This is to fix the following problem (see PR 29921): Suppose we have
354 x = 0.0 * y
356 and we set value of y to NaN. This causes value of x to be set to NaN.
357 When we later determine that y is in fact VARYING, fold uses the fact
358 that HONOR_NANS is false, and we try to change the value of x to 0,
359 causing an ICE. With HONOR_NANS being false, the real appearance of
360 NaN would cause undefined behavior, though, so claiming that y (and x)
361 are UNDEFINED initially is correct. */
363 static void
364 canonicalize_float_value (prop_value_t *val)
366 enum machine_mode mode;
367 tree type;
368 REAL_VALUE_TYPE d;
370 if (val->lattice_val != CONSTANT
371 || TREE_CODE (val->value) != REAL_CST)
372 return;
374 d = TREE_REAL_CST (val->value);
375 type = TREE_TYPE (val->value);
376 mode = TYPE_MODE (type);
378 if (!HONOR_SIGNED_ZEROS (mode)
379 && REAL_VALUE_MINUS_ZERO (d))
381 val->value = build_real (type, dconst0);
382 return;
385 if (!HONOR_NANS (mode)
386 && REAL_VALUE_ISNAN (d))
388 val->lattice_val = UNDEFINED;
389 val->value = NULL;
390 return;
394 /* Return whether the lattice transition is valid. */
396 static bool
397 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
399 /* Lattice transitions must always be monotonically increasing in
400 value. */
401 if (old_val.lattice_val < new_val.lattice_val)
402 return true;
404 if (old_val.lattice_val != new_val.lattice_val)
405 return false;
407 if (!old_val.value && !new_val.value)
408 return true;
410 /* Now both lattice values are CONSTANT. */
412 /* Allow transitioning from &x to &x & ~3. */
413 if (TREE_CODE (old_val.value) != INTEGER_CST
414 && TREE_CODE (new_val.value) == INTEGER_CST)
415 return true;
417 /* Bit-lattices have to agree in the still valid bits. */
418 if (TREE_CODE (old_val.value) == INTEGER_CST
419 && TREE_CODE (new_val.value) == INTEGER_CST)
420 return double_int_equal_p
421 (double_int_and_not (tree_to_double_int (old_val.value),
422 new_val.mask),
423 double_int_and_not (tree_to_double_int (new_val.value),
424 new_val.mask));
426 /* Otherwise constant values have to agree. */
427 return operand_equal_p (old_val.value, new_val.value, 0);
430 /* Set the value for variable VAR to NEW_VAL. Return true if the new
431 value is different from VAR's previous value. */
433 static bool
434 set_lattice_value (tree var, prop_value_t new_val)
436 /* We can deal with old UNINITIALIZED values just fine here. */
437 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
439 canonicalize_float_value (&new_val);
441 /* We have to be careful to not go up the bitwise lattice
442 represented by the mask.
443 ??? This doesn't seem to be the best place to enforce this. */
444 if (new_val.lattice_val == CONSTANT
445 && old_val->lattice_val == CONSTANT
446 && TREE_CODE (new_val.value) == INTEGER_CST
447 && TREE_CODE (old_val->value) == INTEGER_CST)
449 double_int diff;
450 diff = double_int_xor (tree_to_double_int (new_val.value),
451 tree_to_double_int (old_val->value));
452 new_val.mask = double_int_ior (new_val.mask,
453 double_int_ior (old_val->mask, diff));
456 gcc_assert (valid_lattice_transition (*old_val, new_val));
458 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
459 caller that this was a non-transition. */
460 if (old_val->lattice_val != new_val.lattice_val
461 || (new_val.lattice_val == CONSTANT
462 && TREE_CODE (new_val.value) == INTEGER_CST
463 && (TREE_CODE (old_val->value) != INTEGER_CST
464 || !double_int_equal_p (new_val.mask, old_val->mask))))
466 /* ??? We would like to delay creation of INTEGER_CSTs from
467 partially constants here. */
469 if (dump_file && (dump_flags & TDF_DETAILS))
471 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
472 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
475 *old_val = new_val;
477 gcc_assert (new_val.lattice_val != UNINITIALIZED);
478 return true;
481 return false;
484 static prop_value_t get_value_for_expr (tree, bool);
485 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
486 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
487 tree, double_int, double_int,
488 tree, double_int, double_int);
490 /* Return a double_int that can be used for bitwise simplifications
491 from VAL. */
493 static double_int
494 value_to_double_int (prop_value_t val)
496 if (val.value
497 && TREE_CODE (val.value) == INTEGER_CST)
498 return tree_to_double_int (val.value);
499 else
500 return double_int_zero;
503 /* Return the value for the address expression EXPR based on alignment
504 information. */
506 static prop_value_t
507 get_value_from_alignment (tree expr)
509 tree type = TREE_TYPE (expr);
510 prop_value_t val;
511 unsigned HOST_WIDE_INT bitpos;
512 unsigned int align;
514 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
516 align = get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitpos);
517 val.mask
518 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
519 ? double_int_mask (TYPE_PRECISION (type))
520 : double_int_minus_one,
521 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
522 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
523 if (val.lattice_val == CONSTANT)
524 val.value
525 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
526 else
527 val.value = NULL_TREE;
529 return val;
532 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
533 return constant bits extracted from alignment information for
534 invariant addresses. */
536 static prop_value_t
537 get_value_for_expr (tree expr, bool for_bits_p)
539 prop_value_t val;
541 if (TREE_CODE (expr) == SSA_NAME)
543 val = *get_value (expr);
544 if (for_bits_p
545 && val.lattice_val == CONSTANT
546 && TREE_CODE (val.value) == ADDR_EXPR)
547 val = get_value_from_alignment (val.value);
549 else if (is_gimple_min_invariant (expr)
550 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
552 val.lattice_val = CONSTANT;
553 val.value = expr;
554 val.mask = double_int_zero;
555 canonicalize_float_value (&val);
557 else if (TREE_CODE (expr) == ADDR_EXPR)
558 val = get_value_from_alignment (expr);
559 else
561 val.lattice_val = VARYING;
562 val.mask = double_int_minus_one;
563 val.value = NULL_TREE;
565 return val;
568 /* Return the likely CCP lattice value for STMT.
570 If STMT has no operands, then return CONSTANT.
572 Else if undefinedness of operands of STMT cause its value to be
573 undefined, then return UNDEFINED.
575 Else if any operands of STMT are constants, then return CONSTANT.
577 Else return VARYING. */
579 static ccp_lattice_t
580 likely_value (gimple stmt)
582 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
583 tree use;
584 ssa_op_iter iter;
585 unsigned i;
587 enum gimple_code code = gimple_code (stmt);
589 /* This function appears to be called only for assignments, calls,
590 conditionals, and switches, due to the logic in visit_stmt. */
591 gcc_assert (code == GIMPLE_ASSIGN
592 || code == GIMPLE_CALL
593 || code == GIMPLE_COND
594 || code == GIMPLE_SWITCH);
596 /* If the statement has volatile operands, it won't fold to a
597 constant value. */
598 if (gimple_has_volatile_ops (stmt))
599 return VARYING;
601 /* Arrive here for more complex cases. */
602 has_constant_operand = false;
603 has_undefined_operand = false;
604 all_undefined_operands = true;
605 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
607 prop_value_t *val = get_value (use);
609 if (val->lattice_val == UNDEFINED)
610 has_undefined_operand = true;
611 else
612 all_undefined_operands = false;
614 if (val->lattice_val == CONSTANT)
615 has_constant_operand = true;
618 /* There may be constants in regular rhs operands. For calls we
619 have to ignore lhs, fndecl and static chain, otherwise only
620 the lhs. */
621 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
622 i < gimple_num_ops (stmt); ++i)
624 tree op = gimple_op (stmt, i);
625 if (!op || TREE_CODE (op) == SSA_NAME)
626 continue;
627 if (is_gimple_min_invariant (op))
628 has_constant_operand = true;
631 if (has_constant_operand)
632 all_undefined_operands = false;
634 /* If the operation combines operands like COMPLEX_EXPR make sure to
635 not mark the result UNDEFINED if only one part of the result is
636 undefined. */
637 if (has_undefined_operand && all_undefined_operands)
638 return UNDEFINED;
639 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
641 switch (gimple_assign_rhs_code (stmt))
643 /* Unary operators are handled with all_undefined_operands. */
644 case PLUS_EXPR:
645 case MINUS_EXPR:
646 case POINTER_PLUS_EXPR:
647 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
648 Not bitwise operators, one VARYING operand may specify the
649 result completely. Not logical operators for the same reason.
650 Not COMPLEX_EXPR as one VARYING operand makes the result partly
651 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
652 the undefined operand may be promoted. */
653 return UNDEFINED;
655 default:
659 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
660 fall back to VARYING even if there were CONSTANT operands. */
661 if (has_undefined_operand)
662 return VARYING;
664 /* We do not consider virtual operands here -- load from read-only
665 memory may have only VARYING virtual operands, but still be
666 constant. */
667 if (has_constant_operand
668 || gimple_references_memory_p (stmt))
669 return CONSTANT;
671 return VARYING;
674 /* Returns true if STMT cannot be constant. */
676 static bool
677 surely_varying_stmt_p (gimple stmt)
679 /* If the statement has operands that we cannot handle, it cannot be
680 constant. */
681 if (gimple_has_volatile_ops (stmt))
682 return true;
684 /* If it is a call and does not return a value or is not a
685 builtin and not an indirect call, it is varying. */
686 if (is_gimple_call (stmt))
688 tree fndecl;
689 if (!gimple_call_lhs (stmt)
690 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
691 && !DECL_BUILT_IN (fndecl)))
692 return true;
695 /* Any other store operation is not interesting. */
696 else if (gimple_vdef (stmt))
697 return true;
699 /* Anything other than assignments and conditional jumps are not
700 interesting for CCP. */
701 if (gimple_code (stmt) != GIMPLE_ASSIGN
702 && gimple_code (stmt) != GIMPLE_COND
703 && gimple_code (stmt) != GIMPLE_SWITCH
704 && gimple_code (stmt) != GIMPLE_CALL)
705 return true;
707 return false;
710 /* Initialize local data structures for CCP. */
712 static void
713 ccp_initialize (void)
715 basic_block bb;
717 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
719 /* Initialize simulation flags for PHI nodes and statements. */
720 FOR_EACH_BB (bb)
722 gimple_stmt_iterator i;
724 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
726 gimple stmt = gsi_stmt (i);
727 bool is_varying;
729 /* If the statement is a control insn, then we do not
730 want to avoid simulating the statement once. Failure
731 to do so means that those edges will never get added. */
732 if (stmt_ends_bb_p (stmt))
733 is_varying = false;
734 else
735 is_varying = surely_varying_stmt_p (stmt);
737 if (is_varying)
739 tree def;
740 ssa_op_iter iter;
742 /* If the statement will not produce a constant, mark
743 all its outputs VARYING. */
744 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
745 set_value_varying (def);
747 prop_set_simulate_again (stmt, !is_varying);
751 /* Now process PHI nodes. We never clear the simulate_again flag on
752 phi nodes, since we do not know which edges are executable yet,
753 except for phi nodes for virtual operands when we do not do store ccp. */
754 FOR_EACH_BB (bb)
756 gimple_stmt_iterator i;
758 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
760 gimple phi = gsi_stmt (i);
762 if (!is_gimple_reg (gimple_phi_result (phi)))
763 prop_set_simulate_again (phi, false);
764 else
765 prop_set_simulate_again (phi, true);
770 /* Debug count support. Reset the values of ssa names
771 VARYING when the total number ssa names analyzed is
772 beyond the debug count specified. */
774 static void
775 do_dbg_cnt (void)
777 unsigned i;
778 for (i = 0; i < num_ssa_names; i++)
780 if (!dbg_cnt (ccp))
782 const_val[i].lattice_val = VARYING;
783 const_val[i].mask = double_int_minus_one;
784 const_val[i].value = NULL_TREE;
790 /* Do final substitution of propagated values, cleanup the flowgraph and
791 free allocated storage.
793 Return TRUE when something was optimized. */
795 static bool
796 ccp_finalize (void)
798 bool something_changed;
799 unsigned i;
801 do_dbg_cnt ();
803 /* Derive alignment and misalignment information from partially
804 constant pointers in the lattice. */
805 for (i = 1; i < num_ssa_names; ++i)
807 tree name = ssa_name (i);
808 prop_value_t *val;
809 struct ptr_info_def *pi;
810 unsigned int tem, align;
812 if (!name
813 || !POINTER_TYPE_P (TREE_TYPE (name)))
814 continue;
816 val = get_value (name);
817 if (val->lattice_val != CONSTANT
818 || TREE_CODE (val->value) != INTEGER_CST)
819 continue;
821 /* Trailing constant bits specify the alignment, trailing value
822 bits the misalignment. */
823 tem = val->mask.low;
824 align = (tem & -tem);
825 if (align == 1)
826 continue;
828 pi = get_ptr_info (name);
829 pi->align = align;
830 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
833 /* Perform substitutions based on the known constant values. */
834 something_changed = substitute_and_fold (get_constant_value,
835 ccp_fold_stmt, true);
837 free (const_val);
838 const_val = NULL;
839 return something_changed;;
843 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
844 in VAL1.
846 any M UNDEFINED = any
847 any M VARYING = VARYING
848 Ci M Cj = Ci if (i == j)
849 Ci M Cj = VARYING if (i != j)
852 static void
853 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
855 if (val1->lattice_val == UNDEFINED)
857 /* UNDEFINED M any = any */
858 *val1 = *val2;
860 else if (val2->lattice_val == UNDEFINED)
862 /* any M UNDEFINED = any
863 Nothing to do. VAL1 already contains the value we want. */
866 else if (val1->lattice_val == VARYING
867 || val2->lattice_val == VARYING)
869 /* any M VARYING = VARYING. */
870 val1->lattice_val = VARYING;
871 val1->mask = double_int_minus_one;
872 val1->value = NULL_TREE;
874 else if (val1->lattice_val == CONSTANT
875 && val2->lattice_val == CONSTANT
876 && TREE_CODE (val1->value) == INTEGER_CST
877 && TREE_CODE (val2->value) == INTEGER_CST)
879 /* Ci M Cj = Ci if (i == j)
880 Ci M Cj = VARYING if (i != j)
882 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
883 drop to varying. */
884 val1->mask
885 = double_int_ior (double_int_ior (val1->mask,
886 val2->mask),
887 double_int_xor (tree_to_double_int (val1->value),
888 tree_to_double_int (val2->value)));
889 if (double_int_minus_one_p (val1->mask))
891 val1->lattice_val = VARYING;
892 val1->value = NULL_TREE;
895 else if (val1->lattice_val == CONSTANT
896 && val2->lattice_val == CONSTANT
897 && simple_cst_equal (val1->value, val2->value) == 1)
899 /* Ci M Cj = Ci if (i == j)
900 Ci M Cj = VARYING if (i != j)
902 VAL1 already contains the value we want for equivalent values. */
904 else if (val1->lattice_val == CONSTANT
905 && val2->lattice_val == CONSTANT
906 && (TREE_CODE (val1->value) == ADDR_EXPR
907 || TREE_CODE (val2->value) == ADDR_EXPR))
909 /* When not equal addresses are involved try meeting for
910 alignment. */
911 prop_value_t tem = *val2;
912 if (TREE_CODE (val1->value) == ADDR_EXPR)
913 *val1 = get_value_for_expr (val1->value, true);
914 if (TREE_CODE (val2->value) == ADDR_EXPR)
915 tem = get_value_for_expr (val2->value, true);
916 ccp_lattice_meet (val1, &tem);
918 else
920 /* Any other combination is VARYING. */
921 val1->lattice_val = VARYING;
922 val1->mask = double_int_minus_one;
923 val1->value = NULL_TREE;
928 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
929 lattice values to determine PHI_NODE's lattice value. The value of a
930 PHI node is determined calling ccp_lattice_meet with all the arguments
931 of the PHI node that are incoming via executable edges. */
933 static enum ssa_prop_result
934 ccp_visit_phi_node (gimple phi)
936 unsigned i;
937 prop_value_t *old_val, new_val;
939 if (dump_file && (dump_flags & TDF_DETAILS))
941 fprintf (dump_file, "\nVisiting PHI node: ");
942 print_gimple_stmt (dump_file, phi, 0, dump_flags);
945 old_val = get_value (gimple_phi_result (phi));
946 switch (old_val->lattice_val)
948 case VARYING:
949 return SSA_PROP_VARYING;
951 case CONSTANT:
952 new_val = *old_val;
953 break;
955 case UNDEFINED:
956 new_val.lattice_val = UNDEFINED;
957 new_val.value = NULL_TREE;
958 break;
960 default:
961 gcc_unreachable ();
964 for (i = 0; i < gimple_phi_num_args (phi); i++)
966 /* Compute the meet operator over all the PHI arguments flowing
967 through executable edges. */
968 edge e = gimple_phi_arg_edge (phi, i);
970 if (dump_file && (dump_flags & TDF_DETAILS))
972 fprintf (dump_file,
973 "\n Argument #%d (%d -> %d %sexecutable)\n",
974 i, e->src->index, e->dest->index,
975 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
978 /* If the incoming edge is executable, Compute the meet operator for
979 the existing value of the PHI node and the current PHI argument. */
980 if (e->flags & EDGE_EXECUTABLE)
982 tree arg = gimple_phi_arg (phi, i)->def;
983 prop_value_t arg_val = get_value_for_expr (arg, false);
985 ccp_lattice_meet (&new_val, &arg_val);
987 if (dump_file && (dump_flags & TDF_DETAILS))
989 fprintf (dump_file, "\t");
990 print_generic_expr (dump_file, arg, dump_flags);
991 dump_lattice_value (dump_file, "\tValue: ", arg_val);
992 fprintf (dump_file, "\n");
995 if (new_val.lattice_val == VARYING)
996 break;
1000 if (dump_file && (dump_flags & TDF_DETAILS))
1002 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1003 fprintf (dump_file, "\n\n");
1006 /* Make the transition to the new value. */
1007 if (set_lattice_value (gimple_phi_result (phi), new_val))
1009 if (new_val.lattice_val == VARYING)
1010 return SSA_PROP_VARYING;
1011 else
1012 return SSA_PROP_INTERESTING;
1014 else
1015 return SSA_PROP_NOT_INTERESTING;
1018 /* Return the constant value for OP or OP otherwise. */
1020 static tree
1021 valueize_op (tree op)
1023 if (TREE_CODE (op) == SSA_NAME)
1025 tree tem = get_constant_value (op);
1026 if (tem)
1027 return tem;
1029 return op;
1032 /* CCP specific front-end to the non-destructive constant folding
1033 routines.
1035 Attempt to simplify the RHS of STMT knowing that one or more
1036 operands are constants.
1038 If simplification is possible, return the simplified RHS,
1039 otherwise return the original RHS or NULL_TREE. */
1041 static tree
1042 ccp_fold (gimple stmt)
1044 location_t loc = gimple_location (stmt);
1045 switch (gimple_code (stmt))
1047 case GIMPLE_COND:
1049 /* Handle comparison operators that can appear in GIMPLE form. */
1050 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1051 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1052 enum tree_code code = gimple_cond_code (stmt);
1053 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1056 case GIMPLE_SWITCH:
1058 /* Return the constant switch index. */
1059 return valueize_op (gimple_switch_index (stmt));
1062 case GIMPLE_ASSIGN:
1063 case GIMPLE_CALL:
1064 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1066 default:
1067 gcc_unreachable ();
1071 /* Apply the operation CODE in type TYPE to the value, mask pair
1072 RVAL and RMASK representing a value of type RTYPE and set
1073 the value, mask pair *VAL and *MASK to the result. */
1075 static void
1076 bit_value_unop_1 (enum tree_code code, tree type,
1077 double_int *val, double_int *mask,
1078 tree rtype, double_int rval, double_int rmask)
1080 switch (code)
1082 case BIT_NOT_EXPR:
1083 *mask = rmask;
1084 *val = double_int_not (rval);
1085 break;
1087 case NEGATE_EXPR:
1089 double_int temv, temm;
1090 /* Return ~rval + 1. */
1091 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1092 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1093 type, temv, temm,
1094 type, double_int_one, double_int_zero);
1095 break;
1098 CASE_CONVERT:
1100 bool uns;
1102 /* First extend mask and value according to the original type. */
1103 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1104 ? 0 : TYPE_UNSIGNED (rtype));
1105 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1106 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1108 /* Then extend mask and value according to the target type. */
1109 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1110 ? 0 : TYPE_UNSIGNED (type));
1111 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1112 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1113 break;
1116 default:
1117 *mask = double_int_minus_one;
1118 break;
1122 /* Apply the operation CODE in type TYPE to the value, mask pairs
1123 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1124 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1126 static void
1127 bit_value_binop_1 (enum tree_code code, tree type,
1128 double_int *val, double_int *mask,
1129 tree r1type, double_int r1val, double_int r1mask,
1130 tree r2type, double_int r2val, double_int r2mask)
1132 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1133 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1134 /* Assume we'll get a constant result. Use an initial varying value,
1135 we fall back to varying in the end if necessary. */
1136 *mask = double_int_minus_one;
1137 switch (code)
1139 case BIT_AND_EXPR:
1140 /* The mask is constant where there is a known not
1141 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1142 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1143 double_int_and (double_int_ior (r1val, r1mask),
1144 double_int_ior (r2val, r2mask)));
1145 *val = double_int_and (r1val, r2val);
1146 break;
1148 case BIT_IOR_EXPR:
1149 /* The mask is constant where there is a known
1150 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1151 *mask = double_int_and_not
1152 (double_int_ior (r1mask, r2mask),
1153 double_int_ior (double_int_and_not (r1val, r1mask),
1154 double_int_and_not (r2val, r2mask)));
1155 *val = double_int_ior (r1val, r2val);
1156 break;
1158 case BIT_XOR_EXPR:
1159 /* m1 | m2 */
1160 *mask = double_int_ior (r1mask, r2mask);
1161 *val = double_int_xor (r1val, r2val);
1162 break;
1164 case LROTATE_EXPR:
1165 case RROTATE_EXPR:
1166 if (double_int_zero_p (r2mask))
1168 HOST_WIDE_INT shift = r2val.low;
1169 if (code == RROTATE_EXPR)
1170 shift = -shift;
1171 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1172 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1174 break;
1176 case LSHIFT_EXPR:
1177 case RSHIFT_EXPR:
1178 /* ??? We can handle partially known shift counts if we know
1179 its sign. That way we can tell that (x << (y | 8)) & 255
1180 is zero. */
1181 if (double_int_zero_p (r2mask))
1183 HOST_WIDE_INT shift = r2val.low;
1184 if (code == RSHIFT_EXPR)
1185 shift = -shift;
1186 /* We need to know if we are doing a left or a right shift
1187 to properly shift in zeros for left shift and unsigned
1188 right shifts and the sign bit for signed right shifts.
1189 For signed right shifts we shift in varying in case
1190 the sign bit was varying. */
1191 if (shift > 0)
1193 *mask = double_int_lshift (r1mask, shift,
1194 TYPE_PRECISION (type), false);
1195 *val = double_int_lshift (r1val, shift,
1196 TYPE_PRECISION (type), false);
1198 else if (shift < 0)
1200 /* ??? We can have sizetype related inconsistencies in
1201 the IL. */
1202 if ((TREE_CODE (r1type) == INTEGER_TYPE
1203 && (TYPE_IS_SIZETYPE (r1type)
1204 ? 0 : TYPE_UNSIGNED (r1type))) != uns)
1205 break;
1207 shift = -shift;
1208 *mask = double_int_rshift (r1mask, shift,
1209 TYPE_PRECISION (type), !uns);
1210 *val = double_int_rshift (r1val, shift,
1211 TYPE_PRECISION (type), !uns);
1213 else
1215 *mask = r1mask;
1216 *val = r1val;
1219 break;
1221 case PLUS_EXPR:
1222 case POINTER_PLUS_EXPR:
1224 double_int lo, hi;
1225 /* Do the addition with unknown bits set to zero, to give carry-ins of
1226 zero wherever possible. */
1227 lo = double_int_add (double_int_and_not (r1val, r1mask),
1228 double_int_and_not (r2val, r2mask));
1229 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1230 /* Do the addition with unknown bits set to one, to give carry-ins of
1231 one wherever possible. */
1232 hi = double_int_add (double_int_ior (r1val, r1mask),
1233 double_int_ior (r2val, r2mask));
1234 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1235 /* Each bit in the result is known if (a) the corresponding bits in
1236 both inputs are known, and (b) the carry-in to that bit position
1237 is known. We can check condition (b) by seeing if we got the same
1238 result with minimised carries as with maximised carries. */
1239 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1240 double_int_xor (lo, hi));
1241 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1242 /* It shouldn't matter whether we choose lo or hi here. */
1243 *val = lo;
1244 break;
1247 case MINUS_EXPR:
1249 double_int temv, temm;
1250 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1251 r2type, r2val, r2mask);
1252 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1253 r1type, r1val, r1mask,
1254 r2type, temv, temm);
1255 break;
1258 case MULT_EXPR:
1260 /* Just track trailing zeros in both operands and transfer
1261 them to the other. */
1262 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1263 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1264 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1266 *mask = double_int_zero;
1267 *val = double_int_zero;
1269 else if (r1tz + r2tz > 0)
1271 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1272 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1273 *val = double_int_zero;
1275 break;
1278 case EQ_EXPR:
1279 case NE_EXPR:
1281 double_int m = double_int_ior (r1mask, r2mask);
1282 if (!double_int_equal_p (double_int_and_not (r1val, m),
1283 double_int_and_not (r2val, m)))
1285 *mask = double_int_zero;
1286 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1288 else
1290 /* We know the result of a comparison is always one or zero. */
1291 *mask = double_int_one;
1292 *val = double_int_zero;
1294 break;
1297 case GE_EXPR:
1298 case GT_EXPR:
1300 double_int tem = r1val;
1301 r1val = r2val;
1302 r2val = tem;
1303 tem = r1mask;
1304 r1mask = r2mask;
1305 r2mask = tem;
1306 code = swap_tree_comparison (code);
1308 /* Fallthru. */
1309 case LT_EXPR:
1310 case LE_EXPR:
1312 int minmax, maxmin;
1313 /* If the most significant bits are not known we know nothing. */
1314 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1315 break;
1317 /* For comparisons the signedness is in the comparison operands. */
1318 uns = (TREE_CODE (r1type) == INTEGER_TYPE
1319 && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type));
1320 /* ??? We can have sizetype related inconsistencies in the IL. */
1321 if ((TREE_CODE (r2type) == INTEGER_TYPE
1322 && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns)
1323 break;
1325 /* If we know the most significant bits we know the values
1326 value ranges by means of treating varying bits as zero
1327 or one. Do a cross comparison of the max/min pairs. */
1328 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1329 double_int_and_not (r2val, r2mask), uns);
1330 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1331 double_int_ior (r2val, r2mask), uns);
1332 if (maxmin < 0) /* r1 is less than r2. */
1334 *mask = double_int_zero;
1335 *val = double_int_one;
1337 else if (minmax > 0) /* r1 is not less or equal to r2. */
1339 *mask = double_int_zero;
1340 *val = double_int_zero;
1342 else if (maxmin == minmax) /* r1 and r2 are equal. */
1344 /* This probably should never happen as we'd have
1345 folded the thing during fully constant value folding. */
1346 *mask = double_int_zero;
1347 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1349 else
1351 /* We know the result of a comparison is always one or zero. */
1352 *mask = double_int_one;
1353 *val = double_int_zero;
1355 break;
1358 default:;
1362 /* Return the propagation value when applying the operation CODE to
1363 the value RHS yielding type TYPE. */
1365 static prop_value_t
1366 bit_value_unop (enum tree_code code, tree type, tree rhs)
1368 prop_value_t rval = get_value_for_expr (rhs, true);
1369 double_int value, mask;
1370 prop_value_t val;
1371 gcc_assert ((rval.lattice_val == CONSTANT
1372 && TREE_CODE (rval.value) == INTEGER_CST)
1373 || double_int_minus_one_p (rval.mask));
1374 bit_value_unop_1 (code, type, &value, &mask,
1375 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1376 if (!double_int_minus_one_p (mask))
1378 val.lattice_val = CONSTANT;
1379 val.mask = mask;
1380 /* ??? Delay building trees here. */
1381 val.value = double_int_to_tree (type, value);
1383 else
1385 val.lattice_val = VARYING;
1386 val.value = NULL_TREE;
1387 val.mask = double_int_minus_one;
1389 return val;
1392 /* Return the propagation value when applying the operation CODE to
1393 the values RHS1 and RHS2 yielding type TYPE. */
1395 static prop_value_t
1396 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1398 prop_value_t r1val = get_value_for_expr (rhs1, true);
1399 prop_value_t r2val = get_value_for_expr (rhs2, true);
1400 double_int value, mask;
1401 prop_value_t val;
1402 gcc_assert ((r1val.lattice_val == CONSTANT
1403 && TREE_CODE (r1val.value) == INTEGER_CST)
1404 || double_int_minus_one_p (r1val.mask));
1405 gcc_assert ((r2val.lattice_val == CONSTANT
1406 && TREE_CODE (r2val.value) == INTEGER_CST)
1407 || double_int_minus_one_p (r2val.mask));
1408 bit_value_binop_1 (code, type, &value, &mask,
1409 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1410 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1411 if (!double_int_minus_one_p (mask))
1413 val.lattice_val = CONSTANT;
1414 val.mask = mask;
1415 /* ??? Delay building trees here. */
1416 val.value = double_int_to_tree (type, value);
1418 else
1420 val.lattice_val = VARYING;
1421 val.value = NULL_TREE;
1422 val.mask = double_int_minus_one;
1424 return val;
1427 /* Return the propagation value when applying __builtin_assume_aligned to
1428 its arguments. */
1430 static prop_value_t
1431 bit_value_assume_aligned (gimple stmt)
1433 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1434 tree type = TREE_TYPE (ptr);
1435 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1436 prop_value_t ptrval = get_value_for_expr (ptr, true);
1437 prop_value_t alignval;
1438 double_int value, mask;
1439 prop_value_t val;
1440 if (ptrval.lattice_val == UNDEFINED)
1441 return ptrval;
1442 gcc_assert ((ptrval.lattice_val == CONSTANT
1443 && TREE_CODE (ptrval.value) == INTEGER_CST)
1444 || double_int_minus_one_p (ptrval.mask));
1445 align = gimple_call_arg (stmt, 1);
1446 if (!host_integerp (align, 1))
1447 return ptrval;
1448 aligni = tree_low_cst (align, 1);
1449 if (aligni <= 1
1450 || (aligni & (aligni - 1)) != 0)
1451 return ptrval;
1452 if (gimple_call_num_args (stmt) > 2)
1454 misalign = gimple_call_arg (stmt, 2);
1455 if (!host_integerp (misalign, 1))
1456 return ptrval;
1457 misaligni = tree_low_cst (misalign, 1);
1458 if (misaligni >= aligni)
1459 return ptrval;
1461 align = build_int_cst_type (type, -aligni);
1462 alignval = get_value_for_expr (align, true);
1463 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1464 type, value_to_double_int (ptrval), ptrval.mask,
1465 type, value_to_double_int (alignval), alignval.mask);
1466 if (!double_int_minus_one_p (mask))
1468 val.lattice_val = CONSTANT;
1469 val.mask = mask;
1470 gcc_assert ((mask.low & (aligni - 1)) == 0);
1471 gcc_assert ((value.low & (aligni - 1)) == 0);
1472 value.low |= misaligni;
1473 /* ??? Delay building trees here. */
1474 val.value = double_int_to_tree (type, value);
1476 else
1478 val.lattice_val = VARYING;
1479 val.value = NULL_TREE;
1480 val.mask = double_int_minus_one;
1482 return val;
1485 /* Evaluate statement STMT.
1486 Valid only for assignments, calls, conditionals, and switches. */
1488 static prop_value_t
1489 evaluate_stmt (gimple stmt)
1491 prop_value_t val;
1492 tree simplified = NULL_TREE;
1493 ccp_lattice_t likelyvalue = likely_value (stmt);
1494 bool is_constant = false;
1495 unsigned int align;
1497 if (dump_file && (dump_flags & TDF_DETAILS))
1499 fprintf (dump_file, "which is likely ");
1500 switch (likelyvalue)
1502 case CONSTANT:
1503 fprintf (dump_file, "CONSTANT");
1504 break;
1505 case UNDEFINED:
1506 fprintf (dump_file, "UNDEFINED");
1507 break;
1508 case VARYING:
1509 fprintf (dump_file, "VARYING");
1510 break;
1511 default:;
1513 fprintf (dump_file, "\n");
1516 /* If the statement is likely to have a CONSTANT result, then try
1517 to fold the statement to determine the constant value. */
1518 /* FIXME. This is the only place that we call ccp_fold.
1519 Since likely_value never returns CONSTANT for calls, we will
1520 not attempt to fold them, including builtins that may profit. */
1521 if (likelyvalue == CONSTANT)
1523 fold_defer_overflow_warnings ();
1524 simplified = ccp_fold (stmt);
1525 is_constant = simplified && is_gimple_min_invariant (simplified);
1526 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1527 if (is_constant)
1529 /* The statement produced a constant value. */
1530 val.lattice_val = CONSTANT;
1531 val.value = simplified;
1532 val.mask = double_int_zero;
1535 /* If the statement is likely to have a VARYING result, then do not
1536 bother folding the statement. */
1537 else if (likelyvalue == VARYING)
1539 enum gimple_code code = gimple_code (stmt);
1540 if (code == GIMPLE_ASSIGN)
1542 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1544 /* Other cases cannot satisfy is_gimple_min_invariant
1545 without folding. */
1546 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1547 simplified = gimple_assign_rhs1 (stmt);
1549 else if (code == GIMPLE_SWITCH)
1550 simplified = gimple_switch_index (stmt);
1551 else
1552 /* These cannot satisfy is_gimple_min_invariant without folding. */
1553 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1554 is_constant = simplified && is_gimple_min_invariant (simplified);
1555 if (is_constant)
1557 /* The statement produced a constant value. */
1558 val.lattice_val = CONSTANT;
1559 val.value = simplified;
1560 val.mask = double_int_zero;
1564 /* Resort to simplification for bitwise tracking. */
1565 if (flag_tree_bit_ccp
1566 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1567 && !is_constant)
1569 enum gimple_code code = gimple_code (stmt);
1570 tree fndecl;
1571 val.lattice_val = VARYING;
1572 val.value = NULL_TREE;
1573 val.mask = double_int_minus_one;
1574 if (code == GIMPLE_ASSIGN)
1576 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1577 tree rhs1 = gimple_assign_rhs1 (stmt);
1578 switch (get_gimple_rhs_class (subcode))
1580 case GIMPLE_SINGLE_RHS:
1581 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1582 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1583 val = get_value_for_expr (rhs1, true);
1584 break;
1586 case GIMPLE_UNARY_RHS:
1587 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1588 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1589 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1590 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1591 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1592 break;
1594 case GIMPLE_BINARY_RHS:
1595 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1596 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1598 tree lhs = gimple_assign_lhs (stmt);
1599 tree rhs2 = gimple_assign_rhs2 (stmt);
1600 val = bit_value_binop (subcode,
1601 TREE_TYPE (lhs), rhs1, rhs2);
1603 break;
1605 default:;
1608 else if (code == GIMPLE_COND)
1610 enum tree_code code = gimple_cond_code (stmt);
1611 tree rhs1 = gimple_cond_lhs (stmt);
1612 tree rhs2 = gimple_cond_rhs (stmt);
1613 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1614 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1615 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1617 else if (code == GIMPLE_CALL
1618 && (fndecl = gimple_call_fndecl (stmt))
1619 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1621 switch (DECL_FUNCTION_CODE (fndecl))
1623 case BUILT_IN_MALLOC:
1624 case BUILT_IN_REALLOC:
1625 case BUILT_IN_CALLOC:
1626 case BUILT_IN_STRDUP:
1627 case BUILT_IN_STRNDUP:
1628 val.lattice_val = CONSTANT;
1629 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1630 val.mask = shwi_to_double_int
1631 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1632 / BITS_PER_UNIT - 1));
1633 break;
1635 case BUILT_IN_ALLOCA:
1636 case BUILT_IN_ALLOCA_WITH_ALIGN:
1637 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1638 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1639 : BIGGEST_ALIGNMENT);
1640 val.lattice_val = CONSTANT;
1641 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1642 val.mask = shwi_to_double_int
1643 (~(((HOST_WIDE_INT) align)
1644 / BITS_PER_UNIT - 1));
1645 break;
1647 /* These builtins return their first argument, unmodified. */
1648 case BUILT_IN_MEMCPY:
1649 case BUILT_IN_MEMMOVE:
1650 case BUILT_IN_MEMSET:
1651 case BUILT_IN_STRCPY:
1652 case BUILT_IN_STRNCPY:
1653 case BUILT_IN_MEMCPY_CHK:
1654 case BUILT_IN_MEMMOVE_CHK:
1655 case BUILT_IN_MEMSET_CHK:
1656 case BUILT_IN_STRCPY_CHK:
1657 case BUILT_IN_STRNCPY_CHK:
1658 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1659 break;
1661 case BUILT_IN_ASSUME_ALIGNED:
1662 val = bit_value_assume_aligned (stmt);
1663 break;
1665 default:;
1668 is_constant = (val.lattice_val == CONSTANT);
1671 if (!is_constant)
1673 /* The statement produced a nonconstant value. If the statement
1674 had UNDEFINED operands, then the result of the statement
1675 should be UNDEFINED. Otherwise, the statement is VARYING. */
1676 if (likelyvalue == UNDEFINED)
1678 val.lattice_val = likelyvalue;
1679 val.mask = double_int_zero;
1681 else
1683 val.lattice_val = VARYING;
1684 val.mask = double_int_minus_one;
1687 val.value = NULL_TREE;
1690 return val;
1693 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1694 fixed-size array and returns the address, if found, otherwise returns
1695 NULL_TREE. */
1697 static tree
1698 fold_builtin_alloca_with_align (gimple stmt)
1700 unsigned HOST_WIDE_INT size, threshold, n_elem;
1701 tree lhs, arg, block, var, elem_type, array_type;
1703 /* Get lhs. */
1704 lhs = gimple_call_lhs (stmt);
1705 if (lhs == NULL_TREE)
1706 return NULL_TREE;
1708 /* Detect constant argument. */
1709 arg = get_constant_value (gimple_call_arg (stmt, 0));
1710 if (arg == NULL_TREE
1711 || TREE_CODE (arg) != INTEGER_CST
1712 || !host_integerp (arg, 1))
1713 return NULL_TREE;
1715 size = TREE_INT_CST_LOW (arg);
1717 /* Heuristic: don't fold large allocas. */
1718 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1719 /* In case the alloca is located at function entry, it has the same lifetime
1720 as a declared array, so we allow a larger size. */
1721 block = gimple_block (stmt);
1722 if (!(cfun->after_inlining
1723 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1724 threshold /= 10;
1725 if (size > threshold)
1726 return NULL_TREE;
1728 /* Declare array. */
1729 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1730 n_elem = size * 8 / BITS_PER_UNIT;
1731 array_type = build_array_type_nelts (elem_type, n_elem);
1732 var = create_tmp_var (array_type, NULL);
1733 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
1735 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1736 if (pi != NULL && !pi->pt.anything)
1738 bool singleton_p;
1739 unsigned uid;
1740 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1741 gcc_assert (singleton_p);
1742 SET_DECL_PT_UID (var, uid);
1746 /* Fold alloca to the address of the array. */
1747 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1750 /* Fold the stmt at *GSI with CCP specific information that propagating
1751 and regular folding does not catch. */
1753 static bool
1754 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1756 gimple stmt = gsi_stmt (*gsi);
1758 switch (gimple_code (stmt))
1760 case GIMPLE_COND:
1762 prop_value_t val;
1763 /* Statement evaluation will handle type mismatches in constants
1764 more gracefully than the final propagation. This allows us to
1765 fold more conditionals here. */
1766 val = evaluate_stmt (stmt);
1767 if (val.lattice_val != CONSTANT
1768 || !double_int_zero_p (val.mask))
1769 return false;
1771 if (dump_file)
1773 fprintf (dump_file, "Folding predicate ");
1774 print_gimple_expr (dump_file, stmt, 0, 0);
1775 fprintf (dump_file, " to ");
1776 print_generic_expr (dump_file, val.value, 0);
1777 fprintf (dump_file, "\n");
1780 if (integer_zerop (val.value))
1781 gimple_cond_make_false (stmt);
1782 else
1783 gimple_cond_make_true (stmt);
1785 return true;
1788 case GIMPLE_CALL:
1790 tree lhs = gimple_call_lhs (stmt);
1791 tree val;
1792 tree argt;
1793 bool changed = false;
1794 unsigned i;
1796 /* If the call was folded into a constant make sure it goes
1797 away even if we cannot propagate into all uses because of
1798 type issues. */
1799 if (lhs
1800 && TREE_CODE (lhs) == SSA_NAME
1801 && (val = get_constant_value (lhs)))
1803 tree new_rhs = unshare_expr (val);
1804 bool res;
1805 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1806 TREE_TYPE (new_rhs)))
1807 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1808 res = update_call_from_tree (gsi, new_rhs);
1809 gcc_assert (res);
1810 return true;
1813 /* Internal calls provide no argument types, so the extra laxity
1814 for normal calls does not apply. */
1815 if (gimple_call_internal_p (stmt))
1816 return false;
1818 /* The heuristic of fold_builtin_alloca_with_align differs before and
1819 after inlining, so we don't require the arg to be changed into a
1820 constant for folding, but just to be constant. */
1821 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1823 tree new_rhs = fold_builtin_alloca_with_align (stmt);
1824 if (new_rhs)
1826 bool res = update_call_from_tree (gsi, new_rhs);
1827 gcc_assert (res);
1828 return true;
1832 /* Propagate into the call arguments. Compared to replace_uses_in
1833 this can use the argument slot types for type verification
1834 instead of the current argument type. We also can safely
1835 drop qualifiers here as we are dealing with constants anyway. */
1836 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
1837 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1838 ++i, argt = TREE_CHAIN (argt))
1840 tree arg = gimple_call_arg (stmt, i);
1841 if (TREE_CODE (arg) == SSA_NAME
1842 && (val = get_constant_value (arg))
1843 && useless_type_conversion_p
1844 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1845 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1847 gimple_call_set_arg (stmt, i, unshare_expr (val));
1848 changed = true;
1852 return changed;
1855 case GIMPLE_ASSIGN:
1857 tree lhs = gimple_assign_lhs (stmt);
1858 tree val;
1860 /* If we have a load that turned out to be constant replace it
1861 as we cannot propagate into all uses in all cases. */
1862 if (gimple_assign_single_p (stmt)
1863 && TREE_CODE (lhs) == SSA_NAME
1864 && (val = get_constant_value (lhs)))
1866 tree rhs = unshare_expr (val);
1867 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1868 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1869 gimple_assign_set_rhs_from_tree (gsi, rhs);
1870 return true;
1873 return false;
1876 default:
1877 return false;
1881 /* Visit the assignment statement STMT. Set the value of its LHS to the
1882 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1883 creates virtual definitions, set the value of each new name to that
1884 of the RHS (if we can derive a constant out of the RHS).
1885 Value-returning call statements also perform an assignment, and
1886 are handled here. */
1888 static enum ssa_prop_result
1889 visit_assignment (gimple stmt, tree *output_p)
1891 prop_value_t val;
1892 enum ssa_prop_result retval;
1894 tree lhs = gimple_get_lhs (stmt);
1896 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1897 || gimple_call_lhs (stmt) != NULL_TREE);
1899 if (gimple_assign_single_p (stmt)
1900 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1901 /* For a simple copy operation, we copy the lattice values. */
1902 val = *get_value (gimple_assign_rhs1 (stmt));
1903 else
1904 /* Evaluate the statement, which could be
1905 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1906 val = evaluate_stmt (stmt);
1908 retval = SSA_PROP_NOT_INTERESTING;
1910 /* Set the lattice value of the statement's output. */
1911 if (TREE_CODE (lhs) == SSA_NAME)
1913 /* If STMT is an assignment to an SSA_NAME, we only have one
1914 value to set. */
1915 if (set_lattice_value (lhs, val))
1917 *output_p = lhs;
1918 if (val.lattice_val == VARYING)
1919 retval = SSA_PROP_VARYING;
1920 else
1921 retval = SSA_PROP_INTERESTING;
1925 return retval;
1929 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1930 if it can determine which edge will be taken. Otherwise, return
1931 SSA_PROP_VARYING. */
1933 static enum ssa_prop_result
1934 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1936 prop_value_t val;
1937 basic_block block;
1939 block = gimple_bb (stmt);
1940 val = evaluate_stmt (stmt);
1941 if (val.lattice_val != CONSTANT
1942 || !double_int_zero_p (val.mask))
1943 return SSA_PROP_VARYING;
1945 /* Find which edge out of the conditional block will be taken and add it
1946 to the worklist. If no single edge can be determined statically,
1947 return SSA_PROP_VARYING to feed all the outgoing edges to the
1948 propagation engine. */
1949 *taken_edge_p = find_taken_edge (block, val.value);
1950 if (*taken_edge_p)
1951 return SSA_PROP_INTERESTING;
1952 else
1953 return SSA_PROP_VARYING;
1957 /* Evaluate statement STMT. If the statement produces an output value and
1958 its evaluation changes the lattice value of its output, return
1959 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1960 output value.
1962 If STMT is a conditional branch and we can determine its truth
1963 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1964 value, return SSA_PROP_VARYING. */
1966 static enum ssa_prop_result
1967 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1969 tree def;
1970 ssa_op_iter iter;
1972 if (dump_file && (dump_flags & TDF_DETAILS))
1974 fprintf (dump_file, "\nVisiting statement:\n");
1975 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1978 switch (gimple_code (stmt))
1980 case GIMPLE_ASSIGN:
1981 /* If the statement is an assignment that produces a single
1982 output value, evaluate its RHS to see if the lattice value of
1983 its output has changed. */
1984 return visit_assignment (stmt, output_p);
1986 case GIMPLE_CALL:
1987 /* A value-returning call also performs an assignment. */
1988 if (gimple_call_lhs (stmt) != NULL_TREE)
1989 return visit_assignment (stmt, output_p);
1990 break;
1992 case GIMPLE_COND:
1993 case GIMPLE_SWITCH:
1994 /* If STMT is a conditional branch, see if we can determine
1995 which branch will be taken. */
1996 /* FIXME. It appears that we should be able to optimize
1997 computed GOTOs here as well. */
1998 return visit_cond_stmt (stmt, taken_edge_p);
2000 default:
2001 break;
2004 /* Any other kind of statement is not interesting for constant
2005 propagation and, therefore, not worth simulating. */
2006 if (dump_file && (dump_flags & TDF_DETAILS))
2007 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2009 /* Definitions made by statements other than assignments to
2010 SSA_NAMEs represent unknown modifications to their outputs.
2011 Mark them VARYING. */
2012 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2014 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2015 set_lattice_value (def, v);
2018 return SSA_PROP_VARYING;
2022 /* Main entry point for SSA Conditional Constant Propagation. */
2024 static unsigned int
2025 do_ssa_ccp (void)
2027 ccp_initialize ();
2028 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2029 if (ccp_finalize ())
2030 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2031 else
2032 return 0;
2036 static bool
2037 gate_ccp (void)
2039 return flag_tree_ccp != 0;
2043 struct gimple_opt_pass pass_ccp =
2046 GIMPLE_PASS,
2047 "ccp", /* name */
2048 gate_ccp, /* gate */
2049 do_ssa_ccp, /* execute */
2050 NULL, /* sub */
2051 NULL, /* next */
2052 0, /* static_pass_number */
2053 TV_TREE_CCP, /* tv_id */
2054 PROP_cfg | PROP_ssa, /* properties_required */
2055 0, /* properties_provided */
2056 0, /* properties_destroyed */
2057 0, /* todo_flags_start */
2058 TODO_verify_ssa
2059 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2065 /* Try to optimize out __builtin_stack_restore. Optimize it out
2066 if there is another __builtin_stack_restore in the same basic
2067 block and no calls or ASM_EXPRs are in between, or if this block's
2068 only outgoing edge is to EXIT_BLOCK and there are no calls or
2069 ASM_EXPRs after this __builtin_stack_restore. */
2071 static tree
2072 optimize_stack_restore (gimple_stmt_iterator i)
2074 tree callee;
2075 gimple stmt;
2077 basic_block bb = gsi_bb (i);
2078 gimple call = gsi_stmt (i);
2080 if (gimple_code (call) != GIMPLE_CALL
2081 || gimple_call_num_args (call) != 1
2082 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2083 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2084 return NULL_TREE;
2086 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2088 stmt = gsi_stmt (i);
2089 if (gimple_code (stmt) == GIMPLE_ASM)
2090 return NULL_TREE;
2091 if (gimple_code (stmt) != GIMPLE_CALL)
2092 continue;
2094 callee = gimple_call_fndecl (stmt);
2095 if (!callee
2096 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2097 /* All regular builtins are ok, just obviously not alloca. */
2098 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2099 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2100 return NULL_TREE;
2102 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2103 goto second_stack_restore;
2106 if (!gsi_end_p (i))
2107 return NULL_TREE;
2109 /* Allow one successor of the exit block, or zero successors. */
2110 switch (EDGE_COUNT (bb->succs))
2112 case 0:
2113 break;
2114 case 1:
2115 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2116 return NULL_TREE;
2117 break;
2118 default:
2119 return NULL_TREE;
2121 second_stack_restore:
2123 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2124 If there are multiple uses, then the last one should remove the call.
2125 In any case, whether the call to __builtin_stack_save can be removed
2126 or not is irrelevant to removing the call to __builtin_stack_restore. */
2127 if (has_single_use (gimple_call_arg (call, 0)))
2129 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2130 if (is_gimple_call (stack_save))
2132 callee = gimple_call_fndecl (stack_save);
2133 if (callee
2134 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2135 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2137 gimple_stmt_iterator stack_save_gsi;
2138 tree rhs;
2140 stack_save_gsi = gsi_for_stmt (stack_save);
2141 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2142 update_call_from_tree (&stack_save_gsi, rhs);
2147 /* No effect, so the statement will be deleted. */
2148 return integer_zero_node;
2151 /* If va_list type is a simple pointer and nothing special is needed,
2152 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2153 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2154 pointer assignment. */
2156 static tree
2157 optimize_stdarg_builtin (gimple call)
2159 tree callee, lhs, rhs, cfun_va_list;
2160 bool va_list_simple_ptr;
2161 location_t loc = gimple_location (call);
2163 if (gimple_code (call) != GIMPLE_CALL)
2164 return NULL_TREE;
2166 callee = gimple_call_fndecl (call);
2168 cfun_va_list = targetm.fn_abi_va_list (callee);
2169 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2170 && (TREE_TYPE (cfun_va_list) == void_type_node
2171 || TREE_TYPE (cfun_va_list) == char_type_node);
2173 switch (DECL_FUNCTION_CODE (callee))
2175 case BUILT_IN_VA_START:
2176 if (!va_list_simple_ptr
2177 || targetm.expand_builtin_va_start != NULL
2178 || builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2179 return NULL_TREE;
2181 if (gimple_call_num_args (call) != 2)
2182 return NULL_TREE;
2184 lhs = gimple_call_arg (call, 0);
2185 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2186 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2187 != TYPE_MAIN_VARIANT (cfun_va_list))
2188 return NULL_TREE;
2190 lhs = build_fold_indirect_ref_loc (loc, lhs);
2191 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2192 1, integer_zero_node);
2193 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2194 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2196 case BUILT_IN_VA_COPY:
2197 if (!va_list_simple_ptr)
2198 return NULL_TREE;
2200 if (gimple_call_num_args (call) != 2)
2201 return NULL_TREE;
2203 lhs = gimple_call_arg (call, 0);
2204 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2205 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2206 != TYPE_MAIN_VARIANT (cfun_va_list))
2207 return NULL_TREE;
2209 lhs = build_fold_indirect_ref_loc (loc, lhs);
2210 rhs = gimple_call_arg (call, 1);
2211 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2212 != TYPE_MAIN_VARIANT (cfun_va_list))
2213 return NULL_TREE;
2215 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2216 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2218 case BUILT_IN_VA_END:
2219 /* No effect, so the statement will be deleted. */
2220 return integer_zero_node;
2222 default:
2223 gcc_unreachable ();
2227 /* A simple pass that attempts to fold all builtin functions. This pass
2228 is run after we've propagated as many constants as we can. */
2230 static unsigned int
2231 execute_fold_all_builtins (void)
2233 bool cfg_changed = false;
2234 basic_block bb;
2235 unsigned int todoflags = 0;
2237 FOR_EACH_BB (bb)
2239 gimple_stmt_iterator i;
2240 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2242 gimple stmt, old_stmt;
2243 tree callee, result;
2244 enum built_in_function fcode;
2246 stmt = gsi_stmt (i);
2248 if (gimple_code (stmt) != GIMPLE_CALL)
2250 gsi_next (&i);
2251 continue;
2253 callee = gimple_call_fndecl (stmt);
2254 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2256 gsi_next (&i);
2257 continue;
2259 fcode = DECL_FUNCTION_CODE (callee);
2261 result = gimple_fold_builtin (stmt);
2263 if (result)
2264 gimple_remove_stmt_histograms (cfun, stmt);
2266 if (!result)
2267 switch (DECL_FUNCTION_CODE (callee))
2269 case BUILT_IN_CONSTANT_P:
2270 /* Resolve __builtin_constant_p. If it hasn't been
2271 folded to integer_one_node by now, it's fairly
2272 certain that the value simply isn't constant. */
2273 result = integer_zero_node;
2274 break;
2276 case BUILT_IN_ASSUME_ALIGNED:
2277 /* Remove __builtin_assume_aligned. */
2278 result = gimple_call_arg (stmt, 0);
2279 break;
2281 case BUILT_IN_STACK_RESTORE:
2282 result = optimize_stack_restore (i);
2283 if (result)
2284 break;
2285 gsi_next (&i);
2286 continue;
2288 case BUILT_IN_VA_START:
2289 case BUILT_IN_VA_END:
2290 case BUILT_IN_VA_COPY:
2291 /* These shouldn't be folded before pass_stdarg. */
2292 result = optimize_stdarg_builtin (stmt);
2293 if (result)
2294 break;
2295 /* FALLTHRU */
2297 default:
2298 gsi_next (&i);
2299 continue;
2302 if (dump_file && (dump_flags & TDF_DETAILS))
2304 fprintf (dump_file, "Simplified\n ");
2305 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2308 old_stmt = stmt;
2309 if (!update_call_from_tree (&i, result))
2311 gimplify_and_update_call_from_tree (&i, result);
2312 todoflags |= TODO_update_address_taken;
2315 stmt = gsi_stmt (i);
2316 update_stmt (stmt);
2318 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2319 && gimple_purge_dead_eh_edges (bb))
2320 cfg_changed = true;
2322 if (dump_file && (dump_flags & TDF_DETAILS))
2324 fprintf (dump_file, "to\n ");
2325 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2326 fprintf (dump_file, "\n");
2329 /* Retry the same statement if it changed into another
2330 builtin, there might be new opportunities now. */
2331 if (gimple_code (stmt) != GIMPLE_CALL)
2333 gsi_next (&i);
2334 continue;
2336 callee = gimple_call_fndecl (stmt);
2337 if (!callee
2338 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2339 || DECL_FUNCTION_CODE (callee) == fcode)
2340 gsi_next (&i);
2344 /* Delete unreachable blocks. */
2345 if (cfg_changed)
2346 todoflags |= TODO_cleanup_cfg;
2348 return todoflags;
2352 struct gimple_opt_pass pass_fold_builtins =
2355 GIMPLE_PASS,
2356 "fab", /* name */
2357 NULL, /* gate */
2358 execute_fold_all_builtins, /* execute */
2359 NULL, /* sub */
2360 NULL, /* next */
2361 0, /* static_pass_number */
2362 TV_NONE, /* tv_id */
2363 PROP_cfg | PROP_ssa, /* properties_required */
2364 0, /* properties_provided */
2365 0, /* properties_destroyed */
2366 0, /* todo_flags_start */
2367 TODO_verify_ssa
2368 | TODO_update_ssa /* todo_flags_finish */