* cp-tree.h (struct deferred_access_check): Add location.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob6dc30e148cd1c8f9b7beb20f97156f6b9422009d
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
102 References:
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
113 #include "config.h"
114 #include "system.h"
115 #include "coretypes.h"
116 #include "tm.h"
117 #include "tree.h"
118 #include "flags.h"
119 #include "tm_p.h"
120 #include "basic-block.h"
121 #include "function.h"
122 #include "gimple-pretty-print.h"
123 #include "tree-flow.h"
124 #include "tree-pass.h"
125 #include "tree-ssa-propagate.h"
126 #include "value-prof.h"
127 #include "langhooks.h"
128 #include "target.h"
129 #include "diagnostic-core.h"
130 #include "dbgcnt.h"
131 #include "gimple-fold.h"
132 #include "params.h"
135 /* Possible lattice values. */
136 typedef enum
138 UNINITIALIZED,
139 UNDEFINED,
140 CONSTANT,
141 VARYING
142 } ccp_lattice_t;
144 struct prop_value_d {
145 /* Lattice value. */
146 ccp_lattice_t lattice_val;
148 /* Propagated value. */
149 tree value;
151 /* Mask that applies to the propagated value during CCP. For
152 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
153 double_int mask;
156 typedef struct prop_value_d prop_value_t;
158 /* Array of propagated constant values. After propagation,
159 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
160 the constant is held in an SSA name representing a memory store
161 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
162 memory reference used to store (i.e., the LHS of the assignment
163 doing the store). */
164 static prop_value_t *const_val;
166 static void canonicalize_float_value (prop_value_t *);
167 static bool ccp_fold_stmt (gimple_stmt_iterator *);
169 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
171 static void
172 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
174 switch (val.lattice_val)
176 case UNINITIALIZED:
177 fprintf (outf, "%sUNINITIALIZED", prefix);
178 break;
179 case UNDEFINED:
180 fprintf (outf, "%sUNDEFINED", prefix);
181 break;
182 case VARYING:
183 fprintf (outf, "%sVARYING", prefix);
184 break;
185 case CONSTANT:
186 fprintf (outf, "%sCONSTANT ", prefix);
187 if (TREE_CODE (val.value) != INTEGER_CST
188 || double_int_zero_p (val.mask))
189 print_generic_expr (outf, val.value, dump_flags);
190 else
192 double_int cval = double_int_and_not (tree_to_double_int (val.value),
193 val.mask);
194 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
195 prefix, cval.high, cval.low);
196 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
197 val.mask.high, val.mask.low);
199 break;
200 default:
201 gcc_unreachable ();
206 /* Print lattice value VAL to stderr. */
208 void debug_lattice_value (prop_value_t val);
210 DEBUG_FUNCTION void
211 debug_lattice_value (prop_value_t val)
213 dump_lattice_value (stderr, "", val);
214 fprintf (stderr, "\n");
218 /* Compute a default value for variable VAR and store it in the
219 CONST_VAL array. The following rules are used to get default
220 values:
222 1- Global and static variables that are declared constant are
223 considered CONSTANT.
225 2- Any other value is considered UNDEFINED. This is useful when
226 considering PHI nodes. PHI arguments that are undefined do not
227 change the constant value of the PHI node, which allows for more
228 constants to be propagated.
230 3- Variables defined by statements other than assignments and PHI
231 nodes are considered VARYING.
233 4- Initial values of variables that are not GIMPLE registers are
234 considered VARYING. */
236 static prop_value_t
237 get_default_value (tree var)
239 tree sym = SSA_NAME_VAR (var);
240 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
241 gimple stmt;
243 stmt = SSA_NAME_DEF_STMT (var);
245 if (gimple_nop_p (stmt))
247 /* Variables defined by an empty statement are those used
248 before being initialized. If VAR is a local variable, we
249 can assume initially that it is UNDEFINED, otherwise we must
250 consider it VARYING. */
251 if (is_gimple_reg (sym)
252 && TREE_CODE (sym) == VAR_DECL)
253 val.lattice_val = UNDEFINED;
254 else
256 val.lattice_val = VARYING;
257 val.mask = double_int_minus_one;
260 else if (is_gimple_assign (stmt)
261 /* Value-returning GIMPLE_CALL statements assign to
262 a variable, and are treated similarly to GIMPLE_ASSIGN. */
263 || (is_gimple_call (stmt)
264 && gimple_call_lhs (stmt) != NULL_TREE)
265 || gimple_code (stmt) == GIMPLE_PHI)
267 tree cst;
268 if (gimple_assign_single_p (stmt)
269 && DECL_P (gimple_assign_rhs1 (stmt))
270 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
272 val.lattice_val = CONSTANT;
273 val.value = cst;
275 else
276 /* Any other variable defined by an assignment or a PHI node
277 is considered UNDEFINED. */
278 val.lattice_val = UNDEFINED;
280 else
282 /* Otherwise, VAR will never take on a constant value. */
283 val.lattice_val = VARYING;
284 val.mask = double_int_minus_one;
287 return val;
291 /* Get the constant value associated with variable VAR. */
293 static inline prop_value_t *
294 get_value (tree var)
296 prop_value_t *val;
298 if (const_val == NULL)
299 return NULL;
301 val = &const_val[SSA_NAME_VERSION (var)];
302 if (val->lattice_val == UNINITIALIZED)
303 *val = get_default_value (var);
305 canonicalize_float_value (val);
307 return val;
310 /* Return the constant tree value associated with VAR. */
312 static inline tree
313 get_constant_value (tree var)
315 prop_value_t *val;
316 if (TREE_CODE (var) != SSA_NAME)
318 if (is_gimple_min_invariant (var))
319 return var;
320 return NULL_TREE;
322 val = get_value (var);
323 if (val
324 && val->lattice_val == CONSTANT
325 && (TREE_CODE (val->value) != INTEGER_CST
326 || double_int_zero_p (val->mask)))
327 return val->value;
328 return NULL_TREE;
331 /* Sets the value associated with VAR to VARYING. */
333 static inline void
334 set_value_varying (tree var)
336 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
338 val->lattice_val = VARYING;
339 val->value = NULL_TREE;
340 val->mask = double_int_minus_one;
343 /* For float types, modify the value of VAL to make ccp work correctly
344 for non-standard values (-0, NaN):
346 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
347 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
348 This is to fix the following problem (see PR 29921): Suppose we have
350 x = 0.0 * y
352 and we set value of y to NaN. This causes value of x to be set to NaN.
353 When we later determine that y is in fact VARYING, fold uses the fact
354 that HONOR_NANS is false, and we try to change the value of x to 0,
355 causing an ICE. With HONOR_NANS being false, the real appearance of
356 NaN would cause undefined behavior, though, so claiming that y (and x)
357 are UNDEFINED initially is correct. */
359 static void
360 canonicalize_float_value (prop_value_t *val)
362 enum machine_mode mode;
363 tree type;
364 REAL_VALUE_TYPE d;
366 if (val->lattice_val != CONSTANT
367 || TREE_CODE (val->value) != REAL_CST)
368 return;
370 d = TREE_REAL_CST (val->value);
371 type = TREE_TYPE (val->value);
372 mode = TYPE_MODE (type);
374 if (!HONOR_SIGNED_ZEROS (mode)
375 && REAL_VALUE_MINUS_ZERO (d))
377 val->value = build_real (type, dconst0);
378 return;
381 if (!HONOR_NANS (mode)
382 && REAL_VALUE_ISNAN (d))
384 val->lattice_val = UNDEFINED;
385 val->value = NULL;
386 return;
390 /* Return whether the lattice transition is valid. */
392 static bool
393 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
395 /* Lattice transitions must always be monotonically increasing in
396 value. */
397 if (old_val.lattice_val < new_val.lattice_val)
398 return true;
400 if (old_val.lattice_val != new_val.lattice_val)
401 return false;
403 if (!old_val.value && !new_val.value)
404 return true;
406 /* Now both lattice values are CONSTANT. */
408 /* Allow transitioning from &x to &x & ~3. */
409 if (TREE_CODE (old_val.value) != INTEGER_CST
410 && TREE_CODE (new_val.value) == INTEGER_CST)
411 return true;
413 /* Bit-lattices have to agree in the still valid bits. */
414 if (TREE_CODE (old_val.value) == INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
416 return double_int_equal_p
417 (double_int_and_not (tree_to_double_int (old_val.value),
418 new_val.mask),
419 double_int_and_not (tree_to_double_int (new_val.value),
420 new_val.mask));
422 /* Otherwise constant values have to agree. */
423 return operand_equal_p (old_val.value, new_val.value, 0);
426 /* Set the value for variable VAR to NEW_VAL. Return true if the new
427 value is different from VAR's previous value. */
429 static bool
430 set_lattice_value (tree var, prop_value_t new_val)
432 /* We can deal with old UNINITIALIZED values just fine here. */
433 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
435 canonicalize_float_value (&new_val);
437 /* We have to be careful to not go up the bitwise lattice
438 represented by the mask.
439 ??? This doesn't seem to be the best place to enforce this. */
440 if (new_val.lattice_val == CONSTANT
441 && old_val->lattice_val == CONSTANT
442 && TREE_CODE (new_val.value) == INTEGER_CST
443 && TREE_CODE (old_val->value) == INTEGER_CST)
445 double_int diff;
446 diff = double_int_xor (tree_to_double_int (new_val.value),
447 tree_to_double_int (old_val->value));
448 new_val.mask = double_int_ior (new_val.mask,
449 double_int_ior (old_val->mask, diff));
452 gcc_assert (valid_lattice_transition (*old_val, new_val));
454 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
455 caller that this was a non-transition. */
456 if (old_val->lattice_val != new_val.lattice_val
457 || (new_val.lattice_val == CONSTANT
458 && TREE_CODE (new_val.value) == INTEGER_CST
459 && (TREE_CODE (old_val->value) != INTEGER_CST
460 || !double_int_equal_p (new_val.mask, old_val->mask))))
462 /* ??? We would like to delay creation of INTEGER_CSTs from
463 partially constants here. */
465 if (dump_file && (dump_flags & TDF_DETAILS))
467 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
468 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
471 *old_val = new_val;
473 gcc_assert (new_val.lattice_val != UNINITIALIZED);
474 return true;
477 return false;
480 static prop_value_t get_value_for_expr (tree, bool);
481 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
482 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
483 tree, double_int, double_int,
484 tree, double_int, double_int);
486 /* Return a double_int that can be used for bitwise simplifications
487 from VAL. */
489 static double_int
490 value_to_double_int (prop_value_t val)
492 if (val.value
493 && TREE_CODE (val.value) == INTEGER_CST)
494 return tree_to_double_int (val.value);
495 else
496 return double_int_zero;
499 /* Return the value for the address expression EXPR based on alignment
500 information. */
502 static prop_value_t
503 get_value_from_alignment (tree expr)
505 tree type = TREE_TYPE (expr);
506 prop_value_t val;
507 unsigned HOST_WIDE_INT bitpos;
508 unsigned int align;
510 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
512 get_object_alignment_1 (TREE_OPERAND (expr, 0), &align, &bitpos);
513 val.mask
514 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
515 ? double_int_mask (TYPE_PRECISION (type))
516 : double_int_minus_one,
517 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
518 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
519 if (val.lattice_val == CONSTANT)
520 val.value
521 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
522 else
523 val.value = NULL_TREE;
525 return val;
528 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
529 return constant bits extracted from alignment information for
530 invariant addresses. */
532 static prop_value_t
533 get_value_for_expr (tree expr, bool for_bits_p)
535 prop_value_t val;
537 if (TREE_CODE (expr) == SSA_NAME)
539 val = *get_value (expr);
540 if (for_bits_p
541 && val.lattice_val == CONSTANT
542 && TREE_CODE (val.value) == ADDR_EXPR)
543 val = get_value_from_alignment (val.value);
545 else if (is_gimple_min_invariant (expr)
546 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
548 val.lattice_val = CONSTANT;
549 val.value = expr;
550 val.mask = double_int_zero;
551 canonicalize_float_value (&val);
553 else if (TREE_CODE (expr) == ADDR_EXPR)
554 val = get_value_from_alignment (expr);
555 else
557 val.lattice_val = VARYING;
558 val.mask = double_int_minus_one;
559 val.value = NULL_TREE;
561 return val;
564 /* Return the likely CCP lattice value for STMT.
566 If STMT has no operands, then return CONSTANT.
568 Else if undefinedness of operands of STMT cause its value to be
569 undefined, then return UNDEFINED.
571 Else if any operands of STMT are constants, then return CONSTANT.
573 Else return VARYING. */
575 static ccp_lattice_t
576 likely_value (gimple stmt)
578 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
579 tree use;
580 ssa_op_iter iter;
581 unsigned i;
583 enum gimple_code code = gimple_code (stmt);
585 /* This function appears to be called only for assignments, calls,
586 conditionals, and switches, due to the logic in visit_stmt. */
587 gcc_assert (code == GIMPLE_ASSIGN
588 || code == GIMPLE_CALL
589 || code == GIMPLE_COND
590 || code == GIMPLE_SWITCH);
592 /* If the statement has volatile operands, it won't fold to a
593 constant value. */
594 if (gimple_has_volatile_ops (stmt))
595 return VARYING;
597 /* Arrive here for more complex cases. */
598 has_constant_operand = false;
599 has_undefined_operand = false;
600 all_undefined_operands = true;
601 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
603 prop_value_t *val = get_value (use);
605 if (val->lattice_val == UNDEFINED)
606 has_undefined_operand = true;
607 else
608 all_undefined_operands = false;
610 if (val->lattice_val == CONSTANT)
611 has_constant_operand = true;
614 /* There may be constants in regular rhs operands. For calls we
615 have to ignore lhs, fndecl and static chain, otherwise only
616 the lhs. */
617 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
618 i < gimple_num_ops (stmt); ++i)
620 tree op = gimple_op (stmt, i);
621 if (!op || TREE_CODE (op) == SSA_NAME)
622 continue;
623 if (is_gimple_min_invariant (op))
624 has_constant_operand = true;
627 if (has_constant_operand)
628 all_undefined_operands = false;
630 /* If the operation combines operands like COMPLEX_EXPR make sure to
631 not mark the result UNDEFINED if only one part of the result is
632 undefined. */
633 if (has_undefined_operand && all_undefined_operands)
634 return UNDEFINED;
635 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
637 switch (gimple_assign_rhs_code (stmt))
639 /* Unary operators are handled with all_undefined_operands. */
640 case PLUS_EXPR:
641 case MINUS_EXPR:
642 case POINTER_PLUS_EXPR:
643 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
644 Not bitwise operators, one VARYING operand may specify the
645 result completely. Not logical operators for the same reason.
646 Not COMPLEX_EXPR as one VARYING operand makes the result partly
647 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
648 the undefined operand may be promoted. */
649 return UNDEFINED;
651 default:
655 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
656 fall back to CONSTANT. During iteration UNDEFINED may still drop
657 to CONSTANT. */
658 if (has_undefined_operand)
659 return CONSTANT;
661 /* We do not consider virtual operands here -- load from read-only
662 memory may have only VARYING virtual operands, but still be
663 constant. */
664 if (has_constant_operand
665 || gimple_references_memory_p (stmt))
666 return CONSTANT;
668 return VARYING;
671 /* Returns true if STMT cannot be constant. */
673 static bool
674 surely_varying_stmt_p (gimple stmt)
676 /* If the statement has operands that we cannot handle, it cannot be
677 constant. */
678 if (gimple_has_volatile_ops (stmt))
679 return true;
681 /* If it is a call and does not return a value or is not a
682 builtin and not an indirect call, it is varying. */
683 if (is_gimple_call (stmt))
685 tree fndecl;
686 if (!gimple_call_lhs (stmt)
687 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
688 && !DECL_BUILT_IN (fndecl)))
689 return true;
692 /* Any other store operation is not interesting. */
693 else if (gimple_vdef (stmt))
694 return true;
696 /* Anything other than assignments and conditional jumps are not
697 interesting for CCP. */
698 if (gimple_code (stmt) != GIMPLE_ASSIGN
699 && gimple_code (stmt) != GIMPLE_COND
700 && gimple_code (stmt) != GIMPLE_SWITCH
701 && gimple_code (stmt) != GIMPLE_CALL)
702 return true;
704 return false;
707 /* Initialize local data structures for CCP. */
709 static void
710 ccp_initialize (void)
712 basic_block bb;
714 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
716 /* Initialize simulation flags for PHI nodes and statements. */
717 FOR_EACH_BB (bb)
719 gimple_stmt_iterator i;
721 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
723 gimple stmt = gsi_stmt (i);
724 bool is_varying;
726 /* If the statement is a control insn, then we do not
727 want to avoid simulating the statement once. Failure
728 to do so means that those edges will never get added. */
729 if (stmt_ends_bb_p (stmt))
730 is_varying = false;
731 else
732 is_varying = surely_varying_stmt_p (stmt);
734 if (is_varying)
736 tree def;
737 ssa_op_iter iter;
739 /* If the statement will not produce a constant, mark
740 all its outputs VARYING. */
741 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
742 set_value_varying (def);
744 prop_set_simulate_again (stmt, !is_varying);
748 /* Now process PHI nodes. We never clear the simulate_again flag on
749 phi nodes, since we do not know which edges are executable yet,
750 except for phi nodes for virtual operands when we do not do store ccp. */
751 FOR_EACH_BB (bb)
753 gimple_stmt_iterator i;
755 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
757 gimple phi = gsi_stmt (i);
759 if (!is_gimple_reg (gimple_phi_result (phi)))
760 prop_set_simulate_again (phi, false);
761 else
762 prop_set_simulate_again (phi, true);
767 /* Debug count support. Reset the values of ssa names
768 VARYING when the total number ssa names analyzed is
769 beyond the debug count specified. */
771 static void
772 do_dbg_cnt (void)
774 unsigned i;
775 for (i = 0; i < num_ssa_names; i++)
777 if (!dbg_cnt (ccp))
779 const_val[i].lattice_val = VARYING;
780 const_val[i].mask = double_int_minus_one;
781 const_val[i].value = NULL_TREE;
787 /* Do final substitution of propagated values, cleanup the flowgraph and
788 free allocated storage.
790 Return TRUE when something was optimized. */
792 static bool
793 ccp_finalize (void)
795 bool something_changed;
796 unsigned i;
798 do_dbg_cnt ();
800 /* Derive alignment and misalignment information from partially
801 constant pointers in the lattice. */
802 for (i = 1; i < num_ssa_names; ++i)
804 tree name = ssa_name (i);
805 prop_value_t *val;
806 unsigned int tem, align;
808 if (!name
809 || !POINTER_TYPE_P (TREE_TYPE (name)))
810 continue;
812 val = get_value (name);
813 if (val->lattice_val != CONSTANT
814 || TREE_CODE (val->value) != INTEGER_CST)
815 continue;
817 /* Trailing constant bits specify the alignment, trailing value
818 bits the misalignment. */
819 tem = val->mask.low;
820 align = (tem & -tem);
821 if (align > 1)
822 set_ptr_info_alignment (get_ptr_info (name), align,
823 TREE_INT_CST_LOW (val->value) & (align - 1));
826 /* Perform substitutions based on the known constant values. */
827 something_changed = substitute_and_fold (get_constant_value,
828 ccp_fold_stmt, true);
830 free (const_val);
831 const_val = NULL;
832 return something_changed;;
836 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
837 in VAL1.
839 any M UNDEFINED = any
840 any M VARYING = VARYING
841 Ci M Cj = Ci if (i == j)
842 Ci M Cj = VARYING if (i != j)
845 static void
846 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
848 if (val1->lattice_val == UNDEFINED)
850 /* UNDEFINED M any = any */
851 *val1 = *val2;
853 else if (val2->lattice_val == UNDEFINED)
855 /* any M UNDEFINED = any
856 Nothing to do. VAL1 already contains the value we want. */
859 else if (val1->lattice_val == VARYING
860 || val2->lattice_val == VARYING)
862 /* any M VARYING = VARYING. */
863 val1->lattice_val = VARYING;
864 val1->mask = double_int_minus_one;
865 val1->value = NULL_TREE;
867 else if (val1->lattice_val == CONSTANT
868 && val2->lattice_val == CONSTANT
869 && TREE_CODE (val1->value) == INTEGER_CST
870 && TREE_CODE (val2->value) == INTEGER_CST)
872 /* Ci M Cj = Ci if (i == j)
873 Ci M Cj = VARYING if (i != j)
875 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
876 drop to varying. */
877 val1->mask
878 = double_int_ior (double_int_ior (val1->mask,
879 val2->mask),
880 double_int_xor (tree_to_double_int (val1->value),
881 tree_to_double_int (val2->value)));
882 if (double_int_minus_one_p (val1->mask))
884 val1->lattice_val = VARYING;
885 val1->value = NULL_TREE;
888 else if (val1->lattice_val == CONSTANT
889 && val2->lattice_val == CONSTANT
890 && simple_cst_equal (val1->value, val2->value) == 1)
892 /* Ci M Cj = Ci if (i == j)
893 Ci M Cj = VARYING if (i != j)
895 VAL1 already contains the value we want for equivalent values. */
897 else if (val1->lattice_val == CONSTANT
898 && val2->lattice_val == CONSTANT
899 && (TREE_CODE (val1->value) == ADDR_EXPR
900 || TREE_CODE (val2->value) == ADDR_EXPR))
902 /* When not equal addresses are involved try meeting for
903 alignment. */
904 prop_value_t tem = *val2;
905 if (TREE_CODE (val1->value) == ADDR_EXPR)
906 *val1 = get_value_for_expr (val1->value, true);
907 if (TREE_CODE (val2->value) == ADDR_EXPR)
908 tem = get_value_for_expr (val2->value, true);
909 ccp_lattice_meet (val1, &tem);
911 else
913 /* Any other combination is VARYING. */
914 val1->lattice_val = VARYING;
915 val1->mask = double_int_minus_one;
916 val1->value = NULL_TREE;
921 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
922 lattice values to determine PHI_NODE's lattice value. The value of a
923 PHI node is determined calling ccp_lattice_meet with all the arguments
924 of the PHI node that are incoming via executable edges. */
926 static enum ssa_prop_result
927 ccp_visit_phi_node (gimple phi)
929 unsigned i;
930 prop_value_t *old_val, new_val;
932 if (dump_file && (dump_flags & TDF_DETAILS))
934 fprintf (dump_file, "\nVisiting PHI node: ");
935 print_gimple_stmt (dump_file, phi, 0, dump_flags);
938 old_val = get_value (gimple_phi_result (phi));
939 switch (old_val->lattice_val)
941 case VARYING:
942 return SSA_PROP_VARYING;
944 case CONSTANT:
945 new_val = *old_val;
946 break;
948 case UNDEFINED:
949 new_val.lattice_val = UNDEFINED;
950 new_val.value = NULL_TREE;
951 break;
953 default:
954 gcc_unreachable ();
957 for (i = 0; i < gimple_phi_num_args (phi); i++)
959 /* Compute the meet operator over all the PHI arguments flowing
960 through executable edges. */
961 edge e = gimple_phi_arg_edge (phi, i);
963 if (dump_file && (dump_flags & TDF_DETAILS))
965 fprintf (dump_file,
966 "\n Argument #%d (%d -> %d %sexecutable)\n",
967 i, e->src->index, e->dest->index,
968 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
971 /* If the incoming edge is executable, Compute the meet operator for
972 the existing value of the PHI node and the current PHI argument. */
973 if (e->flags & EDGE_EXECUTABLE)
975 tree arg = gimple_phi_arg (phi, i)->def;
976 prop_value_t arg_val = get_value_for_expr (arg, false);
978 ccp_lattice_meet (&new_val, &arg_val);
980 if (dump_file && (dump_flags & TDF_DETAILS))
982 fprintf (dump_file, "\t");
983 print_generic_expr (dump_file, arg, dump_flags);
984 dump_lattice_value (dump_file, "\tValue: ", arg_val);
985 fprintf (dump_file, "\n");
988 if (new_val.lattice_val == VARYING)
989 break;
993 if (dump_file && (dump_flags & TDF_DETAILS))
995 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
996 fprintf (dump_file, "\n\n");
999 /* Make the transition to the new value. */
1000 if (set_lattice_value (gimple_phi_result (phi), new_val))
1002 if (new_val.lattice_val == VARYING)
1003 return SSA_PROP_VARYING;
1004 else
1005 return SSA_PROP_INTERESTING;
1007 else
1008 return SSA_PROP_NOT_INTERESTING;
1011 /* Return the constant value for OP or OP otherwise. */
1013 static tree
1014 valueize_op (tree op)
1016 if (TREE_CODE (op) == SSA_NAME)
1018 tree tem = get_constant_value (op);
1019 if (tem)
1020 return tem;
1022 return op;
1025 /* CCP specific front-end to the non-destructive constant folding
1026 routines.
1028 Attempt to simplify the RHS of STMT knowing that one or more
1029 operands are constants.
1031 If simplification is possible, return the simplified RHS,
1032 otherwise return the original RHS or NULL_TREE. */
1034 static tree
1035 ccp_fold (gimple stmt)
1037 location_t loc = gimple_location (stmt);
1038 switch (gimple_code (stmt))
1040 case GIMPLE_COND:
1042 /* Handle comparison operators that can appear in GIMPLE form. */
1043 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1044 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1045 enum tree_code code = gimple_cond_code (stmt);
1046 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1049 case GIMPLE_SWITCH:
1051 /* Return the constant switch index. */
1052 return valueize_op (gimple_switch_index (stmt));
1055 case GIMPLE_ASSIGN:
1056 case GIMPLE_CALL:
1057 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1059 default:
1060 gcc_unreachable ();
1064 /* Apply the operation CODE in type TYPE to the value, mask pair
1065 RVAL and RMASK representing a value of type RTYPE and set
1066 the value, mask pair *VAL and *MASK to the result. */
1068 static void
1069 bit_value_unop_1 (enum tree_code code, tree type,
1070 double_int *val, double_int *mask,
1071 tree rtype, double_int rval, double_int rmask)
1073 switch (code)
1075 case BIT_NOT_EXPR:
1076 *mask = rmask;
1077 *val = double_int_not (rval);
1078 break;
1080 case NEGATE_EXPR:
1082 double_int temv, temm;
1083 /* Return ~rval + 1. */
1084 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1085 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1086 type, temv, temm,
1087 type, double_int_one, double_int_zero);
1088 break;
1091 CASE_CONVERT:
1093 bool uns;
1095 /* First extend mask and value according to the original type. */
1096 uns = TYPE_UNSIGNED (rtype);
1097 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1098 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1100 /* Then extend mask and value according to the target type. */
1101 uns = TYPE_UNSIGNED (type);
1102 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1103 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1104 break;
1107 default:
1108 *mask = double_int_minus_one;
1109 break;
1113 /* Apply the operation CODE in type TYPE to the value, mask pairs
1114 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1115 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1117 static void
1118 bit_value_binop_1 (enum tree_code code, tree type,
1119 double_int *val, double_int *mask,
1120 tree r1type, double_int r1val, double_int r1mask,
1121 tree r2type, double_int r2val, double_int r2mask)
1123 bool uns = TYPE_UNSIGNED (type);
1124 /* Assume we'll get a constant result. Use an initial varying value,
1125 we fall back to varying in the end if necessary. */
1126 *mask = double_int_minus_one;
1127 switch (code)
1129 case BIT_AND_EXPR:
1130 /* The mask is constant where there is a known not
1131 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1132 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1133 double_int_and (double_int_ior (r1val, r1mask),
1134 double_int_ior (r2val, r2mask)));
1135 *val = double_int_and (r1val, r2val);
1136 break;
1138 case BIT_IOR_EXPR:
1139 /* The mask is constant where there is a known
1140 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1141 *mask = double_int_and_not
1142 (double_int_ior (r1mask, r2mask),
1143 double_int_ior (double_int_and_not (r1val, r1mask),
1144 double_int_and_not (r2val, r2mask)));
1145 *val = double_int_ior (r1val, r2val);
1146 break;
1148 case BIT_XOR_EXPR:
1149 /* m1 | m2 */
1150 *mask = double_int_ior (r1mask, r2mask);
1151 *val = double_int_xor (r1val, r2val);
1152 break;
1154 case LROTATE_EXPR:
1155 case RROTATE_EXPR:
1156 if (double_int_zero_p (r2mask))
1158 HOST_WIDE_INT shift = r2val.low;
1159 if (code == RROTATE_EXPR)
1160 shift = -shift;
1161 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1162 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1164 break;
1166 case LSHIFT_EXPR:
1167 case RSHIFT_EXPR:
1168 /* ??? We can handle partially known shift counts if we know
1169 its sign. That way we can tell that (x << (y | 8)) & 255
1170 is zero. */
1171 if (double_int_zero_p (r2mask))
1173 HOST_WIDE_INT shift = r2val.low;
1174 if (code == RSHIFT_EXPR)
1175 shift = -shift;
1176 /* We need to know if we are doing a left or a right shift
1177 to properly shift in zeros for left shift and unsigned
1178 right shifts and the sign bit for signed right shifts.
1179 For signed right shifts we shift in varying in case
1180 the sign bit was varying. */
1181 if (shift > 0)
1183 *mask = double_int_lshift (r1mask, shift,
1184 TYPE_PRECISION (type), false);
1185 *val = double_int_lshift (r1val, shift,
1186 TYPE_PRECISION (type), false);
1188 else if (shift < 0)
1190 shift = -shift;
1191 *mask = double_int_rshift (r1mask, shift,
1192 TYPE_PRECISION (type), !uns);
1193 *val = double_int_rshift (r1val, shift,
1194 TYPE_PRECISION (type), !uns);
1196 else
1198 *mask = r1mask;
1199 *val = r1val;
1202 break;
1204 case PLUS_EXPR:
1205 case POINTER_PLUS_EXPR:
1207 double_int lo, hi;
1208 /* Do the addition with unknown bits set to zero, to give carry-ins of
1209 zero wherever possible. */
1210 lo = double_int_add (double_int_and_not (r1val, r1mask),
1211 double_int_and_not (r2val, r2mask));
1212 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1213 /* Do the addition with unknown bits set to one, to give carry-ins of
1214 one wherever possible. */
1215 hi = double_int_add (double_int_ior (r1val, r1mask),
1216 double_int_ior (r2val, r2mask));
1217 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1218 /* Each bit in the result is known if (a) the corresponding bits in
1219 both inputs are known, and (b) the carry-in to that bit position
1220 is known. We can check condition (b) by seeing if we got the same
1221 result with minimised carries as with maximised carries. */
1222 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1223 double_int_xor (lo, hi));
1224 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1225 /* It shouldn't matter whether we choose lo or hi here. */
1226 *val = lo;
1227 break;
1230 case MINUS_EXPR:
1232 double_int temv, temm;
1233 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1234 r2type, r2val, r2mask);
1235 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1236 r1type, r1val, r1mask,
1237 r2type, temv, temm);
1238 break;
1241 case MULT_EXPR:
1243 /* Just track trailing zeros in both operands and transfer
1244 them to the other. */
1245 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1246 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1247 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1249 *mask = double_int_zero;
1250 *val = double_int_zero;
1252 else if (r1tz + r2tz > 0)
1254 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1255 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1256 *val = double_int_zero;
1258 break;
1261 case EQ_EXPR:
1262 case NE_EXPR:
1264 double_int m = double_int_ior (r1mask, r2mask);
1265 if (!double_int_equal_p (double_int_and_not (r1val, m),
1266 double_int_and_not (r2val, m)))
1268 *mask = double_int_zero;
1269 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1271 else
1273 /* We know the result of a comparison is always one or zero. */
1274 *mask = double_int_one;
1275 *val = double_int_zero;
1277 break;
1280 case GE_EXPR:
1281 case GT_EXPR:
1283 double_int tem = r1val;
1284 r1val = r2val;
1285 r2val = tem;
1286 tem = r1mask;
1287 r1mask = r2mask;
1288 r2mask = tem;
1289 code = swap_tree_comparison (code);
1291 /* Fallthru. */
1292 case LT_EXPR:
1293 case LE_EXPR:
1295 int minmax, maxmin;
1296 /* If the most significant bits are not known we know nothing. */
1297 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1298 break;
1300 /* For comparisons the signedness is in the comparison operands. */
1301 uns = TYPE_UNSIGNED (r1type);
1303 /* If we know the most significant bits we know the values
1304 value ranges by means of treating varying bits as zero
1305 or one. Do a cross comparison of the max/min pairs. */
1306 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1307 double_int_and_not (r2val, r2mask), uns);
1308 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1309 double_int_ior (r2val, r2mask), uns);
1310 if (maxmin < 0) /* r1 is less than r2. */
1312 *mask = double_int_zero;
1313 *val = double_int_one;
1315 else if (minmax > 0) /* r1 is not less or equal to r2. */
1317 *mask = double_int_zero;
1318 *val = double_int_zero;
1320 else if (maxmin == minmax) /* r1 and r2 are equal. */
1322 /* This probably should never happen as we'd have
1323 folded the thing during fully constant value folding. */
1324 *mask = double_int_zero;
1325 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1327 else
1329 /* We know the result of a comparison is always one or zero. */
1330 *mask = double_int_one;
1331 *val = double_int_zero;
1333 break;
1336 default:;
1340 /* Return the propagation value when applying the operation CODE to
1341 the value RHS yielding type TYPE. */
1343 static prop_value_t
1344 bit_value_unop (enum tree_code code, tree type, tree rhs)
1346 prop_value_t rval = get_value_for_expr (rhs, true);
1347 double_int value, mask;
1348 prop_value_t val;
1350 if (rval.lattice_val == UNDEFINED)
1351 return rval;
1353 gcc_assert ((rval.lattice_val == CONSTANT
1354 && TREE_CODE (rval.value) == INTEGER_CST)
1355 || double_int_minus_one_p (rval.mask));
1356 bit_value_unop_1 (code, type, &value, &mask,
1357 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1358 if (!double_int_minus_one_p (mask))
1360 val.lattice_val = CONSTANT;
1361 val.mask = mask;
1362 /* ??? Delay building trees here. */
1363 val.value = double_int_to_tree (type, value);
1365 else
1367 val.lattice_val = VARYING;
1368 val.value = NULL_TREE;
1369 val.mask = double_int_minus_one;
1371 return val;
1374 /* Return the propagation value when applying the operation CODE to
1375 the values RHS1 and RHS2 yielding type TYPE. */
1377 static prop_value_t
1378 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1380 prop_value_t r1val = get_value_for_expr (rhs1, true);
1381 prop_value_t r2val = get_value_for_expr (rhs2, true);
1382 double_int value, mask;
1383 prop_value_t val;
1385 if (r1val.lattice_val == UNDEFINED
1386 || r2val.lattice_val == UNDEFINED)
1388 val.lattice_val = VARYING;
1389 val.value = NULL_TREE;
1390 val.mask = double_int_minus_one;
1391 return val;
1394 gcc_assert ((r1val.lattice_val == CONSTANT
1395 && TREE_CODE (r1val.value) == INTEGER_CST)
1396 || double_int_minus_one_p (r1val.mask));
1397 gcc_assert ((r2val.lattice_val == CONSTANT
1398 && TREE_CODE (r2val.value) == INTEGER_CST)
1399 || double_int_minus_one_p (r2val.mask));
1400 bit_value_binop_1 (code, type, &value, &mask,
1401 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1402 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1403 if (!double_int_minus_one_p (mask))
1405 val.lattice_val = CONSTANT;
1406 val.mask = mask;
1407 /* ??? Delay building trees here. */
1408 val.value = double_int_to_tree (type, value);
1410 else
1412 val.lattice_val = VARYING;
1413 val.value = NULL_TREE;
1414 val.mask = double_int_minus_one;
1416 return val;
1419 /* Return the propagation value when applying __builtin_assume_aligned to
1420 its arguments. */
1422 static prop_value_t
1423 bit_value_assume_aligned (gimple stmt)
1425 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1426 tree type = TREE_TYPE (ptr);
1427 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1428 prop_value_t ptrval = get_value_for_expr (ptr, true);
1429 prop_value_t alignval;
1430 double_int value, mask;
1431 prop_value_t val;
1432 if (ptrval.lattice_val == UNDEFINED)
1433 return ptrval;
1434 gcc_assert ((ptrval.lattice_val == CONSTANT
1435 && TREE_CODE (ptrval.value) == INTEGER_CST)
1436 || double_int_minus_one_p (ptrval.mask));
1437 align = gimple_call_arg (stmt, 1);
1438 if (!host_integerp (align, 1))
1439 return ptrval;
1440 aligni = tree_low_cst (align, 1);
1441 if (aligni <= 1
1442 || (aligni & (aligni - 1)) != 0)
1443 return ptrval;
1444 if (gimple_call_num_args (stmt) > 2)
1446 misalign = gimple_call_arg (stmt, 2);
1447 if (!host_integerp (misalign, 1))
1448 return ptrval;
1449 misaligni = tree_low_cst (misalign, 1);
1450 if (misaligni >= aligni)
1451 return ptrval;
1453 align = build_int_cst_type (type, -aligni);
1454 alignval = get_value_for_expr (align, true);
1455 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1456 type, value_to_double_int (ptrval), ptrval.mask,
1457 type, value_to_double_int (alignval), alignval.mask);
1458 if (!double_int_minus_one_p (mask))
1460 val.lattice_val = CONSTANT;
1461 val.mask = mask;
1462 gcc_assert ((mask.low & (aligni - 1)) == 0);
1463 gcc_assert ((value.low & (aligni - 1)) == 0);
1464 value.low |= misaligni;
1465 /* ??? Delay building trees here. */
1466 val.value = double_int_to_tree (type, value);
1468 else
1470 val.lattice_val = VARYING;
1471 val.value = NULL_TREE;
1472 val.mask = double_int_minus_one;
1474 return val;
1477 /* Evaluate statement STMT.
1478 Valid only for assignments, calls, conditionals, and switches. */
1480 static prop_value_t
1481 evaluate_stmt (gimple stmt)
1483 prop_value_t val;
1484 tree simplified = NULL_TREE;
1485 ccp_lattice_t likelyvalue = likely_value (stmt);
1486 bool is_constant = false;
1487 unsigned int align;
1489 if (dump_file && (dump_flags & TDF_DETAILS))
1491 fprintf (dump_file, "which is likely ");
1492 switch (likelyvalue)
1494 case CONSTANT:
1495 fprintf (dump_file, "CONSTANT");
1496 break;
1497 case UNDEFINED:
1498 fprintf (dump_file, "UNDEFINED");
1499 break;
1500 case VARYING:
1501 fprintf (dump_file, "VARYING");
1502 break;
1503 default:;
1505 fprintf (dump_file, "\n");
1508 /* If the statement is likely to have a CONSTANT result, then try
1509 to fold the statement to determine the constant value. */
1510 /* FIXME. This is the only place that we call ccp_fold.
1511 Since likely_value never returns CONSTANT for calls, we will
1512 not attempt to fold them, including builtins that may profit. */
1513 if (likelyvalue == CONSTANT)
1515 fold_defer_overflow_warnings ();
1516 simplified = ccp_fold (stmt);
1517 is_constant = simplified && is_gimple_min_invariant (simplified);
1518 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1519 if (is_constant)
1521 /* The statement produced a constant value. */
1522 val.lattice_val = CONSTANT;
1523 val.value = simplified;
1524 val.mask = double_int_zero;
1527 /* If the statement is likely to have a VARYING result, then do not
1528 bother folding the statement. */
1529 else if (likelyvalue == VARYING)
1531 enum gimple_code code = gimple_code (stmt);
1532 if (code == GIMPLE_ASSIGN)
1534 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1536 /* Other cases cannot satisfy is_gimple_min_invariant
1537 without folding. */
1538 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1539 simplified = gimple_assign_rhs1 (stmt);
1541 else if (code == GIMPLE_SWITCH)
1542 simplified = gimple_switch_index (stmt);
1543 else
1544 /* These cannot satisfy is_gimple_min_invariant without folding. */
1545 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1546 is_constant = simplified && is_gimple_min_invariant (simplified);
1547 if (is_constant)
1549 /* The statement produced a constant value. */
1550 val.lattice_val = CONSTANT;
1551 val.value = simplified;
1552 val.mask = double_int_zero;
1556 /* Resort to simplification for bitwise tracking. */
1557 if (flag_tree_bit_ccp
1558 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1559 && !is_constant)
1561 enum gimple_code code = gimple_code (stmt);
1562 tree fndecl;
1563 val.lattice_val = VARYING;
1564 val.value = NULL_TREE;
1565 val.mask = double_int_minus_one;
1566 if (code == GIMPLE_ASSIGN)
1568 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1569 tree rhs1 = gimple_assign_rhs1 (stmt);
1570 switch (get_gimple_rhs_class (subcode))
1572 case GIMPLE_SINGLE_RHS:
1573 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1574 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1575 val = get_value_for_expr (rhs1, true);
1576 break;
1578 case GIMPLE_UNARY_RHS:
1579 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1580 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1581 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1582 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1583 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1584 break;
1586 case GIMPLE_BINARY_RHS:
1587 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1588 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1590 tree lhs = gimple_assign_lhs (stmt);
1591 tree rhs2 = gimple_assign_rhs2 (stmt);
1592 val = bit_value_binop (subcode,
1593 TREE_TYPE (lhs), rhs1, rhs2);
1595 break;
1597 default:;
1600 else if (code == GIMPLE_COND)
1602 enum tree_code code = gimple_cond_code (stmt);
1603 tree rhs1 = gimple_cond_lhs (stmt);
1604 tree rhs2 = gimple_cond_rhs (stmt);
1605 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1606 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1607 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1609 else if (code == GIMPLE_CALL
1610 && (fndecl = gimple_call_fndecl (stmt))
1611 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1613 switch (DECL_FUNCTION_CODE (fndecl))
1615 case BUILT_IN_MALLOC:
1616 case BUILT_IN_REALLOC:
1617 case BUILT_IN_CALLOC:
1618 case BUILT_IN_STRDUP:
1619 case BUILT_IN_STRNDUP:
1620 val.lattice_val = CONSTANT;
1621 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1622 val.mask = shwi_to_double_int
1623 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1624 / BITS_PER_UNIT - 1));
1625 break;
1627 case BUILT_IN_ALLOCA:
1628 case BUILT_IN_ALLOCA_WITH_ALIGN:
1629 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1630 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1631 : BIGGEST_ALIGNMENT);
1632 val.lattice_val = CONSTANT;
1633 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1634 val.mask = shwi_to_double_int
1635 (~(((HOST_WIDE_INT) align)
1636 / BITS_PER_UNIT - 1));
1637 break;
1639 /* These builtins return their first argument, unmodified. */
1640 case BUILT_IN_MEMCPY:
1641 case BUILT_IN_MEMMOVE:
1642 case BUILT_IN_MEMSET:
1643 case BUILT_IN_STRCPY:
1644 case BUILT_IN_STRNCPY:
1645 case BUILT_IN_MEMCPY_CHK:
1646 case BUILT_IN_MEMMOVE_CHK:
1647 case BUILT_IN_MEMSET_CHK:
1648 case BUILT_IN_STRCPY_CHK:
1649 case BUILT_IN_STRNCPY_CHK:
1650 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1651 break;
1653 case BUILT_IN_ASSUME_ALIGNED:
1654 val = bit_value_assume_aligned (stmt);
1655 break;
1657 default:;
1660 is_constant = (val.lattice_val == CONSTANT);
1663 if (!is_constant)
1665 /* The statement produced a nonconstant value. If the statement
1666 had UNDEFINED operands, then the result of the statement
1667 should be UNDEFINED. Otherwise, the statement is VARYING. */
1668 if (likelyvalue == UNDEFINED)
1670 val.lattice_val = likelyvalue;
1671 val.mask = double_int_zero;
1673 else
1675 val.lattice_val = VARYING;
1676 val.mask = double_int_minus_one;
1679 val.value = NULL_TREE;
1682 return val;
1685 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1686 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1688 static void
1689 insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
1691 gimple stmt, clobber_stmt;
1692 tree clobber;
1693 imm_use_iterator iter;
1694 gimple_stmt_iterator i;
1695 gimple *slot;
1697 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1698 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1700 clobber = build_constructor (TREE_TYPE (var), NULL);
1701 TREE_THIS_VOLATILE (clobber) = 1;
1702 clobber_stmt = gimple_build_assign (var, clobber);
1704 i = gsi_for_stmt (stmt);
1705 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1707 else if (gimple_code (stmt) == GIMPLE_PHI)
1709 if (*visited == NULL)
1710 *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
1712 slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
1713 if (*slot != NULL)
1714 continue;
1716 *slot = stmt;
1717 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1718 visited);
1720 else
1721 gcc_assert (is_gimple_debug (stmt));
1724 /* Advance the iterator to the previous non-debug gimple statement in the same
1725 or dominating basic block. */
1727 static inline void
1728 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1730 basic_block dom;
1732 gsi_prev_nondebug (i);
1733 while (gsi_end_p (*i))
1735 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1736 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1737 return;
1739 *i = gsi_last_bb (dom);
1743 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1744 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1746 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1747 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1748 that case the function gives up without inserting the clobbers. */
1750 static void
1751 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1753 gimple stmt;
1754 tree saved_val;
1755 htab_t visited = NULL;
1757 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1759 stmt = gsi_stmt (i);
1761 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1762 continue;
1764 saved_val = gimple_call_lhs (stmt);
1765 if (saved_val == NULL_TREE)
1766 continue;
1768 insert_clobber_before_stack_restore (saved_val, var, &visited);
1769 break;
1772 if (visited != NULL)
1773 htab_delete (visited);
1776 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1777 fixed-size array and returns the address, if found, otherwise returns
1778 NULL_TREE. */
1780 static tree
1781 fold_builtin_alloca_with_align (gimple stmt)
1783 unsigned HOST_WIDE_INT size, threshold, n_elem;
1784 tree lhs, arg, block, var, elem_type, array_type;
1786 /* Get lhs. */
1787 lhs = gimple_call_lhs (stmt);
1788 if (lhs == NULL_TREE)
1789 return NULL_TREE;
1791 /* Detect constant argument. */
1792 arg = get_constant_value (gimple_call_arg (stmt, 0));
1793 if (arg == NULL_TREE
1794 || TREE_CODE (arg) != INTEGER_CST
1795 || !host_integerp (arg, 1))
1796 return NULL_TREE;
1798 size = TREE_INT_CST_LOW (arg);
1800 /* Heuristic: don't fold large allocas. */
1801 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1802 /* In case the alloca is located at function entry, it has the same lifetime
1803 as a declared array, so we allow a larger size. */
1804 block = gimple_block (stmt);
1805 if (!(cfun->after_inlining
1806 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1807 threshold /= 10;
1808 if (size > threshold)
1809 return NULL_TREE;
1811 /* Declare array. */
1812 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1813 n_elem = size * 8 / BITS_PER_UNIT;
1814 array_type = build_array_type_nelts (elem_type, n_elem);
1815 var = create_tmp_var (array_type, NULL);
1816 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
1818 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1819 if (pi != NULL && !pi->pt.anything)
1821 bool singleton_p;
1822 unsigned uid;
1823 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1824 gcc_assert (singleton_p);
1825 SET_DECL_PT_UID (var, uid);
1829 /* Fold alloca to the address of the array. */
1830 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1833 /* Fold the stmt at *GSI with CCP specific information that propagating
1834 and regular folding does not catch. */
1836 static bool
1837 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1839 gimple stmt = gsi_stmt (*gsi);
1841 switch (gimple_code (stmt))
1843 case GIMPLE_COND:
1845 prop_value_t val;
1846 /* Statement evaluation will handle type mismatches in constants
1847 more gracefully than the final propagation. This allows us to
1848 fold more conditionals here. */
1849 val = evaluate_stmt (stmt);
1850 if (val.lattice_val != CONSTANT
1851 || !double_int_zero_p (val.mask))
1852 return false;
1854 if (dump_file)
1856 fprintf (dump_file, "Folding predicate ");
1857 print_gimple_expr (dump_file, stmt, 0, 0);
1858 fprintf (dump_file, " to ");
1859 print_generic_expr (dump_file, val.value, 0);
1860 fprintf (dump_file, "\n");
1863 if (integer_zerop (val.value))
1864 gimple_cond_make_false (stmt);
1865 else
1866 gimple_cond_make_true (stmt);
1868 return true;
1871 case GIMPLE_CALL:
1873 tree lhs = gimple_call_lhs (stmt);
1874 int flags = gimple_call_flags (stmt);
1875 tree val;
1876 tree argt;
1877 bool changed = false;
1878 unsigned i;
1880 /* If the call was folded into a constant make sure it goes
1881 away even if we cannot propagate into all uses because of
1882 type issues. */
1883 if (lhs
1884 && TREE_CODE (lhs) == SSA_NAME
1885 && (val = get_constant_value (lhs))
1886 /* Don't optimize away calls that have side-effects. */
1887 && (flags & (ECF_CONST|ECF_PURE)) != 0
1888 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
1890 tree new_rhs = unshare_expr (val);
1891 bool res;
1892 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1893 TREE_TYPE (new_rhs)))
1894 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1895 res = update_call_from_tree (gsi, new_rhs);
1896 gcc_assert (res);
1897 return true;
1900 /* Internal calls provide no argument types, so the extra laxity
1901 for normal calls does not apply. */
1902 if (gimple_call_internal_p (stmt))
1903 return false;
1905 /* The heuristic of fold_builtin_alloca_with_align differs before and
1906 after inlining, so we don't require the arg to be changed into a
1907 constant for folding, but just to be constant. */
1908 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1910 tree new_rhs = fold_builtin_alloca_with_align (stmt);
1911 if (new_rhs)
1913 bool res = update_call_from_tree (gsi, new_rhs);
1914 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
1915 gcc_assert (res);
1916 insert_clobbers_for_var (*gsi, var);
1917 return true;
1921 /* Propagate into the call arguments. Compared to replace_uses_in
1922 this can use the argument slot types for type verification
1923 instead of the current argument type. We also can safely
1924 drop qualifiers here as we are dealing with constants anyway. */
1925 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
1926 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1927 ++i, argt = TREE_CHAIN (argt))
1929 tree arg = gimple_call_arg (stmt, i);
1930 if (TREE_CODE (arg) == SSA_NAME
1931 && (val = get_constant_value (arg))
1932 && useless_type_conversion_p
1933 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1934 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1936 gimple_call_set_arg (stmt, i, unshare_expr (val));
1937 changed = true;
1941 return changed;
1944 case GIMPLE_ASSIGN:
1946 tree lhs = gimple_assign_lhs (stmt);
1947 tree val;
1949 /* If we have a load that turned out to be constant replace it
1950 as we cannot propagate into all uses in all cases. */
1951 if (gimple_assign_single_p (stmt)
1952 && TREE_CODE (lhs) == SSA_NAME
1953 && (val = get_constant_value (lhs)))
1955 tree rhs = unshare_expr (val);
1956 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1957 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1958 gimple_assign_set_rhs_from_tree (gsi, rhs);
1959 return true;
1962 return false;
1965 default:
1966 return false;
1970 /* Visit the assignment statement STMT. Set the value of its LHS to the
1971 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1972 creates virtual definitions, set the value of each new name to that
1973 of the RHS (if we can derive a constant out of the RHS).
1974 Value-returning call statements also perform an assignment, and
1975 are handled here. */
1977 static enum ssa_prop_result
1978 visit_assignment (gimple stmt, tree *output_p)
1980 prop_value_t val;
1981 enum ssa_prop_result retval;
1983 tree lhs = gimple_get_lhs (stmt);
1985 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1986 || gimple_call_lhs (stmt) != NULL_TREE);
1988 if (gimple_assign_single_p (stmt)
1989 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1990 /* For a simple copy operation, we copy the lattice values. */
1991 val = *get_value (gimple_assign_rhs1 (stmt));
1992 else
1993 /* Evaluate the statement, which could be
1994 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1995 val = evaluate_stmt (stmt);
1997 retval = SSA_PROP_NOT_INTERESTING;
1999 /* Set the lattice value of the statement's output. */
2000 if (TREE_CODE (lhs) == SSA_NAME)
2002 /* If STMT is an assignment to an SSA_NAME, we only have one
2003 value to set. */
2004 if (set_lattice_value (lhs, val))
2006 *output_p = lhs;
2007 if (val.lattice_val == VARYING)
2008 retval = SSA_PROP_VARYING;
2009 else
2010 retval = SSA_PROP_INTERESTING;
2014 return retval;
2018 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2019 if it can determine which edge will be taken. Otherwise, return
2020 SSA_PROP_VARYING. */
2022 static enum ssa_prop_result
2023 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2025 prop_value_t val;
2026 basic_block block;
2028 block = gimple_bb (stmt);
2029 val = evaluate_stmt (stmt);
2030 if (val.lattice_val != CONSTANT
2031 || !double_int_zero_p (val.mask))
2032 return SSA_PROP_VARYING;
2034 /* Find which edge out of the conditional block will be taken and add it
2035 to the worklist. If no single edge can be determined statically,
2036 return SSA_PROP_VARYING to feed all the outgoing edges to the
2037 propagation engine. */
2038 *taken_edge_p = find_taken_edge (block, val.value);
2039 if (*taken_edge_p)
2040 return SSA_PROP_INTERESTING;
2041 else
2042 return SSA_PROP_VARYING;
2046 /* Evaluate statement STMT. If the statement produces an output value and
2047 its evaluation changes the lattice value of its output, return
2048 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2049 output value.
2051 If STMT is a conditional branch and we can determine its truth
2052 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2053 value, return SSA_PROP_VARYING. */
2055 static enum ssa_prop_result
2056 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2058 tree def;
2059 ssa_op_iter iter;
2061 if (dump_file && (dump_flags & TDF_DETAILS))
2063 fprintf (dump_file, "\nVisiting statement:\n");
2064 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2067 switch (gimple_code (stmt))
2069 case GIMPLE_ASSIGN:
2070 /* If the statement is an assignment that produces a single
2071 output value, evaluate its RHS to see if the lattice value of
2072 its output has changed. */
2073 return visit_assignment (stmt, output_p);
2075 case GIMPLE_CALL:
2076 /* A value-returning call also performs an assignment. */
2077 if (gimple_call_lhs (stmt) != NULL_TREE)
2078 return visit_assignment (stmt, output_p);
2079 break;
2081 case GIMPLE_COND:
2082 case GIMPLE_SWITCH:
2083 /* If STMT is a conditional branch, see if we can determine
2084 which branch will be taken. */
2085 /* FIXME. It appears that we should be able to optimize
2086 computed GOTOs here as well. */
2087 return visit_cond_stmt (stmt, taken_edge_p);
2089 default:
2090 break;
2093 /* Any other kind of statement is not interesting for constant
2094 propagation and, therefore, not worth simulating. */
2095 if (dump_file && (dump_flags & TDF_DETAILS))
2096 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2098 /* Definitions made by statements other than assignments to
2099 SSA_NAMEs represent unknown modifications to their outputs.
2100 Mark them VARYING. */
2101 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2103 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2104 set_lattice_value (def, v);
2107 return SSA_PROP_VARYING;
2111 /* Main entry point for SSA Conditional Constant Propagation. */
2113 static unsigned int
2114 do_ssa_ccp (void)
2116 unsigned int todo = 0;
2117 calculate_dominance_info (CDI_DOMINATORS);
2118 ccp_initialize ();
2119 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2120 if (ccp_finalize ())
2121 todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2122 free_dominance_info (CDI_DOMINATORS);
2123 return todo;
2127 static bool
2128 gate_ccp (void)
2130 return flag_tree_ccp != 0;
2134 struct gimple_opt_pass pass_ccp =
2137 GIMPLE_PASS,
2138 "ccp", /* name */
2139 gate_ccp, /* gate */
2140 do_ssa_ccp, /* execute */
2141 NULL, /* sub */
2142 NULL, /* next */
2143 0, /* static_pass_number */
2144 TV_TREE_CCP, /* tv_id */
2145 PROP_cfg | PROP_ssa, /* properties_required */
2146 0, /* properties_provided */
2147 0, /* properties_destroyed */
2148 0, /* todo_flags_start */
2149 TODO_verify_ssa
2150 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2156 /* Try to optimize out __builtin_stack_restore. Optimize it out
2157 if there is another __builtin_stack_restore in the same basic
2158 block and no calls or ASM_EXPRs are in between, or if this block's
2159 only outgoing edge is to EXIT_BLOCK and there are no calls or
2160 ASM_EXPRs after this __builtin_stack_restore. */
2162 static tree
2163 optimize_stack_restore (gimple_stmt_iterator i)
2165 tree callee;
2166 gimple stmt;
2168 basic_block bb = gsi_bb (i);
2169 gimple call = gsi_stmt (i);
2171 if (gimple_code (call) != GIMPLE_CALL
2172 || gimple_call_num_args (call) != 1
2173 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2174 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2175 return NULL_TREE;
2177 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2179 stmt = gsi_stmt (i);
2180 if (gimple_code (stmt) == GIMPLE_ASM)
2181 return NULL_TREE;
2182 if (gimple_code (stmt) != GIMPLE_CALL)
2183 continue;
2185 callee = gimple_call_fndecl (stmt);
2186 if (!callee
2187 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2188 /* All regular builtins are ok, just obviously not alloca. */
2189 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2190 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2191 return NULL_TREE;
2193 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2194 goto second_stack_restore;
2197 if (!gsi_end_p (i))
2198 return NULL_TREE;
2200 /* Allow one successor of the exit block, or zero successors. */
2201 switch (EDGE_COUNT (bb->succs))
2203 case 0:
2204 break;
2205 case 1:
2206 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2207 return NULL_TREE;
2208 break;
2209 default:
2210 return NULL_TREE;
2212 second_stack_restore:
2214 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2215 If there are multiple uses, then the last one should remove the call.
2216 In any case, whether the call to __builtin_stack_save can be removed
2217 or not is irrelevant to removing the call to __builtin_stack_restore. */
2218 if (has_single_use (gimple_call_arg (call, 0)))
2220 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2221 if (is_gimple_call (stack_save))
2223 callee = gimple_call_fndecl (stack_save);
2224 if (callee
2225 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2226 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2228 gimple_stmt_iterator stack_save_gsi;
2229 tree rhs;
2231 stack_save_gsi = gsi_for_stmt (stack_save);
2232 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2233 update_call_from_tree (&stack_save_gsi, rhs);
2238 /* No effect, so the statement will be deleted. */
2239 return integer_zero_node;
2242 /* If va_list type is a simple pointer and nothing special is needed,
2243 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2244 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2245 pointer assignment. */
2247 static tree
2248 optimize_stdarg_builtin (gimple call)
2250 tree callee, lhs, rhs, cfun_va_list;
2251 bool va_list_simple_ptr;
2252 location_t loc = gimple_location (call);
2254 if (gimple_code (call) != GIMPLE_CALL)
2255 return NULL_TREE;
2257 callee = gimple_call_fndecl (call);
2259 cfun_va_list = targetm.fn_abi_va_list (callee);
2260 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2261 && (TREE_TYPE (cfun_va_list) == void_type_node
2262 || TREE_TYPE (cfun_va_list) == char_type_node);
2264 switch (DECL_FUNCTION_CODE (callee))
2266 case BUILT_IN_VA_START:
2267 if (!va_list_simple_ptr
2268 || targetm.expand_builtin_va_start != NULL
2269 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2270 return NULL_TREE;
2272 if (gimple_call_num_args (call) != 2)
2273 return NULL_TREE;
2275 lhs = gimple_call_arg (call, 0);
2276 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2277 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2278 != TYPE_MAIN_VARIANT (cfun_va_list))
2279 return NULL_TREE;
2281 lhs = build_fold_indirect_ref_loc (loc, lhs);
2282 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2283 1, integer_zero_node);
2284 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2285 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2287 case BUILT_IN_VA_COPY:
2288 if (!va_list_simple_ptr)
2289 return NULL_TREE;
2291 if (gimple_call_num_args (call) != 2)
2292 return NULL_TREE;
2294 lhs = gimple_call_arg (call, 0);
2295 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2296 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2297 != TYPE_MAIN_VARIANT (cfun_va_list))
2298 return NULL_TREE;
2300 lhs = build_fold_indirect_ref_loc (loc, lhs);
2301 rhs = gimple_call_arg (call, 1);
2302 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2303 != TYPE_MAIN_VARIANT (cfun_va_list))
2304 return NULL_TREE;
2306 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2307 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2309 case BUILT_IN_VA_END:
2310 /* No effect, so the statement will be deleted. */
2311 return integer_zero_node;
2313 default:
2314 gcc_unreachable ();
2318 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2319 the incoming jumps. Return true if at least one jump was changed. */
2321 static bool
2322 optimize_unreachable (gimple_stmt_iterator i)
2324 basic_block bb = gsi_bb (i);
2325 gimple_stmt_iterator gsi;
2326 gimple stmt;
2327 edge_iterator ei;
2328 edge e;
2329 bool ret;
2331 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2333 stmt = gsi_stmt (gsi);
2335 if (is_gimple_debug (stmt))
2336 continue;
2338 if (gimple_code (stmt) == GIMPLE_LABEL)
2340 /* Verify we do not need to preserve the label. */
2341 if (FORCED_LABEL (gimple_label_label (stmt)))
2342 return false;
2344 continue;
2347 /* Only handle the case that __builtin_unreachable is the first statement
2348 in the block. We rely on DCE to remove stmts without side-effects
2349 before __builtin_unreachable. */
2350 if (gsi_stmt (gsi) != gsi_stmt (i))
2351 return false;
2354 ret = false;
2355 FOR_EACH_EDGE (e, ei, bb->preds)
2357 gsi = gsi_last_bb (e->src);
2358 if (gsi_end_p (gsi))
2359 continue;
2361 stmt = gsi_stmt (gsi);
2362 if (gimple_code (stmt) == GIMPLE_COND)
2364 if (e->flags & EDGE_TRUE_VALUE)
2365 gimple_cond_make_false (stmt);
2366 else if (e->flags & EDGE_FALSE_VALUE)
2367 gimple_cond_make_true (stmt);
2368 else
2369 gcc_unreachable ();
2371 else
2373 /* Todo: handle other cases, f.i. switch statement. */
2374 continue;
2377 ret = true;
2380 return ret;
2383 /* A simple pass that attempts to fold all builtin functions. This pass
2384 is run after we've propagated as many constants as we can. */
2386 static unsigned int
2387 execute_fold_all_builtins (void)
2389 bool cfg_changed = false;
2390 basic_block bb;
2391 unsigned int todoflags = 0;
2393 FOR_EACH_BB (bb)
2395 gimple_stmt_iterator i;
2396 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2398 gimple stmt, old_stmt;
2399 tree callee, result;
2400 enum built_in_function fcode;
2402 stmt = gsi_stmt (i);
2404 if (gimple_code (stmt) != GIMPLE_CALL)
2406 gsi_next (&i);
2407 continue;
2409 callee = gimple_call_fndecl (stmt);
2410 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2412 gsi_next (&i);
2413 continue;
2415 fcode = DECL_FUNCTION_CODE (callee);
2417 result = gimple_fold_builtin (stmt);
2419 if (result)
2420 gimple_remove_stmt_histograms (cfun, stmt);
2422 if (!result)
2423 switch (DECL_FUNCTION_CODE (callee))
2425 case BUILT_IN_CONSTANT_P:
2426 /* Resolve __builtin_constant_p. If it hasn't been
2427 folded to integer_one_node by now, it's fairly
2428 certain that the value simply isn't constant. */
2429 result = integer_zero_node;
2430 break;
2432 case BUILT_IN_ASSUME_ALIGNED:
2433 /* Remove __builtin_assume_aligned. */
2434 result = gimple_call_arg (stmt, 0);
2435 break;
2437 case BUILT_IN_STACK_RESTORE:
2438 result = optimize_stack_restore (i);
2439 if (result)
2440 break;
2441 gsi_next (&i);
2442 continue;
2444 case BUILT_IN_UNREACHABLE:
2445 if (optimize_unreachable (i))
2446 cfg_changed = true;
2447 break;
2449 case BUILT_IN_VA_START:
2450 case BUILT_IN_VA_END:
2451 case BUILT_IN_VA_COPY:
2452 /* These shouldn't be folded before pass_stdarg. */
2453 result = optimize_stdarg_builtin (stmt);
2454 if (result)
2455 break;
2456 /* FALLTHRU */
2458 default:
2459 gsi_next (&i);
2460 continue;
2463 if (result == NULL_TREE)
2464 break;
2466 if (dump_file && (dump_flags & TDF_DETAILS))
2468 fprintf (dump_file, "Simplified\n ");
2469 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2472 old_stmt = stmt;
2473 if (!update_call_from_tree (&i, result))
2475 gimplify_and_update_call_from_tree (&i, result);
2476 todoflags |= TODO_update_address_taken;
2479 stmt = gsi_stmt (i);
2480 update_stmt (stmt);
2482 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2483 && gimple_purge_dead_eh_edges (bb))
2484 cfg_changed = true;
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2488 fprintf (dump_file, "to\n ");
2489 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2490 fprintf (dump_file, "\n");
2493 /* Retry the same statement if it changed into another
2494 builtin, there might be new opportunities now. */
2495 if (gimple_code (stmt) != GIMPLE_CALL)
2497 gsi_next (&i);
2498 continue;
2500 callee = gimple_call_fndecl (stmt);
2501 if (!callee
2502 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2503 || DECL_FUNCTION_CODE (callee) == fcode)
2504 gsi_next (&i);
2508 /* Delete unreachable blocks. */
2509 if (cfg_changed)
2510 todoflags |= TODO_cleanup_cfg;
2512 return todoflags;
2516 struct gimple_opt_pass pass_fold_builtins =
2519 GIMPLE_PASS,
2520 "fab", /* name */
2521 NULL, /* gate */
2522 execute_fold_all_builtins, /* execute */
2523 NULL, /* sub */
2524 NULL, /* next */
2525 0, /* static_pass_number */
2526 TV_NONE, /* tv_id */
2527 PROP_cfg | PROP_ssa, /* properties_required */
2528 0, /* properties_provided */
2529 0, /* properties_destroyed */
2530 0, /* todo_flags_start */
2531 TODO_verify_ssa
2532 | TODO_update_ssa /* todo_flags_finish */