Fix typo in ChangeLog entry date.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blobfb6eb4da209c375522de7ab905db90311634d395
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
113 For instance, consider the following code fragment:
115 struct A a;
116 const int B = 42;
118 void foo (int i)
120 if (i > 10)
121 a.a = 42;
122 else
124 a.b = 21;
125 a.a = a.b + 21;
128 if (a.a != B)
129 never_executed ();
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
139 # a_5 = VDEF <a_4>
140 a.a = 2;
142 # VUSE <a_5>
143 x_3 = a.b;
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
152 int A;
154 foo (int i)
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
160 # VUSE <A_5>
161 A.0_6 = A;
163 return A.0_6;
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
177 References:
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
211 #include "dbgcnt.h"
214 /* Possible lattice values. */
215 typedef enum
217 UNINITIALIZED,
218 UNDEFINED,
219 CONSTANT,
220 VARYING
221 } ccp_lattice_t;
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
228 doing the store). */
229 static prop_value_t *const_val;
231 static void canonicalize_float_value (prop_value_t *);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 static void
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
252 break;
253 default:
254 gcc_unreachable ();
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
263 void
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
275 tree
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym))
281 tree val = DECL_INITIAL (sym);
282 if (val)
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
287 if (TREE_CODE (val) == ADDR_EXPR)
289 tree base = get_base_address (TREE_OPERAND (val, 0));
290 if (base && TREE_CODE (base) == VAR_DECL)
291 add_referenced_var (base);
293 return val;
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
299 if (!val
300 && !DECL_EXTERNAL (sym)
301 && targetm.binds_local_p (sym)
302 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
304 return fold_convert (TREE_TYPE (sym), integer_zero_node);
307 return NULL_TREE;
310 /* Compute a default value for variable VAR and store it in the
311 CONST_VAL array. The following rules are used to get default
312 values:
314 1- Global and static variables that are declared constant are
315 considered CONSTANT.
317 2- Any other value is considered UNDEFINED. This is useful when
318 considering PHI nodes. PHI arguments that are undefined do not
319 change the constant value of the PHI node, which allows for more
320 constants to be propagated.
322 3- Variables defined by statements other than assignments and PHI
323 nodes are considered VARYING.
325 4- Initial values of variables that are not GIMPLE registers are
326 considered VARYING. */
328 static prop_value_t
329 get_default_value (tree var)
331 tree sym = SSA_NAME_VAR (var);
332 prop_value_t val = { UNINITIALIZED, NULL_TREE };
333 gimple stmt;
335 stmt = SSA_NAME_DEF_STMT (var);
337 if (gimple_nop_p (stmt))
339 /* Variables defined by an empty statement are those used
340 before being initialized. If VAR is a local variable, we
341 can assume initially that it is UNDEFINED, otherwise we must
342 consider it VARYING. */
343 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
344 val.lattice_val = UNDEFINED;
345 else
346 val.lattice_val = VARYING;
348 else if (is_gimple_assign (stmt)
349 /* Value-returning GIMPLE_CALL statements assign to
350 a variable, and are treated similarly to GIMPLE_ASSIGN. */
351 || (is_gimple_call (stmt)
352 && gimple_call_lhs (stmt) != NULL_TREE)
353 || gimple_code (stmt) == GIMPLE_PHI)
355 tree cst;
356 if (gimple_assign_single_p (stmt)
357 && DECL_P (gimple_assign_rhs1 (stmt))
358 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
360 val.lattice_val = CONSTANT;
361 val.value = cst;
363 else
364 /* Any other variable defined by an assignment or a PHI node
365 is considered UNDEFINED. */
366 val.lattice_val = UNDEFINED;
368 else
370 /* Otherwise, VAR will never take on a constant value. */
371 val.lattice_val = VARYING;
374 return val;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
381 get_value (tree var)
383 prop_value_t *val;
385 if (const_val == NULL)
386 return NULL;
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
392 canonicalize_float_value (val);
394 return val;
397 /* Sets the value associated with VAR to VARYING. */
399 static inline void
400 set_value_varying (tree var)
402 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
404 val->lattice_val = VARYING;
405 val->value = NULL_TREE;
408 /* For float types, modify the value of VAL to make ccp work correctly
409 for non-standard values (-0, NaN):
411 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
412 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
413 This is to fix the following problem (see PR 29921): Suppose we have
415 x = 0.0 * y
417 and we set value of y to NaN. This causes value of x to be set to NaN.
418 When we later determine that y is in fact VARYING, fold uses the fact
419 that HONOR_NANS is false, and we try to change the value of x to 0,
420 causing an ICE. With HONOR_NANS being false, the real appearance of
421 NaN would cause undefined behavior, though, so claiming that y (and x)
422 are UNDEFINED initially is correct. */
424 static void
425 canonicalize_float_value (prop_value_t *val)
427 enum machine_mode mode;
428 tree type;
429 REAL_VALUE_TYPE d;
431 if (val->lattice_val != CONSTANT
432 || TREE_CODE (val->value) != REAL_CST)
433 return;
435 d = TREE_REAL_CST (val->value);
436 type = TREE_TYPE (val->value);
437 mode = TYPE_MODE (type);
439 if (!HONOR_SIGNED_ZEROS (mode)
440 && REAL_VALUE_MINUS_ZERO (d))
442 val->value = build_real (type, dconst0);
443 return;
446 if (!HONOR_NANS (mode)
447 && REAL_VALUE_ISNAN (d))
449 val->lattice_val = UNDEFINED;
450 val->value = NULL;
451 return;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
458 static bool
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))));
474 if (old_val->lattice_val != new_val.lattice_val)
476 if (dump_file && (dump_flags & TDF_DETAILS))
478 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
479 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
482 *old_val = new_val;
484 gcc_assert (new_val.lattice_val != UNDEFINED);
485 return true;
488 return false;
492 /* Return the likely CCP lattice value for STMT.
494 If STMT has no operands, then return CONSTANT.
496 Else if undefinedness of operands of STMT cause its value to be
497 undefined, then return UNDEFINED.
499 Else if any operands of STMT are constants, then return CONSTANT.
501 Else return VARYING. */
503 static ccp_lattice_t
504 likely_value (gimple stmt)
506 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
507 tree use;
508 ssa_op_iter iter;
509 unsigned i;
511 enum gimple_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
521 constant value. */
522 if (gimple_has_volatile_ops (stmt))
523 return VARYING;
525 /* Arrive here for more complex cases. */
526 has_constant_operand = false;
527 has_undefined_operand = false;
528 all_undefined_operands = true;
529 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
531 prop_value_t *val = get_value (use);
533 if (val->lattice_val == UNDEFINED)
534 has_undefined_operand = true;
535 else
536 all_undefined_operands = false;
538 if (val->lattice_val == CONSTANT)
539 has_constant_operand = true;
542 /* There may be constants in regular rhs operands. For calls we
543 have to ignore lhs, fndecl and static chain, otherwise only
544 the lhs. */
545 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
546 i < gimple_num_ops (stmt); ++i)
548 tree op = gimple_op (stmt, i);
549 if (!op || TREE_CODE (op) == SSA_NAME)
550 continue;
551 if (is_gimple_min_invariant (op))
552 has_constant_operand = true;
555 /* If the operation combines operands like COMPLEX_EXPR make sure to
556 not mark the result UNDEFINED if only one part of the result is
557 undefined. */
558 if (has_undefined_operand && all_undefined_operands)
559 return UNDEFINED;
560 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
562 switch (gimple_assign_rhs_code (stmt))
564 /* Unary operators are handled with all_undefined_operands. */
565 case PLUS_EXPR:
566 case MINUS_EXPR:
567 case POINTER_PLUS_EXPR:
568 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
569 Not bitwise operators, one VARYING operand may specify the
570 result completely. Not logical operators for the same reason.
571 Not COMPLEX_EXPR as one VARYING operand makes the result partly
572 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
573 the undefined operand may be promoted. */
574 return UNDEFINED;
576 default:
580 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
581 fall back to VARYING even if there were CONSTANT operands. */
582 if (has_undefined_operand)
583 return VARYING;
585 /* We do not consider virtual operands here -- load from read-only
586 memory may have only VARYING virtual operands, but still be
587 constant. */
588 if (has_constant_operand
589 || gimple_references_memory_p (stmt))
590 return CONSTANT;
592 return VARYING;
595 /* Returns true if STMT cannot be constant. */
597 static bool
598 surely_varying_stmt_p (gimple stmt)
600 /* If the statement has operands that we cannot handle, it cannot be
601 constant. */
602 if (gimple_has_volatile_ops (stmt))
603 return true;
605 /* If it is a call and does not return a value or is not a
606 builtin and not an indirect call, it is varying. */
607 if (is_gimple_call (stmt))
609 tree fndecl;
610 if (!gimple_call_lhs (stmt)
611 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
612 && !DECL_BUILT_IN (fndecl)))
613 return true;
616 /* Any other store operation is not interesting. */
617 else if (gimple_vdef (stmt))
618 return true;
620 /* Anything other than assignments and conditional jumps are not
621 interesting for CCP. */
622 if (gimple_code (stmt) != GIMPLE_ASSIGN
623 && gimple_code (stmt) != GIMPLE_COND
624 && gimple_code (stmt) != GIMPLE_SWITCH
625 && gimple_code (stmt) != GIMPLE_CALL)
626 return true;
628 return false;
631 /* Initialize local data structures for CCP. */
633 static void
634 ccp_initialize (void)
636 basic_block bb;
638 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
640 /* Initialize simulation flags for PHI nodes and statements. */
641 FOR_EACH_BB (bb)
643 gimple_stmt_iterator i;
645 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
647 gimple stmt = gsi_stmt (i);
648 bool is_varying = surely_varying_stmt_p (stmt);
650 if (is_varying)
652 tree def;
653 ssa_op_iter iter;
655 /* If the statement will not produce a constant, mark
656 all its outputs VARYING. */
657 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
658 set_value_varying (def);
660 prop_set_simulate_again (stmt, !is_varying);
664 /* Now process PHI nodes. We never clear the simulate_again flag on
665 phi nodes, since we do not know which edges are executable yet,
666 except for phi nodes for virtual operands when we do not do store ccp. */
667 FOR_EACH_BB (bb)
669 gimple_stmt_iterator i;
671 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
673 gimple phi = gsi_stmt (i);
675 if (!is_gimple_reg (gimple_phi_result (phi)))
676 prop_set_simulate_again (phi, false);
677 else
678 prop_set_simulate_again (phi, true);
683 /* Debug count support. Reset the values of ssa names
684 VARYING when the total number ssa names analyzed is
685 beyond the debug count specified. */
687 static void
688 do_dbg_cnt (void)
690 unsigned i;
691 for (i = 0; i < num_ssa_names; i++)
693 if (!dbg_cnt (ccp))
695 const_val[i].lattice_val = VARYING;
696 const_val[i].value = NULL_TREE;
702 /* Do final substitution of propagated values, cleanup the flowgraph and
703 free allocated storage.
705 Return TRUE when something was optimized. */
707 static bool
708 ccp_finalize (void)
710 bool something_changed;
712 do_dbg_cnt ();
713 /* Perform substitutions based on the known constant values. */
714 something_changed = substitute_and_fold (const_val, false);
716 free (const_val);
717 const_val = NULL;
718 return something_changed;;
722 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
723 in VAL1.
725 any M UNDEFINED = any
726 any M VARYING = VARYING
727 Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
731 static void
732 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
734 if (val1->lattice_val == UNDEFINED)
736 /* UNDEFINED M any = any */
737 *val1 = *val2;
739 else if (val2->lattice_val == UNDEFINED)
741 /* any M UNDEFINED = any
742 Nothing to do. VAL1 already contains the value we want. */
745 else if (val1->lattice_val == VARYING
746 || val2->lattice_val == VARYING)
748 /* any M VARYING = VARYING. */
749 val1->lattice_val = VARYING;
750 val1->value = NULL_TREE;
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1)
756 /* Ci M Cj = Ci if (i == j)
757 Ci M Cj = VARYING if (i != j)
759 If these two values come from memory stores, make sure that
760 they come from the same memory reference. */
761 val1->lattice_val = CONSTANT;
762 val1->value = val1->value;
764 else
766 /* Any other combination is VARYING. */
767 val1->lattice_val = VARYING;
768 val1->value = NULL_TREE;
773 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
774 lattice values to determine PHI_NODE's lattice value. The value of a
775 PHI node is determined calling ccp_lattice_meet with all the arguments
776 of the PHI node that are incoming via executable edges. */
778 static enum ssa_prop_result
779 ccp_visit_phi_node (gimple phi)
781 unsigned i;
782 prop_value_t *old_val, new_val;
784 if (dump_file && (dump_flags & TDF_DETAILS))
786 fprintf (dump_file, "\nVisiting PHI node: ");
787 print_gimple_stmt (dump_file, phi, 0, dump_flags);
790 old_val = get_value (gimple_phi_result (phi));
791 switch (old_val->lattice_val)
793 case VARYING:
794 return SSA_PROP_VARYING;
796 case CONSTANT:
797 new_val = *old_val;
798 break;
800 case UNDEFINED:
801 new_val.lattice_val = UNDEFINED;
802 new_val.value = NULL_TREE;
803 break;
805 default:
806 gcc_unreachable ();
809 for (i = 0; i < gimple_phi_num_args (phi); i++)
811 /* Compute the meet operator over all the PHI arguments flowing
812 through executable edges. */
813 edge e = gimple_phi_arg_edge (phi, i);
815 if (dump_file && (dump_flags & TDF_DETAILS))
817 fprintf (dump_file,
818 "\n Argument #%d (%d -> %d %sexecutable)\n",
819 i, e->src->index, e->dest->index,
820 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
823 /* If the incoming edge is executable, Compute the meet operator for
824 the existing value of the PHI node and the current PHI argument. */
825 if (e->flags & EDGE_EXECUTABLE)
827 tree arg = gimple_phi_arg (phi, i)->def;
828 prop_value_t arg_val;
830 if (is_gimple_min_invariant (arg))
832 arg_val.lattice_val = CONSTANT;
833 arg_val.value = arg;
835 else
836 arg_val = *(get_value (arg));
838 ccp_lattice_meet (&new_val, &arg_val);
840 if (dump_file && (dump_flags & TDF_DETAILS))
842 fprintf (dump_file, "\t");
843 print_generic_expr (dump_file, arg, dump_flags);
844 dump_lattice_value (dump_file, "\tValue: ", arg_val);
845 fprintf (dump_file, "\n");
848 if (new_val.lattice_val == VARYING)
849 break;
853 if (dump_file && (dump_flags & TDF_DETAILS))
855 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
856 fprintf (dump_file, "\n\n");
859 /* Make the transition to the new value. */
860 if (set_lattice_value (gimple_phi_result (phi), new_val))
862 if (new_val.lattice_val == VARYING)
863 return SSA_PROP_VARYING;
864 else
865 return SSA_PROP_INTERESTING;
867 else
868 return SSA_PROP_NOT_INTERESTING;
871 /* Return true if we may propagate the address expression ADDR into the
872 dereference DEREF and cancel them. */
874 bool
875 may_propagate_address_into_dereference (tree addr, tree deref)
877 gcc_assert (INDIRECT_REF_P (deref)
878 && TREE_CODE (addr) == ADDR_EXPR);
880 /* Don't propagate if ADDR's operand has incomplete type. */
881 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
882 return false;
884 /* If the address is invariant then we do not need to preserve restrict
885 qualifications. But we do need to preserve volatile qualifiers until
886 we can annotate the folded dereference itself properly. */
887 if (is_gimple_min_invariant (addr)
888 && (!TREE_THIS_VOLATILE (deref)
889 || TYPE_VOLATILE (TREE_TYPE (addr))))
890 return useless_type_conversion_p (TREE_TYPE (deref),
891 TREE_TYPE (TREE_OPERAND (addr, 0)));
893 /* Else both the address substitution and the folding must result in
894 a valid useless type conversion sequence. */
895 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
896 TREE_TYPE (addr))
897 && useless_type_conversion_p (TREE_TYPE (deref),
898 TREE_TYPE (TREE_OPERAND (addr, 0))));
901 /* CCP specific front-end to the non-destructive constant folding
902 routines.
904 Attempt to simplify the RHS of STMT knowing that one or more
905 operands are constants.
907 If simplification is possible, return the simplified RHS,
908 otherwise return the original RHS or NULL_TREE. */
910 static tree
911 ccp_fold (gimple stmt)
913 switch (gimple_code (stmt))
915 case GIMPLE_ASSIGN:
917 enum tree_code subcode = gimple_assign_rhs_code (stmt);
919 switch (get_gimple_rhs_class (subcode))
921 case GIMPLE_SINGLE_RHS:
923 tree rhs = gimple_assign_rhs1 (stmt);
924 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
926 if (TREE_CODE (rhs) == SSA_NAME)
928 /* If the RHS is an SSA_NAME, return its known constant value,
929 if any. */
930 return get_value (rhs)->value;
932 /* Handle propagating invariant addresses into address operations.
933 The folding we do here matches that in tree-ssa-forwprop.c. */
934 else if (TREE_CODE (rhs) == ADDR_EXPR)
936 tree *base;
937 base = &TREE_OPERAND (rhs, 0);
938 while (handled_component_p (*base))
939 base = &TREE_OPERAND (*base, 0);
940 if (TREE_CODE (*base) == INDIRECT_REF
941 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
943 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
944 if (val->lattice_val == CONSTANT
945 && TREE_CODE (val->value) == ADDR_EXPR
946 && may_propagate_address_into_dereference
947 (val->value, *base))
949 /* We need to return a new tree, not modify the IL
950 or share parts of it. So play some tricks to
951 avoid manually building it. */
952 tree ret, save = *base;
953 *base = TREE_OPERAND (val->value, 0);
954 ret = unshare_expr (rhs);
955 recompute_tree_invariant_for_addr_expr (ret);
956 *base = save;
957 return ret;
961 else if (TREE_CODE (rhs) == CONSTRUCTOR
962 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
963 && (CONSTRUCTOR_NELTS (rhs)
964 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
966 unsigned i;
967 tree val, list;
969 list = NULL_TREE;
970 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
972 if (TREE_CODE (val) == SSA_NAME
973 && get_value (val)->lattice_val == CONSTANT)
974 val = get_value (val)->value;
975 if (TREE_CODE (val) == INTEGER_CST
976 || TREE_CODE (val) == REAL_CST
977 || TREE_CODE (val) == FIXED_CST)
978 list = tree_cons (NULL_TREE, val, list);
979 else
980 return NULL_TREE;
983 return build_vector (TREE_TYPE (rhs), nreverse (list));
986 if (kind == tcc_reference)
988 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
989 || TREE_CODE (rhs) == REALPART_EXPR
990 || TREE_CODE (rhs) == IMAGPART_EXPR)
991 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
993 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
994 if (val->lattice_val == CONSTANT)
995 return fold_unary (TREE_CODE (rhs),
996 TREE_TYPE (rhs), val->value);
998 else if (TREE_CODE (rhs) == INDIRECT_REF
999 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1001 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1002 if (val->lattice_val == CONSTANT
1003 && TREE_CODE (val->value) == ADDR_EXPR
1004 && useless_type_conversion_p (TREE_TYPE (rhs),
1005 TREE_TYPE (TREE_TYPE (val->value))))
1006 rhs = TREE_OPERAND (val->value, 0);
1008 return fold_const_aggregate_ref (rhs);
1010 else if (kind == tcc_declaration)
1011 return get_symbol_constant_value (rhs);
1012 return rhs;
1015 case GIMPLE_UNARY_RHS:
1017 /* Handle unary operators that can appear in GIMPLE form.
1018 Note that we know the single operand must be a constant,
1019 so this should almost always return a simplified RHS. */
1020 tree lhs = gimple_assign_lhs (stmt);
1021 tree op0 = gimple_assign_rhs1 (stmt);
1023 /* Simplify the operand down to a constant. */
1024 if (TREE_CODE (op0) == SSA_NAME)
1026 prop_value_t *val = get_value (op0);
1027 if (val->lattice_val == CONSTANT)
1028 op0 = get_value (op0)->value;
1031 /* Conversions are useless for CCP purposes if they are
1032 value-preserving. Thus the restrictions that
1033 useless_type_conversion_p places for pointer type conversions
1034 do not apply here. Substitution later will only substitute to
1035 allowed places. */
1036 if (CONVERT_EXPR_CODE_P (subcode)
1037 && POINTER_TYPE_P (TREE_TYPE (lhs))
1038 && POINTER_TYPE_P (TREE_TYPE (op0))
1039 /* Do not allow differences in volatile qualification
1040 as this might get us confused as to whether a
1041 propagation destination statement is volatile
1042 or not. See PR36988. */
1043 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1044 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1046 tree tem;
1047 /* Still try to generate a constant of correct type. */
1048 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1049 TREE_TYPE (op0))
1050 && ((tem = maybe_fold_offset_to_address
1051 (op0, integer_zero_node, TREE_TYPE (lhs)))
1052 != NULL_TREE))
1053 return tem;
1054 return op0;
1057 return fold_unary_ignore_overflow (subcode,
1058 gimple_expr_type (stmt), op0);
1061 case GIMPLE_BINARY_RHS:
1063 /* Handle binary operators that can appear in GIMPLE form. */
1064 tree op0 = gimple_assign_rhs1 (stmt);
1065 tree op1 = gimple_assign_rhs2 (stmt);
1067 /* Simplify the operands down to constants when appropriate. */
1068 if (TREE_CODE (op0) == SSA_NAME)
1070 prop_value_t *val = get_value (op0);
1071 if (val->lattice_val == CONSTANT)
1072 op0 = val->value;
1075 if (TREE_CODE (op1) == SSA_NAME)
1077 prop_value_t *val = get_value (op1);
1078 if (val->lattice_val == CONSTANT)
1079 op1 = val->value;
1082 /* Fold &foo + CST into an invariant reference if possible. */
1083 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1084 && TREE_CODE (op0) == ADDR_EXPR
1085 && TREE_CODE (op1) == INTEGER_CST)
1087 tree lhs = gimple_assign_lhs (stmt);
1088 tree tem = maybe_fold_offset_to_address (op0, op1,
1089 TREE_TYPE (lhs));
1090 if (tem != NULL_TREE)
1091 return tem;
1094 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1097 default:
1098 gcc_unreachable ();
1101 break;
1103 case GIMPLE_CALL:
1105 tree fn = gimple_call_fn (stmt);
1106 prop_value_t *val;
1108 if (TREE_CODE (fn) == SSA_NAME)
1110 val = get_value (fn);
1111 if (val->lattice_val == CONSTANT)
1112 fn = val->value;
1114 if (TREE_CODE (fn) == ADDR_EXPR
1115 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1116 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1118 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1119 tree call, retval;
1120 unsigned i;
1121 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1123 args[i] = gimple_call_arg (stmt, i);
1124 if (TREE_CODE (args[i]) == SSA_NAME)
1126 val = get_value (args[i]);
1127 if (val->lattice_val == CONSTANT)
1128 args[i] = val->value;
1131 call = build_call_array (gimple_call_return_type (stmt),
1132 fn, gimple_call_num_args (stmt), args);
1133 retval = fold_call_expr (call, false);
1134 if (retval)
1135 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1136 STRIP_NOPS (retval);
1137 return retval;
1139 return NULL_TREE;
1142 case GIMPLE_COND:
1144 /* Handle comparison operators that can appear in GIMPLE form. */
1145 tree op0 = gimple_cond_lhs (stmt);
1146 tree op1 = gimple_cond_rhs (stmt);
1147 enum tree_code code = gimple_cond_code (stmt);
1149 /* Simplify the operands down to constants when appropriate. */
1150 if (TREE_CODE (op0) == SSA_NAME)
1152 prop_value_t *val = get_value (op0);
1153 if (val->lattice_val == CONSTANT)
1154 op0 = val->value;
1157 if (TREE_CODE (op1) == SSA_NAME)
1159 prop_value_t *val = get_value (op1);
1160 if (val->lattice_val == CONSTANT)
1161 op1 = val->value;
1164 return fold_binary (code, boolean_type_node, op0, op1);
1167 case GIMPLE_SWITCH:
1169 tree rhs = gimple_switch_index (stmt);
1171 if (TREE_CODE (rhs) == SSA_NAME)
1173 /* If the RHS is an SSA_NAME, return its known constant value,
1174 if any. */
1175 return get_value (rhs)->value;
1178 return rhs;
1181 default:
1182 gcc_unreachable ();
1187 /* Return the tree representing the element referenced by T if T is an
1188 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1189 NULL_TREE otherwise. */
1191 tree
1192 fold_const_aggregate_ref (tree t)
1194 prop_value_t *value;
1195 tree base, ctor, idx, field;
1196 unsigned HOST_WIDE_INT cnt;
1197 tree cfield, cval;
1199 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1200 return get_symbol_constant_value (t);
1202 switch (TREE_CODE (t))
1204 case ARRAY_REF:
1205 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1206 DECL_INITIAL. If BASE is a nested reference into another
1207 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1208 the inner reference. */
1209 base = TREE_OPERAND (t, 0);
1210 switch (TREE_CODE (base))
1212 case VAR_DECL:
1213 if (!TREE_READONLY (base)
1214 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1215 || !targetm.binds_local_p (base))
1216 return NULL_TREE;
1218 ctor = DECL_INITIAL (base);
1219 break;
1221 case ARRAY_REF:
1222 case COMPONENT_REF:
1223 ctor = fold_const_aggregate_ref (base);
1224 break;
1226 case STRING_CST:
1227 case CONSTRUCTOR:
1228 ctor = base;
1229 break;
1231 default:
1232 return NULL_TREE;
1235 if (ctor == NULL_TREE
1236 || (TREE_CODE (ctor) != CONSTRUCTOR
1237 && TREE_CODE (ctor) != STRING_CST)
1238 || !TREE_STATIC (ctor))
1239 return NULL_TREE;
1241 /* Get the index. If we have an SSA_NAME, try to resolve it
1242 with the current lattice value for the SSA_NAME. */
1243 idx = TREE_OPERAND (t, 1);
1244 switch (TREE_CODE (idx))
1246 case SSA_NAME:
1247 if ((value = get_value (idx))
1248 && value->lattice_val == CONSTANT
1249 && TREE_CODE (value->value) == INTEGER_CST)
1250 idx = value->value;
1251 else
1252 return NULL_TREE;
1253 break;
1255 case INTEGER_CST:
1256 break;
1258 default:
1259 return NULL_TREE;
1262 /* Fold read from constant string. */
1263 if (TREE_CODE (ctor) == STRING_CST)
1265 if ((TYPE_MODE (TREE_TYPE (t))
1266 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1267 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1268 == MODE_INT)
1269 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1270 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1271 return build_int_cst_type (TREE_TYPE (t),
1272 (TREE_STRING_POINTER (ctor)
1273 [TREE_INT_CST_LOW (idx)]));
1274 return NULL_TREE;
1277 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1278 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1279 if (tree_int_cst_equal (cfield, idx))
1281 STRIP_USELESS_TYPE_CONVERSION (cval);
1282 if (TREE_CODE (cval) == ADDR_EXPR)
1284 tree base = get_base_address (TREE_OPERAND (cval, 0));
1285 if (base && TREE_CODE (base) == VAR_DECL)
1286 add_referenced_var (base);
1288 return cval;
1290 break;
1292 case COMPONENT_REF:
1293 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1294 DECL_INITIAL. If BASE is a nested reference into another
1295 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1296 the inner reference. */
1297 base = TREE_OPERAND (t, 0);
1298 switch (TREE_CODE (base))
1300 case VAR_DECL:
1301 if (!TREE_READONLY (base)
1302 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1303 || !targetm.binds_local_p (base))
1304 return NULL_TREE;
1306 ctor = DECL_INITIAL (base);
1307 break;
1309 case ARRAY_REF:
1310 case COMPONENT_REF:
1311 ctor = fold_const_aggregate_ref (base);
1312 break;
1314 default:
1315 return NULL_TREE;
1318 if (ctor == NULL_TREE
1319 || TREE_CODE (ctor) != CONSTRUCTOR
1320 || !TREE_STATIC (ctor))
1321 return NULL_TREE;
1323 field = TREE_OPERAND (t, 1);
1325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1326 if (cfield == field
1327 /* FIXME: Handle bit-fields. */
1328 && ! DECL_BIT_FIELD (cfield))
1330 STRIP_USELESS_TYPE_CONVERSION (cval);
1331 if (TREE_CODE (cval) == ADDR_EXPR)
1333 tree base = get_base_address (TREE_OPERAND (cval, 0));
1334 if (base && TREE_CODE (base) == VAR_DECL)
1335 add_referenced_var (base);
1337 return cval;
1339 break;
1341 case REALPART_EXPR:
1342 case IMAGPART_EXPR:
1344 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1345 if (c && TREE_CODE (c) == COMPLEX_CST)
1346 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1347 break;
1350 case INDIRECT_REF:
1352 tree base = TREE_OPERAND (t, 0);
1353 if (TREE_CODE (base) == SSA_NAME
1354 && (value = get_value (base))
1355 && value->lattice_val == CONSTANT
1356 && TREE_CODE (value->value) == ADDR_EXPR
1357 && useless_type_conversion_p (TREE_TYPE (t),
1358 TREE_TYPE (TREE_TYPE (value->value))))
1359 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1360 break;
1363 default:
1364 break;
1367 return NULL_TREE;
1370 /* Evaluate statement STMT.
1371 Valid only for assignments, calls, conditionals, and switches. */
1373 static prop_value_t
1374 evaluate_stmt (gimple stmt)
1376 prop_value_t val;
1377 tree simplified = NULL_TREE;
1378 ccp_lattice_t likelyvalue = likely_value (stmt);
1379 bool is_constant;
1381 fold_defer_overflow_warnings ();
1383 /* If the statement is likely to have a CONSTANT result, then try
1384 to fold the statement to determine the constant value. */
1385 /* FIXME. This is the only place that we call ccp_fold.
1386 Since likely_value never returns CONSTANT for calls, we will
1387 not attempt to fold them, including builtins that may profit. */
1388 if (likelyvalue == CONSTANT)
1389 simplified = ccp_fold (stmt);
1390 /* If the statement is likely to have a VARYING result, then do not
1391 bother folding the statement. */
1392 else if (likelyvalue == VARYING)
1394 enum gimple_code code = gimple_code (stmt);
1395 if (code == GIMPLE_ASSIGN)
1397 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1399 /* Other cases cannot satisfy is_gimple_min_invariant
1400 without folding. */
1401 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1402 simplified = gimple_assign_rhs1 (stmt);
1404 else if (code == GIMPLE_SWITCH)
1405 simplified = gimple_switch_index (stmt);
1406 else
1407 /* These cannot satisfy is_gimple_min_invariant without folding. */
1408 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1411 is_constant = simplified && is_gimple_min_invariant (simplified);
1413 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1415 if (dump_file && (dump_flags & TDF_DETAILS))
1417 fprintf (dump_file, "which is likely ");
1418 switch (likelyvalue)
1420 case CONSTANT:
1421 fprintf (dump_file, "CONSTANT");
1422 break;
1423 case UNDEFINED:
1424 fprintf (dump_file, "UNDEFINED");
1425 break;
1426 case VARYING:
1427 fprintf (dump_file, "VARYING");
1428 break;
1429 default:;
1431 fprintf (dump_file, "\n");
1434 if (is_constant)
1436 /* The statement produced a constant value. */
1437 val.lattice_val = CONSTANT;
1438 val.value = simplified;
1440 else
1442 /* The statement produced a nonconstant value. If the statement
1443 had UNDEFINED operands, then the result of the statement
1444 should be UNDEFINED. Otherwise, the statement is VARYING. */
1445 if (likelyvalue == UNDEFINED)
1446 val.lattice_val = likelyvalue;
1447 else
1448 val.lattice_val = VARYING;
1450 val.value = NULL_TREE;
1453 return val;
1456 /* Visit the assignment statement STMT. Set the value of its LHS to the
1457 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1458 creates virtual definitions, set the value of each new name to that
1459 of the RHS (if we can derive a constant out of the RHS).
1460 Value-returning call statements also perform an assignment, and
1461 are handled here. */
1463 static enum ssa_prop_result
1464 visit_assignment (gimple stmt, tree *output_p)
1466 prop_value_t val;
1467 enum ssa_prop_result retval;
1469 tree lhs = gimple_get_lhs (stmt);
1471 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1472 || gimple_call_lhs (stmt) != NULL_TREE);
1474 if (gimple_assign_copy_p (stmt))
1476 tree rhs = gimple_assign_rhs1 (stmt);
1478 if (TREE_CODE (rhs) == SSA_NAME)
1480 /* For a simple copy operation, we copy the lattice values. */
1481 prop_value_t *nval = get_value (rhs);
1482 val = *nval;
1484 else
1485 val = evaluate_stmt (stmt);
1487 else
1488 /* Evaluate the statement, which could be
1489 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1490 val = evaluate_stmt (stmt);
1492 retval = SSA_PROP_NOT_INTERESTING;
1494 /* Set the lattice value of the statement's output. */
1495 if (TREE_CODE (lhs) == SSA_NAME)
1497 /* If STMT is an assignment to an SSA_NAME, we only have one
1498 value to set. */
1499 if (set_lattice_value (lhs, val))
1501 *output_p = lhs;
1502 if (val.lattice_val == VARYING)
1503 retval = SSA_PROP_VARYING;
1504 else
1505 retval = SSA_PROP_INTERESTING;
1509 return retval;
1513 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1514 if it can determine which edge will be taken. Otherwise, return
1515 SSA_PROP_VARYING. */
1517 static enum ssa_prop_result
1518 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1520 prop_value_t val;
1521 basic_block block;
1523 block = gimple_bb (stmt);
1524 val = evaluate_stmt (stmt);
1526 /* Find which edge out of the conditional block will be taken and add it
1527 to the worklist. If no single edge can be determined statically,
1528 return SSA_PROP_VARYING to feed all the outgoing edges to the
1529 propagation engine. */
1530 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1531 if (*taken_edge_p)
1532 return SSA_PROP_INTERESTING;
1533 else
1534 return SSA_PROP_VARYING;
1538 /* Evaluate statement STMT. If the statement produces an output value and
1539 its evaluation changes the lattice value of its output, return
1540 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1541 output value.
1543 If STMT is a conditional branch and we can determine its truth
1544 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1545 value, return SSA_PROP_VARYING. */
1547 static enum ssa_prop_result
1548 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1550 tree def;
1551 ssa_op_iter iter;
1553 if (dump_file && (dump_flags & TDF_DETAILS))
1555 fprintf (dump_file, "\nVisiting statement:\n");
1556 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1559 switch (gimple_code (stmt))
1561 case GIMPLE_ASSIGN:
1562 /* If the statement is an assignment that produces a single
1563 output value, evaluate its RHS to see if the lattice value of
1564 its output has changed. */
1565 return visit_assignment (stmt, output_p);
1567 case GIMPLE_CALL:
1568 /* A value-returning call also performs an assignment. */
1569 if (gimple_call_lhs (stmt) != NULL_TREE)
1570 return visit_assignment (stmt, output_p);
1571 break;
1573 case GIMPLE_COND:
1574 case GIMPLE_SWITCH:
1575 /* If STMT is a conditional branch, see if we can determine
1576 which branch will be taken. */
1577 /* FIXME. It appears that we should be able to optimize
1578 computed GOTOs here as well. */
1579 return visit_cond_stmt (stmt, taken_edge_p);
1581 default:
1582 break;
1585 /* Any other kind of statement is not interesting for constant
1586 propagation and, therefore, not worth simulating. */
1587 if (dump_file && (dump_flags & TDF_DETAILS))
1588 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1590 /* Definitions made by statements other than assignments to
1591 SSA_NAMEs represent unknown modifications to their outputs.
1592 Mark them VARYING. */
1593 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1595 prop_value_t v = { VARYING, NULL_TREE };
1596 set_lattice_value (def, v);
1599 return SSA_PROP_VARYING;
1603 /* Main entry point for SSA Conditional Constant Propagation. */
1605 static unsigned int
1606 do_ssa_ccp (void)
1608 ccp_initialize ();
1609 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1610 if (ccp_finalize ())
1611 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1612 else
1613 return 0;
1617 static bool
1618 gate_ccp (void)
1620 return flag_tree_ccp != 0;
1624 struct gimple_opt_pass pass_ccp =
1627 GIMPLE_PASS,
1628 "ccp", /* name */
1629 gate_ccp, /* gate */
1630 do_ssa_ccp, /* execute */
1631 NULL, /* sub */
1632 NULL, /* next */
1633 0, /* static_pass_number */
1634 TV_TREE_CCP, /* tv_id */
1635 PROP_cfg | PROP_ssa, /* properties_required */
1636 0, /* properties_provided */
1637 0, /* properties_destroyed */
1638 0, /* todo_flags_start */
1639 TODO_dump_func | TODO_verify_ssa
1640 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1645 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1646 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1647 is the desired result type. */
1649 static tree
1650 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1651 bool allow_negative_idx)
1653 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1654 tree array_type, elt_type, elt_size;
1655 tree domain_type;
1657 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1658 measured in units of the size of elements type) from that ARRAY_REF).
1659 We can't do anything if either is variable.
1661 The case we handle here is *(&A[N]+O). */
1662 if (TREE_CODE (base) == ARRAY_REF)
1664 tree low_bound = array_ref_low_bound (base);
1666 elt_offset = TREE_OPERAND (base, 1);
1667 if (TREE_CODE (low_bound) != INTEGER_CST
1668 || TREE_CODE (elt_offset) != INTEGER_CST)
1669 return NULL_TREE;
1671 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1672 base = TREE_OPERAND (base, 0);
1675 /* Ignore stupid user tricks of indexing non-array variables. */
1676 array_type = TREE_TYPE (base);
1677 if (TREE_CODE (array_type) != ARRAY_TYPE)
1678 return NULL_TREE;
1679 elt_type = TREE_TYPE (array_type);
1680 if (!useless_type_conversion_p (orig_type, elt_type))
1681 return NULL_TREE;
1683 /* Use signed size type for intermediate computation on the index. */
1684 idx_type = signed_type_for (size_type_node);
1686 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1687 element type (so we can use the alignment if it's not constant).
1688 Otherwise, compute the offset as an index by using a division. If the
1689 division isn't exact, then don't do anything. */
1690 elt_size = TYPE_SIZE_UNIT (elt_type);
1691 if (!elt_size)
1692 return NULL;
1693 if (integer_zerop (offset))
1695 if (TREE_CODE (elt_size) != INTEGER_CST)
1696 elt_size = size_int (TYPE_ALIGN (elt_type));
1698 idx = build_int_cst (idx_type, 0);
1700 else
1702 unsigned HOST_WIDE_INT lquo, lrem;
1703 HOST_WIDE_INT hquo, hrem;
1704 double_int soffset;
1706 /* The final array offset should be signed, so we need
1707 to sign-extend the (possibly pointer) offset here
1708 and use signed division. */
1709 soffset = double_int_sext (tree_to_double_int (offset),
1710 TYPE_PRECISION (TREE_TYPE (offset)));
1711 if (TREE_CODE (elt_size) != INTEGER_CST
1712 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1713 soffset.low, soffset.high,
1714 TREE_INT_CST_LOW (elt_size),
1715 TREE_INT_CST_HIGH (elt_size),
1716 &lquo, &hquo, &lrem, &hrem)
1717 || lrem || hrem)
1718 return NULL_TREE;
1720 idx = build_int_cst_wide (idx_type, lquo, hquo);
1723 /* Assume the low bound is zero. If there is a domain type, get the
1724 low bound, if any, convert the index into that type, and add the
1725 low bound. */
1726 min_idx = build_int_cst (idx_type, 0);
1727 domain_type = TYPE_DOMAIN (array_type);
1728 if (domain_type)
1730 idx_type = domain_type;
1731 if (TYPE_MIN_VALUE (idx_type))
1732 min_idx = TYPE_MIN_VALUE (idx_type);
1733 else
1734 min_idx = fold_convert (idx_type, min_idx);
1736 if (TREE_CODE (min_idx) != INTEGER_CST)
1737 return NULL_TREE;
1739 elt_offset = fold_convert (idx_type, elt_offset);
1742 if (!integer_zerop (min_idx))
1743 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1744 if (!integer_zerop (elt_offset))
1745 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1747 /* Make sure to possibly truncate late after offsetting. */
1748 idx = fold_convert (idx_type, idx);
1750 /* We don't want to construct access past array bounds. For example
1751 char *(c[4]);
1752 c[3][2];
1753 should not be simplified into (*c)[14] or tree-vrp will
1754 give false warnings. The same is true for
1755 struct A { long x; char d[0]; } *a;
1756 (char *)a - 4;
1757 which should be not folded to &a->d[-8]. */
1758 if (domain_type
1759 && TYPE_MAX_VALUE (domain_type)
1760 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1762 tree up_bound = TYPE_MAX_VALUE (domain_type);
1764 if (tree_int_cst_lt (up_bound, idx)
1765 /* Accesses after the end of arrays of size 0 (gcc
1766 extension) and 1 are likely intentional ("struct
1767 hack"). */
1768 && compare_tree_int (up_bound, 1) > 0)
1769 return NULL_TREE;
1771 if (domain_type
1772 && TYPE_MIN_VALUE (domain_type))
1774 if (!allow_negative_idx
1775 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1776 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1777 return NULL_TREE;
1779 else if (!allow_negative_idx
1780 && compare_tree_int (idx, 0) < 0)
1781 return NULL_TREE;
1783 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1787 /* Attempt to fold *(S+O) to S.X.
1788 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1789 is the desired result type. */
1791 static tree
1792 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1793 tree orig_type, bool base_is_ptr)
1795 tree f, t, field_type, tail_array_field, field_offset;
1796 tree ret;
1797 tree new_base;
1799 if (TREE_CODE (record_type) != RECORD_TYPE
1800 && TREE_CODE (record_type) != UNION_TYPE
1801 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1802 return NULL_TREE;
1804 /* Short-circuit silly cases. */
1805 if (useless_type_conversion_p (record_type, orig_type))
1806 return NULL_TREE;
1808 tail_array_field = NULL_TREE;
1809 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1811 int cmp;
1813 if (TREE_CODE (f) != FIELD_DECL)
1814 continue;
1815 if (DECL_BIT_FIELD (f))
1816 continue;
1818 if (!DECL_FIELD_OFFSET (f))
1819 continue;
1820 field_offset = byte_position (f);
1821 if (TREE_CODE (field_offset) != INTEGER_CST)
1822 continue;
1824 /* ??? Java creates "interesting" fields for representing base classes.
1825 They have no name, and have no context. With no context, we get into
1826 trouble with nonoverlapping_component_refs_p. Skip them. */
1827 if (!DECL_FIELD_CONTEXT (f))
1828 continue;
1830 /* The previous array field isn't at the end. */
1831 tail_array_field = NULL_TREE;
1833 /* Check to see if this offset overlaps with the field. */
1834 cmp = tree_int_cst_compare (field_offset, offset);
1835 if (cmp > 0)
1836 continue;
1838 field_type = TREE_TYPE (f);
1840 /* Here we exactly match the offset being checked. If the types match,
1841 then we can return that field. */
1842 if (cmp == 0
1843 && useless_type_conversion_p (orig_type, field_type))
1845 if (base_is_ptr)
1846 base = build1 (INDIRECT_REF, record_type, base);
1847 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1848 return t;
1851 /* Don't care about offsets into the middle of scalars. */
1852 if (!AGGREGATE_TYPE_P (field_type))
1853 continue;
1855 /* Check for array at the end of the struct. This is often
1856 used as for flexible array members. We should be able to
1857 turn this into an array access anyway. */
1858 if (TREE_CODE (field_type) == ARRAY_TYPE)
1859 tail_array_field = f;
1861 /* Check the end of the field against the offset. */
1862 if (!DECL_SIZE_UNIT (f)
1863 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1864 continue;
1865 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1866 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1867 continue;
1869 /* If we matched, then set offset to the displacement into
1870 this field. */
1871 if (base_is_ptr)
1872 new_base = build1 (INDIRECT_REF, record_type, base);
1873 else
1874 new_base = base;
1875 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1877 /* Recurse to possibly find the match. */
1878 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1879 f == TYPE_FIELDS (record_type));
1880 if (ret)
1881 return ret;
1882 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1883 orig_type, false);
1884 if (ret)
1885 return ret;
1888 if (!tail_array_field)
1889 return NULL_TREE;
1891 f = tail_array_field;
1892 field_type = TREE_TYPE (f);
1893 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1895 /* If we get here, we've got an aggregate field, and a possibly
1896 nonzero offset into them. Recurse and hope for a valid match. */
1897 if (base_is_ptr)
1898 base = build1 (INDIRECT_REF, record_type, base);
1899 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1901 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1902 f == TYPE_FIELDS (record_type));
1903 if (t)
1904 return t;
1905 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1906 orig_type, false);
1909 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1910 or BASE[index] or by combination of those.
1912 Before attempting the conversion strip off existing ADDR_EXPRs and
1913 handled component refs. */
1915 tree
1916 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1918 tree ret;
1919 tree type;
1920 bool base_is_ptr = true;
1922 STRIP_NOPS (base);
1923 if (TREE_CODE (base) == ADDR_EXPR)
1925 base_is_ptr = false;
1927 base = TREE_OPERAND (base, 0);
1929 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1930 so it needs to be removed and new COMPONENT_REF constructed.
1931 The wrong COMPONENT_REF are often constructed by folding the
1932 (type *)&object within the expression (type *)&object+offset */
1933 if (handled_component_p (base))
1935 HOST_WIDE_INT sub_offset, size, maxsize;
1936 tree newbase;
1937 newbase = get_ref_base_and_extent (base, &sub_offset,
1938 &size, &maxsize);
1939 gcc_assert (newbase);
1940 if (size == maxsize
1941 && size != -1
1942 && !(sub_offset & (BITS_PER_UNIT - 1)))
1944 base = newbase;
1945 if (sub_offset)
1946 offset = int_const_binop (PLUS_EXPR, offset,
1947 build_int_cst (TREE_TYPE (offset),
1948 sub_offset / BITS_PER_UNIT), 1);
1951 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1952 && integer_zerop (offset))
1953 return base;
1954 type = TREE_TYPE (base);
1956 else
1958 base_is_ptr = true;
1959 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1960 return NULL_TREE;
1961 type = TREE_TYPE (TREE_TYPE (base));
1963 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1964 orig_type, base_is_ptr);
1965 if (!ret)
1967 if (base_is_ptr)
1968 base = build1 (INDIRECT_REF, type, base);
1969 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1971 return ret;
1974 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1975 or &BASE[index] or by combination of those.
1977 Before attempting the conversion strip off existing component refs. */
1979 tree
1980 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1982 tree t;
1984 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1985 && POINTER_TYPE_P (orig_type));
1987 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1988 if (t != NULL_TREE)
1990 tree orig = addr;
1991 tree ptr_type;
1993 /* For __builtin_object_size to function correctly we need to
1994 make sure not to fold address arithmetic so that we change
1995 reference from one array to another. This would happen for
1996 example for
1998 struct X { char s1[10]; char s2[10] } s;
1999 char *foo (void) { return &s.s2[-4]; }
2001 where we need to avoid generating &s.s1[6]. As the C and
2002 C++ frontends create different initial trees
2003 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2004 sophisticated comparisons here. Note that checking for the
2005 condition after the fact is easier than trying to avoid doing
2006 the folding. */
2007 STRIP_NOPS (orig);
2008 if (TREE_CODE (orig) == ADDR_EXPR)
2009 orig = TREE_OPERAND (orig, 0);
2010 if ((TREE_CODE (orig) == ARRAY_REF
2011 || (TREE_CODE (orig) == COMPONENT_REF
2012 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2013 && (TREE_CODE (t) == ARRAY_REF
2014 || TREE_CODE (t) == COMPONENT_REF)
2015 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2016 ? TREE_OPERAND (orig, 0) : orig,
2017 TREE_CODE (t) == ARRAY_REF
2018 ? TREE_OPERAND (t, 0) : t, 0))
2019 return NULL_TREE;
2021 ptr_type = build_pointer_type (TREE_TYPE (t));
2022 if (!useless_type_conversion_p (orig_type, ptr_type))
2023 return NULL_TREE;
2024 return build_fold_addr_expr_with_type (t, ptr_type);
2027 return NULL_TREE;
2030 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2031 Return the simplified expression, or NULL if nothing could be done. */
2033 static tree
2034 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2036 tree t;
2037 bool volatile_p = TREE_THIS_VOLATILE (expr);
2039 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2040 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2041 are sometimes added. */
2042 base = fold (base);
2043 STRIP_TYPE_NOPS (base);
2044 TREE_OPERAND (expr, 0) = base;
2046 /* One possibility is that the address reduces to a string constant. */
2047 t = fold_read_from_constant_string (expr);
2048 if (t)
2049 return t;
2051 /* Add in any offset from a POINTER_PLUS_EXPR. */
2052 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2054 tree offset2;
2056 offset2 = TREE_OPERAND (base, 1);
2057 if (TREE_CODE (offset2) != INTEGER_CST)
2058 return NULL_TREE;
2059 base = TREE_OPERAND (base, 0);
2061 offset = fold_convert (sizetype,
2062 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2065 if (TREE_CODE (base) == ADDR_EXPR)
2067 tree base_addr = base;
2069 /* Strip the ADDR_EXPR. */
2070 base = TREE_OPERAND (base, 0);
2072 /* Fold away CONST_DECL to its value, if the type is scalar. */
2073 if (TREE_CODE (base) == CONST_DECL
2074 && is_gimple_min_invariant (DECL_INITIAL (base)))
2075 return DECL_INITIAL (base);
2077 /* Try folding *(&B+O) to B.X. */
2078 t = maybe_fold_offset_to_reference (base_addr, offset,
2079 TREE_TYPE (expr));
2080 if (t)
2082 /* Preserve volatileness of the original expression.
2083 We can end up with a plain decl here which is shared
2084 and we shouldn't mess with its flags. */
2085 if (!SSA_VAR_P (t))
2086 TREE_THIS_VOLATILE (t) = volatile_p;
2087 return t;
2090 else
2092 /* We can get here for out-of-range string constant accesses,
2093 such as "_"[3]. Bail out of the entire substitution search
2094 and arrange for the entire statement to be replaced by a
2095 call to __builtin_trap. In all likelihood this will all be
2096 constant-folded away, but in the meantime we can't leave with
2097 something that get_expr_operands can't understand. */
2099 t = base;
2100 STRIP_NOPS (t);
2101 if (TREE_CODE (t) == ADDR_EXPR
2102 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2104 /* FIXME: Except that this causes problems elsewhere with dead
2105 code not being deleted, and we die in the rtl expanders
2106 because we failed to remove some ssa_name. In the meantime,
2107 just return zero. */
2108 /* FIXME2: This condition should be signaled by
2109 fold_read_from_constant_string directly, rather than
2110 re-checking for it here. */
2111 return integer_zero_node;
2114 /* Try folding *(B+O) to B->X. Still an improvement. */
2115 if (POINTER_TYPE_P (TREE_TYPE (base)))
2117 t = maybe_fold_offset_to_reference (base, offset,
2118 TREE_TYPE (expr));
2119 if (t)
2120 return t;
2124 /* Otherwise we had an offset that we could not simplify. */
2125 return NULL_TREE;
2129 /* A quaint feature extant in our address arithmetic is that there
2130 can be hidden type changes here. The type of the result need
2131 not be the same as the type of the input pointer.
2133 What we're after here is an expression of the form
2134 (T *)(&array + const)
2135 where array is OP0, const is OP1, RES_TYPE is T and
2136 the cast doesn't actually exist, but is implicit in the
2137 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2138 &array[x]
2139 which may be able to propagate further. */
2141 tree
2142 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2144 tree ptd_type;
2145 tree t;
2147 /* The first operand should be an ADDR_EXPR. */
2148 if (TREE_CODE (op0) != ADDR_EXPR)
2149 return NULL_TREE;
2150 op0 = TREE_OPERAND (op0, 0);
2152 /* It had better be a constant. */
2153 if (TREE_CODE (op1) != INTEGER_CST)
2155 /* Or op0 should now be A[0] and the non-constant offset defined
2156 via a multiplication by the array element size. */
2157 if (TREE_CODE (op0) == ARRAY_REF
2158 && integer_zerop (TREE_OPERAND (op0, 1))
2159 && TREE_CODE (op1) == SSA_NAME
2160 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2162 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2163 if (!is_gimple_assign (offset_def))
2164 return NULL_TREE;
2166 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2167 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2168 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2169 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2170 return build1 (ADDR_EXPR, res_type,
2171 build4 (ARRAY_REF, TREE_TYPE (op0),
2172 TREE_OPERAND (op0, 0),
2173 gimple_assign_rhs1 (offset_def),
2174 TREE_OPERAND (op0, 2),
2175 TREE_OPERAND (op0, 3)));
2176 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2177 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2178 return build1 (ADDR_EXPR, res_type,
2179 build4 (ARRAY_REF, TREE_TYPE (op0),
2180 TREE_OPERAND (op0, 0),
2181 op1,
2182 TREE_OPERAND (op0, 2),
2183 TREE_OPERAND (op0, 3)));
2185 return NULL_TREE;
2188 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2189 the offset into it. */
2190 while (TREE_CODE (op0) == ARRAY_REF)
2192 tree array_obj = TREE_OPERAND (op0, 0);
2193 tree array_idx = TREE_OPERAND (op0, 1);
2194 tree elt_type = TREE_TYPE (op0);
2195 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2196 tree min_idx;
2198 if (TREE_CODE (array_idx) != INTEGER_CST)
2199 break;
2200 if (TREE_CODE (elt_size) != INTEGER_CST)
2201 break;
2203 /* Un-bias the index by the min index of the array type. */
2204 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2205 if (min_idx)
2207 min_idx = TYPE_MIN_VALUE (min_idx);
2208 if (min_idx)
2210 if (TREE_CODE (min_idx) != INTEGER_CST)
2211 break;
2213 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2214 if (!integer_zerop (min_idx))
2215 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2216 min_idx, 0);
2220 /* Convert the index to a byte offset. */
2221 array_idx = fold_convert (sizetype, array_idx);
2222 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2224 /* Update the operands for the next round, or for folding. */
2225 op1 = int_const_binop (PLUS_EXPR,
2226 array_idx, op1, 0);
2227 op0 = array_obj;
2230 ptd_type = TREE_TYPE (res_type);
2231 /* If we want a pointer to void, reconstruct the reference from the
2232 array element type. A pointer to that can be trivially converted
2233 to void *. This happens as we fold (void *)(ptr p+ off). */
2234 if (VOID_TYPE_P (ptd_type)
2235 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2236 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2238 /* At which point we can try some of the same things as for indirects. */
2239 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2240 if (!t)
2241 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2242 ptd_type, false);
2243 if (t)
2244 t = build1 (ADDR_EXPR, res_type, t);
2246 return t;
2249 /* Subroutine of fold_stmt. We perform several simplifications of the
2250 memory reference tree EXPR and make sure to re-gimplify them properly
2251 after propagation of constant addresses. IS_LHS is true if the
2252 reference is supposed to be an lvalue. */
2254 static tree
2255 maybe_fold_reference (tree expr, bool is_lhs)
2257 tree *t = &expr;
2259 if (TREE_CODE (expr) == ARRAY_REF
2260 && !is_lhs)
2262 tree tem = fold_read_from_constant_string (expr);
2263 if (tem)
2264 return tem;
2267 /* ??? We might want to open-code the relevant remaining cases
2268 to avoid using the generic fold. */
2269 if (handled_component_p (*t)
2270 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
2272 tree tem = fold (*t);
2273 if (tem != *t)
2274 return tem;
2277 while (handled_component_p (*t))
2278 t = &TREE_OPERAND (*t, 0);
2280 if (TREE_CODE (*t) == INDIRECT_REF)
2282 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2283 integer_zero_node);
2284 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2285 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2286 tem = NULL_TREE;
2287 if (!tem
2288 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
2289 /* If we had a good reason for propagating the address here,
2290 make sure we end up with valid gimple. See PR34989. */
2291 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2293 if (tem)
2295 *t = tem;
2296 tem = maybe_fold_reference (expr, is_lhs);
2297 if (tem)
2298 return tem;
2299 return expr;
2303 return NULL_TREE;
2307 /* Return the string length, maximum string length or maximum value of
2308 ARG in LENGTH.
2309 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2310 is not NULL and, for TYPE == 0, its value is not equal to the length
2311 we determine or if we are unable to determine the length or value,
2312 return false. VISITED is a bitmap of visited variables.
2313 TYPE is 0 if string length should be returned, 1 for maximum string
2314 length and 2 for maximum value ARG can have. */
2316 static bool
2317 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2319 tree var, val;
2320 gimple def_stmt;
2322 if (TREE_CODE (arg) != SSA_NAME)
2324 if (TREE_CODE (arg) == COND_EXPR)
2325 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2326 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2327 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2328 else if (TREE_CODE (arg) == ADDR_EXPR
2329 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2330 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2332 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2333 if (TREE_CODE (aop0) == INDIRECT_REF
2334 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2335 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2336 length, visited, type);
2339 if (type == 2)
2341 val = arg;
2342 if (TREE_CODE (val) != INTEGER_CST
2343 || tree_int_cst_sgn (val) < 0)
2344 return false;
2346 else
2347 val = c_strlen (arg, 1);
2348 if (!val)
2349 return false;
2351 if (*length)
2353 if (type > 0)
2355 if (TREE_CODE (*length) != INTEGER_CST
2356 || TREE_CODE (val) != INTEGER_CST)
2357 return false;
2359 if (tree_int_cst_lt (*length, val))
2360 *length = val;
2361 return true;
2363 else if (simple_cst_equal (val, *length) != 1)
2364 return false;
2367 *length = val;
2368 return true;
2371 /* If we were already here, break the infinite cycle. */
2372 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2373 return true;
2374 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2376 var = arg;
2377 def_stmt = SSA_NAME_DEF_STMT (var);
2379 switch (gimple_code (def_stmt))
2381 case GIMPLE_ASSIGN:
2382 /* The RHS of the statement defining VAR must either have a
2383 constant length or come from another SSA_NAME with a constant
2384 length. */
2385 if (gimple_assign_single_p (def_stmt)
2386 || gimple_assign_unary_nop_p (def_stmt))
2388 tree rhs = gimple_assign_rhs1 (def_stmt);
2389 return get_maxval_strlen (rhs, length, visited, type);
2391 return false;
2393 case GIMPLE_PHI:
2395 /* All the arguments of the PHI node must have the same constant
2396 length. */
2397 unsigned i;
2399 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2401 tree arg = gimple_phi_arg (def_stmt, i)->def;
2403 /* If this PHI has itself as an argument, we cannot
2404 determine the string length of this argument. However,
2405 if we can find a constant string length for the other
2406 PHI args then we can still be sure that this is a
2407 constant string length. So be optimistic and just
2408 continue with the next argument. */
2409 if (arg == gimple_phi_result (def_stmt))
2410 continue;
2412 if (!get_maxval_strlen (arg, length, visited, type))
2413 return false;
2416 return true;
2418 default:
2419 return false;
2424 /* Fold builtin call in statement STMT. Returns a simplified tree.
2425 We may return a non-constant expression, including another call
2426 to a different function and with different arguments, e.g.,
2427 substituting memcpy for strcpy when the string length is known.
2428 Note that some builtins expand into inline code that may not
2429 be valid in GIMPLE. Callers must take care. */
2431 static tree
2432 ccp_fold_builtin (gimple stmt)
2434 tree result, val[3];
2435 tree callee, a;
2436 int arg_idx, type;
2437 bitmap visited;
2438 bool ignore;
2439 int nargs;
2441 gcc_assert (is_gimple_call (stmt));
2443 ignore = (gimple_call_lhs (stmt) == NULL);
2445 /* First try the generic builtin folder. If that succeeds, return the
2446 result directly. */
2447 result = fold_call_stmt (stmt, ignore);
2448 if (result)
2450 if (ignore)
2451 STRIP_NOPS (result);
2452 return result;
2455 /* Ignore MD builtins. */
2456 callee = gimple_call_fndecl (stmt);
2457 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2458 return NULL_TREE;
2460 /* If the builtin could not be folded, and it has no argument list,
2461 we're done. */
2462 nargs = gimple_call_num_args (stmt);
2463 if (nargs == 0)
2464 return NULL_TREE;
2466 /* Limit the work only for builtins we know how to simplify. */
2467 switch (DECL_FUNCTION_CODE (callee))
2469 case BUILT_IN_STRLEN:
2470 case BUILT_IN_FPUTS:
2471 case BUILT_IN_FPUTS_UNLOCKED:
2472 arg_idx = 0;
2473 type = 0;
2474 break;
2475 case BUILT_IN_STRCPY:
2476 case BUILT_IN_STRNCPY:
2477 arg_idx = 1;
2478 type = 0;
2479 break;
2480 case BUILT_IN_MEMCPY_CHK:
2481 case BUILT_IN_MEMPCPY_CHK:
2482 case BUILT_IN_MEMMOVE_CHK:
2483 case BUILT_IN_MEMSET_CHK:
2484 case BUILT_IN_STRNCPY_CHK:
2485 arg_idx = 2;
2486 type = 2;
2487 break;
2488 case BUILT_IN_STRCPY_CHK:
2489 case BUILT_IN_STPCPY_CHK:
2490 arg_idx = 1;
2491 type = 1;
2492 break;
2493 case BUILT_IN_SNPRINTF_CHK:
2494 case BUILT_IN_VSNPRINTF_CHK:
2495 arg_idx = 1;
2496 type = 2;
2497 break;
2498 default:
2499 return NULL_TREE;
2502 if (arg_idx >= nargs)
2503 return NULL_TREE;
2505 /* Try to use the dataflow information gathered by the CCP process. */
2506 visited = BITMAP_ALLOC (NULL);
2507 bitmap_clear (visited);
2509 memset (val, 0, sizeof (val));
2510 a = gimple_call_arg (stmt, arg_idx);
2511 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2512 val[arg_idx] = NULL_TREE;
2514 BITMAP_FREE (visited);
2516 result = NULL_TREE;
2517 switch (DECL_FUNCTION_CODE (callee))
2519 case BUILT_IN_STRLEN:
2520 if (val[0] && nargs == 1)
2522 tree new_val =
2523 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2525 /* If the result is not a valid gimple value, or not a cast
2526 of a valid gimple value, then we can not use the result. */
2527 if (is_gimple_val (new_val)
2528 || (is_gimple_cast (new_val)
2529 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2530 return new_val;
2532 break;
2534 case BUILT_IN_STRCPY:
2535 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2536 result = fold_builtin_strcpy (callee,
2537 gimple_call_arg (stmt, 0),
2538 gimple_call_arg (stmt, 1),
2539 val[1]);
2540 break;
2542 case BUILT_IN_STRNCPY:
2543 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2544 result = fold_builtin_strncpy (callee,
2545 gimple_call_arg (stmt, 0),
2546 gimple_call_arg (stmt, 1),
2547 gimple_call_arg (stmt, 2),
2548 val[1]);
2549 break;
2551 case BUILT_IN_FPUTS:
2552 if (nargs == 2)
2553 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2554 gimple_call_arg (stmt, 1),
2555 ignore, false, val[0]);
2556 break;
2558 case BUILT_IN_FPUTS_UNLOCKED:
2559 if (nargs == 2)
2560 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2561 gimple_call_arg (stmt, 1),
2562 ignore, true, val[0]);
2563 break;
2565 case BUILT_IN_MEMCPY_CHK:
2566 case BUILT_IN_MEMPCPY_CHK:
2567 case BUILT_IN_MEMMOVE_CHK:
2568 case BUILT_IN_MEMSET_CHK:
2569 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2570 result = fold_builtin_memory_chk (callee,
2571 gimple_call_arg (stmt, 0),
2572 gimple_call_arg (stmt, 1),
2573 gimple_call_arg (stmt, 2),
2574 gimple_call_arg (stmt, 3),
2575 val[2], ignore,
2576 DECL_FUNCTION_CODE (callee));
2577 break;
2579 case BUILT_IN_STRCPY_CHK:
2580 case BUILT_IN_STPCPY_CHK:
2581 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2582 result = fold_builtin_stxcpy_chk (callee,
2583 gimple_call_arg (stmt, 0),
2584 gimple_call_arg (stmt, 1),
2585 gimple_call_arg (stmt, 2),
2586 val[1], ignore,
2587 DECL_FUNCTION_CODE (callee));
2588 break;
2590 case BUILT_IN_STRNCPY_CHK:
2591 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2592 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2593 gimple_call_arg (stmt, 1),
2594 gimple_call_arg (stmt, 2),
2595 gimple_call_arg (stmt, 3),
2596 val[2]);
2597 break;
2599 case BUILT_IN_SNPRINTF_CHK:
2600 case BUILT_IN_VSNPRINTF_CHK:
2601 if (val[1] && is_gimple_val (val[1]))
2602 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2603 DECL_FUNCTION_CODE (callee));
2604 break;
2606 default:
2607 gcc_unreachable ();
2610 if (result && ignore)
2611 result = fold_ignored_result (result);
2612 return result;
2615 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2616 replacement rhs for the statement or NULL_TREE if no simplification
2617 could be made. It is assumed that the operands have been previously
2618 folded. */
2620 static tree
2621 fold_gimple_assign (gimple_stmt_iterator *si)
2623 gimple stmt = gsi_stmt (*si);
2624 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2626 tree result = NULL_TREE;
2628 switch (get_gimple_rhs_class (subcode))
2630 case GIMPLE_SINGLE_RHS:
2632 tree rhs = gimple_assign_rhs1 (stmt);
2634 /* Try to fold a conditional expression. */
2635 if (TREE_CODE (rhs) == COND_EXPR)
2637 tree op0 = COND_EXPR_COND (rhs);
2638 tree tem;
2639 bool set = false;
2641 if (COMPARISON_CLASS_P (op0))
2643 fold_defer_overflow_warnings ();
2644 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2645 TREE_OPERAND (op0, 0),
2646 TREE_OPERAND (op0, 1));
2647 /* This is actually a conditional expression, not a GIMPLE
2648 conditional statement, however, the valid_gimple_rhs_p
2649 test still applies. */
2650 set = (tem && is_gimple_condexpr (tem)
2651 && valid_gimple_rhs_p (tem));
2652 fold_undefer_overflow_warnings (set, stmt, 0);
2654 else if (is_gimple_min_invariant (op0))
2656 tem = op0;
2657 set = true;
2659 else
2660 return NULL_TREE;
2662 if (set)
2663 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), tem,
2664 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2667 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2668 return maybe_fold_tmr (rhs);
2670 else if (REFERENCE_CLASS_P (rhs))
2671 return maybe_fold_reference (rhs, false);
2673 else if (TREE_CODE (rhs) == ADDR_EXPR)
2675 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2676 if (tem)
2677 result = fold_convert (TREE_TYPE (rhs),
2678 build_fold_addr_expr (tem));
2681 else if (TREE_CODE (rhs) == CONSTRUCTOR
2682 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2683 && (CONSTRUCTOR_NELTS (rhs)
2684 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2686 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2687 unsigned i;
2688 tree val;
2690 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2691 if (TREE_CODE (val) != INTEGER_CST
2692 && TREE_CODE (val) != REAL_CST
2693 && TREE_CODE (val) != FIXED_CST)
2694 return NULL_TREE;
2696 return build_vector_from_ctor (TREE_TYPE (rhs),
2697 CONSTRUCTOR_ELTS (rhs));
2700 /* If we couldn't fold the RHS, hand over to the generic
2701 fold routines. */
2702 if (result == NULL_TREE)
2703 result = fold (rhs);
2705 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2706 that may have been added by fold, and "useless" type
2707 conversions that might now be apparent due to propagation. */
2708 STRIP_USELESS_TYPE_CONVERSION (result);
2710 if (result != rhs && valid_gimple_rhs_p (result))
2711 return result;
2713 return NULL_TREE;
2715 break;
2717 case GIMPLE_UNARY_RHS:
2719 tree rhs = gimple_assign_rhs1 (stmt);
2721 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2722 if (result)
2724 /* If the operation was a conversion do _not_ mark a
2725 resulting constant with TREE_OVERFLOW if the original
2726 constant was not. These conversions have implementation
2727 defined behavior and retaining the TREE_OVERFLOW flag
2728 here would confuse later passes such as VRP. */
2729 if (CONVERT_EXPR_CODE_P (subcode)
2730 && TREE_CODE (result) == INTEGER_CST
2731 && TREE_CODE (rhs) == INTEGER_CST)
2732 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2734 STRIP_USELESS_TYPE_CONVERSION (result);
2735 if (valid_gimple_rhs_p (result))
2736 return result;
2738 else if (CONVERT_EXPR_CODE_P (subcode)
2739 && POINTER_TYPE_P (gimple_expr_type (stmt))
2740 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2742 tree type = gimple_expr_type (stmt);
2743 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2744 integer_zero_node, type);
2745 if (t)
2746 return t;
2749 break;
2751 case GIMPLE_BINARY_RHS:
2752 /* Try to fold pointer addition. */
2753 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2755 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2756 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2758 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2759 if (!useless_type_conversion_p
2760 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2761 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2763 result = maybe_fold_stmt_addition (type,
2764 gimple_assign_rhs1 (stmt),
2765 gimple_assign_rhs2 (stmt));
2768 if (!result)
2769 result = fold_binary (subcode,
2770 TREE_TYPE (gimple_assign_lhs (stmt)),
2771 gimple_assign_rhs1 (stmt),
2772 gimple_assign_rhs2 (stmt));
2774 if (result)
2776 STRIP_USELESS_TYPE_CONVERSION (result);
2777 if (valid_gimple_rhs_p (result))
2778 return result;
2780 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2781 we lose canonicalization opportunities. Do not go again
2782 through fold here though, or the same non-GIMPLE will be
2783 produced. */
2784 if (commutative_tree_code (subcode)
2785 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2786 gimple_assign_rhs2 (stmt), false))
2787 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2788 gimple_assign_rhs2 (stmt),
2789 gimple_assign_rhs1 (stmt));
2791 break;
2793 case GIMPLE_INVALID_RHS:
2794 gcc_unreachable ();
2797 return NULL_TREE;
2800 /* Attempt to fold a conditional statement. Return true if any changes were
2801 made. We only attempt to fold the condition expression, and do not perform
2802 any transformation that would require alteration of the cfg. It is
2803 assumed that the operands have been previously folded. */
2805 static bool
2806 fold_gimple_cond (gimple stmt)
2808 tree result = fold_binary (gimple_cond_code (stmt),
2809 boolean_type_node,
2810 gimple_cond_lhs (stmt),
2811 gimple_cond_rhs (stmt));
2813 if (result)
2815 STRIP_USELESS_TYPE_CONVERSION (result);
2816 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2818 gimple_cond_set_condition_from_tree (stmt, result);
2819 return true;
2823 return false;
2827 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2828 The statement may be replaced by another statement, e.g., if the call
2829 simplifies to a constant value. Return true if any changes were made.
2830 It is assumed that the operands have been previously folded. */
2832 static bool
2833 fold_gimple_call (gimple_stmt_iterator *gsi)
2835 gimple stmt = gsi_stmt (*gsi);
2837 tree callee = gimple_call_fndecl (stmt);
2839 /* Check for builtins that CCP can handle using information not
2840 available in the generic fold routines. */
2841 if (callee && DECL_BUILT_IN (callee))
2843 tree result = ccp_fold_builtin (stmt);
2845 if (result)
2846 return update_call_from_tree (gsi, result);
2848 else
2850 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2851 here are when we've propagated the address of a decl into the
2852 object slot. */
2853 /* ??? Should perhaps do this in fold proper. However, doing it
2854 there requires that we create a new CALL_EXPR, and that requires
2855 copying EH region info to the new node. Easier to just do it
2856 here where we can just smash the call operand. */
2857 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2858 callee = gimple_call_fn (stmt);
2859 if (TREE_CODE (callee) == OBJ_TYPE_REF
2860 && lang_hooks.fold_obj_type_ref
2861 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2862 && DECL_P (TREE_OPERAND
2863 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2865 tree t;
2867 /* ??? Caution: Broken ADDR_EXPR semantics means that
2868 looking at the type of the operand of the addr_expr
2869 can yield an array type. See silly exception in
2870 check_pointer_types_r. */
2871 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2872 t = lang_hooks.fold_obj_type_ref (callee, t);
2873 if (t)
2875 gimple_call_set_fn (stmt, t);
2876 return true;
2881 return false;
2884 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2885 distinguishes both cases. */
2887 static bool
2888 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2890 bool changed = false;
2891 gimple stmt = gsi_stmt (*gsi);
2892 unsigned i;
2894 /* Fold the main computation performed by the statement. */
2895 switch (gimple_code (stmt))
2897 case GIMPLE_ASSIGN:
2899 unsigned old_num_ops = gimple_num_ops (stmt);
2900 tree new_rhs = fold_gimple_assign (gsi);
2901 if (new_rhs != NULL_TREE
2902 && (!inplace
2903 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2905 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2906 changed = true;
2908 break;
2911 case GIMPLE_COND:
2912 changed |= fold_gimple_cond (stmt);
2913 break;
2915 case GIMPLE_CALL:
2916 /* Fold *& in call arguments. */
2917 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2918 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2920 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2921 if (tmp)
2923 gimple_call_set_arg (stmt, i, tmp);
2924 changed = true;
2927 /* The entire statement may be replaced in this case. */
2928 if (!inplace)
2929 changed |= fold_gimple_call (gsi);
2930 break;
2932 case GIMPLE_ASM:
2933 /* Fold *& in asm operands. */
2934 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2936 tree link = gimple_asm_output_op (stmt, i);
2937 tree op = TREE_VALUE (link);
2938 if (REFERENCE_CLASS_P (op)
2939 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2941 TREE_VALUE (link) = op;
2942 changed = true;
2945 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2947 tree link = gimple_asm_input_op (stmt, i);
2948 tree op = TREE_VALUE (link);
2949 if (REFERENCE_CLASS_P (op)
2950 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
2952 TREE_VALUE (link) = op;
2953 changed = true;
2956 break;
2958 default:;
2961 stmt = gsi_stmt (*gsi);
2963 /* Fold *& on the lhs. */
2964 if (gimple_has_lhs (stmt))
2966 tree lhs = gimple_get_lhs (stmt);
2967 if (lhs && REFERENCE_CLASS_P (lhs))
2969 tree new_lhs = maybe_fold_reference (lhs, true);
2970 if (new_lhs)
2972 gimple_set_lhs (stmt, new_lhs);
2973 changed = true;
2978 return changed;
2981 /* Fold the statement pointed to by GSI. In some cases, this function may
2982 replace the whole statement with a new one. Returns true iff folding
2983 makes any changes.
2984 The statement pointed to by GSI should be in valid gimple form but may
2985 be in unfolded state as resulting from for example constant propagation
2986 which can produce *&x = 0. */
2988 bool
2989 fold_stmt (gimple_stmt_iterator *gsi)
2991 return fold_stmt_1 (gsi, false);
2994 /* Perform the minimal folding on statement STMT. Only operations like
2995 *&x created by constant propagation are handled. The statement cannot
2996 be replaced with a new one. Return true if the statement was
2997 changed, false otherwise.
2998 The statement STMT should be in valid gimple form but may
2999 be in unfolded state as resulting from for example constant propagation
3000 which can produce *&x = 0. */
3002 bool
3003 fold_stmt_inplace (gimple stmt)
3005 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3006 bool changed = fold_stmt_1 (&gsi, true);
3007 gcc_assert (gsi_stmt (gsi) == stmt);
3008 return changed;
3011 /* Try to optimize out __builtin_stack_restore. Optimize it out
3012 if there is another __builtin_stack_restore in the same basic
3013 block and no calls or ASM_EXPRs are in between, or if this block's
3014 only outgoing edge is to EXIT_BLOCK and there are no calls or
3015 ASM_EXPRs after this __builtin_stack_restore. */
3017 static tree
3018 optimize_stack_restore (gimple_stmt_iterator i)
3020 tree callee, rhs;
3021 gimple stmt, stack_save;
3022 gimple_stmt_iterator stack_save_gsi;
3024 basic_block bb = gsi_bb (i);
3025 gimple call = gsi_stmt (i);
3027 if (gimple_code (call) != GIMPLE_CALL
3028 || gimple_call_num_args (call) != 1
3029 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3030 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3031 return NULL_TREE;
3033 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3035 stmt = gsi_stmt (i);
3036 if (gimple_code (stmt) == GIMPLE_ASM)
3037 return NULL_TREE;
3038 if (gimple_code (stmt) != GIMPLE_CALL)
3039 continue;
3041 callee = gimple_call_fndecl (stmt);
3042 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3043 return NULL_TREE;
3045 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3046 break;
3049 if (gsi_end_p (i)
3050 && (! single_succ_p (bb)
3051 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3052 return NULL_TREE;
3054 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3055 if (gimple_code (stack_save) != GIMPLE_CALL
3056 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3057 || stmt_could_throw_p (stack_save)
3058 || !has_single_use (gimple_call_arg (call, 0)))
3059 return NULL_TREE;
3061 callee = gimple_call_fndecl (stack_save);
3062 if (!callee
3063 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3064 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3065 || gimple_call_num_args (stack_save) != 0)
3066 return NULL_TREE;
3068 stack_save_gsi = gsi_for_stmt (stack_save);
3069 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3070 if (!update_call_from_tree (&stack_save_gsi, rhs))
3071 return NULL_TREE;
3073 /* No effect, so the statement will be deleted. */
3074 return integer_zero_node;
3077 /* If va_list type is a simple pointer and nothing special is needed,
3078 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3079 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3080 pointer assignment. */
3082 static tree
3083 optimize_stdarg_builtin (gimple call)
3085 tree callee, lhs, rhs, cfun_va_list;
3086 bool va_list_simple_ptr;
3088 if (gimple_code (call) != GIMPLE_CALL)
3089 return NULL_TREE;
3091 callee = gimple_call_fndecl (call);
3093 cfun_va_list = targetm.fn_abi_va_list (callee);
3094 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3095 && (TREE_TYPE (cfun_va_list) == void_type_node
3096 || TREE_TYPE (cfun_va_list) == char_type_node);
3098 switch (DECL_FUNCTION_CODE (callee))
3100 case BUILT_IN_VA_START:
3101 if (!va_list_simple_ptr
3102 || targetm.expand_builtin_va_start != NULL
3103 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3104 return NULL_TREE;
3106 if (gimple_call_num_args (call) != 2)
3107 return NULL_TREE;
3109 lhs = gimple_call_arg (call, 0);
3110 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3111 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3112 != TYPE_MAIN_VARIANT (cfun_va_list))
3113 return NULL_TREE;
3115 lhs = build_fold_indirect_ref (lhs);
3116 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3117 1, integer_zero_node);
3118 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3119 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3121 case BUILT_IN_VA_COPY:
3122 if (!va_list_simple_ptr)
3123 return NULL_TREE;
3125 if (gimple_call_num_args (call) != 2)
3126 return NULL_TREE;
3128 lhs = gimple_call_arg (call, 0);
3129 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3130 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3131 != TYPE_MAIN_VARIANT (cfun_va_list))
3132 return NULL_TREE;
3134 lhs = build_fold_indirect_ref (lhs);
3135 rhs = gimple_call_arg (call, 1);
3136 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3137 != TYPE_MAIN_VARIANT (cfun_va_list))
3138 return NULL_TREE;
3140 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3141 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3143 case BUILT_IN_VA_END:
3144 /* No effect, so the statement will be deleted. */
3145 return integer_zero_node;
3147 default:
3148 gcc_unreachable ();
3152 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3153 RHS of an assignment. Insert the necessary statements before
3154 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3155 is replaced. If the call is expected to produces a result, then it
3156 is replaced by an assignment of the new RHS to the result variable.
3157 If the result is to be ignored, then the call is replaced by a
3158 GIMPLE_NOP. */
3160 static void
3161 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3163 tree lhs;
3164 tree tmp = NULL_TREE; /* Silence warning. */
3165 gimple stmt, new_stmt;
3166 gimple_stmt_iterator i;
3167 gimple_seq stmts = gimple_seq_alloc();
3168 struct gimplify_ctx gctx;
3170 stmt = gsi_stmt (*si_p);
3172 gcc_assert (is_gimple_call (stmt));
3174 lhs = gimple_call_lhs (stmt);
3176 push_gimplify_context (&gctx);
3178 if (lhs == NULL_TREE)
3179 gimplify_and_add (expr, &stmts);
3180 else
3181 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3183 pop_gimplify_context (NULL);
3185 if (gimple_has_location (stmt))
3186 annotate_all_with_location (stmts, gimple_location (stmt));
3188 /* The replacement can expose previously unreferenced variables. */
3189 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3191 new_stmt = gsi_stmt (i);
3192 find_new_referenced_vars (new_stmt);
3193 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3194 mark_symbols_for_renaming (new_stmt);
3195 gsi_next (si_p);
3198 if (lhs == NULL_TREE)
3200 new_stmt = gimple_build_nop ();
3201 unlink_stmt_vdef (stmt);
3202 release_defs (stmt);
3204 else
3206 new_stmt = gimple_build_assign (lhs, tmp);
3207 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3208 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3209 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3212 gimple_set_location (new_stmt, gimple_location (stmt));
3213 gsi_replace (si_p, new_stmt, false);
3216 /* A simple pass that attempts to fold all builtin functions. This pass
3217 is run after we've propagated as many constants as we can. */
3219 static unsigned int
3220 execute_fold_all_builtins (void)
3222 bool cfg_changed = false;
3223 basic_block bb;
3224 unsigned int todoflags = 0;
3226 FOR_EACH_BB (bb)
3228 gimple_stmt_iterator i;
3229 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3231 gimple stmt, old_stmt;
3232 tree callee, result;
3233 enum built_in_function fcode;
3235 stmt = gsi_stmt (i);
3237 if (gimple_code (stmt) != GIMPLE_CALL)
3239 gsi_next (&i);
3240 continue;
3242 callee = gimple_call_fndecl (stmt);
3243 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3245 gsi_next (&i);
3246 continue;
3248 fcode = DECL_FUNCTION_CODE (callee);
3250 result = ccp_fold_builtin (stmt);
3252 if (result)
3253 gimple_remove_stmt_histograms (cfun, stmt);
3255 if (!result)
3256 switch (DECL_FUNCTION_CODE (callee))
3258 case BUILT_IN_CONSTANT_P:
3259 /* Resolve __builtin_constant_p. If it hasn't been
3260 folded to integer_one_node by now, it's fairly
3261 certain that the value simply isn't constant. */
3262 result = integer_zero_node;
3263 break;
3265 case BUILT_IN_STACK_RESTORE:
3266 result = optimize_stack_restore (i);
3267 if (result)
3268 break;
3269 gsi_next (&i);
3270 continue;
3272 case BUILT_IN_VA_START:
3273 case BUILT_IN_VA_END:
3274 case BUILT_IN_VA_COPY:
3275 /* These shouldn't be folded before pass_stdarg. */
3276 result = optimize_stdarg_builtin (stmt);
3277 if (result)
3278 break;
3279 /* FALLTHRU */
3281 default:
3282 gsi_next (&i);
3283 continue;
3286 if (dump_file && (dump_flags & TDF_DETAILS))
3288 fprintf (dump_file, "Simplified\n ");
3289 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3292 old_stmt = stmt;
3293 if (!update_call_from_tree (&i, result))
3295 gimplify_and_update_call_from_tree (&i, result);
3296 todoflags |= TODO_update_address_taken;
3299 stmt = gsi_stmt (i);
3300 update_stmt (stmt);
3302 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3303 && gimple_purge_dead_eh_edges (bb))
3304 cfg_changed = true;
3306 if (dump_file && (dump_flags & TDF_DETAILS))
3308 fprintf (dump_file, "to\n ");
3309 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3310 fprintf (dump_file, "\n");
3313 /* Retry the same statement if it changed into another
3314 builtin, there might be new opportunities now. */
3315 if (gimple_code (stmt) != GIMPLE_CALL)
3317 gsi_next (&i);
3318 continue;
3320 callee = gimple_call_fndecl (stmt);
3321 if (!callee
3322 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3323 || DECL_FUNCTION_CODE (callee) == fcode)
3324 gsi_next (&i);
3328 /* Delete unreachable blocks. */
3329 if (cfg_changed)
3330 todoflags |= TODO_cleanup_cfg;
3332 return todoflags;
3336 struct gimple_opt_pass pass_fold_builtins =
3339 GIMPLE_PASS,
3340 "fab", /* name */
3341 NULL, /* gate */
3342 execute_fold_all_builtins, /* execute */
3343 NULL, /* sub */
3344 NULL, /* next */
3345 0, /* static_pass_number */
3346 TV_NONE, /* tv_id */
3347 PROP_cfg | PROP_ssa, /* properties_required */
3348 0, /* properties_provided */
3349 0, /* properties_destroyed */
3350 0, /* todo_flags_start */
3351 TODO_dump_func
3352 | TODO_verify_ssa
3353 | TODO_update_ssa /* todo_flags_finish */