sh.c (shift_insns_rtx, [...]): Truncate shift counts to avoid out-of-bounds array...
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob9cf2f83546b70a2618395d6c4de4618b3d2f53cd
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
113 For instance, consider the following code fragment:
115 struct A a;
116 const int B = 42;
118 void foo (int i)
120 if (i > 10)
121 a.a = 42;
122 else
124 a.b = 21;
125 a.a = a.b + 21;
128 if (a.a != B)
129 never_executed ();
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
139 # a_5 = VDEF <a_4>
140 a.a = 2;
142 # VUSE <a_5>
143 x_3 = a.b;
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
152 int A;
154 foo (int i)
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
160 # VUSE <A_5>
161 A.0_6 = A;
163 return A.0_6;
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
177 References:
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
211 #include "dbgcnt.h"
214 /* Possible lattice values. */
215 typedef enum
217 UNINITIALIZED,
218 UNDEFINED,
219 CONSTANT,
220 VARYING
221 } ccp_lattice_t;
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
228 doing the store). */
229 static prop_value_t *const_val;
231 static void canonicalize_float_value (prop_value_t *);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 static void
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
252 break;
253 default:
254 gcc_unreachable ();
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
263 void
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
275 tree
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym))
281 tree val = DECL_INITIAL (sym);
282 if (val)
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
287 if (TREE_CODE (val) == ADDR_EXPR)
289 tree base = get_base_address (TREE_OPERAND (val, 0));
290 if (base && TREE_CODE (base) == VAR_DECL)
291 add_referenced_var (base);
293 return val;
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
299 if (!val
300 && !DECL_EXTERNAL (sym)
301 && targetm.binds_local_p (sym)
302 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
304 return fold_convert (TREE_TYPE (sym), integer_zero_node);
307 return NULL_TREE;
310 /* Compute a default value for variable VAR and store it in the
311 CONST_VAL array. The following rules are used to get default
312 values:
314 1- Global and static variables that are declared constant are
315 considered CONSTANT.
317 2- Any other value is considered UNDEFINED. This is useful when
318 considering PHI nodes. PHI arguments that are undefined do not
319 change the constant value of the PHI node, which allows for more
320 constants to be propagated.
322 3- Variables defined by statements other than assignments and PHI
323 nodes are considered VARYING.
325 4- Initial values of variables that are not GIMPLE registers are
326 considered VARYING. */
328 static prop_value_t
329 get_default_value (tree var)
331 tree sym = SSA_NAME_VAR (var);
332 prop_value_t val = { UNINITIALIZED, NULL_TREE };
333 gimple stmt;
335 stmt = SSA_NAME_DEF_STMT (var);
337 if (gimple_nop_p (stmt))
339 /* Variables defined by an empty statement are those used
340 before being initialized. If VAR is a local variable, we
341 can assume initially that it is UNDEFINED, otherwise we must
342 consider it VARYING. */
343 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
344 val.lattice_val = UNDEFINED;
345 else
346 val.lattice_val = VARYING;
348 else if (is_gimple_assign (stmt)
349 /* Value-returning GIMPLE_CALL statements assign to
350 a variable, and are treated similarly to GIMPLE_ASSIGN. */
351 || (is_gimple_call (stmt)
352 && gimple_call_lhs (stmt) != NULL_TREE)
353 || gimple_code (stmt) == GIMPLE_PHI)
355 tree cst;
356 if (gimple_assign_single_p (stmt)
357 && DECL_P (gimple_assign_rhs1 (stmt))
358 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
360 val.lattice_val = CONSTANT;
361 val.value = cst;
363 else
364 /* Any other variable defined by an assignment or a PHI node
365 is considered UNDEFINED. */
366 val.lattice_val = UNDEFINED;
368 else
370 /* Otherwise, VAR will never take on a constant value. */
371 val.lattice_val = VARYING;
374 return val;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
381 get_value (tree var)
383 prop_value_t *val;
385 if (const_val == NULL)
386 return NULL;
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
392 canonicalize_float_value (val);
394 return val;
397 /* Sets the value associated with VAR to VARYING. */
399 static inline void
400 set_value_varying (tree var)
402 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
404 val->lattice_val = VARYING;
405 val->value = NULL_TREE;
408 /* For float types, modify the value of VAL to make ccp work correctly
409 for non-standard values (-0, NaN):
411 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
412 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
413 This is to fix the following problem (see PR 29921): Suppose we have
415 x = 0.0 * y
417 and we set value of y to NaN. This causes value of x to be set to NaN.
418 When we later determine that y is in fact VARYING, fold uses the fact
419 that HONOR_NANS is false, and we try to change the value of x to 0,
420 causing an ICE. With HONOR_NANS being false, the real appearance of
421 NaN would cause undefined behavior, though, so claiming that y (and x)
422 are UNDEFINED initially is correct. */
424 static void
425 canonicalize_float_value (prop_value_t *val)
427 enum machine_mode mode;
428 tree type;
429 REAL_VALUE_TYPE d;
431 if (val->lattice_val != CONSTANT
432 || TREE_CODE (val->value) != REAL_CST)
433 return;
435 d = TREE_REAL_CST (val->value);
436 type = TREE_TYPE (val->value);
437 mode = TYPE_MODE (type);
439 if (!HONOR_SIGNED_ZEROS (mode)
440 && REAL_VALUE_MINUS_ZERO (d))
442 val->value = build_real (type, dconst0);
443 return;
446 if (!HONOR_NANS (mode)
447 && REAL_VALUE_ISNAN (d))
449 val->lattice_val = UNDEFINED;
450 val->value = NULL;
451 return;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
458 static bool
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))));
474 if (old_val->lattice_val != new_val.lattice_val)
476 if (dump_file && (dump_flags & TDF_DETAILS))
478 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
479 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
482 *old_val = new_val;
484 gcc_assert (new_val.lattice_val != UNDEFINED);
485 return true;
488 return false;
492 /* Return the likely CCP lattice value for STMT.
494 If STMT has no operands, then return CONSTANT.
496 Else if undefinedness of operands of STMT cause its value to be
497 undefined, then return UNDEFINED.
499 Else if any operands of STMT are constants, then return CONSTANT.
501 Else return VARYING. */
503 static ccp_lattice_t
504 likely_value (gimple stmt)
506 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
507 tree use;
508 ssa_op_iter iter;
509 unsigned i;
511 enum gimple_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
521 constant value. */
522 if (gimple_has_volatile_ops (stmt))
523 return VARYING;
525 /* Arrive here for more complex cases. */
526 has_constant_operand = false;
527 has_undefined_operand = false;
528 all_undefined_operands = true;
529 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
531 prop_value_t *val = get_value (use);
533 if (val->lattice_val == UNDEFINED)
534 has_undefined_operand = true;
535 else
536 all_undefined_operands = false;
538 if (val->lattice_val == CONSTANT)
539 has_constant_operand = true;
542 /* There may be constants in regular rhs operands. For calls we
543 have to ignore lhs, fndecl and static chain, otherwise only
544 the lhs. */
545 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
546 i < gimple_num_ops (stmt); ++i)
548 tree op = gimple_op (stmt, i);
549 if (!op || TREE_CODE (op) == SSA_NAME)
550 continue;
551 if (is_gimple_min_invariant (op))
552 has_constant_operand = true;
555 /* If the operation combines operands like COMPLEX_EXPR make sure to
556 not mark the result UNDEFINED if only one part of the result is
557 undefined. */
558 if (has_undefined_operand && all_undefined_operands)
559 return UNDEFINED;
560 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
562 switch (gimple_assign_rhs_code (stmt))
564 /* Unary operators are handled with all_undefined_operands. */
565 case PLUS_EXPR:
566 case MINUS_EXPR:
567 case POINTER_PLUS_EXPR:
568 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
569 Not bitwise operators, one VARYING operand may specify the
570 result completely. Not logical operators for the same reason.
571 Not COMPLEX_EXPR as one VARYING operand makes the result partly
572 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
573 the undefined operand may be promoted. */
574 return UNDEFINED;
576 default:
580 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
581 fall back to VARYING even if there were CONSTANT operands. */
582 if (has_undefined_operand)
583 return VARYING;
585 /* We do not consider virtual operands here -- load from read-only
586 memory may have only VARYING virtual operands, but still be
587 constant. */
588 if (has_constant_operand
589 || gimple_references_memory_p (stmt))
590 return CONSTANT;
592 return VARYING;
595 /* Returns true if STMT cannot be constant. */
597 static bool
598 surely_varying_stmt_p (gimple stmt)
600 /* If the statement has operands that we cannot handle, it cannot be
601 constant. */
602 if (gimple_has_volatile_ops (stmt))
603 return true;
605 /* If it is a call and does not return a value or is not a
606 builtin and not an indirect call, it is varying. */
607 if (is_gimple_call (stmt))
609 tree fndecl;
610 if (!gimple_call_lhs (stmt)
611 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
612 && !DECL_BUILT_IN (fndecl)))
613 return true;
616 /* Any other store operation is not interesting. */
617 else if (gimple_vdef (stmt))
618 return true;
620 /* Anything other than assignments and conditional jumps are not
621 interesting for CCP. */
622 if (gimple_code (stmt) != GIMPLE_ASSIGN
623 && gimple_code (stmt) != GIMPLE_COND
624 && gimple_code (stmt) != GIMPLE_SWITCH
625 && gimple_code (stmt) != GIMPLE_CALL)
626 return true;
628 return false;
631 /* Initialize local data structures for CCP. */
633 static void
634 ccp_initialize (void)
636 basic_block bb;
638 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
640 /* Initialize simulation flags for PHI nodes and statements. */
641 FOR_EACH_BB (bb)
643 gimple_stmt_iterator i;
645 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
647 gimple stmt = gsi_stmt (i);
648 bool is_varying = surely_varying_stmt_p (stmt);
650 if (is_varying)
652 tree def;
653 ssa_op_iter iter;
655 /* If the statement will not produce a constant, mark
656 all its outputs VARYING. */
657 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
658 set_value_varying (def);
660 prop_set_simulate_again (stmt, !is_varying);
664 /* Now process PHI nodes. We never clear the simulate_again flag on
665 phi nodes, since we do not know which edges are executable yet,
666 except for phi nodes for virtual operands when we do not do store ccp. */
667 FOR_EACH_BB (bb)
669 gimple_stmt_iterator i;
671 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
673 gimple phi = gsi_stmt (i);
675 if (!is_gimple_reg (gimple_phi_result (phi)))
676 prop_set_simulate_again (phi, false);
677 else
678 prop_set_simulate_again (phi, true);
683 /* Debug count support. Reset the values of ssa names
684 VARYING when the total number ssa names analyzed is
685 beyond the debug count specified. */
687 static void
688 do_dbg_cnt (void)
690 unsigned i;
691 for (i = 0; i < num_ssa_names; i++)
693 if (!dbg_cnt (ccp))
695 const_val[i].lattice_val = VARYING;
696 const_val[i].value = NULL_TREE;
702 /* Do final substitution of propagated values, cleanup the flowgraph and
703 free allocated storage.
705 Return TRUE when something was optimized. */
707 static bool
708 ccp_finalize (void)
710 bool something_changed;
712 do_dbg_cnt ();
713 /* Perform substitutions based on the known constant values. */
714 something_changed = substitute_and_fold (const_val, false);
716 free (const_val);
717 const_val = NULL;
718 return something_changed;;
722 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
723 in VAL1.
725 any M UNDEFINED = any
726 any M VARYING = VARYING
727 Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
731 static void
732 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
734 if (val1->lattice_val == UNDEFINED)
736 /* UNDEFINED M any = any */
737 *val1 = *val2;
739 else if (val2->lattice_val == UNDEFINED)
741 /* any M UNDEFINED = any
742 Nothing to do. VAL1 already contains the value we want. */
745 else if (val1->lattice_val == VARYING
746 || val2->lattice_val == VARYING)
748 /* any M VARYING = VARYING. */
749 val1->lattice_val = VARYING;
750 val1->value = NULL_TREE;
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1)
756 /* Ci M Cj = Ci if (i == j)
757 Ci M Cj = VARYING if (i != j)
759 If these two values come from memory stores, make sure that
760 they come from the same memory reference. */
761 val1->lattice_val = CONSTANT;
762 val1->value = val1->value;
764 else
766 /* Any other combination is VARYING. */
767 val1->lattice_val = VARYING;
768 val1->value = NULL_TREE;
773 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
774 lattice values to determine PHI_NODE's lattice value. The value of a
775 PHI node is determined calling ccp_lattice_meet with all the arguments
776 of the PHI node that are incoming via executable edges. */
778 static enum ssa_prop_result
779 ccp_visit_phi_node (gimple phi)
781 unsigned i;
782 prop_value_t *old_val, new_val;
784 if (dump_file && (dump_flags & TDF_DETAILS))
786 fprintf (dump_file, "\nVisiting PHI node: ");
787 print_gimple_stmt (dump_file, phi, 0, dump_flags);
790 old_val = get_value (gimple_phi_result (phi));
791 switch (old_val->lattice_val)
793 case VARYING:
794 return SSA_PROP_VARYING;
796 case CONSTANT:
797 new_val = *old_val;
798 break;
800 case UNDEFINED:
801 new_val.lattice_val = UNDEFINED;
802 new_val.value = NULL_TREE;
803 break;
805 default:
806 gcc_unreachable ();
809 for (i = 0; i < gimple_phi_num_args (phi); i++)
811 /* Compute the meet operator over all the PHI arguments flowing
812 through executable edges. */
813 edge e = gimple_phi_arg_edge (phi, i);
815 if (dump_file && (dump_flags & TDF_DETAILS))
817 fprintf (dump_file,
818 "\n Argument #%d (%d -> %d %sexecutable)\n",
819 i, e->src->index, e->dest->index,
820 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
823 /* If the incoming edge is executable, Compute the meet operator for
824 the existing value of the PHI node and the current PHI argument. */
825 if (e->flags & EDGE_EXECUTABLE)
827 tree arg = gimple_phi_arg (phi, i)->def;
828 prop_value_t arg_val;
830 if (is_gimple_min_invariant (arg))
832 arg_val.lattice_val = CONSTANT;
833 arg_val.value = arg;
835 else
836 arg_val = *(get_value (arg));
838 ccp_lattice_meet (&new_val, &arg_val);
840 if (dump_file && (dump_flags & TDF_DETAILS))
842 fprintf (dump_file, "\t");
843 print_generic_expr (dump_file, arg, dump_flags);
844 dump_lattice_value (dump_file, "\tValue: ", arg_val);
845 fprintf (dump_file, "\n");
848 if (new_val.lattice_val == VARYING)
849 break;
853 if (dump_file && (dump_flags & TDF_DETAILS))
855 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
856 fprintf (dump_file, "\n\n");
859 /* Make the transition to the new value. */
860 if (set_lattice_value (gimple_phi_result (phi), new_val))
862 if (new_val.lattice_val == VARYING)
863 return SSA_PROP_VARYING;
864 else
865 return SSA_PROP_INTERESTING;
867 else
868 return SSA_PROP_NOT_INTERESTING;
871 /* Return true if we may propagate the address expression ADDR into the
872 dereference DEREF and cancel them. */
874 bool
875 may_propagate_address_into_dereference (tree addr, tree deref)
877 gcc_assert (INDIRECT_REF_P (deref)
878 && TREE_CODE (addr) == ADDR_EXPR);
880 /* Don't propagate if ADDR's operand has incomplete type. */
881 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
882 return false;
884 /* If the address is invariant then we do not need to preserve restrict
885 qualifications. But we do need to preserve volatile qualifiers until
886 we can annotate the folded dereference itself properly. */
887 if (is_gimple_min_invariant (addr)
888 && (!TREE_THIS_VOLATILE (deref)
889 || TYPE_VOLATILE (TREE_TYPE (addr))))
890 return useless_type_conversion_p (TREE_TYPE (deref),
891 TREE_TYPE (TREE_OPERAND (addr, 0)));
893 /* Else both the address substitution and the folding must result in
894 a valid useless type conversion sequence. */
895 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
896 TREE_TYPE (addr))
897 && useless_type_conversion_p (TREE_TYPE (deref),
898 TREE_TYPE (TREE_OPERAND (addr, 0))));
901 /* CCP specific front-end to the non-destructive constant folding
902 routines.
904 Attempt to simplify the RHS of STMT knowing that one or more
905 operands are constants.
907 If simplification is possible, return the simplified RHS,
908 otherwise return the original RHS or NULL_TREE. */
910 static tree
911 ccp_fold (gimple stmt)
913 switch (gimple_code (stmt))
915 case GIMPLE_ASSIGN:
917 enum tree_code subcode = gimple_assign_rhs_code (stmt);
919 switch (get_gimple_rhs_class (subcode))
921 case GIMPLE_SINGLE_RHS:
923 tree rhs = gimple_assign_rhs1 (stmt);
924 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
926 if (TREE_CODE (rhs) == SSA_NAME)
928 /* If the RHS is an SSA_NAME, return its known constant value,
929 if any. */
930 return get_value (rhs)->value;
932 /* Handle propagating invariant addresses into address operations.
933 The folding we do here matches that in tree-ssa-forwprop.c. */
934 else if (TREE_CODE (rhs) == ADDR_EXPR)
936 tree *base;
937 base = &TREE_OPERAND (rhs, 0);
938 while (handled_component_p (*base))
939 base = &TREE_OPERAND (*base, 0);
940 if (TREE_CODE (*base) == INDIRECT_REF
941 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
943 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
944 if (val->lattice_val == CONSTANT
945 && TREE_CODE (val->value) == ADDR_EXPR
946 && may_propagate_address_into_dereference
947 (val->value, *base))
949 /* We need to return a new tree, not modify the IL
950 or share parts of it. So play some tricks to
951 avoid manually building it. */
952 tree ret, save = *base;
953 *base = TREE_OPERAND (val->value, 0);
954 ret = unshare_expr (rhs);
955 recompute_tree_invariant_for_addr_expr (ret);
956 *base = save;
957 return ret;
962 if (kind == tcc_reference)
964 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
965 || TREE_CODE (rhs) == REALPART_EXPR
966 || TREE_CODE (rhs) == IMAGPART_EXPR)
967 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
969 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
970 if (val->lattice_val == CONSTANT)
971 return fold_unary (TREE_CODE (rhs),
972 TREE_TYPE (rhs), val->value);
974 else if (TREE_CODE (rhs) == INDIRECT_REF
975 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
977 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
978 if (val->lattice_val == CONSTANT
979 && TREE_CODE (val->value) == ADDR_EXPR
980 && useless_type_conversion_p (TREE_TYPE (rhs),
981 TREE_TYPE (TREE_TYPE (val->value))))
982 rhs = TREE_OPERAND (val->value, 0);
984 return fold_const_aggregate_ref (rhs);
986 else if (kind == tcc_declaration)
987 return get_symbol_constant_value (rhs);
988 return rhs;
991 case GIMPLE_UNARY_RHS:
993 /* Handle unary operators that can appear in GIMPLE form.
994 Note that we know the single operand must be a constant,
995 so this should almost always return a simplified RHS. */
996 tree lhs = gimple_assign_lhs (stmt);
997 tree op0 = gimple_assign_rhs1 (stmt);
999 /* Simplify the operand down to a constant. */
1000 if (TREE_CODE (op0) == SSA_NAME)
1002 prop_value_t *val = get_value (op0);
1003 if (val->lattice_val == CONSTANT)
1004 op0 = get_value (op0)->value;
1007 /* Conversions are useless for CCP purposes if they are
1008 value-preserving. Thus the restrictions that
1009 useless_type_conversion_p places for pointer type conversions
1010 do not apply here. Substitution later will only substitute to
1011 allowed places. */
1012 if (CONVERT_EXPR_CODE_P (subcode)
1013 && POINTER_TYPE_P (TREE_TYPE (lhs))
1014 && POINTER_TYPE_P (TREE_TYPE (op0))
1015 /* Do not allow differences in volatile qualification
1016 as this might get us confused as to whether a
1017 propagation destination statement is volatile
1018 or not. See PR36988. */
1019 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1020 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1022 tree tem;
1023 /* Still try to generate a constant of correct type. */
1024 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1025 TREE_TYPE (op0))
1026 && ((tem = maybe_fold_offset_to_address
1027 (op0, integer_zero_node, TREE_TYPE (lhs)))
1028 != NULL_TREE))
1029 return tem;
1030 return op0;
1033 return fold_unary_ignore_overflow (subcode,
1034 gimple_expr_type (stmt), op0);
1037 case GIMPLE_BINARY_RHS:
1039 /* Handle binary operators that can appear in GIMPLE form. */
1040 tree op0 = gimple_assign_rhs1 (stmt);
1041 tree op1 = gimple_assign_rhs2 (stmt);
1043 /* Simplify the operands down to constants when appropriate. */
1044 if (TREE_CODE (op0) == SSA_NAME)
1046 prop_value_t *val = get_value (op0);
1047 if (val->lattice_val == CONSTANT)
1048 op0 = val->value;
1051 if (TREE_CODE (op1) == SSA_NAME)
1053 prop_value_t *val = get_value (op1);
1054 if (val->lattice_val == CONSTANT)
1055 op1 = val->value;
1058 /* Fold &foo + CST into an invariant reference if possible. */
1059 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1060 && TREE_CODE (op0) == ADDR_EXPR
1061 && TREE_CODE (op1) == INTEGER_CST)
1063 tree lhs = gimple_assign_lhs (stmt);
1064 tree tem = maybe_fold_offset_to_address (op0, op1,
1065 TREE_TYPE (lhs));
1066 if (tem != NULL_TREE)
1067 return tem;
1070 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1073 default:
1074 gcc_unreachable ();
1077 break;
1079 case GIMPLE_CALL:
1081 tree fn = gimple_call_fn (stmt);
1082 prop_value_t *val;
1084 if (TREE_CODE (fn) == SSA_NAME)
1086 val = get_value (fn);
1087 if (val->lattice_val == CONSTANT)
1088 fn = val->value;
1090 if (TREE_CODE (fn) == ADDR_EXPR
1091 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1092 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1094 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1095 tree call, retval;
1096 unsigned i;
1097 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1099 args[i] = gimple_call_arg (stmt, i);
1100 if (TREE_CODE (args[i]) == SSA_NAME)
1102 val = get_value (args[i]);
1103 if (val->lattice_val == CONSTANT)
1104 args[i] = val->value;
1107 call = build_call_array (gimple_call_return_type (stmt),
1108 fn, gimple_call_num_args (stmt), args);
1109 retval = fold_call_expr (call, false);
1110 if (retval)
1111 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1112 STRIP_NOPS (retval);
1113 return retval;
1115 return NULL_TREE;
1118 case GIMPLE_COND:
1120 /* Handle comparison operators that can appear in GIMPLE form. */
1121 tree op0 = gimple_cond_lhs (stmt);
1122 tree op1 = gimple_cond_rhs (stmt);
1123 enum tree_code code = gimple_cond_code (stmt);
1125 /* Simplify the operands down to constants when appropriate. */
1126 if (TREE_CODE (op0) == SSA_NAME)
1128 prop_value_t *val = get_value (op0);
1129 if (val->lattice_val == CONSTANT)
1130 op0 = val->value;
1133 if (TREE_CODE (op1) == SSA_NAME)
1135 prop_value_t *val = get_value (op1);
1136 if (val->lattice_val == CONSTANT)
1137 op1 = val->value;
1140 return fold_binary (code, boolean_type_node, op0, op1);
1143 case GIMPLE_SWITCH:
1145 tree rhs = gimple_switch_index (stmt);
1147 if (TREE_CODE (rhs) == SSA_NAME)
1149 /* If the RHS is an SSA_NAME, return its known constant value,
1150 if any. */
1151 return get_value (rhs)->value;
1154 return rhs;
1157 default:
1158 gcc_unreachable ();
1163 /* Return the tree representing the element referenced by T if T is an
1164 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1165 NULL_TREE otherwise. */
1167 tree
1168 fold_const_aggregate_ref (tree t)
1170 prop_value_t *value;
1171 tree base, ctor, idx, field;
1172 unsigned HOST_WIDE_INT cnt;
1173 tree cfield, cval;
1175 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1176 return get_symbol_constant_value (t);
1178 switch (TREE_CODE (t))
1180 case ARRAY_REF:
1181 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1182 DECL_INITIAL. If BASE is a nested reference into another
1183 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1184 the inner reference. */
1185 base = TREE_OPERAND (t, 0);
1186 switch (TREE_CODE (base))
1188 case VAR_DECL:
1189 if (!TREE_READONLY (base)
1190 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1191 || !targetm.binds_local_p (base))
1192 return NULL_TREE;
1194 ctor = DECL_INITIAL (base);
1195 break;
1197 case ARRAY_REF:
1198 case COMPONENT_REF:
1199 ctor = fold_const_aggregate_ref (base);
1200 break;
1202 case STRING_CST:
1203 case CONSTRUCTOR:
1204 ctor = base;
1205 break;
1207 default:
1208 return NULL_TREE;
1211 if (ctor == NULL_TREE
1212 || (TREE_CODE (ctor) != CONSTRUCTOR
1213 && TREE_CODE (ctor) != STRING_CST)
1214 || !TREE_STATIC (ctor))
1215 return NULL_TREE;
1217 /* Get the index. If we have an SSA_NAME, try to resolve it
1218 with the current lattice value for the SSA_NAME. */
1219 idx = TREE_OPERAND (t, 1);
1220 switch (TREE_CODE (idx))
1222 case SSA_NAME:
1223 if ((value = get_value (idx))
1224 && value->lattice_val == CONSTANT
1225 && TREE_CODE (value->value) == INTEGER_CST)
1226 idx = value->value;
1227 else
1228 return NULL_TREE;
1229 break;
1231 case INTEGER_CST:
1232 break;
1234 default:
1235 return NULL_TREE;
1238 /* Fold read from constant string. */
1239 if (TREE_CODE (ctor) == STRING_CST)
1241 if ((TYPE_MODE (TREE_TYPE (t))
1242 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1243 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1244 == MODE_INT)
1245 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1246 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1247 return build_int_cst_type (TREE_TYPE (t),
1248 (TREE_STRING_POINTER (ctor)
1249 [TREE_INT_CST_LOW (idx)]));
1250 return NULL_TREE;
1253 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1254 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1255 if (tree_int_cst_equal (cfield, idx))
1257 STRIP_USELESS_TYPE_CONVERSION (cval);
1258 if (TREE_CODE (cval) == ADDR_EXPR)
1260 tree base = get_base_address (TREE_OPERAND (cval, 0));
1261 if (base && TREE_CODE (base) == VAR_DECL)
1262 add_referenced_var (base);
1264 return cval;
1266 break;
1268 case COMPONENT_REF:
1269 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1270 DECL_INITIAL. If BASE is a nested reference into another
1271 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1272 the inner reference. */
1273 base = TREE_OPERAND (t, 0);
1274 switch (TREE_CODE (base))
1276 case VAR_DECL:
1277 if (!TREE_READONLY (base)
1278 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1279 || !targetm.binds_local_p (base))
1280 return NULL_TREE;
1282 ctor = DECL_INITIAL (base);
1283 break;
1285 case ARRAY_REF:
1286 case COMPONENT_REF:
1287 ctor = fold_const_aggregate_ref (base);
1288 break;
1290 default:
1291 return NULL_TREE;
1294 if (ctor == NULL_TREE
1295 || TREE_CODE (ctor) != CONSTRUCTOR
1296 || !TREE_STATIC (ctor))
1297 return NULL_TREE;
1299 field = TREE_OPERAND (t, 1);
1301 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1302 if (cfield == field
1303 /* FIXME: Handle bit-fields. */
1304 && ! DECL_BIT_FIELD (cfield))
1306 STRIP_USELESS_TYPE_CONVERSION (cval);
1307 if (TREE_CODE (cval) == ADDR_EXPR)
1309 tree base = get_base_address (TREE_OPERAND (cval, 0));
1310 if (base && TREE_CODE (base) == VAR_DECL)
1311 add_referenced_var (base);
1313 return cval;
1315 break;
1317 case REALPART_EXPR:
1318 case IMAGPART_EXPR:
1320 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1321 if (c && TREE_CODE (c) == COMPLEX_CST)
1322 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1323 break;
1326 case INDIRECT_REF:
1328 tree base = TREE_OPERAND (t, 0);
1329 if (TREE_CODE (base) == SSA_NAME
1330 && (value = get_value (base))
1331 && value->lattice_val == CONSTANT
1332 && TREE_CODE (value->value) == ADDR_EXPR)
1333 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1334 break;
1337 default:
1338 break;
1341 return NULL_TREE;
1344 /* Evaluate statement STMT.
1345 Valid only for assignments, calls, conditionals, and switches. */
1347 static prop_value_t
1348 evaluate_stmt (gimple stmt)
1350 prop_value_t val;
1351 tree simplified = NULL_TREE;
1352 ccp_lattice_t likelyvalue = likely_value (stmt);
1353 bool is_constant;
1355 fold_defer_overflow_warnings ();
1357 /* If the statement is likely to have a CONSTANT result, then try
1358 to fold the statement to determine the constant value. */
1359 /* FIXME. This is the only place that we call ccp_fold.
1360 Since likely_value never returns CONSTANT for calls, we will
1361 not attempt to fold them, including builtins that may profit. */
1362 if (likelyvalue == CONSTANT)
1363 simplified = ccp_fold (stmt);
1364 /* If the statement is likely to have a VARYING result, then do not
1365 bother folding the statement. */
1366 else if (likelyvalue == VARYING)
1368 enum gimple_code code = gimple_code (stmt);
1369 if (code == GIMPLE_ASSIGN)
1371 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1373 /* Other cases cannot satisfy is_gimple_min_invariant
1374 without folding. */
1375 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1376 simplified = gimple_assign_rhs1 (stmt);
1378 else if (code == GIMPLE_SWITCH)
1379 simplified = gimple_switch_index (stmt);
1380 else
1381 /* These cannot satisfy is_gimple_min_invariant without folding. */
1382 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1385 is_constant = simplified && is_gimple_min_invariant (simplified);
1387 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1389 if (dump_file && (dump_flags & TDF_DETAILS))
1391 fprintf (dump_file, "which is likely ");
1392 switch (likelyvalue)
1394 case CONSTANT:
1395 fprintf (dump_file, "CONSTANT");
1396 break;
1397 case UNDEFINED:
1398 fprintf (dump_file, "UNDEFINED");
1399 break;
1400 case VARYING:
1401 fprintf (dump_file, "VARYING");
1402 break;
1403 default:;
1405 fprintf (dump_file, "\n");
1408 if (is_constant)
1410 /* The statement produced a constant value. */
1411 val.lattice_val = CONSTANT;
1412 val.value = simplified;
1414 else
1416 /* The statement produced a nonconstant value. If the statement
1417 had UNDEFINED operands, then the result of the statement
1418 should be UNDEFINED. Otherwise, the statement is VARYING. */
1419 if (likelyvalue == UNDEFINED)
1420 val.lattice_val = likelyvalue;
1421 else
1422 val.lattice_val = VARYING;
1424 val.value = NULL_TREE;
1427 return val;
1430 /* Visit the assignment statement STMT. Set the value of its LHS to the
1431 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1432 creates virtual definitions, set the value of each new name to that
1433 of the RHS (if we can derive a constant out of the RHS).
1434 Value-returning call statements also perform an assignment, and
1435 are handled here. */
1437 static enum ssa_prop_result
1438 visit_assignment (gimple stmt, tree *output_p)
1440 prop_value_t val;
1441 enum ssa_prop_result retval;
1443 tree lhs = gimple_get_lhs (stmt);
1445 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1446 || gimple_call_lhs (stmt) != NULL_TREE);
1448 if (gimple_assign_copy_p (stmt))
1450 tree rhs = gimple_assign_rhs1 (stmt);
1452 if (TREE_CODE (rhs) == SSA_NAME)
1454 /* For a simple copy operation, we copy the lattice values. */
1455 prop_value_t *nval = get_value (rhs);
1456 val = *nval;
1458 else
1459 val = evaluate_stmt (stmt);
1461 else
1462 /* Evaluate the statement, which could be
1463 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1464 val = evaluate_stmt (stmt);
1466 retval = SSA_PROP_NOT_INTERESTING;
1468 /* Set the lattice value of the statement's output. */
1469 if (TREE_CODE (lhs) == SSA_NAME)
1471 /* If STMT is an assignment to an SSA_NAME, we only have one
1472 value to set. */
1473 if (set_lattice_value (lhs, val))
1475 *output_p = lhs;
1476 if (val.lattice_val == VARYING)
1477 retval = SSA_PROP_VARYING;
1478 else
1479 retval = SSA_PROP_INTERESTING;
1483 return retval;
1487 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1488 if it can determine which edge will be taken. Otherwise, return
1489 SSA_PROP_VARYING. */
1491 static enum ssa_prop_result
1492 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1494 prop_value_t val;
1495 basic_block block;
1497 block = gimple_bb (stmt);
1498 val = evaluate_stmt (stmt);
1500 /* Find which edge out of the conditional block will be taken and add it
1501 to the worklist. If no single edge can be determined statically,
1502 return SSA_PROP_VARYING to feed all the outgoing edges to the
1503 propagation engine. */
1504 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1505 if (*taken_edge_p)
1506 return SSA_PROP_INTERESTING;
1507 else
1508 return SSA_PROP_VARYING;
1512 /* Evaluate statement STMT. If the statement produces an output value and
1513 its evaluation changes the lattice value of its output, return
1514 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1515 output value.
1517 If STMT is a conditional branch and we can determine its truth
1518 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1519 value, return SSA_PROP_VARYING. */
1521 static enum ssa_prop_result
1522 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1524 tree def;
1525 ssa_op_iter iter;
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1529 fprintf (dump_file, "\nVisiting statement:\n");
1530 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1533 switch (gimple_code (stmt))
1535 case GIMPLE_ASSIGN:
1536 /* If the statement is an assignment that produces a single
1537 output value, evaluate its RHS to see if the lattice value of
1538 its output has changed. */
1539 return visit_assignment (stmt, output_p);
1541 case GIMPLE_CALL:
1542 /* A value-returning call also performs an assignment. */
1543 if (gimple_call_lhs (stmt) != NULL_TREE)
1544 return visit_assignment (stmt, output_p);
1545 break;
1547 case GIMPLE_COND:
1548 case GIMPLE_SWITCH:
1549 /* If STMT is a conditional branch, see if we can determine
1550 which branch will be taken. */
1551 /* FIXME. It appears that we should be able to optimize
1552 computed GOTOs here as well. */
1553 return visit_cond_stmt (stmt, taken_edge_p);
1555 default:
1556 break;
1559 /* Any other kind of statement is not interesting for constant
1560 propagation and, therefore, not worth simulating. */
1561 if (dump_file && (dump_flags & TDF_DETAILS))
1562 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1564 /* Definitions made by statements other than assignments to
1565 SSA_NAMEs represent unknown modifications to their outputs.
1566 Mark them VARYING. */
1567 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1569 prop_value_t v = { VARYING, NULL_TREE };
1570 set_lattice_value (def, v);
1573 return SSA_PROP_VARYING;
1577 /* Main entry point for SSA Conditional Constant Propagation. */
1579 static unsigned int
1580 do_ssa_ccp (void)
1582 ccp_initialize ();
1583 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1584 if (ccp_finalize ())
1585 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1586 else
1587 return 0;
1591 static bool
1592 gate_ccp (void)
1594 return flag_tree_ccp != 0;
1598 struct gimple_opt_pass pass_ccp =
1601 GIMPLE_PASS,
1602 "ccp", /* name */
1603 gate_ccp, /* gate */
1604 do_ssa_ccp, /* execute */
1605 NULL, /* sub */
1606 NULL, /* next */
1607 0, /* static_pass_number */
1608 TV_TREE_CCP, /* tv_id */
1609 PROP_cfg | PROP_ssa, /* properties_required */
1610 0, /* properties_provided */
1611 0, /* properties_destroyed */
1612 0, /* todo_flags_start */
1613 TODO_dump_func | TODO_verify_ssa
1614 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1619 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1620 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1621 is the desired result type. */
1623 static tree
1624 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1625 bool allow_negative_idx)
1627 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1628 tree array_type, elt_type, elt_size;
1629 tree domain_type;
1631 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1632 measured in units of the size of elements type) from that ARRAY_REF).
1633 We can't do anything if either is variable.
1635 The case we handle here is *(&A[N]+O). */
1636 if (TREE_CODE (base) == ARRAY_REF)
1638 tree low_bound = array_ref_low_bound (base);
1640 elt_offset = TREE_OPERAND (base, 1);
1641 if (TREE_CODE (low_bound) != INTEGER_CST
1642 || TREE_CODE (elt_offset) != INTEGER_CST)
1643 return NULL_TREE;
1645 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1646 base = TREE_OPERAND (base, 0);
1649 /* Ignore stupid user tricks of indexing non-array variables. */
1650 array_type = TREE_TYPE (base);
1651 if (TREE_CODE (array_type) != ARRAY_TYPE)
1652 return NULL_TREE;
1653 elt_type = TREE_TYPE (array_type);
1654 if (!useless_type_conversion_p (orig_type, elt_type))
1655 return NULL_TREE;
1657 /* Use signed size type for intermediate computation on the index. */
1658 idx_type = signed_type_for (size_type_node);
1660 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1661 element type (so we can use the alignment if it's not constant).
1662 Otherwise, compute the offset as an index by using a division. If the
1663 division isn't exact, then don't do anything. */
1664 elt_size = TYPE_SIZE_UNIT (elt_type);
1665 if (!elt_size)
1666 return NULL;
1667 if (integer_zerop (offset))
1669 if (TREE_CODE (elt_size) != INTEGER_CST)
1670 elt_size = size_int (TYPE_ALIGN (elt_type));
1672 idx = build_int_cst (idx_type, 0);
1674 else
1676 unsigned HOST_WIDE_INT lquo, lrem;
1677 HOST_WIDE_INT hquo, hrem;
1678 double_int soffset;
1680 /* The final array offset should be signed, so we need
1681 to sign-extend the (possibly pointer) offset here
1682 and use signed division. */
1683 soffset = double_int_sext (tree_to_double_int (offset),
1684 TYPE_PRECISION (TREE_TYPE (offset)));
1685 if (TREE_CODE (elt_size) != INTEGER_CST
1686 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1687 soffset.low, soffset.high,
1688 TREE_INT_CST_LOW (elt_size),
1689 TREE_INT_CST_HIGH (elt_size),
1690 &lquo, &hquo, &lrem, &hrem)
1691 || lrem || hrem)
1692 return NULL_TREE;
1694 idx = build_int_cst_wide (idx_type, lquo, hquo);
1697 /* Assume the low bound is zero. If there is a domain type, get the
1698 low bound, if any, convert the index into that type, and add the
1699 low bound. */
1700 min_idx = build_int_cst (idx_type, 0);
1701 domain_type = TYPE_DOMAIN (array_type);
1702 if (domain_type)
1704 idx_type = domain_type;
1705 if (TYPE_MIN_VALUE (idx_type))
1706 min_idx = TYPE_MIN_VALUE (idx_type);
1707 else
1708 min_idx = fold_convert (idx_type, min_idx);
1710 if (TREE_CODE (min_idx) != INTEGER_CST)
1711 return NULL_TREE;
1713 elt_offset = fold_convert (idx_type, elt_offset);
1716 if (!integer_zerop (min_idx))
1717 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1718 if (!integer_zerop (elt_offset))
1719 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1721 /* Make sure to possibly truncate late after offsetting. */
1722 idx = fold_convert (idx_type, idx);
1724 /* We don't want to construct access past array bounds. For example
1725 char *(c[4]);
1726 c[3][2];
1727 should not be simplified into (*c)[14] or tree-vrp will
1728 give false warnings. The same is true for
1729 struct A { long x; char d[0]; } *a;
1730 (char *)a - 4;
1731 which should be not folded to &a->d[-8]. */
1732 if (domain_type
1733 && TYPE_MAX_VALUE (domain_type)
1734 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1736 tree up_bound = TYPE_MAX_VALUE (domain_type);
1738 if (tree_int_cst_lt (up_bound, idx)
1739 /* Accesses after the end of arrays of size 0 (gcc
1740 extension) and 1 are likely intentional ("struct
1741 hack"). */
1742 && compare_tree_int (up_bound, 1) > 0)
1743 return NULL_TREE;
1745 if (domain_type
1746 && TYPE_MIN_VALUE (domain_type))
1748 if (!allow_negative_idx
1749 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1750 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1751 return NULL_TREE;
1753 else if (!allow_negative_idx
1754 && compare_tree_int (idx, 0) < 0)
1755 return NULL_TREE;
1757 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1761 /* Attempt to fold *(S+O) to S.X.
1762 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1763 is the desired result type. */
1765 static tree
1766 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1767 tree orig_type, bool base_is_ptr)
1769 tree f, t, field_type, tail_array_field, field_offset;
1770 tree ret;
1771 tree new_base;
1773 if (TREE_CODE (record_type) != RECORD_TYPE
1774 && TREE_CODE (record_type) != UNION_TYPE
1775 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1776 return NULL_TREE;
1778 /* Short-circuit silly cases. */
1779 if (useless_type_conversion_p (record_type, orig_type))
1780 return NULL_TREE;
1782 tail_array_field = NULL_TREE;
1783 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1785 int cmp;
1787 if (TREE_CODE (f) != FIELD_DECL)
1788 continue;
1789 if (DECL_BIT_FIELD (f))
1790 continue;
1792 if (!DECL_FIELD_OFFSET (f))
1793 continue;
1794 field_offset = byte_position (f);
1795 if (TREE_CODE (field_offset) != INTEGER_CST)
1796 continue;
1798 /* ??? Java creates "interesting" fields for representing base classes.
1799 They have no name, and have no context. With no context, we get into
1800 trouble with nonoverlapping_component_refs_p. Skip them. */
1801 if (!DECL_FIELD_CONTEXT (f))
1802 continue;
1804 /* The previous array field isn't at the end. */
1805 tail_array_field = NULL_TREE;
1807 /* Check to see if this offset overlaps with the field. */
1808 cmp = tree_int_cst_compare (field_offset, offset);
1809 if (cmp > 0)
1810 continue;
1812 field_type = TREE_TYPE (f);
1814 /* Here we exactly match the offset being checked. If the types match,
1815 then we can return that field. */
1816 if (cmp == 0
1817 && useless_type_conversion_p (orig_type, field_type))
1819 if (base_is_ptr)
1820 base = build1 (INDIRECT_REF, record_type, base);
1821 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1822 return t;
1825 /* Don't care about offsets into the middle of scalars. */
1826 if (!AGGREGATE_TYPE_P (field_type))
1827 continue;
1829 /* Check for array at the end of the struct. This is often
1830 used as for flexible array members. We should be able to
1831 turn this into an array access anyway. */
1832 if (TREE_CODE (field_type) == ARRAY_TYPE)
1833 tail_array_field = f;
1835 /* Check the end of the field against the offset. */
1836 if (!DECL_SIZE_UNIT (f)
1837 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1838 continue;
1839 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1840 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1841 continue;
1843 /* If we matched, then set offset to the displacement into
1844 this field. */
1845 if (base_is_ptr)
1846 new_base = build1 (INDIRECT_REF, record_type, base);
1847 else
1848 new_base = base;
1849 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1851 /* Recurse to possibly find the match. */
1852 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1853 f == TYPE_FIELDS (record_type));
1854 if (ret)
1855 return ret;
1856 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1857 orig_type, false);
1858 if (ret)
1859 return ret;
1862 if (!tail_array_field)
1863 return NULL_TREE;
1865 f = tail_array_field;
1866 field_type = TREE_TYPE (f);
1867 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1869 /* If we get here, we've got an aggregate field, and a possibly
1870 nonzero offset into them. Recurse and hope for a valid match. */
1871 if (base_is_ptr)
1872 base = build1 (INDIRECT_REF, record_type, base);
1873 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1875 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1876 f == TYPE_FIELDS (record_type));
1877 if (t)
1878 return t;
1879 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1880 orig_type, false);
1883 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1884 or BASE[index] or by combination of those.
1886 Before attempting the conversion strip off existing ADDR_EXPRs and
1887 handled component refs. */
1889 tree
1890 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1892 tree ret;
1893 tree type;
1894 bool base_is_ptr = true;
1896 STRIP_NOPS (base);
1897 if (TREE_CODE (base) == ADDR_EXPR)
1899 base_is_ptr = false;
1901 base = TREE_OPERAND (base, 0);
1903 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1904 so it needs to be removed and new COMPONENT_REF constructed.
1905 The wrong COMPONENT_REF are often constructed by folding the
1906 (type *)&object within the expression (type *)&object+offset */
1907 if (handled_component_p (base))
1909 HOST_WIDE_INT sub_offset, size, maxsize;
1910 tree newbase;
1911 newbase = get_ref_base_and_extent (base, &sub_offset,
1912 &size, &maxsize);
1913 gcc_assert (newbase);
1914 if (size == maxsize
1915 && size != -1
1916 && !(sub_offset & (BITS_PER_UNIT - 1)))
1918 base = newbase;
1919 if (sub_offset)
1920 offset = int_const_binop (PLUS_EXPR, offset,
1921 build_int_cst (TREE_TYPE (offset),
1922 sub_offset / BITS_PER_UNIT), 1);
1925 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1926 && integer_zerop (offset))
1927 return base;
1928 type = TREE_TYPE (base);
1930 else
1932 base_is_ptr = true;
1933 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1934 return NULL_TREE;
1935 type = TREE_TYPE (TREE_TYPE (base));
1937 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1938 orig_type, base_is_ptr);
1939 if (!ret)
1941 if (base_is_ptr)
1942 base = build1 (INDIRECT_REF, type, base);
1943 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1945 return ret;
1948 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1949 or &BASE[index] or by combination of those.
1951 Before attempting the conversion strip off existing component refs. */
1953 tree
1954 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1956 tree t;
1958 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1959 && POINTER_TYPE_P (orig_type));
1961 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1962 if (t != NULL_TREE)
1964 tree orig = addr;
1965 tree ptr_type;
1967 /* For __builtin_object_size to function correctly we need to
1968 make sure not to fold address arithmetic so that we change
1969 reference from one array to another. This would happen for
1970 example for
1972 struct X { char s1[10]; char s2[10] } s;
1973 char *foo (void) { return &s.s2[-4]; }
1975 where we need to avoid generating &s.s1[6]. As the C and
1976 C++ frontends create different initial trees
1977 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1978 sophisticated comparisons here. Note that checking for the
1979 condition after the fact is easier than trying to avoid doing
1980 the folding. */
1981 STRIP_NOPS (orig);
1982 if (TREE_CODE (orig) == ADDR_EXPR)
1983 orig = TREE_OPERAND (orig, 0);
1984 if ((TREE_CODE (orig) == ARRAY_REF
1985 || (TREE_CODE (orig) == COMPONENT_REF
1986 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1987 && (TREE_CODE (t) == ARRAY_REF
1988 || TREE_CODE (t) == COMPONENT_REF)
1989 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1990 ? TREE_OPERAND (orig, 0) : orig,
1991 TREE_CODE (t) == ARRAY_REF
1992 ? TREE_OPERAND (t, 0) : t, 0))
1993 return NULL_TREE;
1995 ptr_type = build_pointer_type (TREE_TYPE (t));
1996 if (!useless_type_conversion_p (orig_type, ptr_type))
1997 return NULL_TREE;
1998 return build_fold_addr_expr_with_type (t, ptr_type);
2001 return NULL_TREE;
2004 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2005 Return the simplified expression, or NULL if nothing could be done. */
2007 static tree
2008 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2010 tree t;
2011 bool volatile_p = TREE_THIS_VOLATILE (expr);
2013 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2014 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2015 are sometimes added. */
2016 base = fold (base);
2017 STRIP_TYPE_NOPS (base);
2018 TREE_OPERAND (expr, 0) = base;
2020 /* One possibility is that the address reduces to a string constant. */
2021 t = fold_read_from_constant_string (expr);
2022 if (t)
2023 return t;
2025 /* Add in any offset from a POINTER_PLUS_EXPR. */
2026 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2028 tree offset2;
2030 offset2 = TREE_OPERAND (base, 1);
2031 if (TREE_CODE (offset2) != INTEGER_CST)
2032 return NULL_TREE;
2033 base = TREE_OPERAND (base, 0);
2035 offset = fold_convert (sizetype,
2036 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2039 if (TREE_CODE (base) == ADDR_EXPR)
2041 tree base_addr = base;
2043 /* Strip the ADDR_EXPR. */
2044 base = TREE_OPERAND (base, 0);
2046 /* Fold away CONST_DECL to its value, if the type is scalar. */
2047 if (TREE_CODE (base) == CONST_DECL
2048 && is_gimple_min_invariant (DECL_INITIAL (base)))
2049 return DECL_INITIAL (base);
2051 /* Try folding *(&B+O) to B.X. */
2052 t = maybe_fold_offset_to_reference (base_addr, offset,
2053 TREE_TYPE (expr));
2054 if (t)
2056 /* Preserve volatileness of the original expression.
2057 We can end up with a plain decl here which is shared
2058 and we shouldn't mess with its flags. */
2059 if (!SSA_VAR_P (t))
2060 TREE_THIS_VOLATILE (t) = volatile_p;
2061 return t;
2064 else
2066 /* We can get here for out-of-range string constant accesses,
2067 such as "_"[3]. Bail out of the entire substitution search
2068 and arrange for the entire statement to be replaced by a
2069 call to __builtin_trap. In all likelihood this will all be
2070 constant-folded away, but in the meantime we can't leave with
2071 something that get_expr_operands can't understand. */
2073 t = base;
2074 STRIP_NOPS (t);
2075 if (TREE_CODE (t) == ADDR_EXPR
2076 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2078 /* FIXME: Except that this causes problems elsewhere with dead
2079 code not being deleted, and we die in the rtl expanders
2080 because we failed to remove some ssa_name. In the meantime,
2081 just return zero. */
2082 /* FIXME2: This condition should be signaled by
2083 fold_read_from_constant_string directly, rather than
2084 re-checking for it here. */
2085 return integer_zero_node;
2088 /* Try folding *(B+O) to B->X. Still an improvement. */
2089 if (POINTER_TYPE_P (TREE_TYPE (base)))
2091 t = maybe_fold_offset_to_reference (base, offset,
2092 TREE_TYPE (expr));
2093 if (t)
2094 return t;
2098 /* Otherwise we had an offset that we could not simplify. */
2099 return NULL_TREE;
2103 /* A quaint feature extant in our address arithmetic is that there
2104 can be hidden type changes here. The type of the result need
2105 not be the same as the type of the input pointer.
2107 What we're after here is an expression of the form
2108 (T *)(&array + const)
2109 where array is OP0, const is OP1, RES_TYPE is T and
2110 the cast doesn't actually exist, but is implicit in the
2111 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2112 &array[x]
2113 which may be able to propagate further. */
2115 tree
2116 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2118 tree ptd_type;
2119 tree t;
2121 /* The first operand should be an ADDR_EXPR. */
2122 if (TREE_CODE (op0) != ADDR_EXPR)
2123 return NULL_TREE;
2124 op0 = TREE_OPERAND (op0, 0);
2126 /* It had better be a constant. */
2127 if (TREE_CODE (op1) != INTEGER_CST)
2129 /* Or op0 should now be A[0] and the non-constant offset defined
2130 via a multiplication by the array element size. */
2131 if (TREE_CODE (op0) == ARRAY_REF
2132 && integer_zerop (TREE_OPERAND (op0, 1))
2133 && TREE_CODE (op1) == SSA_NAME
2134 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2136 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2137 if (!is_gimple_assign (offset_def))
2138 return NULL_TREE;
2140 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2141 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2142 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2143 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2144 return build1 (ADDR_EXPR, res_type,
2145 build4 (ARRAY_REF, TREE_TYPE (op0),
2146 TREE_OPERAND (op0, 0),
2147 gimple_assign_rhs1 (offset_def),
2148 TREE_OPERAND (op0, 2),
2149 TREE_OPERAND (op0, 3)));
2150 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2151 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2152 return build1 (ADDR_EXPR, res_type,
2153 build4 (ARRAY_REF, TREE_TYPE (op0),
2154 TREE_OPERAND (op0, 0),
2155 op1,
2156 TREE_OPERAND (op0, 2),
2157 TREE_OPERAND (op0, 3)));
2159 return NULL_TREE;
2162 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2163 the offset into it. */
2164 while (TREE_CODE (op0) == ARRAY_REF)
2166 tree array_obj = TREE_OPERAND (op0, 0);
2167 tree array_idx = TREE_OPERAND (op0, 1);
2168 tree elt_type = TREE_TYPE (op0);
2169 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2170 tree min_idx;
2172 if (TREE_CODE (array_idx) != INTEGER_CST)
2173 break;
2174 if (TREE_CODE (elt_size) != INTEGER_CST)
2175 break;
2177 /* Un-bias the index by the min index of the array type. */
2178 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2179 if (min_idx)
2181 min_idx = TYPE_MIN_VALUE (min_idx);
2182 if (min_idx)
2184 if (TREE_CODE (min_idx) != INTEGER_CST)
2185 break;
2187 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2188 if (!integer_zerop (min_idx))
2189 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2190 min_idx, 0);
2194 /* Convert the index to a byte offset. */
2195 array_idx = fold_convert (sizetype, array_idx);
2196 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2198 /* Update the operands for the next round, or for folding. */
2199 op1 = int_const_binop (PLUS_EXPR,
2200 array_idx, op1, 0);
2201 op0 = array_obj;
2204 ptd_type = TREE_TYPE (res_type);
2205 /* If we want a pointer to void, reconstruct the reference from the
2206 array element type. A pointer to that can be trivially converted
2207 to void *. This happens as we fold (void *)(ptr p+ off). */
2208 if (VOID_TYPE_P (ptd_type)
2209 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2210 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2212 /* At which point we can try some of the same things as for indirects. */
2213 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2214 if (!t)
2215 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2216 ptd_type, false);
2217 if (t)
2218 t = build1 (ADDR_EXPR, res_type, t);
2220 return t;
2223 /* Subroutine of fold_stmt. We perform several simplifications of the
2224 memory reference tree EXPR and make sure to re-gimplify them properly
2225 after propagation of constant addresses. IS_LHS is true if the
2226 reference is supposed to be an lvalue. */
2228 static tree
2229 maybe_fold_reference (tree expr, bool is_lhs)
2231 tree *t = &expr;
2233 if (TREE_CODE (expr) == ARRAY_REF
2234 && !is_lhs)
2236 tree tem = fold_read_from_constant_string (expr);
2237 if (tem)
2238 return tem;
2241 /* ??? We might want to open-code the relevant remaining cases
2242 to avoid using the generic fold. */
2243 if (handled_component_p (*t)
2244 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
2246 tree tem = fold (*t);
2247 if (tem != *t)
2248 return tem;
2251 while (handled_component_p (*t))
2252 t = &TREE_OPERAND (*t, 0);
2254 if (TREE_CODE (*t) == INDIRECT_REF)
2256 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2257 integer_zero_node);
2258 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2259 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2260 tem = NULL_TREE;
2261 if (!tem
2262 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
2263 /* If we had a good reason for propagating the address here,
2264 make sure we end up with valid gimple. See PR34989. */
2265 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2267 if (tem)
2269 *t = tem;
2270 tem = maybe_fold_reference (expr, is_lhs);
2271 if (tem)
2272 return tem;
2273 return expr;
2277 return NULL_TREE;
2281 /* Return the string length, maximum string length or maximum value of
2282 ARG in LENGTH.
2283 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2284 is not NULL and, for TYPE == 0, its value is not equal to the length
2285 we determine or if we are unable to determine the length or value,
2286 return false. VISITED is a bitmap of visited variables.
2287 TYPE is 0 if string length should be returned, 1 for maximum string
2288 length and 2 for maximum value ARG can have. */
2290 static bool
2291 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2293 tree var, val;
2294 gimple def_stmt;
2296 if (TREE_CODE (arg) != SSA_NAME)
2298 if (TREE_CODE (arg) == COND_EXPR)
2299 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2300 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2301 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2302 else if (TREE_CODE (arg) == ADDR_EXPR
2303 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2304 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2306 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2307 if (TREE_CODE (aop0) == INDIRECT_REF
2308 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2309 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2310 length, visited, type);
2313 if (type == 2)
2315 val = arg;
2316 if (TREE_CODE (val) != INTEGER_CST
2317 || tree_int_cst_sgn (val) < 0)
2318 return false;
2320 else
2321 val = c_strlen (arg, 1);
2322 if (!val)
2323 return false;
2325 if (*length)
2327 if (type > 0)
2329 if (TREE_CODE (*length) != INTEGER_CST
2330 || TREE_CODE (val) != INTEGER_CST)
2331 return false;
2333 if (tree_int_cst_lt (*length, val))
2334 *length = val;
2335 return true;
2337 else if (simple_cst_equal (val, *length) != 1)
2338 return false;
2341 *length = val;
2342 return true;
2345 /* If we were already here, break the infinite cycle. */
2346 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2347 return true;
2348 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2350 var = arg;
2351 def_stmt = SSA_NAME_DEF_STMT (var);
2353 switch (gimple_code (def_stmt))
2355 case GIMPLE_ASSIGN:
2356 /* The RHS of the statement defining VAR must either have a
2357 constant length or come from another SSA_NAME with a constant
2358 length. */
2359 if (gimple_assign_single_p (def_stmt)
2360 || gimple_assign_unary_nop_p (def_stmt))
2362 tree rhs = gimple_assign_rhs1 (def_stmt);
2363 return get_maxval_strlen (rhs, length, visited, type);
2365 return false;
2367 case GIMPLE_PHI:
2369 /* All the arguments of the PHI node must have the same constant
2370 length. */
2371 unsigned i;
2373 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2375 tree arg = gimple_phi_arg (def_stmt, i)->def;
2377 /* If this PHI has itself as an argument, we cannot
2378 determine the string length of this argument. However,
2379 if we can find a constant string length for the other
2380 PHI args then we can still be sure that this is a
2381 constant string length. So be optimistic and just
2382 continue with the next argument. */
2383 if (arg == gimple_phi_result (def_stmt))
2384 continue;
2386 if (!get_maxval_strlen (arg, length, visited, type))
2387 return false;
2390 return true;
2392 default:
2393 return false;
2398 /* Fold builtin call in statement STMT. Returns a simplified tree.
2399 We may return a non-constant expression, including another call
2400 to a different function and with different arguments, e.g.,
2401 substituting memcpy for strcpy when the string length is known.
2402 Note that some builtins expand into inline code that may not
2403 be valid in GIMPLE. Callers must take care. */
2405 static tree
2406 ccp_fold_builtin (gimple stmt)
2408 tree result, val[3];
2409 tree callee, a;
2410 int arg_idx, type;
2411 bitmap visited;
2412 bool ignore;
2413 int nargs;
2415 gcc_assert (is_gimple_call (stmt));
2417 ignore = (gimple_call_lhs (stmt) == NULL);
2419 /* First try the generic builtin folder. If that succeeds, return the
2420 result directly. */
2421 result = fold_call_stmt (stmt, ignore);
2422 if (result)
2424 if (ignore)
2425 STRIP_NOPS (result);
2426 return result;
2429 /* Ignore MD builtins. */
2430 callee = gimple_call_fndecl (stmt);
2431 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2432 return NULL_TREE;
2434 /* If the builtin could not be folded, and it has no argument list,
2435 we're done. */
2436 nargs = gimple_call_num_args (stmt);
2437 if (nargs == 0)
2438 return NULL_TREE;
2440 /* Limit the work only for builtins we know how to simplify. */
2441 switch (DECL_FUNCTION_CODE (callee))
2443 case BUILT_IN_STRLEN:
2444 case BUILT_IN_FPUTS:
2445 case BUILT_IN_FPUTS_UNLOCKED:
2446 arg_idx = 0;
2447 type = 0;
2448 break;
2449 case BUILT_IN_STRCPY:
2450 case BUILT_IN_STRNCPY:
2451 arg_idx = 1;
2452 type = 0;
2453 break;
2454 case BUILT_IN_MEMCPY_CHK:
2455 case BUILT_IN_MEMPCPY_CHK:
2456 case BUILT_IN_MEMMOVE_CHK:
2457 case BUILT_IN_MEMSET_CHK:
2458 case BUILT_IN_STRNCPY_CHK:
2459 arg_idx = 2;
2460 type = 2;
2461 break;
2462 case BUILT_IN_STRCPY_CHK:
2463 case BUILT_IN_STPCPY_CHK:
2464 arg_idx = 1;
2465 type = 1;
2466 break;
2467 case BUILT_IN_SNPRINTF_CHK:
2468 case BUILT_IN_VSNPRINTF_CHK:
2469 arg_idx = 1;
2470 type = 2;
2471 break;
2472 default:
2473 return NULL_TREE;
2476 if (arg_idx >= nargs)
2477 return NULL_TREE;
2479 /* Try to use the dataflow information gathered by the CCP process. */
2480 visited = BITMAP_ALLOC (NULL);
2481 bitmap_clear (visited);
2483 memset (val, 0, sizeof (val));
2484 a = gimple_call_arg (stmt, arg_idx);
2485 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2486 val[arg_idx] = NULL_TREE;
2488 BITMAP_FREE (visited);
2490 result = NULL_TREE;
2491 switch (DECL_FUNCTION_CODE (callee))
2493 case BUILT_IN_STRLEN:
2494 if (val[0] && nargs == 1)
2496 tree new_val =
2497 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2499 /* If the result is not a valid gimple value, or not a cast
2500 of a valid gimple value, then we can not use the result. */
2501 if (is_gimple_val (new_val)
2502 || (is_gimple_cast (new_val)
2503 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2504 return new_val;
2506 break;
2508 case BUILT_IN_STRCPY:
2509 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2510 result = fold_builtin_strcpy (callee,
2511 gimple_call_arg (stmt, 0),
2512 gimple_call_arg (stmt, 1),
2513 val[1]);
2514 break;
2516 case BUILT_IN_STRNCPY:
2517 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2518 result = fold_builtin_strncpy (callee,
2519 gimple_call_arg (stmt, 0),
2520 gimple_call_arg (stmt, 1),
2521 gimple_call_arg (stmt, 2),
2522 val[1]);
2523 break;
2525 case BUILT_IN_FPUTS:
2526 if (nargs == 2)
2527 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2528 gimple_call_arg (stmt, 1),
2529 ignore, false, val[0]);
2530 break;
2532 case BUILT_IN_FPUTS_UNLOCKED:
2533 if (nargs == 2)
2534 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1),
2536 ignore, true, val[0]);
2537 break;
2539 case BUILT_IN_MEMCPY_CHK:
2540 case BUILT_IN_MEMPCPY_CHK:
2541 case BUILT_IN_MEMMOVE_CHK:
2542 case BUILT_IN_MEMSET_CHK:
2543 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2544 result = fold_builtin_memory_chk (callee,
2545 gimple_call_arg (stmt, 0),
2546 gimple_call_arg (stmt, 1),
2547 gimple_call_arg (stmt, 2),
2548 gimple_call_arg (stmt, 3),
2549 val[2], ignore,
2550 DECL_FUNCTION_CODE (callee));
2551 break;
2553 case BUILT_IN_STRCPY_CHK:
2554 case BUILT_IN_STPCPY_CHK:
2555 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2556 result = fold_builtin_stxcpy_chk (callee,
2557 gimple_call_arg (stmt, 0),
2558 gimple_call_arg (stmt, 1),
2559 gimple_call_arg (stmt, 2),
2560 val[1], ignore,
2561 DECL_FUNCTION_CODE (callee));
2562 break;
2564 case BUILT_IN_STRNCPY_CHK:
2565 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2566 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2567 gimple_call_arg (stmt, 1),
2568 gimple_call_arg (stmt, 2),
2569 gimple_call_arg (stmt, 3),
2570 val[2]);
2571 break;
2573 case BUILT_IN_SNPRINTF_CHK:
2574 case BUILT_IN_VSNPRINTF_CHK:
2575 if (val[1] && is_gimple_val (val[1]))
2576 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2577 DECL_FUNCTION_CODE (callee));
2578 break;
2580 default:
2581 gcc_unreachable ();
2584 if (result && ignore)
2585 result = fold_ignored_result (result);
2586 return result;
2589 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2590 replacement rhs for the statement or NULL_TREE if no simplification
2591 could be made. It is assumed that the operands have been previously
2592 folded. */
2594 static tree
2595 fold_gimple_assign (gimple_stmt_iterator *si)
2597 gimple stmt = gsi_stmt (*si);
2598 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2600 tree result = NULL_TREE;
2602 switch (get_gimple_rhs_class (subcode))
2604 case GIMPLE_SINGLE_RHS:
2606 tree rhs = gimple_assign_rhs1 (stmt);
2608 /* Try to fold a conditional expression. */
2609 if (TREE_CODE (rhs) == COND_EXPR)
2611 tree op0 = COND_EXPR_COND (rhs);
2612 tree tem;
2613 bool set = false;
2615 if (COMPARISON_CLASS_P (op0))
2617 fold_defer_overflow_warnings ();
2618 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2619 TREE_OPERAND (op0, 0),
2620 TREE_OPERAND (op0, 1));
2621 /* This is actually a conditional expression, not a GIMPLE
2622 conditional statement, however, the valid_gimple_rhs_p
2623 test still applies. */
2624 set = (tem && is_gimple_condexpr (tem)
2625 && valid_gimple_rhs_p (tem));
2626 fold_undefer_overflow_warnings (set, stmt, 0);
2628 else if (is_gimple_min_invariant (op0))
2630 tem = op0;
2631 set = true;
2633 else
2634 return NULL_TREE;
2636 if (set)
2637 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), tem,
2638 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2641 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2642 return maybe_fold_tmr (rhs);
2644 else if (REFERENCE_CLASS_P (rhs))
2645 return maybe_fold_reference (rhs, false);
2647 else if (TREE_CODE (rhs) == ADDR_EXPR)
2649 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2650 if (tem)
2651 result = fold_convert (TREE_TYPE (rhs),
2652 build_fold_addr_expr (tem));
2655 /* If we couldn't fold the RHS, hand over to the generic
2656 fold routines. */
2657 if (result == NULL_TREE)
2658 result = fold (rhs);
2660 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2661 that may have been added by fold, and "useless" type
2662 conversions that might now be apparent due to propagation. */
2663 STRIP_USELESS_TYPE_CONVERSION (result);
2665 if (result != rhs && valid_gimple_rhs_p (result))
2666 return result;
2668 return NULL_TREE;
2670 break;
2672 case GIMPLE_UNARY_RHS:
2674 tree rhs = gimple_assign_rhs1 (stmt);
2676 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2677 if (result)
2679 /* If the operation was a conversion do _not_ mark a
2680 resulting constant with TREE_OVERFLOW if the original
2681 constant was not. These conversions have implementation
2682 defined behavior and retaining the TREE_OVERFLOW flag
2683 here would confuse later passes such as VRP. */
2684 if (CONVERT_EXPR_CODE_P (subcode)
2685 && TREE_CODE (result) == INTEGER_CST
2686 && TREE_CODE (rhs) == INTEGER_CST)
2687 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2689 STRIP_USELESS_TYPE_CONVERSION (result);
2690 if (valid_gimple_rhs_p (result))
2691 return result;
2693 else if (CONVERT_EXPR_CODE_P (subcode)
2694 && POINTER_TYPE_P (gimple_expr_type (stmt))
2695 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2697 tree type = gimple_expr_type (stmt);
2698 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2699 integer_zero_node, type);
2700 if (t)
2701 return t;
2704 break;
2706 case GIMPLE_BINARY_RHS:
2707 /* Try to fold pointer addition. */
2708 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2710 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2711 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2713 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2714 if (!useless_type_conversion_p
2715 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2716 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2718 result = maybe_fold_stmt_addition (type,
2719 gimple_assign_rhs1 (stmt),
2720 gimple_assign_rhs2 (stmt));
2723 if (!result)
2724 result = fold_binary (subcode,
2725 TREE_TYPE (gimple_assign_lhs (stmt)),
2726 gimple_assign_rhs1 (stmt),
2727 gimple_assign_rhs2 (stmt));
2729 if (result)
2731 STRIP_USELESS_TYPE_CONVERSION (result);
2732 if (valid_gimple_rhs_p (result))
2733 return result;
2735 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2736 we lose canonicalization opportunities. Do not go again
2737 through fold here though, or the same non-GIMPLE will be
2738 produced. */
2739 if (commutative_tree_code (subcode)
2740 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2741 gimple_assign_rhs2 (stmt), false))
2742 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2743 gimple_assign_rhs2 (stmt),
2744 gimple_assign_rhs1 (stmt));
2746 break;
2748 case GIMPLE_INVALID_RHS:
2749 gcc_unreachable ();
2752 return NULL_TREE;
2755 /* Attempt to fold a conditional statement. Return true if any changes were
2756 made. We only attempt to fold the condition expression, and do not perform
2757 any transformation that would require alteration of the cfg. It is
2758 assumed that the operands have been previously folded. */
2760 static bool
2761 fold_gimple_cond (gimple stmt)
2763 tree result = fold_binary (gimple_cond_code (stmt),
2764 boolean_type_node,
2765 gimple_cond_lhs (stmt),
2766 gimple_cond_rhs (stmt));
2768 if (result)
2770 STRIP_USELESS_TYPE_CONVERSION (result);
2771 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2773 gimple_cond_set_condition_from_tree (stmt, result);
2774 return true;
2778 return false;
2782 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2783 The statement may be replaced by another statement, e.g., if the call
2784 simplifies to a constant value. Return true if any changes were made.
2785 It is assumed that the operands have been previously folded. */
2787 static bool
2788 fold_gimple_call (gimple_stmt_iterator *gsi)
2790 gimple stmt = gsi_stmt (*gsi);
2792 tree callee = gimple_call_fndecl (stmt);
2794 /* Check for builtins that CCP can handle using information not
2795 available in the generic fold routines. */
2796 if (callee && DECL_BUILT_IN (callee))
2798 tree result = ccp_fold_builtin (stmt);
2800 if (result)
2801 return update_call_from_tree (gsi, result);
2803 else
2805 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2806 here are when we've propagated the address of a decl into the
2807 object slot. */
2808 /* ??? Should perhaps do this in fold proper. However, doing it
2809 there requires that we create a new CALL_EXPR, and that requires
2810 copying EH region info to the new node. Easier to just do it
2811 here where we can just smash the call operand. */
2812 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2813 callee = gimple_call_fn (stmt);
2814 if (TREE_CODE (callee) == OBJ_TYPE_REF
2815 && lang_hooks.fold_obj_type_ref
2816 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2817 && DECL_P (TREE_OPERAND
2818 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2820 tree t;
2822 /* ??? Caution: Broken ADDR_EXPR semantics means that
2823 looking at the type of the operand of the addr_expr
2824 can yield an array type. See silly exception in
2825 check_pointer_types_r. */
2826 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2827 t = lang_hooks.fold_obj_type_ref (callee, t);
2828 if (t)
2830 gimple_call_set_fn (stmt, t);
2831 return true;
2836 return false;
2839 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2840 distinguishes both cases. */
2842 static bool
2843 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2845 bool changed = false;
2846 gimple stmt = gsi_stmt (*gsi);
2847 unsigned i;
2849 /* Fold the main computation performed by the statement. */
2850 switch (gimple_code (stmt))
2852 case GIMPLE_ASSIGN:
2854 unsigned old_num_ops = gimple_num_ops (stmt);
2855 tree new_rhs = fold_gimple_assign (gsi);
2856 if (new_rhs != NULL_TREE
2857 && (!inplace
2858 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2860 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2861 changed = true;
2863 break;
2866 case GIMPLE_COND:
2867 changed |= fold_gimple_cond (stmt);
2868 break;
2870 case GIMPLE_CALL:
2871 /* Fold *& in call arguments. */
2872 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2873 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2875 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2876 if (tmp)
2878 gimple_call_set_arg (stmt, i, tmp);
2879 changed = true;
2882 /* The entire statement may be replaced in this case. */
2883 if (!inplace)
2884 changed |= fold_gimple_call (gsi);
2885 break;
2887 case GIMPLE_ASM:
2888 /* Fold *& in asm operands. */
2889 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2891 tree link = gimple_asm_output_op (stmt, i);
2892 tree op = TREE_VALUE (link);
2893 if (REFERENCE_CLASS_P (op)
2894 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2896 TREE_VALUE (link) = op;
2897 changed = true;
2900 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2902 tree link = gimple_asm_input_op (stmt, i);
2903 tree op = TREE_VALUE (link);
2904 if (REFERENCE_CLASS_P (op)
2905 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
2907 TREE_VALUE (link) = op;
2908 changed = true;
2911 break;
2913 default:;
2916 stmt = gsi_stmt (*gsi);
2918 /* Fold *& on the lhs. */
2919 if (gimple_has_lhs (stmt))
2921 tree lhs = gimple_get_lhs (stmt);
2922 if (lhs && REFERENCE_CLASS_P (lhs))
2924 tree new_lhs = maybe_fold_reference (lhs, true);
2925 if (new_lhs)
2927 gimple_set_lhs (stmt, new_lhs);
2928 changed = true;
2933 return changed;
2936 /* Fold the statement pointed to by GSI. In some cases, this function may
2937 replace the whole statement with a new one. Returns true iff folding
2938 makes any changes.
2939 The statement pointed to by GSI should be in valid gimple form but may
2940 be in unfolded state as resulting from for example constant propagation
2941 which can produce *&x = 0. */
2943 bool
2944 fold_stmt (gimple_stmt_iterator *gsi)
2946 return fold_stmt_1 (gsi, false);
2949 /* Perform the minimal folding on statement STMT. Only operations like
2950 *&x created by constant propagation are handled. The statement cannot
2951 be replaced with a new one. Return true if the statement was
2952 changed, false otherwise.
2953 The statement STMT should be in valid gimple form but may
2954 be in unfolded state as resulting from for example constant propagation
2955 which can produce *&x = 0. */
2957 bool
2958 fold_stmt_inplace (gimple stmt)
2960 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2961 bool changed = fold_stmt_1 (&gsi, true);
2962 gcc_assert (gsi_stmt (gsi) == stmt);
2963 return changed;
2966 /* Try to optimize out __builtin_stack_restore. Optimize it out
2967 if there is another __builtin_stack_restore in the same basic
2968 block and no calls or ASM_EXPRs are in between, or if this block's
2969 only outgoing edge is to EXIT_BLOCK and there are no calls or
2970 ASM_EXPRs after this __builtin_stack_restore. */
2972 static tree
2973 optimize_stack_restore (gimple_stmt_iterator i)
2975 tree callee, rhs;
2976 gimple stmt, stack_save;
2977 gimple_stmt_iterator stack_save_gsi;
2979 basic_block bb = gsi_bb (i);
2980 gimple call = gsi_stmt (i);
2982 if (gimple_code (call) != GIMPLE_CALL
2983 || gimple_call_num_args (call) != 1
2984 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2985 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2986 return NULL_TREE;
2988 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2990 stmt = gsi_stmt (i);
2991 if (gimple_code (stmt) == GIMPLE_ASM)
2992 return NULL_TREE;
2993 if (gimple_code (stmt) != GIMPLE_CALL)
2994 continue;
2996 callee = gimple_call_fndecl (stmt);
2997 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2998 return NULL_TREE;
3000 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3001 break;
3004 if (gsi_end_p (i)
3005 && (! single_succ_p (bb)
3006 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3007 return NULL_TREE;
3009 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3010 if (gimple_code (stack_save) != GIMPLE_CALL
3011 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3012 || stmt_could_throw_p (stack_save)
3013 || !has_single_use (gimple_call_arg (call, 0)))
3014 return NULL_TREE;
3016 callee = gimple_call_fndecl (stack_save);
3017 if (!callee
3018 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3019 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3020 || gimple_call_num_args (stack_save) != 0)
3021 return NULL_TREE;
3023 stack_save_gsi = gsi_for_stmt (stack_save);
3024 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3025 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3026 if (!update_call_from_tree (&stack_save_gsi, rhs))
3028 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3029 return NULL_TREE;
3031 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3033 /* No effect, so the statement will be deleted. */
3034 return integer_zero_node;
3037 /* If va_list type is a simple pointer and nothing special is needed,
3038 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3039 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3040 pointer assignment. */
3042 static tree
3043 optimize_stdarg_builtin (gimple call)
3045 tree callee, lhs, rhs, cfun_va_list;
3046 bool va_list_simple_ptr;
3048 if (gimple_code (call) != GIMPLE_CALL)
3049 return NULL_TREE;
3051 callee = gimple_call_fndecl (call);
3053 cfun_va_list = targetm.fn_abi_va_list (callee);
3054 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3055 && (TREE_TYPE (cfun_va_list) == void_type_node
3056 || TREE_TYPE (cfun_va_list) == char_type_node);
3058 switch (DECL_FUNCTION_CODE (callee))
3060 case BUILT_IN_VA_START:
3061 if (!va_list_simple_ptr
3062 || targetm.expand_builtin_va_start != NULL
3063 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3064 return NULL_TREE;
3066 if (gimple_call_num_args (call) != 2)
3067 return NULL_TREE;
3069 lhs = gimple_call_arg (call, 0);
3070 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3071 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3072 != TYPE_MAIN_VARIANT (cfun_va_list))
3073 return NULL_TREE;
3075 lhs = build_fold_indirect_ref (lhs);
3076 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3077 1, integer_zero_node);
3078 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3079 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3081 case BUILT_IN_VA_COPY:
3082 if (!va_list_simple_ptr)
3083 return NULL_TREE;
3085 if (gimple_call_num_args (call) != 2)
3086 return NULL_TREE;
3088 lhs = gimple_call_arg (call, 0);
3089 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3090 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3091 != TYPE_MAIN_VARIANT (cfun_va_list))
3092 return NULL_TREE;
3094 lhs = build_fold_indirect_ref (lhs);
3095 rhs = gimple_call_arg (call, 1);
3096 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3097 != TYPE_MAIN_VARIANT (cfun_va_list))
3098 return NULL_TREE;
3100 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3101 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3103 case BUILT_IN_VA_END:
3104 /* No effect, so the statement will be deleted. */
3105 return integer_zero_node;
3107 default:
3108 gcc_unreachable ();
3112 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3113 RHS of an assignment. Insert the necessary statements before
3114 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3115 is replaced. If the call is expected to produces a result, then it
3116 is replaced by an assignment of the new RHS to the result variable.
3117 If the result is to be ignored, then the call is replaced by a
3118 GIMPLE_NOP. */
3120 static void
3121 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3123 tree lhs;
3124 tree tmp = NULL_TREE; /* Silence warning. */
3125 gimple stmt, new_stmt;
3126 gimple_stmt_iterator i;
3127 gimple_seq stmts = gimple_seq_alloc();
3128 struct gimplify_ctx gctx;
3130 stmt = gsi_stmt (*si_p);
3132 gcc_assert (is_gimple_call (stmt));
3134 lhs = gimple_call_lhs (stmt);
3136 push_gimplify_context (&gctx);
3138 if (lhs == NULL_TREE)
3139 gimplify_and_add (expr, &stmts);
3140 else
3141 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3143 pop_gimplify_context (NULL);
3145 if (gimple_has_location (stmt))
3146 annotate_all_with_location (stmts, gimple_location (stmt));
3148 /* The replacement can expose previously unreferenced variables. */
3149 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3151 new_stmt = gsi_stmt (i);
3152 find_new_referenced_vars (new_stmt);
3153 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3154 mark_symbols_for_renaming (new_stmt);
3155 gsi_next (si_p);
3158 if (lhs == NULL_TREE)
3160 new_stmt = gimple_build_nop ();
3161 unlink_stmt_vdef (stmt);
3162 release_defs (stmt);
3164 else
3166 new_stmt = gimple_build_assign (lhs, tmp);
3167 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3168 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3169 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3172 gimple_set_location (new_stmt, gimple_location (stmt));
3173 gsi_replace (si_p, new_stmt, false);
3176 /* A simple pass that attempts to fold all builtin functions. This pass
3177 is run after we've propagated as many constants as we can. */
3179 static unsigned int
3180 execute_fold_all_builtins (void)
3182 bool cfg_changed = false;
3183 basic_block bb;
3184 unsigned int todoflags = 0;
3186 FOR_EACH_BB (bb)
3188 gimple_stmt_iterator i;
3189 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3191 gimple stmt, old_stmt;
3192 tree callee, result;
3193 enum built_in_function fcode;
3195 stmt = gsi_stmt (i);
3197 if (gimple_code (stmt) != GIMPLE_CALL)
3199 gsi_next (&i);
3200 continue;
3202 callee = gimple_call_fndecl (stmt);
3203 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3205 gsi_next (&i);
3206 continue;
3208 fcode = DECL_FUNCTION_CODE (callee);
3210 result = ccp_fold_builtin (stmt);
3212 if (result)
3213 gimple_remove_stmt_histograms (cfun, stmt);
3215 if (!result)
3216 switch (DECL_FUNCTION_CODE (callee))
3218 case BUILT_IN_CONSTANT_P:
3219 /* Resolve __builtin_constant_p. If it hasn't been
3220 folded to integer_one_node by now, it's fairly
3221 certain that the value simply isn't constant. */
3222 result = integer_zero_node;
3223 break;
3225 case BUILT_IN_STACK_RESTORE:
3226 result = optimize_stack_restore (i);
3227 if (result)
3228 break;
3229 gsi_next (&i);
3230 continue;
3232 case BUILT_IN_VA_START:
3233 case BUILT_IN_VA_END:
3234 case BUILT_IN_VA_COPY:
3235 /* These shouldn't be folded before pass_stdarg. */
3236 result = optimize_stdarg_builtin (stmt);
3237 if (result)
3238 break;
3239 /* FALLTHRU */
3241 default:
3242 gsi_next (&i);
3243 continue;
3246 if (dump_file && (dump_flags & TDF_DETAILS))
3248 fprintf (dump_file, "Simplified\n ");
3249 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3252 old_stmt = stmt;
3253 push_stmt_changes (gsi_stmt_ptr (&i));
3255 if (!update_call_from_tree (&i, result))
3257 gimplify_and_update_call_from_tree (&i, result);
3258 todoflags |= TODO_update_address_taken;
3261 stmt = gsi_stmt (i);
3262 pop_stmt_changes (gsi_stmt_ptr (&i));
3264 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3265 && gimple_purge_dead_eh_edges (bb))
3266 cfg_changed = true;
3268 if (dump_file && (dump_flags & TDF_DETAILS))
3270 fprintf (dump_file, "to\n ");
3271 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3272 fprintf (dump_file, "\n");
3275 /* Retry the same statement if it changed into another
3276 builtin, there might be new opportunities now. */
3277 if (gimple_code (stmt) != GIMPLE_CALL)
3279 gsi_next (&i);
3280 continue;
3282 callee = gimple_call_fndecl (stmt);
3283 if (!callee
3284 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3285 || DECL_FUNCTION_CODE (callee) == fcode)
3286 gsi_next (&i);
3290 /* Delete unreachable blocks. */
3291 if (cfg_changed)
3292 todoflags |= TODO_cleanup_cfg;
3294 return todoflags;
3298 struct gimple_opt_pass pass_fold_builtins =
3301 GIMPLE_PASS,
3302 "fab", /* name */
3303 NULL, /* gate */
3304 execute_fold_all_builtins, /* execute */
3305 NULL, /* sub */
3306 NULL, /* next */
3307 0, /* static_pass_number */
3308 TV_NONE, /* tv_id */
3309 PROP_cfg | PROP_ssa, /* properties_required */
3310 0, /* properties_provided */
3311 0, /* properties_destroyed */
3312 0, /* todo_flags_start */
3313 TODO_dump_func
3314 | TODO_verify_ssa
3315 | TODO_update_ssa /* todo_flags_finish */