Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / tree-ssa-ccp.c
blob949c4b5ce7766ec9fd4c4a27275bbe0b428424cb
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
113 For instance, consider the following code fragment:
115 struct A a;
116 const int B = 42;
118 void foo (int i)
120 if (i > 10)
121 a.a = 42;
122 else
124 a.b = 21;
125 a.a = a.b + 21;
128 if (a.a != B)
129 never_executed ();
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
139 # a_5 = VDEF <a_4>
140 a.a = 2;
142 # VUSE <a_5>
143 x_3 = a.b;
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
152 int A;
154 foo (int i)
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
160 # VUSE <A_5>
161 A.0_6 = A;
163 return A.0_6;
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
177 References:
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
211 #include "dbgcnt.h"
214 /* Possible lattice values. */
215 typedef enum
217 UNINITIALIZED,
218 UNDEFINED,
219 CONSTANT,
220 VARYING
221 } ccp_lattice_t;
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
228 doing the store). */
229 static prop_value_t *const_val;
231 static void canonicalize_float_value (prop_value_t *);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 static void
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
252 break;
253 default:
254 gcc_unreachable ();
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
263 void
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
275 tree
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && (TREE_READONLY (sym)
280 || TREE_CODE (sym) == CONST_DECL))
282 tree val = DECL_INITIAL (sym);
283 if (val)
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
288 if (TREE_CODE (val) == ADDR_EXPR)
290 tree base = get_base_address (TREE_OPERAND (val, 0));
291 if (base && TREE_CODE (base) == VAR_DECL)
293 TREE_ADDRESSABLE (base) = 1;
294 if (gimple_referenced_vars (cfun))
295 add_referenced_var (base);
298 return val;
301 /* Variables declared 'const' without an initializer
302 have zero as the initializer if they may not be
303 overridden at link or run time. */
304 if (!val
305 && !DECL_EXTERNAL (sym)
306 && targetm.binds_local_p (sym)
307 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
308 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
309 return fold_convert (TREE_TYPE (sym), integer_zero_node);
312 return NULL_TREE;
315 /* Compute a default value for variable VAR and store it in the
316 CONST_VAL array. The following rules are used to get default
317 values:
319 1- Global and static variables that are declared constant are
320 considered CONSTANT.
322 2- Any other value is considered UNDEFINED. This is useful when
323 considering PHI nodes. PHI arguments that are undefined do not
324 change the constant value of the PHI node, which allows for more
325 constants to be propagated.
327 3- Variables defined by statements other than assignments and PHI
328 nodes are considered VARYING.
330 4- Initial values of variables that are not GIMPLE registers are
331 considered VARYING. */
333 static prop_value_t
334 get_default_value (tree var)
336 tree sym = SSA_NAME_VAR (var);
337 prop_value_t val = { UNINITIALIZED, NULL_TREE };
338 gimple stmt;
340 stmt = SSA_NAME_DEF_STMT (var);
342 if (gimple_nop_p (stmt))
344 /* Variables defined by an empty statement are those used
345 before being initialized. If VAR is a local variable, we
346 can assume initially that it is UNDEFINED, otherwise we must
347 consider it VARYING. */
348 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
349 val.lattice_val = UNDEFINED;
350 else
351 val.lattice_val = VARYING;
353 else if (is_gimple_assign (stmt)
354 /* Value-returning GIMPLE_CALL statements assign to
355 a variable, and are treated similarly to GIMPLE_ASSIGN. */
356 || (is_gimple_call (stmt)
357 && gimple_call_lhs (stmt) != NULL_TREE)
358 || gimple_code (stmt) == GIMPLE_PHI)
360 tree cst;
361 if (gimple_assign_single_p (stmt)
362 && DECL_P (gimple_assign_rhs1 (stmt))
363 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
365 val.lattice_val = CONSTANT;
366 val.value = cst;
368 else
369 /* Any other variable defined by an assignment or a PHI node
370 is considered UNDEFINED. */
371 val.lattice_val = UNDEFINED;
373 else
375 /* Otherwise, VAR will never take on a constant value. */
376 val.lattice_val = VARYING;
379 return val;
383 /* Get the constant value associated with variable VAR. */
385 static inline prop_value_t *
386 get_value (tree var)
388 prop_value_t *val;
390 if (const_val == NULL)
391 return NULL;
393 val = &const_val[SSA_NAME_VERSION (var)];
394 if (val->lattice_val == UNINITIALIZED)
395 *val = get_default_value (var);
397 canonicalize_float_value (val);
399 return val;
402 /* Sets the value associated with VAR to VARYING. */
404 static inline void
405 set_value_varying (tree var)
407 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
409 val->lattice_val = VARYING;
410 val->value = NULL_TREE;
413 /* For float types, modify the value of VAL to make ccp work correctly
414 for non-standard values (-0, NaN):
416 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
417 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
418 This is to fix the following problem (see PR 29921): Suppose we have
420 x = 0.0 * y
422 and we set value of y to NaN. This causes value of x to be set to NaN.
423 When we later determine that y is in fact VARYING, fold uses the fact
424 that HONOR_NANS is false, and we try to change the value of x to 0,
425 causing an ICE. With HONOR_NANS being false, the real appearance of
426 NaN would cause undefined behavior, though, so claiming that y (and x)
427 are UNDEFINED initially is correct. */
429 static void
430 canonicalize_float_value (prop_value_t *val)
432 enum machine_mode mode;
433 tree type;
434 REAL_VALUE_TYPE d;
436 if (val->lattice_val != CONSTANT
437 || TREE_CODE (val->value) != REAL_CST)
438 return;
440 d = TREE_REAL_CST (val->value);
441 type = TREE_TYPE (val->value);
442 mode = TYPE_MODE (type);
444 if (!HONOR_SIGNED_ZEROS (mode)
445 && REAL_VALUE_MINUS_ZERO (d))
447 val->value = build_real (type, dconst0);
448 return;
451 if (!HONOR_NANS (mode)
452 && REAL_VALUE_ISNAN (d))
454 val->lattice_val = UNDEFINED;
455 val->value = NULL;
456 return;
460 /* Set the value for variable VAR to NEW_VAL. Return true if the new
461 value is different from VAR's previous value. */
463 static bool
464 set_lattice_value (tree var, prop_value_t new_val)
466 prop_value_t *old_val = get_value (var);
468 canonicalize_float_value (&new_val);
470 /* Lattice transitions must always be monotonically increasing in
471 value. If *OLD_VAL and NEW_VAL are the same, return false to
472 inform the caller that this was a non-transition. */
474 gcc_assert (old_val->lattice_val < new_val.lattice_val
475 || (old_val->lattice_val == new_val.lattice_val
476 && ((!old_val->value && !new_val.value)
477 || operand_equal_p (old_val->value, new_val.value, 0))));
479 if (old_val->lattice_val != new_val.lattice_val)
481 if (dump_file && (dump_flags & TDF_DETAILS))
483 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
484 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
487 *old_val = new_val;
489 gcc_assert (new_val.lattice_val != UNDEFINED);
490 return true;
493 return false;
497 /* Return the likely CCP lattice value for STMT.
499 If STMT has no operands, then return CONSTANT.
501 Else if undefinedness of operands of STMT cause its value to be
502 undefined, then return UNDEFINED.
504 Else if any operands of STMT are constants, then return CONSTANT.
506 Else return VARYING. */
508 static ccp_lattice_t
509 likely_value (gimple stmt)
511 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
512 tree use;
513 ssa_op_iter iter;
514 unsigned i;
516 enum gimple_code code = gimple_code (stmt);
518 /* This function appears to be called only for assignments, calls,
519 conditionals, and switches, due to the logic in visit_stmt. */
520 gcc_assert (code == GIMPLE_ASSIGN
521 || code == GIMPLE_CALL
522 || code == GIMPLE_COND
523 || code == GIMPLE_SWITCH);
525 /* If the statement has volatile operands, it won't fold to a
526 constant value. */
527 if (gimple_has_volatile_ops (stmt))
528 return VARYING;
530 /* Arrive here for more complex cases. */
531 has_constant_operand = false;
532 has_undefined_operand = false;
533 all_undefined_operands = true;
534 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
536 prop_value_t *val = get_value (use);
538 if (val->lattice_val == UNDEFINED)
539 has_undefined_operand = true;
540 else
541 all_undefined_operands = false;
543 if (val->lattice_val == CONSTANT)
544 has_constant_operand = true;
547 /* There may be constants in regular rhs operands. For calls we
548 have to ignore lhs, fndecl and static chain, otherwise only
549 the lhs. */
550 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
551 i < gimple_num_ops (stmt); ++i)
553 tree op = gimple_op (stmt, i);
554 if (!op || TREE_CODE (op) == SSA_NAME)
555 continue;
556 if (is_gimple_min_invariant (op))
557 has_constant_operand = true;
560 /* If the operation combines operands like COMPLEX_EXPR make sure to
561 not mark the result UNDEFINED if only one part of the result is
562 undefined. */
563 if (has_undefined_operand && all_undefined_operands)
564 return UNDEFINED;
565 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
567 switch (gimple_assign_rhs_code (stmt))
569 /* Unary operators are handled with all_undefined_operands. */
570 case PLUS_EXPR:
571 case MINUS_EXPR:
572 case POINTER_PLUS_EXPR:
573 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
574 Not bitwise operators, one VARYING operand may specify the
575 result completely. Not logical operators for the same reason.
576 Not COMPLEX_EXPR as one VARYING operand makes the result partly
577 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
578 the undefined operand may be promoted. */
579 return UNDEFINED;
581 default:
585 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
586 fall back to VARYING even if there were CONSTANT operands. */
587 if (has_undefined_operand)
588 return VARYING;
590 /* We do not consider virtual operands here -- load from read-only
591 memory may have only VARYING virtual operands, but still be
592 constant. */
593 if (has_constant_operand
594 || gimple_references_memory_p (stmt))
595 return CONSTANT;
597 return VARYING;
600 /* Returns true if STMT cannot be constant. */
602 static bool
603 surely_varying_stmt_p (gimple stmt)
605 /* If the statement has operands that we cannot handle, it cannot be
606 constant. */
607 if (gimple_has_volatile_ops (stmt))
608 return true;
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
612 if (is_gimple_call (stmt))
614 tree fndecl;
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
617 && !DECL_BUILT_IN (fndecl)))
618 return true;
621 /* Any other store operation is not interesting. */
622 else if (gimple_vdef (stmt))
623 return true;
625 /* Anything other than assignments and conditional jumps are not
626 interesting for CCP. */
627 if (gimple_code (stmt) != GIMPLE_ASSIGN
628 && gimple_code (stmt) != GIMPLE_COND
629 && gimple_code (stmt) != GIMPLE_SWITCH
630 && gimple_code (stmt) != GIMPLE_CALL)
631 return true;
633 return false;
636 /* Initialize local data structures for CCP. */
638 static void
639 ccp_initialize (void)
641 basic_block bb;
643 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
645 /* Initialize simulation flags for PHI nodes and statements. */
646 FOR_EACH_BB (bb)
648 gimple_stmt_iterator i;
650 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
652 gimple stmt = gsi_stmt (i);
653 bool is_varying;
655 /* If the statement is a control insn, then we do not
656 want to avoid simulating the statement once. Failure
657 to do so means that those edges will never get added. */
658 if (stmt_ends_bb_p (stmt))
659 is_varying = false;
660 else
661 is_varying = surely_varying_stmt_p (stmt);
663 if (is_varying)
665 tree def;
666 ssa_op_iter iter;
668 /* If the statement will not produce a constant, mark
669 all its outputs VARYING. */
670 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
671 set_value_varying (def);
673 prop_set_simulate_again (stmt, !is_varying);
677 /* Now process PHI nodes. We never clear the simulate_again flag on
678 phi nodes, since we do not know which edges are executable yet,
679 except for phi nodes for virtual operands when we do not do store ccp. */
680 FOR_EACH_BB (bb)
682 gimple_stmt_iterator i;
684 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
686 gimple phi = gsi_stmt (i);
688 if (!is_gimple_reg (gimple_phi_result (phi)))
689 prop_set_simulate_again (phi, false);
690 else
691 prop_set_simulate_again (phi, true);
696 /* Debug count support. Reset the values of ssa names
697 VARYING when the total number ssa names analyzed is
698 beyond the debug count specified. */
700 static void
701 do_dbg_cnt (void)
703 unsigned i;
704 for (i = 0; i < num_ssa_names; i++)
706 if (!dbg_cnt (ccp))
708 const_val[i].lattice_val = VARYING;
709 const_val[i].value = NULL_TREE;
715 /* Do final substitution of propagated values, cleanup the flowgraph and
716 free allocated storage.
718 Return TRUE when something was optimized. */
720 static bool
721 ccp_finalize (void)
723 bool something_changed;
725 do_dbg_cnt ();
726 /* Perform substitutions based on the known constant values. */
727 something_changed = substitute_and_fold (const_val, false);
729 free (const_val);
730 const_val = NULL;
731 return something_changed;;
735 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
736 in VAL1.
738 any M UNDEFINED = any
739 any M VARYING = VARYING
740 Ci M Cj = Ci if (i == j)
741 Ci M Cj = VARYING if (i != j)
744 static void
745 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
747 if (val1->lattice_val == UNDEFINED)
749 /* UNDEFINED M any = any */
750 *val1 = *val2;
752 else if (val2->lattice_val == UNDEFINED)
754 /* any M UNDEFINED = any
755 Nothing to do. VAL1 already contains the value we want. */
758 else if (val1->lattice_val == VARYING
759 || val2->lattice_val == VARYING)
761 /* any M VARYING = VARYING. */
762 val1->lattice_val = VARYING;
763 val1->value = NULL_TREE;
765 else if (val1->lattice_val == CONSTANT
766 && val2->lattice_val == CONSTANT
767 && simple_cst_equal (val1->value, val2->value) == 1)
769 /* Ci M Cj = Ci if (i == j)
770 Ci M Cj = VARYING if (i != j)
772 If these two values come from memory stores, make sure that
773 they come from the same memory reference. */
774 val1->lattice_val = CONSTANT;
775 val1->value = val1->value;
777 else
779 /* Any other combination is VARYING. */
780 val1->lattice_val = VARYING;
781 val1->value = NULL_TREE;
786 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
787 lattice values to determine PHI_NODE's lattice value. The value of a
788 PHI node is determined calling ccp_lattice_meet with all the arguments
789 of the PHI node that are incoming via executable edges. */
791 static enum ssa_prop_result
792 ccp_visit_phi_node (gimple phi)
794 unsigned i;
795 prop_value_t *old_val, new_val;
797 if (dump_file && (dump_flags & TDF_DETAILS))
799 fprintf (dump_file, "\nVisiting PHI node: ");
800 print_gimple_stmt (dump_file, phi, 0, dump_flags);
803 old_val = get_value (gimple_phi_result (phi));
804 switch (old_val->lattice_val)
806 case VARYING:
807 return SSA_PROP_VARYING;
809 case CONSTANT:
810 new_val = *old_val;
811 break;
813 case UNDEFINED:
814 new_val.lattice_val = UNDEFINED;
815 new_val.value = NULL_TREE;
816 break;
818 default:
819 gcc_unreachable ();
822 for (i = 0; i < gimple_phi_num_args (phi); i++)
824 /* Compute the meet operator over all the PHI arguments flowing
825 through executable edges. */
826 edge e = gimple_phi_arg_edge (phi, i);
828 if (dump_file && (dump_flags & TDF_DETAILS))
830 fprintf (dump_file,
831 "\n Argument #%d (%d -> %d %sexecutable)\n",
832 i, e->src->index, e->dest->index,
833 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
836 /* If the incoming edge is executable, Compute the meet operator for
837 the existing value of the PHI node and the current PHI argument. */
838 if (e->flags & EDGE_EXECUTABLE)
840 tree arg = gimple_phi_arg (phi, i)->def;
841 prop_value_t arg_val;
843 if (is_gimple_min_invariant (arg))
845 arg_val.lattice_val = CONSTANT;
846 arg_val.value = arg;
848 else
849 arg_val = *(get_value (arg));
851 ccp_lattice_meet (&new_val, &arg_val);
853 if (dump_file && (dump_flags & TDF_DETAILS))
855 fprintf (dump_file, "\t");
856 print_generic_expr (dump_file, arg, dump_flags);
857 dump_lattice_value (dump_file, "\tValue: ", arg_val);
858 fprintf (dump_file, "\n");
861 if (new_val.lattice_val == VARYING)
862 break;
866 if (dump_file && (dump_flags & TDF_DETAILS))
868 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
869 fprintf (dump_file, "\n\n");
872 /* Make the transition to the new value. */
873 if (set_lattice_value (gimple_phi_result (phi), new_val))
875 if (new_val.lattice_val == VARYING)
876 return SSA_PROP_VARYING;
877 else
878 return SSA_PROP_INTERESTING;
880 else
881 return SSA_PROP_NOT_INTERESTING;
884 /* Return true if we may propagate the address expression ADDR into the
885 dereference DEREF and cancel them. */
887 bool
888 may_propagate_address_into_dereference (tree addr, tree deref)
890 gcc_assert (INDIRECT_REF_P (deref)
891 && TREE_CODE (addr) == ADDR_EXPR);
893 /* Don't propagate if ADDR's operand has incomplete type. */
894 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
895 return false;
897 /* If the address is invariant then we do not need to preserve restrict
898 qualifications. But we do need to preserve volatile qualifiers until
899 we can annotate the folded dereference itself properly. */
900 if (is_gimple_min_invariant (addr)
901 && (!TREE_THIS_VOLATILE (deref)
902 || TYPE_VOLATILE (TREE_TYPE (addr))))
903 return useless_type_conversion_p (TREE_TYPE (deref),
904 TREE_TYPE (TREE_OPERAND (addr, 0)));
906 /* Else both the address substitution and the folding must result in
907 a valid useless type conversion sequence. */
908 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
909 TREE_TYPE (addr))
910 && useless_type_conversion_p (TREE_TYPE (deref),
911 TREE_TYPE (TREE_OPERAND (addr, 0))));
914 /* CCP specific front-end to the non-destructive constant folding
915 routines.
917 Attempt to simplify the RHS of STMT knowing that one or more
918 operands are constants.
920 If simplification is possible, return the simplified RHS,
921 otherwise return the original RHS or NULL_TREE. */
923 static tree
924 ccp_fold (gimple stmt)
926 location_t loc = gimple_location (stmt);
927 switch (gimple_code (stmt))
929 case GIMPLE_ASSIGN:
931 enum tree_code subcode = gimple_assign_rhs_code (stmt);
933 switch (get_gimple_rhs_class (subcode))
935 case GIMPLE_SINGLE_RHS:
937 tree rhs = gimple_assign_rhs1 (stmt);
938 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
940 if (TREE_CODE (rhs) == SSA_NAME)
942 /* If the RHS is an SSA_NAME, return its known constant value,
943 if any. */
944 return get_value (rhs)->value;
946 /* Handle propagating invariant addresses into address operations.
947 The folding we do here matches that in tree-ssa-forwprop.c. */
948 else if (TREE_CODE (rhs) == ADDR_EXPR)
950 tree *base;
951 base = &TREE_OPERAND (rhs, 0);
952 while (handled_component_p (*base))
953 base = &TREE_OPERAND (*base, 0);
954 if (TREE_CODE (*base) == INDIRECT_REF
955 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
957 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
958 if (val->lattice_val == CONSTANT
959 && TREE_CODE (val->value) == ADDR_EXPR
960 && may_propagate_address_into_dereference
961 (val->value, *base))
963 /* We need to return a new tree, not modify the IL
964 or share parts of it. So play some tricks to
965 avoid manually building it. */
966 tree ret, save = *base;
967 *base = TREE_OPERAND (val->value, 0);
968 ret = unshare_expr (rhs);
969 recompute_tree_invariant_for_addr_expr (ret);
970 *base = save;
971 return ret;
975 else if (TREE_CODE (rhs) == CONSTRUCTOR
976 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
977 && (CONSTRUCTOR_NELTS (rhs)
978 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
980 unsigned i;
981 tree val, list;
983 list = NULL_TREE;
984 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
986 if (TREE_CODE (val) == SSA_NAME
987 && get_value (val)->lattice_val == CONSTANT)
988 val = get_value (val)->value;
989 if (TREE_CODE (val) == INTEGER_CST
990 || TREE_CODE (val) == REAL_CST
991 || TREE_CODE (val) == FIXED_CST)
992 list = tree_cons (NULL_TREE, val, list);
993 else
994 return NULL_TREE;
997 return build_vector (TREE_TYPE (rhs), nreverse (list));
1000 if (kind == tcc_reference)
1002 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1003 || TREE_CODE (rhs) == REALPART_EXPR
1004 || TREE_CODE (rhs) == IMAGPART_EXPR)
1005 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1007 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1008 if (val->lattice_val == CONSTANT)
1009 return fold_unary_loc (EXPR_LOCATION (rhs),
1010 TREE_CODE (rhs),
1011 TREE_TYPE (rhs), val->value);
1013 else if (TREE_CODE (rhs) == INDIRECT_REF
1014 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1016 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1017 if (val->lattice_val == CONSTANT
1018 && TREE_CODE (val->value) == ADDR_EXPR
1019 && useless_type_conversion_p (TREE_TYPE (rhs),
1020 TREE_TYPE (TREE_TYPE (val->value))))
1021 rhs = TREE_OPERAND (val->value, 0);
1023 return fold_const_aggregate_ref (rhs);
1025 else if (kind == tcc_declaration)
1026 return get_symbol_constant_value (rhs);
1027 return rhs;
1030 case GIMPLE_UNARY_RHS:
1032 /* Handle unary operators that can appear in GIMPLE form.
1033 Note that we know the single operand must be a constant,
1034 so this should almost always return a simplified RHS. */
1035 tree lhs = gimple_assign_lhs (stmt);
1036 tree op0 = gimple_assign_rhs1 (stmt);
1038 /* Simplify the operand down to a constant. */
1039 if (TREE_CODE (op0) == SSA_NAME)
1041 prop_value_t *val = get_value (op0);
1042 if (val->lattice_val == CONSTANT)
1043 op0 = get_value (op0)->value;
1046 /* Conversions are useless for CCP purposes if they are
1047 value-preserving. Thus the restrictions that
1048 useless_type_conversion_p places for pointer type conversions
1049 do not apply here. Substitution later will only substitute to
1050 allowed places. */
1051 if (CONVERT_EXPR_CODE_P (subcode)
1052 && POINTER_TYPE_P (TREE_TYPE (lhs))
1053 && POINTER_TYPE_P (TREE_TYPE (op0))
1054 /* Do not allow differences in volatile qualification
1055 as this might get us confused as to whether a
1056 propagation destination statement is volatile
1057 or not. See PR36988. */
1058 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1059 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1061 tree tem;
1062 /* Still try to generate a constant of correct type. */
1063 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1064 TREE_TYPE (op0))
1065 && ((tem = maybe_fold_offset_to_address
1066 (loc,
1067 op0, integer_zero_node, TREE_TYPE (lhs)))
1068 != NULL_TREE))
1069 return tem;
1070 return op0;
1073 return
1074 fold_unary_ignore_overflow_loc (loc, subcode,
1075 gimple_expr_type (stmt), op0);
1078 case GIMPLE_BINARY_RHS:
1080 /* Handle binary operators that can appear in GIMPLE form. */
1081 tree op0 = gimple_assign_rhs1 (stmt);
1082 tree op1 = gimple_assign_rhs2 (stmt);
1084 /* Simplify the operands down to constants when appropriate. */
1085 if (TREE_CODE (op0) == SSA_NAME)
1087 prop_value_t *val = get_value (op0);
1088 if (val->lattice_val == CONSTANT)
1089 op0 = val->value;
1092 if (TREE_CODE (op1) == SSA_NAME)
1094 prop_value_t *val = get_value (op1);
1095 if (val->lattice_val == CONSTANT)
1096 op1 = val->value;
1099 /* Fold &foo + CST into an invariant reference if possible. */
1100 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1101 && TREE_CODE (op0) == ADDR_EXPR
1102 && TREE_CODE (op1) == INTEGER_CST)
1104 tree tem = maybe_fold_offset_to_address
1105 (loc, op0, op1, TREE_TYPE (op0));
1106 if (tem != NULL_TREE)
1107 return tem;
1110 return fold_binary_loc (loc, subcode,
1111 gimple_expr_type (stmt), op0, op1);
1114 default:
1115 gcc_unreachable ();
1118 break;
1120 case GIMPLE_CALL:
1122 tree fn = gimple_call_fn (stmt);
1123 prop_value_t *val;
1125 if (TREE_CODE (fn) == SSA_NAME)
1127 val = get_value (fn);
1128 if (val->lattice_val == CONSTANT)
1129 fn = val->value;
1131 if (TREE_CODE (fn) == ADDR_EXPR
1132 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1133 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1135 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1136 tree call, retval;
1137 unsigned i;
1138 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1140 args[i] = gimple_call_arg (stmt, i);
1141 if (TREE_CODE (args[i]) == SSA_NAME)
1143 val = get_value (args[i]);
1144 if (val->lattice_val == CONSTANT)
1145 args[i] = val->value;
1148 call = build_call_array_loc (loc,
1149 gimple_call_return_type (stmt),
1150 fn, gimple_call_num_args (stmt), args);
1151 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1152 if (retval)
1153 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1154 STRIP_NOPS (retval);
1155 return retval;
1157 return NULL_TREE;
1160 case GIMPLE_COND:
1162 /* Handle comparison operators that can appear in GIMPLE form. */
1163 tree op0 = gimple_cond_lhs (stmt);
1164 tree op1 = gimple_cond_rhs (stmt);
1165 enum tree_code code = gimple_cond_code (stmt);
1167 /* Simplify the operands down to constants when appropriate. */
1168 if (TREE_CODE (op0) == SSA_NAME)
1170 prop_value_t *val = get_value (op0);
1171 if (val->lattice_val == CONSTANT)
1172 op0 = val->value;
1175 if (TREE_CODE (op1) == SSA_NAME)
1177 prop_value_t *val = get_value (op1);
1178 if (val->lattice_val == CONSTANT)
1179 op1 = val->value;
1182 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1185 case GIMPLE_SWITCH:
1187 tree rhs = gimple_switch_index (stmt);
1189 if (TREE_CODE (rhs) == SSA_NAME)
1191 /* If the RHS is an SSA_NAME, return its known constant value,
1192 if any. */
1193 return get_value (rhs)->value;
1196 return rhs;
1199 default:
1200 gcc_unreachable ();
1205 /* Return the tree representing the element referenced by T if T is an
1206 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1207 NULL_TREE otherwise. */
1209 tree
1210 fold_const_aggregate_ref (tree t)
1212 prop_value_t *value;
1213 tree base, ctor, idx, field;
1214 unsigned HOST_WIDE_INT cnt;
1215 tree cfield, cval;
1217 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1218 return get_symbol_constant_value (t);
1220 switch (TREE_CODE (t))
1222 case ARRAY_REF:
1223 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1224 DECL_INITIAL. If BASE is a nested reference into another
1225 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1226 the inner reference. */
1227 base = TREE_OPERAND (t, 0);
1228 switch (TREE_CODE (base))
1230 case VAR_DECL:
1231 if (!TREE_READONLY (base)
1232 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1233 || !targetm.binds_local_p (base))
1234 return NULL_TREE;
1236 ctor = DECL_INITIAL (base);
1237 break;
1239 case ARRAY_REF:
1240 case COMPONENT_REF:
1241 ctor = fold_const_aggregate_ref (base);
1242 break;
1244 case STRING_CST:
1245 case CONSTRUCTOR:
1246 ctor = base;
1247 break;
1249 default:
1250 return NULL_TREE;
1253 if (ctor == NULL_TREE
1254 || (TREE_CODE (ctor) != CONSTRUCTOR
1255 && TREE_CODE (ctor) != STRING_CST)
1256 || !TREE_STATIC (ctor))
1257 return NULL_TREE;
1259 /* Get the index. If we have an SSA_NAME, try to resolve it
1260 with the current lattice value for the SSA_NAME. */
1261 idx = TREE_OPERAND (t, 1);
1262 switch (TREE_CODE (idx))
1264 case SSA_NAME:
1265 if ((value = get_value (idx))
1266 && value->lattice_val == CONSTANT
1267 && TREE_CODE (value->value) == INTEGER_CST)
1268 idx = value->value;
1269 else
1270 return NULL_TREE;
1271 break;
1273 case INTEGER_CST:
1274 break;
1276 default:
1277 return NULL_TREE;
1280 /* Fold read from constant string. */
1281 if (TREE_CODE (ctor) == STRING_CST)
1283 if ((TYPE_MODE (TREE_TYPE (t))
1284 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1285 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1286 == MODE_INT)
1287 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1288 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1289 return build_int_cst_type (TREE_TYPE (t),
1290 (TREE_STRING_POINTER (ctor)
1291 [TREE_INT_CST_LOW (idx)]));
1292 return NULL_TREE;
1295 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1296 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1297 if (tree_int_cst_equal (cfield, idx))
1299 STRIP_USELESS_TYPE_CONVERSION (cval);
1300 if (TREE_CODE (cval) == ADDR_EXPR)
1302 tree base = get_base_address (TREE_OPERAND (cval, 0));
1303 if (base && TREE_CODE (base) == VAR_DECL)
1304 add_referenced_var (base);
1306 return cval;
1308 break;
1310 case COMPONENT_REF:
1311 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1312 DECL_INITIAL. If BASE is a nested reference into another
1313 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1314 the inner reference. */
1315 base = TREE_OPERAND (t, 0);
1316 switch (TREE_CODE (base))
1318 case VAR_DECL:
1319 if (!TREE_READONLY (base)
1320 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1321 || !targetm.binds_local_p (base))
1322 return NULL_TREE;
1324 ctor = DECL_INITIAL (base);
1325 break;
1327 case ARRAY_REF:
1328 case COMPONENT_REF:
1329 ctor = fold_const_aggregate_ref (base);
1330 break;
1332 default:
1333 return NULL_TREE;
1336 if (ctor == NULL_TREE
1337 || TREE_CODE (ctor) != CONSTRUCTOR
1338 || !TREE_STATIC (ctor))
1339 return NULL_TREE;
1341 field = TREE_OPERAND (t, 1);
1343 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1344 if (cfield == field
1345 /* FIXME: Handle bit-fields. */
1346 && ! DECL_BIT_FIELD (cfield))
1348 STRIP_USELESS_TYPE_CONVERSION (cval);
1349 if (TREE_CODE (cval) == ADDR_EXPR)
1351 tree base = get_base_address (TREE_OPERAND (cval, 0));
1352 if (base && TREE_CODE (base) == VAR_DECL)
1353 add_referenced_var (base);
1355 return cval;
1357 break;
1359 case REALPART_EXPR:
1360 case IMAGPART_EXPR:
1362 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1363 if (c && TREE_CODE (c) == COMPLEX_CST)
1364 return fold_build1_loc (EXPR_LOCATION (t),
1365 TREE_CODE (t), TREE_TYPE (t), c);
1366 break;
1369 case INDIRECT_REF:
1371 tree base = TREE_OPERAND (t, 0);
1372 if (TREE_CODE (base) == SSA_NAME
1373 && (value = get_value (base))
1374 && value->lattice_val == CONSTANT
1375 && TREE_CODE (value->value) == ADDR_EXPR
1376 && useless_type_conversion_p (TREE_TYPE (t),
1377 TREE_TYPE (TREE_TYPE (value->value))))
1378 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1379 break;
1382 default:
1383 break;
1386 return NULL_TREE;
1389 /* Evaluate statement STMT.
1390 Valid only for assignments, calls, conditionals, and switches. */
1392 static prop_value_t
1393 evaluate_stmt (gimple stmt)
1395 prop_value_t val;
1396 tree simplified = NULL_TREE;
1397 ccp_lattice_t likelyvalue = likely_value (stmt);
1398 bool is_constant;
1400 fold_defer_overflow_warnings ();
1402 /* If the statement is likely to have a CONSTANT result, then try
1403 to fold the statement to determine the constant value. */
1404 /* FIXME. This is the only place that we call ccp_fold.
1405 Since likely_value never returns CONSTANT for calls, we will
1406 not attempt to fold them, including builtins that may profit. */
1407 if (likelyvalue == CONSTANT)
1408 simplified = ccp_fold (stmt);
1409 /* If the statement is likely to have a VARYING result, then do not
1410 bother folding the statement. */
1411 else if (likelyvalue == VARYING)
1413 enum gimple_code code = gimple_code (stmt);
1414 if (code == GIMPLE_ASSIGN)
1416 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1418 /* Other cases cannot satisfy is_gimple_min_invariant
1419 without folding. */
1420 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1421 simplified = gimple_assign_rhs1 (stmt);
1423 else if (code == GIMPLE_SWITCH)
1424 simplified = gimple_switch_index (stmt);
1425 else
1426 /* These cannot satisfy is_gimple_min_invariant without folding. */
1427 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1430 is_constant = simplified && is_gimple_min_invariant (simplified);
1432 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1434 if (dump_file && (dump_flags & TDF_DETAILS))
1436 fprintf (dump_file, "which is likely ");
1437 switch (likelyvalue)
1439 case CONSTANT:
1440 fprintf (dump_file, "CONSTANT");
1441 break;
1442 case UNDEFINED:
1443 fprintf (dump_file, "UNDEFINED");
1444 break;
1445 case VARYING:
1446 fprintf (dump_file, "VARYING");
1447 break;
1448 default:;
1450 fprintf (dump_file, "\n");
1453 if (is_constant)
1455 /* The statement produced a constant value. */
1456 val.lattice_val = CONSTANT;
1457 val.value = simplified;
1459 else
1461 /* The statement produced a nonconstant value. If the statement
1462 had UNDEFINED operands, then the result of the statement
1463 should be UNDEFINED. Otherwise, the statement is VARYING. */
1464 if (likelyvalue == UNDEFINED)
1465 val.lattice_val = likelyvalue;
1466 else
1467 val.lattice_val = VARYING;
1469 val.value = NULL_TREE;
1472 return val;
1475 /* Visit the assignment statement STMT. Set the value of its LHS to the
1476 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1477 creates virtual definitions, set the value of each new name to that
1478 of the RHS (if we can derive a constant out of the RHS).
1479 Value-returning call statements also perform an assignment, and
1480 are handled here. */
1482 static enum ssa_prop_result
1483 visit_assignment (gimple stmt, tree *output_p)
1485 prop_value_t val;
1486 enum ssa_prop_result retval;
1488 tree lhs = gimple_get_lhs (stmt);
1490 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1491 || gimple_call_lhs (stmt) != NULL_TREE);
1493 if (gimple_assign_copy_p (stmt))
1495 tree rhs = gimple_assign_rhs1 (stmt);
1497 if (TREE_CODE (rhs) == SSA_NAME)
1499 /* For a simple copy operation, we copy the lattice values. */
1500 prop_value_t *nval = get_value (rhs);
1501 val = *nval;
1503 else
1504 val = evaluate_stmt (stmt);
1506 else
1507 /* Evaluate the statement, which could be
1508 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1509 val = evaluate_stmt (stmt);
1511 retval = SSA_PROP_NOT_INTERESTING;
1513 /* Set the lattice value of the statement's output. */
1514 if (TREE_CODE (lhs) == SSA_NAME)
1516 /* If STMT is an assignment to an SSA_NAME, we only have one
1517 value to set. */
1518 if (set_lattice_value (lhs, val))
1520 *output_p = lhs;
1521 if (val.lattice_val == VARYING)
1522 retval = SSA_PROP_VARYING;
1523 else
1524 retval = SSA_PROP_INTERESTING;
1528 return retval;
1532 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1533 if it can determine which edge will be taken. Otherwise, return
1534 SSA_PROP_VARYING. */
1536 static enum ssa_prop_result
1537 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1539 prop_value_t val;
1540 basic_block block;
1542 block = gimple_bb (stmt);
1543 val = evaluate_stmt (stmt);
1545 /* Find which edge out of the conditional block will be taken and add it
1546 to the worklist. If no single edge can be determined statically,
1547 return SSA_PROP_VARYING to feed all the outgoing edges to the
1548 propagation engine. */
1549 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1550 if (*taken_edge_p)
1551 return SSA_PROP_INTERESTING;
1552 else
1553 return SSA_PROP_VARYING;
1557 /* Evaluate statement STMT. If the statement produces an output value and
1558 its evaluation changes the lattice value of its output, return
1559 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1560 output value.
1562 If STMT is a conditional branch and we can determine its truth
1563 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1564 value, return SSA_PROP_VARYING. */
1566 static enum ssa_prop_result
1567 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1569 tree def;
1570 ssa_op_iter iter;
1572 if (dump_file && (dump_flags & TDF_DETAILS))
1574 fprintf (dump_file, "\nVisiting statement:\n");
1575 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1578 switch (gimple_code (stmt))
1580 case GIMPLE_ASSIGN:
1581 /* If the statement is an assignment that produces a single
1582 output value, evaluate its RHS to see if the lattice value of
1583 its output has changed. */
1584 return visit_assignment (stmt, output_p);
1586 case GIMPLE_CALL:
1587 /* A value-returning call also performs an assignment. */
1588 if (gimple_call_lhs (stmt) != NULL_TREE)
1589 return visit_assignment (stmt, output_p);
1590 break;
1592 case GIMPLE_COND:
1593 case GIMPLE_SWITCH:
1594 /* If STMT is a conditional branch, see if we can determine
1595 which branch will be taken. */
1596 /* FIXME. It appears that we should be able to optimize
1597 computed GOTOs here as well. */
1598 return visit_cond_stmt (stmt, taken_edge_p);
1600 default:
1601 break;
1604 /* Any other kind of statement is not interesting for constant
1605 propagation and, therefore, not worth simulating. */
1606 if (dump_file && (dump_flags & TDF_DETAILS))
1607 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1609 /* Definitions made by statements other than assignments to
1610 SSA_NAMEs represent unknown modifications to their outputs.
1611 Mark them VARYING. */
1612 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1614 prop_value_t v = { VARYING, NULL_TREE };
1615 set_lattice_value (def, v);
1618 return SSA_PROP_VARYING;
1622 /* Main entry point for SSA Conditional Constant Propagation. */
1624 static unsigned int
1625 do_ssa_ccp (void)
1627 ccp_initialize ();
1628 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1629 if (ccp_finalize ())
1630 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1631 else
1632 return 0;
1636 static bool
1637 gate_ccp (void)
1639 return flag_tree_ccp != 0;
1643 struct gimple_opt_pass pass_ccp =
1646 GIMPLE_PASS,
1647 "ccp", /* name */
1648 gate_ccp, /* gate */
1649 do_ssa_ccp, /* execute */
1650 NULL, /* sub */
1651 NULL, /* next */
1652 0, /* static_pass_number */
1653 TV_TREE_CCP, /* tv_id */
1654 PROP_cfg | PROP_ssa, /* properties_required */
1655 0, /* properties_provided */
1656 0, /* properties_destroyed */
1657 0, /* todo_flags_start */
1658 TODO_dump_func | TODO_verify_ssa
1659 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1664 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1665 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1666 is the desired result type.
1668 LOC is the location of the original expression. */
1670 static tree
1671 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
1672 tree orig_type,
1673 bool allow_negative_idx)
1675 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1676 tree array_type, elt_type, elt_size;
1677 tree domain_type;
1679 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1680 measured in units of the size of elements type) from that ARRAY_REF).
1681 We can't do anything if either is variable.
1683 The case we handle here is *(&A[N]+O). */
1684 if (TREE_CODE (base) == ARRAY_REF)
1686 tree low_bound = array_ref_low_bound (base);
1688 elt_offset = TREE_OPERAND (base, 1);
1689 if (TREE_CODE (low_bound) != INTEGER_CST
1690 || TREE_CODE (elt_offset) != INTEGER_CST)
1691 return NULL_TREE;
1693 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1694 base = TREE_OPERAND (base, 0);
1697 /* Ignore stupid user tricks of indexing non-array variables. */
1698 array_type = TREE_TYPE (base);
1699 if (TREE_CODE (array_type) != ARRAY_TYPE)
1700 return NULL_TREE;
1701 elt_type = TREE_TYPE (array_type);
1702 if (!useless_type_conversion_p (orig_type, elt_type))
1703 return NULL_TREE;
1705 /* Use signed size type for intermediate computation on the index. */
1706 idx_type = signed_type_for (size_type_node);
1708 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1709 element type (so we can use the alignment if it's not constant).
1710 Otherwise, compute the offset as an index by using a division. If the
1711 division isn't exact, then don't do anything. */
1712 elt_size = TYPE_SIZE_UNIT (elt_type);
1713 if (!elt_size)
1714 return NULL;
1715 if (integer_zerop (offset))
1717 if (TREE_CODE (elt_size) != INTEGER_CST)
1718 elt_size = size_int (TYPE_ALIGN (elt_type));
1720 idx = build_int_cst (idx_type, 0);
1722 else
1724 unsigned HOST_WIDE_INT lquo, lrem;
1725 HOST_WIDE_INT hquo, hrem;
1726 double_int soffset;
1728 /* The final array offset should be signed, so we need
1729 to sign-extend the (possibly pointer) offset here
1730 and use signed division. */
1731 soffset = double_int_sext (tree_to_double_int (offset),
1732 TYPE_PRECISION (TREE_TYPE (offset)));
1733 if (TREE_CODE (elt_size) != INTEGER_CST
1734 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1735 soffset.low, soffset.high,
1736 TREE_INT_CST_LOW (elt_size),
1737 TREE_INT_CST_HIGH (elt_size),
1738 &lquo, &hquo, &lrem, &hrem)
1739 || lrem || hrem)
1740 return NULL_TREE;
1742 idx = build_int_cst_wide (idx_type, lquo, hquo);
1745 /* Assume the low bound is zero. If there is a domain type, get the
1746 low bound, if any, convert the index into that type, and add the
1747 low bound. */
1748 min_idx = build_int_cst (idx_type, 0);
1749 domain_type = TYPE_DOMAIN (array_type);
1750 if (domain_type)
1752 idx_type = domain_type;
1753 if (TYPE_MIN_VALUE (idx_type))
1754 min_idx = TYPE_MIN_VALUE (idx_type);
1755 else
1756 min_idx = fold_convert (idx_type, min_idx);
1758 if (TREE_CODE (min_idx) != INTEGER_CST)
1759 return NULL_TREE;
1761 elt_offset = fold_convert (idx_type, elt_offset);
1764 if (!integer_zerop (min_idx))
1765 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1766 if (!integer_zerop (elt_offset))
1767 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1769 /* Make sure to possibly truncate late after offsetting. */
1770 idx = fold_convert (idx_type, idx);
1772 /* We don't want to construct access past array bounds. For example
1773 char *(c[4]);
1774 c[3][2];
1775 should not be simplified into (*c)[14] or tree-vrp will
1776 give false warnings. The same is true for
1777 struct A { long x; char d[0]; } *a;
1778 (char *)a - 4;
1779 which should be not folded to &a->d[-8]. */
1780 if (domain_type
1781 && TYPE_MAX_VALUE (domain_type)
1782 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1784 tree up_bound = TYPE_MAX_VALUE (domain_type);
1786 if (tree_int_cst_lt (up_bound, idx)
1787 /* Accesses after the end of arrays of size 0 (gcc
1788 extension) and 1 are likely intentional ("struct
1789 hack"). */
1790 && compare_tree_int (up_bound, 1) > 0)
1791 return NULL_TREE;
1793 if (domain_type
1794 && TYPE_MIN_VALUE (domain_type))
1796 if (!allow_negative_idx
1797 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1798 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1799 return NULL_TREE;
1801 else if (!allow_negative_idx
1802 && compare_tree_int (idx, 0) < 0)
1803 return NULL_TREE;
1806 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1807 SET_EXPR_LOCATION (t, loc);
1808 return t;
1813 /* Attempt to fold *(S+O) to S.X.
1814 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1815 is the desired result type.
1817 LOC is the location of the original expression. */
1819 static tree
1820 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
1821 tree base, tree offset,
1822 tree orig_type, bool base_is_ptr)
1824 tree f, t, field_type, tail_array_field, field_offset;
1825 tree ret;
1826 tree new_base;
1828 if (TREE_CODE (record_type) != RECORD_TYPE
1829 && TREE_CODE (record_type) != UNION_TYPE
1830 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1831 return NULL_TREE;
1833 /* Short-circuit silly cases. */
1834 if (useless_type_conversion_p (record_type, orig_type))
1835 return NULL_TREE;
1837 tail_array_field = NULL_TREE;
1838 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1840 int cmp;
1842 if (TREE_CODE (f) != FIELD_DECL)
1843 continue;
1844 if (DECL_BIT_FIELD (f))
1845 continue;
1847 if (!DECL_FIELD_OFFSET (f))
1848 continue;
1849 field_offset = byte_position (f);
1850 if (TREE_CODE (field_offset) != INTEGER_CST)
1851 continue;
1853 /* ??? Java creates "interesting" fields for representing base classes.
1854 They have no name, and have no context. With no context, we get into
1855 trouble with nonoverlapping_component_refs_p. Skip them. */
1856 if (!DECL_FIELD_CONTEXT (f))
1857 continue;
1859 /* The previous array field isn't at the end. */
1860 tail_array_field = NULL_TREE;
1862 /* Check to see if this offset overlaps with the field. */
1863 cmp = tree_int_cst_compare (field_offset, offset);
1864 if (cmp > 0)
1865 continue;
1867 field_type = TREE_TYPE (f);
1869 /* Here we exactly match the offset being checked. If the types match,
1870 then we can return that field. */
1871 if (cmp == 0
1872 && useless_type_conversion_p (orig_type, field_type))
1874 if (base_is_ptr)
1875 base = build1 (INDIRECT_REF, record_type, base);
1876 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1877 return t;
1880 /* Don't care about offsets into the middle of scalars. */
1881 if (!AGGREGATE_TYPE_P (field_type))
1882 continue;
1884 /* Check for array at the end of the struct. This is often
1885 used as for flexible array members. We should be able to
1886 turn this into an array access anyway. */
1887 if (TREE_CODE (field_type) == ARRAY_TYPE)
1888 tail_array_field = f;
1890 /* Check the end of the field against the offset. */
1891 if (!DECL_SIZE_UNIT (f)
1892 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1893 continue;
1894 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1895 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1896 continue;
1898 /* If we matched, then set offset to the displacement into
1899 this field. */
1900 if (base_is_ptr)
1901 new_base = build1 (INDIRECT_REF, record_type, base);
1902 else
1903 new_base = base;
1904 protected_set_expr_location (new_base, loc);
1905 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1906 protected_set_expr_location (new_base, loc);
1908 /* Recurse to possibly find the match. */
1909 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
1910 f == TYPE_FIELDS (record_type));
1911 if (ret)
1912 return ret;
1913 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
1914 orig_type, false);
1915 if (ret)
1916 return ret;
1919 if (!tail_array_field)
1920 return NULL_TREE;
1922 f = tail_array_field;
1923 field_type = TREE_TYPE (f);
1924 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1926 /* If we get here, we've got an aggregate field, and a possibly
1927 nonzero offset into them. Recurse and hope for a valid match. */
1928 if (base_is_ptr)
1930 base = build1 (INDIRECT_REF, record_type, base);
1931 SET_EXPR_LOCATION (base, loc);
1933 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1934 SET_EXPR_LOCATION (base, loc);
1936 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
1937 f == TYPE_FIELDS (record_type));
1938 if (t)
1939 return t;
1940 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
1941 orig_type, false);
1944 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1945 or BASE[index] or by combination of those.
1947 LOC is the location of original expression.
1949 Before attempting the conversion strip off existing ADDR_EXPRs and
1950 handled component refs. */
1952 tree
1953 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
1954 tree orig_type)
1956 tree ret;
1957 tree type;
1958 bool base_is_ptr = true;
1960 STRIP_NOPS (base);
1961 if (TREE_CODE (base) == ADDR_EXPR)
1963 base_is_ptr = false;
1965 base = TREE_OPERAND (base, 0);
1967 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1968 so it needs to be removed and new COMPONENT_REF constructed.
1969 The wrong COMPONENT_REF are often constructed by folding the
1970 (type *)&object within the expression (type *)&object+offset */
1971 if (handled_component_p (base))
1973 HOST_WIDE_INT sub_offset, size, maxsize;
1974 tree newbase;
1975 newbase = get_ref_base_and_extent (base, &sub_offset,
1976 &size, &maxsize);
1977 gcc_assert (newbase);
1978 if (size == maxsize
1979 && size != -1
1980 && !(sub_offset & (BITS_PER_UNIT - 1)))
1982 base = newbase;
1983 if (sub_offset)
1984 offset = int_const_binop (PLUS_EXPR, offset,
1985 build_int_cst (TREE_TYPE (offset),
1986 sub_offset / BITS_PER_UNIT), 1);
1989 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1990 && integer_zerop (offset))
1991 return base;
1992 type = TREE_TYPE (base);
1994 else
1996 base_is_ptr = true;
1997 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1998 return NULL_TREE;
1999 type = TREE_TYPE (TREE_TYPE (base));
2001 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset,
2002 orig_type, base_is_ptr);
2003 if (!ret)
2005 if (base_is_ptr)
2007 base = build1 (INDIRECT_REF, type, base);
2008 SET_EXPR_LOCATION (base, loc);
2010 ret = maybe_fold_offset_to_array_ref (loc,
2011 base, offset, orig_type, true);
2013 return ret;
2016 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2017 or &BASE[index] or by combination of those.
2019 LOC is the location of the original expression.
2021 Before attempting the conversion strip off existing component refs. */
2023 tree
2024 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
2025 tree orig_type)
2027 tree t;
2029 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2030 && POINTER_TYPE_P (orig_type));
2032 t = maybe_fold_offset_to_reference (loc, addr, offset,
2033 TREE_TYPE (orig_type));
2034 if (t != NULL_TREE)
2036 tree orig = addr;
2037 tree ptr_type;
2039 /* For __builtin_object_size to function correctly we need to
2040 make sure not to fold address arithmetic so that we change
2041 reference from one array to another. This would happen for
2042 example for
2044 struct X { char s1[10]; char s2[10] } s;
2045 char *foo (void) { return &s.s2[-4]; }
2047 where we need to avoid generating &s.s1[6]. As the C and
2048 C++ frontends create different initial trees
2049 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2050 sophisticated comparisons here. Note that checking for the
2051 condition after the fact is easier than trying to avoid doing
2052 the folding. */
2053 STRIP_NOPS (orig);
2054 if (TREE_CODE (orig) == ADDR_EXPR)
2055 orig = TREE_OPERAND (orig, 0);
2056 if ((TREE_CODE (orig) == ARRAY_REF
2057 || (TREE_CODE (orig) == COMPONENT_REF
2058 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2059 && (TREE_CODE (t) == ARRAY_REF
2060 || TREE_CODE (t) == COMPONENT_REF)
2061 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2062 ? TREE_OPERAND (orig, 0) : orig,
2063 TREE_CODE (t) == ARRAY_REF
2064 ? TREE_OPERAND (t, 0) : t, 0))
2065 return NULL_TREE;
2067 ptr_type = build_pointer_type (TREE_TYPE (t));
2068 if (!useless_type_conversion_p (orig_type, ptr_type))
2069 return NULL_TREE;
2070 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
2073 return NULL_TREE;
2076 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2077 Return the simplified expression, or NULL if nothing could be done. */
2079 static tree
2080 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2082 tree t;
2083 bool volatile_p = TREE_THIS_VOLATILE (expr);
2084 location_t loc = EXPR_LOCATION (expr);
2086 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2087 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2088 are sometimes added. */
2089 base = fold (base);
2090 STRIP_TYPE_NOPS (base);
2091 TREE_OPERAND (expr, 0) = base;
2093 /* One possibility is that the address reduces to a string constant. */
2094 t = fold_read_from_constant_string (expr);
2095 if (t)
2096 return t;
2098 /* Add in any offset from a POINTER_PLUS_EXPR. */
2099 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2101 tree offset2;
2103 offset2 = TREE_OPERAND (base, 1);
2104 if (TREE_CODE (offset2) != INTEGER_CST)
2105 return NULL_TREE;
2106 base = TREE_OPERAND (base, 0);
2108 offset = fold_convert (sizetype,
2109 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2112 if (TREE_CODE (base) == ADDR_EXPR)
2114 tree base_addr = base;
2116 /* Strip the ADDR_EXPR. */
2117 base = TREE_OPERAND (base, 0);
2119 /* Fold away CONST_DECL to its value, if the type is scalar. */
2120 if (TREE_CODE (base) == CONST_DECL
2121 && is_gimple_min_invariant (DECL_INITIAL (base)))
2122 return DECL_INITIAL (base);
2124 /* Try folding *(&B+O) to B.X. */
2125 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
2126 TREE_TYPE (expr));
2127 if (t)
2129 /* Preserve volatileness of the original expression.
2130 We can end up with a plain decl here which is shared
2131 and we shouldn't mess with its flags. */
2132 if (!SSA_VAR_P (t))
2133 TREE_THIS_VOLATILE (t) = volatile_p;
2134 return t;
2137 else
2139 /* We can get here for out-of-range string constant accesses,
2140 such as "_"[3]. Bail out of the entire substitution search
2141 and arrange for the entire statement to be replaced by a
2142 call to __builtin_trap. In all likelihood this will all be
2143 constant-folded away, but in the meantime we can't leave with
2144 something that get_expr_operands can't understand. */
2146 t = base;
2147 STRIP_NOPS (t);
2148 if (TREE_CODE (t) == ADDR_EXPR
2149 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2151 /* FIXME: Except that this causes problems elsewhere with dead
2152 code not being deleted, and we die in the rtl expanders
2153 because we failed to remove some ssa_name. In the meantime,
2154 just return zero. */
2155 /* FIXME2: This condition should be signaled by
2156 fold_read_from_constant_string directly, rather than
2157 re-checking for it here. */
2158 return integer_zero_node;
2161 /* Try folding *(B+O) to B->X. Still an improvement. */
2162 if (POINTER_TYPE_P (TREE_TYPE (base)))
2164 t = maybe_fold_offset_to_reference (loc, base, offset,
2165 TREE_TYPE (expr));
2166 if (t)
2167 return t;
2171 /* Otherwise we had an offset that we could not simplify. */
2172 return NULL_TREE;
2176 /* A quaint feature extant in our address arithmetic is that there
2177 can be hidden type changes here. The type of the result need
2178 not be the same as the type of the input pointer.
2180 What we're after here is an expression of the form
2181 (T *)(&array + const)
2182 where array is OP0, const is OP1, RES_TYPE is T and
2183 the cast doesn't actually exist, but is implicit in the
2184 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2185 &array[x]
2186 which may be able to propagate further. */
2188 tree
2189 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
2191 tree ptd_type;
2192 tree t;
2194 /* The first operand should be an ADDR_EXPR. */
2195 if (TREE_CODE (op0) != ADDR_EXPR)
2196 return NULL_TREE;
2197 op0 = TREE_OPERAND (op0, 0);
2199 /* It had better be a constant. */
2200 if (TREE_CODE (op1) != INTEGER_CST)
2202 /* Or op0 should now be A[0] and the non-constant offset defined
2203 via a multiplication by the array element size. */
2204 if (TREE_CODE (op0) == ARRAY_REF
2205 && integer_zerop (TREE_OPERAND (op0, 1))
2206 && TREE_CODE (op1) == SSA_NAME
2207 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2209 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2210 if (!is_gimple_assign (offset_def))
2211 return NULL_TREE;
2213 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2214 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2215 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2216 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2217 return build1 (ADDR_EXPR, res_type,
2218 build4 (ARRAY_REF, TREE_TYPE (op0),
2219 TREE_OPERAND (op0, 0),
2220 gimple_assign_rhs1 (offset_def),
2221 TREE_OPERAND (op0, 2),
2222 TREE_OPERAND (op0, 3)));
2223 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2224 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2225 return build1 (ADDR_EXPR, res_type,
2226 build4 (ARRAY_REF, TREE_TYPE (op0),
2227 TREE_OPERAND (op0, 0),
2228 op1,
2229 TREE_OPERAND (op0, 2),
2230 TREE_OPERAND (op0, 3)));
2232 return NULL_TREE;
2235 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2236 the offset into it. */
2237 while (TREE_CODE (op0) == ARRAY_REF)
2239 tree array_obj = TREE_OPERAND (op0, 0);
2240 tree array_idx = TREE_OPERAND (op0, 1);
2241 tree elt_type = TREE_TYPE (op0);
2242 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2243 tree min_idx;
2245 if (TREE_CODE (array_idx) != INTEGER_CST)
2246 break;
2247 if (TREE_CODE (elt_size) != INTEGER_CST)
2248 break;
2250 /* Un-bias the index by the min index of the array type. */
2251 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2252 if (min_idx)
2254 min_idx = TYPE_MIN_VALUE (min_idx);
2255 if (min_idx)
2257 if (TREE_CODE (min_idx) != INTEGER_CST)
2258 break;
2260 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2261 if (!integer_zerop (min_idx))
2262 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2263 min_idx, 0);
2267 /* Convert the index to a byte offset. */
2268 array_idx = fold_convert (sizetype, array_idx);
2269 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2271 /* Update the operands for the next round, or for folding. */
2272 op1 = int_const_binop (PLUS_EXPR,
2273 array_idx, op1, 0);
2274 op0 = array_obj;
2277 ptd_type = TREE_TYPE (res_type);
2278 /* If we want a pointer to void, reconstruct the reference from the
2279 array element type. A pointer to that can be trivially converted
2280 to void *. This happens as we fold (void *)(ptr p+ off). */
2281 if (VOID_TYPE_P (ptd_type)
2282 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2283 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2285 /* At which point we can try some of the same things as for indirects. */
2286 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
2287 if (!t)
2288 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
2289 ptd_type, false);
2290 if (t)
2292 t = build1 (ADDR_EXPR, res_type, t);
2293 SET_EXPR_LOCATION (t, loc);
2296 return t;
2299 /* Subroutine of fold_stmt. We perform several simplifications of the
2300 memory reference tree EXPR and make sure to re-gimplify them properly
2301 after propagation of constant addresses. IS_LHS is true if the
2302 reference is supposed to be an lvalue. */
2304 static tree
2305 maybe_fold_reference (tree expr, bool is_lhs)
2307 tree *t = &expr;
2309 if (TREE_CODE (expr) == ARRAY_REF
2310 && !is_lhs)
2312 tree tem = fold_read_from_constant_string (expr);
2313 if (tem)
2314 return tem;
2317 /* ??? We might want to open-code the relevant remaining cases
2318 to avoid using the generic fold. */
2319 if (handled_component_p (*t)
2320 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
2322 tree tem = fold (*t);
2323 if (tem != *t)
2324 return tem;
2327 while (handled_component_p (*t))
2328 t = &TREE_OPERAND (*t, 0);
2330 if (TREE_CODE (*t) == INDIRECT_REF)
2332 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2333 integer_zero_node);
2334 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2335 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2336 tem = NULL_TREE;
2337 if (!tem
2338 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
2339 /* If we had a good reason for propagating the address here,
2340 make sure we end up with valid gimple. See PR34989. */
2341 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2343 if (tem)
2345 *t = tem;
2346 tem = maybe_fold_reference (expr, is_lhs);
2347 if (tem)
2348 return tem;
2349 return expr;
2352 else if (!is_lhs
2353 && DECL_P (*t))
2355 tree tem = get_symbol_constant_value (*t);
2356 if (tem)
2358 *t = tem;
2359 tem = maybe_fold_reference (expr, is_lhs);
2360 if (tem)
2361 return tem;
2362 return expr;
2366 return NULL_TREE;
2370 /* Return the string length, maximum string length or maximum value of
2371 ARG in LENGTH.
2372 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2373 is not NULL and, for TYPE == 0, its value is not equal to the length
2374 we determine or if we are unable to determine the length or value,
2375 return false. VISITED is a bitmap of visited variables.
2376 TYPE is 0 if string length should be returned, 1 for maximum string
2377 length and 2 for maximum value ARG can have. */
2379 static bool
2380 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2382 tree var, val;
2383 gimple def_stmt;
2385 if (TREE_CODE (arg) != SSA_NAME)
2387 if (TREE_CODE (arg) == COND_EXPR)
2388 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2389 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2390 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2391 else if (TREE_CODE (arg) == ADDR_EXPR
2392 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2393 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2395 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2396 if (TREE_CODE (aop0) == INDIRECT_REF
2397 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2398 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2399 length, visited, type);
2402 if (type == 2)
2404 val = arg;
2405 if (TREE_CODE (val) != INTEGER_CST
2406 || tree_int_cst_sgn (val) < 0)
2407 return false;
2409 else
2410 val = c_strlen (arg, 1);
2411 if (!val)
2412 return false;
2414 if (*length)
2416 if (type > 0)
2418 if (TREE_CODE (*length) != INTEGER_CST
2419 || TREE_CODE (val) != INTEGER_CST)
2420 return false;
2422 if (tree_int_cst_lt (*length, val))
2423 *length = val;
2424 return true;
2426 else if (simple_cst_equal (val, *length) != 1)
2427 return false;
2430 *length = val;
2431 return true;
2434 /* If we were already here, break the infinite cycle. */
2435 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2436 return true;
2437 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2439 var = arg;
2440 def_stmt = SSA_NAME_DEF_STMT (var);
2442 switch (gimple_code (def_stmt))
2444 case GIMPLE_ASSIGN:
2445 /* The RHS of the statement defining VAR must either have a
2446 constant length or come from another SSA_NAME with a constant
2447 length. */
2448 if (gimple_assign_single_p (def_stmt)
2449 || gimple_assign_unary_nop_p (def_stmt))
2451 tree rhs = gimple_assign_rhs1 (def_stmt);
2452 return get_maxval_strlen (rhs, length, visited, type);
2454 return false;
2456 case GIMPLE_PHI:
2458 /* All the arguments of the PHI node must have the same constant
2459 length. */
2460 unsigned i;
2462 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2464 tree arg = gimple_phi_arg (def_stmt, i)->def;
2466 /* If this PHI has itself as an argument, we cannot
2467 determine the string length of this argument. However,
2468 if we can find a constant string length for the other
2469 PHI args then we can still be sure that this is a
2470 constant string length. So be optimistic and just
2471 continue with the next argument. */
2472 if (arg == gimple_phi_result (def_stmt))
2473 continue;
2475 if (!get_maxval_strlen (arg, length, visited, type))
2476 return false;
2479 return true;
2481 default:
2482 return false;
2487 /* Fold builtin call in statement STMT. Returns a simplified tree.
2488 We may return a non-constant expression, including another call
2489 to a different function and with different arguments, e.g.,
2490 substituting memcpy for strcpy when the string length is known.
2491 Note that some builtins expand into inline code that may not
2492 be valid in GIMPLE. Callers must take care. */
2494 static tree
2495 ccp_fold_builtin (gimple stmt)
2497 tree result, val[3];
2498 tree callee, a;
2499 int arg_idx, type;
2500 bitmap visited;
2501 bool ignore;
2502 int nargs;
2503 location_t loc = gimple_location (stmt);
2505 gcc_assert (is_gimple_call (stmt));
2507 ignore = (gimple_call_lhs (stmt) == NULL);
2509 /* First try the generic builtin folder. If that succeeds, return the
2510 result directly. */
2511 result = fold_call_stmt (stmt, ignore);
2512 if (result)
2514 if (ignore)
2515 STRIP_NOPS (result);
2516 return result;
2519 /* Ignore MD builtins. */
2520 callee = gimple_call_fndecl (stmt);
2521 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2522 return NULL_TREE;
2524 /* If the builtin could not be folded, and it has no argument list,
2525 we're done. */
2526 nargs = gimple_call_num_args (stmt);
2527 if (nargs == 0)
2528 return NULL_TREE;
2530 /* Limit the work only for builtins we know how to simplify. */
2531 switch (DECL_FUNCTION_CODE (callee))
2533 case BUILT_IN_STRLEN:
2534 case BUILT_IN_FPUTS:
2535 case BUILT_IN_FPUTS_UNLOCKED:
2536 arg_idx = 0;
2537 type = 0;
2538 break;
2539 case BUILT_IN_STRCPY:
2540 case BUILT_IN_STRNCPY:
2541 arg_idx = 1;
2542 type = 0;
2543 break;
2544 case BUILT_IN_MEMCPY_CHK:
2545 case BUILT_IN_MEMPCPY_CHK:
2546 case BUILT_IN_MEMMOVE_CHK:
2547 case BUILT_IN_MEMSET_CHK:
2548 case BUILT_IN_STRNCPY_CHK:
2549 arg_idx = 2;
2550 type = 2;
2551 break;
2552 case BUILT_IN_STRCPY_CHK:
2553 case BUILT_IN_STPCPY_CHK:
2554 arg_idx = 1;
2555 type = 1;
2556 break;
2557 case BUILT_IN_SNPRINTF_CHK:
2558 case BUILT_IN_VSNPRINTF_CHK:
2559 arg_idx = 1;
2560 type = 2;
2561 break;
2562 default:
2563 return NULL_TREE;
2566 if (arg_idx >= nargs)
2567 return NULL_TREE;
2569 /* Try to use the dataflow information gathered by the CCP process. */
2570 visited = BITMAP_ALLOC (NULL);
2571 bitmap_clear (visited);
2573 memset (val, 0, sizeof (val));
2574 a = gimple_call_arg (stmt, arg_idx);
2575 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2576 val[arg_idx] = NULL_TREE;
2578 BITMAP_FREE (visited);
2580 result = NULL_TREE;
2581 switch (DECL_FUNCTION_CODE (callee))
2583 case BUILT_IN_STRLEN:
2584 if (val[0] && nargs == 1)
2586 tree new_val =
2587 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2589 /* If the result is not a valid gimple value, or not a cast
2590 of a valid gimple value, then we can not use the result. */
2591 if (is_gimple_val (new_val)
2592 || (is_gimple_cast (new_val)
2593 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2594 return new_val;
2596 break;
2598 case BUILT_IN_STRCPY:
2599 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2600 result = fold_builtin_strcpy (loc, callee,
2601 gimple_call_arg (stmt, 0),
2602 gimple_call_arg (stmt, 1),
2603 val[1]);
2604 break;
2606 case BUILT_IN_STRNCPY:
2607 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2608 result = fold_builtin_strncpy (loc, callee,
2609 gimple_call_arg (stmt, 0),
2610 gimple_call_arg (stmt, 1),
2611 gimple_call_arg (stmt, 2),
2612 val[1]);
2613 break;
2615 case BUILT_IN_FPUTS:
2616 if (nargs == 2)
2617 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
2618 gimple_call_arg (stmt, 1),
2619 ignore, false, val[0]);
2620 break;
2622 case BUILT_IN_FPUTS_UNLOCKED:
2623 if (nargs == 2)
2624 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
2625 gimple_call_arg (stmt, 1),
2626 ignore, true, val[0]);
2627 break;
2629 case BUILT_IN_MEMCPY_CHK:
2630 case BUILT_IN_MEMPCPY_CHK:
2631 case BUILT_IN_MEMMOVE_CHK:
2632 case BUILT_IN_MEMSET_CHK:
2633 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2634 result = fold_builtin_memory_chk (loc, callee,
2635 gimple_call_arg (stmt, 0),
2636 gimple_call_arg (stmt, 1),
2637 gimple_call_arg (stmt, 2),
2638 gimple_call_arg (stmt, 3),
2639 val[2], ignore,
2640 DECL_FUNCTION_CODE (callee));
2641 break;
2643 case BUILT_IN_STRCPY_CHK:
2644 case BUILT_IN_STPCPY_CHK:
2645 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2646 result = fold_builtin_stxcpy_chk (loc, callee,
2647 gimple_call_arg (stmt, 0),
2648 gimple_call_arg (stmt, 1),
2649 gimple_call_arg (stmt, 2),
2650 val[1], ignore,
2651 DECL_FUNCTION_CODE (callee));
2652 break;
2654 case BUILT_IN_STRNCPY_CHK:
2655 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2656 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
2657 gimple_call_arg (stmt, 1),
2658 gimple_call_arg (stmt, 2),
2659 gimple_call_arg (stmt, 3),
2660 val[2]);
2661 break;
2663 case BUILT_IN_SNPRINTF_CHK:
2664 case BUILT_IN_VSNPRINTF_CHK:
2665 if (val[1] && is_gimple_val (val[1]))
2666 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2667 DECL_FUNCTION_CODE (callee));
2668 break;
2670 default:
2671 gcc_unreachable ();
2674 if (result && ignore)
2675 result = fold_ignored_result (result);
2676 return result;
2679 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2680 replacement rhs for the statement or NULL_TREE if no simplification
2681 could be made. It is assumed that the operands have been previously
2682 folded. */
2684 static tree
2685 fold_gimple_assign (gimple_stmt_iterator *si)
2687 gimple stmt = gsi_stmt (*si);
2688 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2689 location_t loc = gimple_location (stmt);
2691 tree result = NULL_TREE;
2693 switch (get_gimple_rhs_class (subcode))
2695 case GIMPLE_SINGLE_RHS:
2697 tree rhs = gimple_assign_rhs1 (stmt);
2699 /* Try to fold a conditional expression. */
2700 if (TREE_CODE (rhs) == COND_EXPR)
2702 tree op0 = COND_EXPR_COND (rhs);
2703 tree tem;
2704 bool set = false;
2705 location_t cond_loc = EXPR_LOCATION (rhs);
2707 if (COMPARISON_CLASS_P (op0))
2709 fold_defer_overflow_warnings ();
2710 tem = fold_binary_loc (cond_loc,
2711 TREE_CODE (op0), TREE_TYPE (op0),
2712 TREE_OPERAND (op0, 0),
2713 TREE_OPERAND (op0, 1));
2714 /* This is actually a conditional expression, not a GIMPLE
2715 conditional statement, however, the valid_gimple_rhs_p
2716 test still applies. */
2717 set = (tem && is_gimple_condexpr (tem)
2718 && valid_gimple_rhs_p (tem));
2719 fold_undefer_overflow_warnings (set, stmt, 0);
2721 else if (is_gimple_min_invariant (op0))
2723 tem = op0;
2724 set = true;
2726 else
2727 return NULL_TREE;
2729 if (set)
2730 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
2731 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2734 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2735 return maybe_fold_tmr (rhs);
2737 else if (REFERENCE_CLASS_P (rhs))
2738 return maybe_fold_reference (rhs, false);
2740 else if (TREE_CODE (rhs) == ADDR_EXPR)
2742 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2743 if (tem)
2744 result = fold_convert (TREE_TYPE (rhs),
2745 build_fold_addr_expr_loc (loc, tem));
2748 else if (TREE_CODE (rhs) == CONSTRUCTOR
2749 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2750 && (CONSTRUCTOR_NELTS (rhs)
2751 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2753 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2754 unsigned i;
2755 tree val;
2757 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2758 if (TREE_CODE (val) != INTEGER_CST
2759 && TREE_CODE (val) != REAL_CST
2760 && TREE_CODE (val) != FIXED_CST)
2761 return NULL_TREE;
2763 return build_vector_from_ctor (TREE_TYPE (rhs),
2764 CONSTRUCTOR_ELTS (rhs));
2767 else if (DECL_P (rhs))
2768 return get_symbol_constant_value (rhs);
2770 /* If we couldn't fold the RHS, hand over to the generic
2771 fold routines. */
2772 if (result == NULL_TREE)
2773 result = fold (rhs);
2775 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2776 that may have been added by fold, and "useless" type
2777 conversions that might now be apparent due to propagation. */
2778 STRIP_USELESS_TYPE_CONVERSION (result);
2780 if (result != rhs && valid_gimple_rhs_p (result))
2781 return result;
2783 return NULL_TREE;
2785 break;
2787 case GIMPLE_UNARY_RHS:
2789 tree rhs = gimple_assign_rhs1 (stmt);
2791 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
2792 if (result)
2794 /* If the operation was a conversion do _not_ mark a
2795 resulting constant with TREE_OVERFLOW if the original
2796 constant was not. These conversions have implementation
2797 defined behavior and retaining the TREE_OVERFLOW flag
2798 here would confuse later passes such as VRP. */
2799 if (CONVERT_EXPR_CODE_P (subcode)
2800 && TREE_CODE (result) == INTEGER_CST
2801 && TREE_CODE (rhs) == INTEGER_CST)
2802 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2804 STRIP_USELESS_TYPE_CONVERSION (result);
2805 if (valid_gimple_rhs_p (result))
2806 return result;
2808 else if (CONVERT_EXPR_CODE_P (subcode)
2809 && POINTER_TYPE_P (gimple_expr_type (stmt))
2810 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2812 tree type = gimple_expr_type (stmt);
2813 tree t = maybe_fold_offset_to_address (loc,
2814 gimple_assign_rhs1 (stmt),
2815 integer_zero_node, type);
2816 if (t)
2817 return t;
2820 break;
2822 case GIMPLE_BINARY_RHS:
2823 /* Try to fold pointer addition. */
2824 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2826 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2827 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2829 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2830 if (!useless_type_conversion_p
2831 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2832 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2834 result = maybe_fold_stmt_addition (gimple_location (stmt),
2835 type,
2836 gimple_assign_rhs1 (stmt),
2837 gimple_assign_rhs2 (stmt));
2840 if (!result)
2841 result = fold_binary_loc (loc, subcode,
2842 TREE_TYPE (gimple_assign_lhs (stmt)),
2843 gimple_assign_rhs1 (stmt),
2844 gimple_assign_rhs2 (stmt));
2846 if (result)
2848 STRIP_USELESS_TYPE_CONVERSION (result);
2849 if (valid_gimple_rhs_p (result))
2850 return result;
2852 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2853 we lose canonicalization opportunities. Do not go again
2854 through fold here though, or the same non-GIMPLE will be
2855 produced. */
2856 if (commutative_tree_code (subcode)
2857 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2858 gimple_assign_rhs2 (stmt), false))
2859 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2860 gimple_assign_rhs2 (stmt),
2861 gimple_assign_rhs1 (stmt));
2863 break;
2865 case GIMPLE_INVALID_RHS:
2866 gcc_unreachable ();
2869 return NULL_TREE;
2872 /* Attempt to fold a conditional statement. Return true if any changes were
2873 made. We only attempt to fold the condition expression, and do not perform
2874 any transformation that would require alteration of the cfg. It is
2875 assumed that the operands have been previously folded. */
2877 static bool
2878 fold_gimple_cond (gimple stmt)
2880 tree result = fold_binary_loc (gimple_location (stmt),
2881 gimple_cond_code (stmt),
2882 boolean_type_node,
2883 gimple_cond_lhs (stmt),
2884 gimple_cond_rhs (stmt));
2886 if (result)
2888 STRIP_USELESS_TYPE_CONVERSION (result);
2889 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2891 gimple_cond_set_condition_from_tree (stmt, result);
2892 return true;
2896 return false;
2900 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2901 The statement may be replaced by another statement, e.g., if the call
2902 simplifies to a constant value. Return true if any changes were made.
2903 It is assumed that the operands have been previously folded. */
2905 static bool
2906 fold_gimple_call (gimple_stmt_iterator *gsi)
2908 gimple stmt = gsi_stmt (*gsi);
2910 tree callee = gimple_call_fndecl (stmt);
2912 /* Check for builtins that CCP can handle using information not
2913 available in the generic fold routines. */
2914 if (callee && DECL_BUILT_IN (callee))
2916 tree result = ccp_fold_builtin (stmt);
2918 if (result)
2919 return update_call_from_tree (gsi, result);
2921 else
2923 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2924 here are when we've propagated the address of a decl into the
2925 object slot. */
2926 /* ??? Should perhaps do this in fold proper. However, doing it
2927 there requires that we create a new CALL_EXPR, and that requires
2928 copying EH region info to the new node. Easier to just do it
2929 here where we can just smash the call operand. */
2930 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2931 callee = gimple_call_fn (stmt);
2932 if (TREE_CODE (callee) == OBJ_TYPE_REF
2933 && lang_hooks.fold_obj_type_ref
2934 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2935 && DECL_P (TREE_OPERAND
2936 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2938 tree t;
2940 /* ??? Caution: Broken ADDR_EXPR semantics means that
2941 looking at the type of the operand of the addr_expr
2942 can yield an array type. See silly exception in
2943 check_pointer_types_r. */
2944 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2945 t = lang_hooks.fold_obj_type_ref (callee, t);
2946 if (t)
2948 gimple_call_set_fn (stmt, t);
2949 return true;
2954 return false;
2957 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2958 distinguishes both cases. */
2960 static bool
2961 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2963 bool changed = false;
2964 gimple stmt = gsi_stmt (*gsi);
2965 unsigned i;
2967 /* Fold the main computation performed by the statement. */
2968 switch (gimple_code (stmt))
2970 case GIMPLE_ASSIGN:
2972 unsigned old_num_ops = gimple_num_ops (stmt);
2973 tree new_rhs = fold_gimple_assign (gsi);
2974 if (new_rhs != NULL_TREE
2975 && (!inplace
2976 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2978 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2979 changed = true;
2981 break;
2984 case GIMPLE_COND:
2985 changed |= fold_gimple_cond (stmt);
2986 break;
2988 case GIMPLE_CALL:
2989 /* Fold *& in call arguments. */
2990 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2991 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2993 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2994 if (tmp)
2996 gimple_call_set_arg (stmt, i, tmp);
2997 changed = true;
3000 /* The entire statement may be replaced in this case. */
3001 if (!inplace)
3002 changed |= fold_gimple_call (gsi);
3003 break;
3005 case GIMPLE_ASM:
3006 /* Fold *& in asm operands. */
3007 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3009 tree link = gimple_asm_output_op (stmt, i);
3010 tree op = TREE_VALUE (link);
3011 if (REFERENCE_CLASS_P (op)
3012 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3014 TREE_VALUE (link) = op;
3015 changed = true;
3018 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3020 tree link = gimple_asm_input_op (stmt, i);
3021 tree op = TREE_VALUE (link);
3022 if (REFERENCE_CLASS_P (op)
3023 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
3025 TREE_VALUE (link) = op;
3026 changed = true;
3029 break;
3031 default:;
3034 stmt = gsi_stmt (*gsi);
3036 /* Fold *& on the lhs. */
3037 if (gimple_has_lhs (stmt))
3039 tree lhs = gimple_get_lhs (stmt);
3040 if (lhs && REFERENCE_CLASS_P (lhs))
3042 tree new_lhs = maybe_fold_reference (lhs, true);
3043 if (new_lhs)
3045 gimple_set_lhs (stmt, new_lhs);
3046 changed = true;
3051 return changed;
3054 /* Fold the statement pointed to by GSI. In some cases, this function may
3055 replace the whole statement with a new one. Returns true iff folding
3056 makes any changes.
3057 The statement pointed to by GSI should be in valid gimple form but may
3058 be in unfolded state as resulting from for example constant propagation
3059 which can produce *&x = 0. */
3061 bool
3062 fold_stmt (gimple_stmt_iterator *gsi)
3064 return fold_stmt_1 (gsi, false);
3067 /* Perform the minimal folding on statement STMT. Only operations like
3068 *&x created by constant propagation are handled. The statement cannot
3069 be replaced with a new one. Return true if the statement was
3070 changed, false otherwise.
3071 The statement STMT should be in valid gimple form but may
3072 be in unfolded state as resulting from for example constant propagation
3073 which can produce *&x = 0. */
3075 bool
3076 fold_stmt_inplace (gimple stmt)
3078 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3079 bool changed = fold_stmt_1 (&gsi, true);
3080 gcc_assert (gsi_stmt (gsi) == stmt);
3081 return changed;
3084 /* Try to optimize out __builtin_stack_restore. Optimize it out
3085 if there is another __builtin_stack_restore in the same basic
3086 block and no calls or ASM_EXPRs are in between, or if this block's
3087 only outgoing edge is to EXIT_BLOCK and there are no calls or
3088 ASM_EXPRs after this __builtin_stack_restore. */
3090 static tree
3091 optimize_stack_restore (gimple_stmt_iterator i)
3093 tree callee, rhs;
3094 gimple stmt, stack_save;
3095 gimple_stmt_iterator stack_save_gsi;
3097 basic_block bb = gsi_bb (i);
3098 gimple call = gsi_stmt (i);
3100 if (gimple_code (call) != GIMPLE_CALL
3101 || gimple_call_num_args (call) != 1
3102 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3103 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3104 return NULL_TREE;
3106 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3108 stmt = gsi_stmt (i);
3109 if (gimple_code (stmt) == GIMPLE_ASM)
3110 return NULL_TREE;
3111 if (gimple_code (stmt) != GIMPLE_CALL)
3112 continue;
3114 callee = gimple_call_fndecl (stmt);
3115 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3116 return NULL_TREE;
3118 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3119 break;
3122 if (gsi_end_p (i)
3123 && (! single_succ_p (bb)
3124 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3125 return NULL_TREE;
3127 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3128 if (gimple_code (stack_save) != GIMPLE_CALL
3129 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3130 || stmt_could_throw_p (stack_save)
3131 || !has_single_use (gimple_call_arg (call, 0)))
3132 return NULL_TREE;
3134 callee = gimple_call_fndecl (stack_save);
3135 if (!callee
3136 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3137 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3138 || gimple_call_num_args (stack_save) != 0)
3139 return NULL_TREE;
3141 stack_save_gsi = gsi_for_stmt (stack_save);
3142 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3143 if (!update_call_from_tree (&stack_save_gsi, rhs))
3144 return NULL_TREE;
3146 /* No effect, so the statement will be deleted. */
3147 return integer_zero_node;
3150 /* If va_list type is a simple pointer and nothing special is needed,
3151 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3152 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3153 pointer assignment. */
3155 static tree
3156 optimize_stdarg_builtin (gimple call)
3158 tree callee, lhs, rhs, cfun_va_list;
3159 bool va_list_simple_ptr;
3160 location_t loc = gimple_location (call);
3162 if (gimple_code (call) != GIMPLE_CALL)
3163 return NULL_TREE;
3165 callee = gimple_call_fndecl (call);
3167 cfun_va_list = targetm.fn_abi_va_list (callee);
3168 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3169 && (TREE_TYPE (cfun_va_list) == void_type_node
3170 || TREE_TYPE (cfun_va_list) == char_type_node);
3172 switch (DECL_FUNCTION_CODE (callee))
3174 case BUILT_IN_VA_START:
3175 if (!va_list_simple_ptr
3176 || targetm.expand_builtin_va_start != NULL
3177 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3178 return NULL_TREE;
3180 if (gimple_call_num_args (call) != 2)
3181 return NULL_TREE;
3183 lhs = gimple_call_arg (call, 0);
3184 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3185 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3186 != TYPE_MAIN_VARIANT (cfun_va_list))
3187 return NULL_TREE;
3189 lhs = build_fold_indirect_ref_loc (loc, lhs);
3190 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
3191 1, integer_zero_node);
3192 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
3193 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3195 case BUILT_IN_VA_COPY:
3196 if (!va_list_simple_ptr)
3197 return NULL_TREE;
3199 if (gimple_call_num_args (call) != 2)
3200 return NULL_TREE;
3202 lhs = gimple_call_arg (call, 0);
3203 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3204 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3205 != TYPE_MAIN_VARIANT (cfun_va_list))
3206 return NULL_TREE;
3208 lhs = build_fold_indirect_ref_loc (loc, lhs);
3209 rhs = gimple_call_arg (call, 1);
3210 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3211 != TYPE_MAIN_VARIANT (cfun_va_list))
3212 return NULL_TREE;
3214 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
3215 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3217 case BUILT_IN_VA_END:
3218 /* No effect, so the statement will be deleted. */
3219 return integer_zero_node;
3221 default:
3222 gcc_unreachable ();
3226 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3227 RHS of an assignment. Insert the necessary statements before
3228 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3229 is replaced. If the call is expected to produces a result, then it
3230 is replaced by an assignment of the new RHS to the result variable.
3231 If the result is to be ignored, then the call is replaced by a
3232 GIMPLE_NOP. */
3234 static void
3235 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3237 tree lhs;
3238 tree tmp = NULL_TREE; /* Silence warning. */
3239 gimple stmt, new_stmt;
3240 gimple_stmt_iterator i;
3241 gimple_seq stmts = gimple_seq_alloc();
3242 struct gimplify_ctx gctx;
3244 stmt = gsi_stmt (*si_p);
3246 gcc_assert (is_gimple_call (stmt));
3248 lhs = gimple_call_lhs (stmt);
3250 push_gimplify_context (&gctx);
3252 if (lhs == NULL_TREE)
3253 gimplify_and_add (expr, &stmts);
3254 else
3255 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3257 pop_gimplify_context (NULL);
3259 if (gimple_has_location (stmt))
3260 annotate_all_with_location (stmts, gimple_location (stmt));
3262 /* The replacement can expose previously unreferenced variables. */
3263 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3265 new_stmt = gsi_stmt (i);
3266 find_new_referenced_vars (new_stmt);
3267 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3268 mark_symbols_for_renaming (new_stmt);
3269 gsi_next (si_p);
3272 if (lhs == NULL_TREE)
3274 new_stmt = gimple_build_nop ();
3275 unlink_stmt_vdef (stmt);
3276 release_defs (stmt);
3278 else
3280 new_stmt = gimple_build_assign (lhs, tmp);
3281 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3282 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3283 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3286 gimple_set_location (new_stmt, gimple_location (stmt));
3287 gsi_replace (si_p, new_stmt, false);
3290 /* A simple pass that attempts to fold all builtin functions. This pass
3291 is run after we've propagated as many constants as we can. */
3293 static unsigned int
3294 execute_fold_all_builtins (void)
3296 bool cfg_changed = false;
3297 basic_block bb;
3298 unsigned int todoflags = 0;
3300 FOR_EACH_BB (bb)
3302 gimple_stmt_iterator i;
3303 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3305 gimple stmt, old_stmt;
3306 tree callee, result;
3307 enum built_in_function fcode;
3309 stmt = gsi_stmt (i);
3311 if (gimple_code (stmt) != GIMPLE_CALL)
3313 gsi_next (&i);
3314 continue;
3316 callee = gimple_call_fndecl (stmt);
3317 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3319 gsi_next (&i);
3320 continue;
3322 fcode = DECL_FUNCTION_CODE (callee);
3324 result = ccp_fold_builtin (stmt);
3326 if (result)
3327 gimple_remove_stmt_histograms (cfun, stmt);
3329 if (!result)
3330 switch (DECL_FUNCTION_CODE (callee))
3332 case BUILT_IN_CONSTANT_P:
3333 /* Resolve __builtin_constant_p. If it hasn't been
3334 folded to integer_one_node by now, it's fairly
3335 certain that the value simply isn't constant. */
3336 result = integer_zero_node;
3337 break;
3339 case BUILT_IN_STACK_RESTORE:
3340 result = optimize_stack_restore (i);
3341 if (result)
3342 break;
3343 gsi_next (&i);
3344 continue;
3346 case BUILT_IN_VA_START:
3347 case BUILT_IN_VA_END:
3348 case BUILT_IN_VA_COPY:
3349 /* These shouldn't be folded before pass_stdarg. */
3350 result = optimize_stdarg_builtin (stmt);
3351 if (result)
3352 break;
3353 /* FALLTHRU */
3355 default:
3356 gsi_next (&i);
3357 continue;
3360 if (dump_file && (dump_flags & TDF_DETAILS))
3362 fprintf (dump_file, "Simplified\n ");
3363 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3366 old_stmt = stmt;
3367 if (!update_call_from_tree (&i, result))
3369 gimplify_and_update_call_from_tree (&i, result);
3370 todoflags |= TODO_update_address_taken;
3373 stmt = gsi_stmt (i);
3374 update_stmt (stmt);
3376 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3377 && gimple_purge_dead_eh_edges (bb))
3378 cfg_changed = true;
3380 if (dump_file && (dump_flags & TDF_DETAILS))
3382 fprintf (dump_file, "to\n ");
3383 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3384 fprintf (dump_file, "\n");
3387 /* Retry the same statement if it changed into another
3388 builtin, there might be new opportunities now. */
3389 if (gimple_code (stmt) != GIMPLE_CALL)
3391 gsi_next (&i);
3392 continue;
3394 callee = gimple_call_fndecl (stmt);
3395 if (!callee
3396 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3397 || DECL_FUNCTION_CODE (callee) == fcode)
3398 gsi_next (&i);
3402 /* Delete unreachable blocks. */
3403 if (cfg_changed)
3404 todoflags |= TODO_cleanup_cfg;
3406 return todoflags;
3410 struct gimple_opt_pass pass_fold_builtins =
3413 GIMPLE_PASS,
3414 "fab", /* name */
3415 NULL, /* gate */
3416 execute_fold_all_builtins, /* execute */
3417 NULL, /* sub */
3418 NULL, /* next */
3419 0, /* static_pass_number */
3420 TV_NONE, /* tv_id */
3421 PROP_cfg | PROP_ssa, /* properties_required */
3422 0, /* properties_provided */
3423 0, /* properties_destroyed */
3424 0, /* todo_flags_start */
3425 TODO_dump_func
3426 | TODO_verify_ssa
3427 | TODO_update_ssa /* todo_flags_finish */