In libobjc/: 2010-12-11 Nicola Pero <nicola.pero@meta-innovation.com>
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob30a0e14b6fcf48675c8d8024e9423111230574d6
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
102 References:
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
113 #include "config.h"
114 #include "system.h"
115 #include "coretypes.h"
116 #include "tm.h"
117 #include "tree.h"
118 #include "flags.h"
119 #include "tm_p.h"
120 #include "basic-block.h"
121 #include "output.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
125 #include "timevar.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
132 #include "target.h"
133 #include "diagnostic-core.h"
134 #include "dbgcnt.h"
137 /* Possible lattice values. */
138 typedef enum
140 UNINITIALIZED,
141 UNDEFINED,
142 CONSTANT,
143 VARYING
144 } ccp_lattice_t;
146 struct prop_value_d {
147 /* Lattice value. */
148 ccp_lattice_t lattice_val;
150 /* Propagated value. */
151 tree value;
153 /* Mask that applies to the propagated value during CCP. For
154 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
155 double_int mask;
158 typedef struct prop_value_d prop_value_t;
160 /* Array of propagated constant values. After propagation,
161 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
162 the constant is held in an SSA name representing a memory store
163 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
164 memory reference used to store (i.e., the LHS of the assignment
165 doing the store). */
166 static prop_value_t *const_val;
168 static void canonicalize_float_value (prop_value_t *);
169 static bool ccp_fold_stmt (gimple_stmt_iterator *);
170 static tree fold_ctor_reference (tree type, tree ctor,
171 unsigned HOST_WIDE_INT offset,
172 unsigned HOST_WIDE_INT size);
174 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
176 static void
177 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
179 switch (val.lattice_val)
181 case UNINITIALIZED:
182 fprintf (outf, "%sUNINITIALIZED", prefix);
183 break;
184 case UNDEFINED:
185 fprintf (outf, "%sUNDEFINED", prefix);
186 break;
187 case VARYING:
188 fprintf (outf, "%sVARYING", prefix);
189 break;
190 case CONSTANT:
191 fprintf (outf, "%sCONSTANT ", prefix);
192 if (TREE_CODE (val.value) != INTEGER_CST
193 || double_int_zero_p (val.mask))
194 print_generic_expr (outf, val.value, dump_flags);
195 else
197 double_int cval = double_int_and_not (tree_to_double_int (val.value),
198 val.mask);
199 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
200 prefix, cval.high, cval.low);
201 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
202 val.mask.high, val.mask.low);
204 break;
205 default:
206 gcc_unreachable ();
211 /* Print lattice value VAL to stderr. */
213 void debug_lattice_value (prop_value_t val);
215 DEBUG_FUNCTION void
216 debug_lattice_value (prop_value_t val)
218 dump_lattice_value (stderr, "", val);
219 fprintf (stderr, "\n");
223 /* Compute a default value for variable VAR and store it in the
224 CONST_VAL array. The following rules are used to get default
225 values:
227 1- Global and static variables that are declared constant are
228 considered CONSTANT.
230 2- Any other value is considered UNDEFINED. This is useful when
231 considering PHI nodes. PHI arguments that are undefined do not
232 change the constant value of the PHI node, which allows for more
233 constants to be propagated.
235 3- Variables defined by statements other than assignments and PHI
236 nodes are considered VARYING.
238 4- Initial values of variables that are not GIMPLE registers are
239 considered VARYING. */
241 static prop_value_t
242 get_default_value (tree var)
244 tree sym = SSA_NAME_VAR (var);
245 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
246 gimple stmt;
248 stmt = SSA_NAME_DEF_STMT (var);
250 if (gimple_nop_p (stmt))
252 /* Variables defined by an empty statement are those used
253 before being initialized. If VAR is a local variable, we
254 can assume initially that it is UNDEFINED, otherwise we must
255 consider it VARYING. */
256 if (is_gimple_reg (sym)
257 && TREE_CODE (sym) == VAR_DECL)
258 val.lattice_val = UNDEFINED;
259 else
261 val.lattice_val = VARYING;
262 val.mask = double_int_minus_one;
265 else if (is_gimple_assign (stmt)
266 /* Value-returning GIMPLE_CALL statements assign to
267 a variable, and are treated similarly to GIMPLE_ASSIGN. */
268 || (is_gimple_call (stmt)
269 && gimple_call_lhs (stmt) != NULL_TREE)
270 || gimple_code (stmt) == GIMPLE_PHI)
272 tree cst;
273 if (gimple_assign_single_p (stmt)
274 && DECL_P (gimple_assign_rhs1 (stmt))
275 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
277 val.lattice_val = CONSTANT;
278 val.value = cst;
280 else
281 /* Any other variable defined by an assignment or a PHI node
282 is considered UNDEFINED. */
283 val.lattice_val = UNDEFINED;
285 else
287 /* Otherwise, VAR will never take on a constant value. */
288 val.lattice_val = VARYING;
289 val.mask = double_int_minus_one;
292 return val;
296 /* Get the constant value associated with variable VAR. */
298 static inline prop_value_t *
299 get_value (tree var)
301 prop_value_t *val;
303 if (const_val == NULL)
304 return NULL;
306 val = &const_val[SSA_NAME_VERSION (var)];
307 if (val->lattice_val == UNINITIALIZED)
308 *val = get_default_value (var);
310 canonicalize_float_value (val);
312 return val;
315 /* Return the constant tree value associated with VAR. */
317 static inline tree
318 get_constant_value (tree var)
320 prop_value_t *val;
321 if (TREE_CODE (var) != SSA_NAME)
323 if (is_gimple_min_invariant (var))
324 return var;
325 return NULL_TREE;
327 val = get_value (var);
328 if (val
329 && val->lattice_val == CONSTANT
330 && (TREE_CODE (val->value) != INTEGER_CST
331 || double_int_zero_p (val->mask)))
332 return val->value;
333 return NULL_TREE;
336 /* Sets the value associated with VAR to VARYING. */
338 static inline void
339 set_value_varying (tree var)
341 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
343 val->lattice_val = VARYING;
344 val->value = NULL_TREE;
345 val->mask = double_int_minus_one;
348 /* For float types, modify the value of VAL to make ccp work correctly
349 for non-standard values (-0, NaN):
351 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
352 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
353 This is to fix the following problem (see PR 29921): Suppose we have
355 x = 0.0 * y
357 and we set value of y to NaN. This causes value of x to be set to NaN.
358 When we later determine that y is in fact VARYING, fold uses the fact
359 that HONOR_NANS is false, and we try to change the value of x to 0,
360 causing an ICE. With HONOR_NANS being false, the real appearance of
361 NaN would cause undefined behavior, though, so claiming that y (and x)
362 are UNDEFINED initially is correct. */
364 static void
365 canonicalize_float_value (prop_value_t *val)
367 enum machine_mode mode;
368 tree type;
369 REAL_VALUE_TYPE d;
371 if (val->lattice_val != CONSTANT
372 || TREE_CODE (val->value) != REAL_CST)
373 return;
375 d = TREE_REAL_CST (val->value);
376 type = TREE_TYPE (val->value);
377 mode = TYPE_MODE (type);
379 if (!HONOR_SIGNED_ZEROS (mode)
380 && REAL_VALUE_MINUS_ZERO (d))
382 val->value = build_real (type, dconst0);
383 return;
386 if (!HONOR_NANS (mode)
387 && REAL_VALUE_ISNAN (d))
389 val->lattice_val = UNDEFINED;
390 val->value = NULL;
391 return;
395 /* Return whether the lattice transition is valid. */
397 static bool
398 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
400 /* Lattice transitions must always be monotonically increasing in
401 value. */
402 if (old_val.lattice_val < new_val.lattice_val)
403 return true;
405 if (old_val.lattice_val != new_val.lattice_val)
406 return false;
408 if (!old_val.value && !new_val.value)
409 return true;
411 /* Now both lattice values are CONSTANT. */
413 /* Allow transitioning from &x to &x & ~3. */
414 if (TREE_CODE (old_val.value) != INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
416 return true;
418 /* Bit-lattices have to agree in the still valid bits. */
419 if (TREE_CODE (old_val.value) == INTEGER_CST
420 && TREE_CODE (new_val.value) == INTEGER_CST)
421 return double_int_equal_p
422 (double_int_and_not (tree_to_double_int (old_val.value),
423 new_val.mask),
424 double_int_and_not (tree_to_double_int (new_val.value),
425 new_val.mask));
427 /* Otherwise constant values have to agree. */
428 return operand_equal_p (old_val.value, new_val.value, 0);
431 /* Set the value for variable VAR to NEW_VAL. Return true if the new
432 value is different from VAR's previous value. */
434 static bool
435 set_lattice_value (tree var, prop_value_t new_val)
437 /* We can deal with old UNINITIALIZED values just fine here. */
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
440 canonicalize_float_value (&new_val);
442 /* We have to be careful to not go up the bitwise lattice
443 represented by the mask.
444 ??? This doesn't seem to be the best place to enforce this. */
445 if (new_val.lattice_val == CONSTANT
446 && old_val->lattice_val == CONSTANT
447 && TREE_CODE (new_val.value) == INTEGER_CST
448 && TREE_CODE (old_val->value) == INTEGER_CST)
450 double_int diff;
451 diff = double_int_xor (tree_to_double_int (new_val.value),
452 tree_to_double_int (old_val->value));
453 new_val.mask = double_int_ior (new_val.mask,
454 double_int_ior (old_val->mask, diff));
457 gcc_assert (valid_lattice_transition (*old_val, new_val));
459 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
460 caller that this was a non-transition. */
461 if (old_val->lattice_val != new_val.lattice_val
462 || (new_val.lattice_val == CONSTANT
463 && TREE_CODE (new_val.value) == INTEGER_CST
464 && (TREE_CODE (old_val->value) != INTEGER_CST
465 || !double_int_equal_p (new_val.mask, old_val->mask))))
467 /* ??? We would like to delay creation of INTEGER_CSTs from
468 partially constants here. */
470 if (dump_file && (dump_flags & TDF_DETAILS))
472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
476 *old_val = new_val;
478 gcc_assert (new_val.lattice_val != UNINITIALIZED);
479 return true;
482 return false;
485 static prop_value_t get_value_for_expr (tree, bool);
486 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
487 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
488 tree, double_int, double_int,
489 tree, double_int, double_int);
491 /* Return a double_int that can be used for bitwise simplifications
492 from VAL. */
494 static double_int
495 value_to_double_int (prop_value_t val)
497 if (val.value
498 && TREE_CODE (val.value) == INTEGER_CST)
499 return tree_to_double_int (val.value);
500 else
501 return double_int_zero;
504 /* Return the value for the address expression EXPR based on alignment
505 information. */
507 static prop_value_t
508 get_value_from_alignment (tree expr)
510 prop_value_t val;
511 HOST_WIDE_INT bitsize, bitpos;
512 tree base, offset;
513 enum machine_mode mode;
514 int align;
516 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
518 base = get_inner_reference (TREE_OPERAND (expr, 0),
519 &bitsize, &bitpos, &offset,
520 &mode, &align, &align, false);
521 if (TREE_CODE (base) == MEM_REF)
522 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
523 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
524 else if (base
525 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
526 > BITS_PER_UNIT))
528 val.lattice_val = CONSTANT;
529 /* We assume pointers are zero-extended. */
530 val.mask = double_int_and_not
531 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
532 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
533 val.value = build_int_cst (TREE_TYPE (expr), 0);
535 else
537 val.lattice_val = VARYING;
538 val.mask = double_int_minus_one;
539 val.value = NULL_TREE;
541 if (bitpos != 0)
543 double_int value, mask;
544 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
545 TREE_TYPE (expr), value_to_double_int (val), val.mask,
546 TREE_TYPE (expr),
547 shwi_to_double_int (bitpos / BITS_PER_UNIT),
548 double_int_zero);
549 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
550 val.mask = mask;
551 if (val.lattice_val == CONSTANT)
552 val.value = double_int_to_tree (TREE_TYPE (expr), value);
553 else
554 val.value = NULL_TREE;
556 /* ??? We should handle i * 4 and more complex expressions from
557 the offset, possibly by just expanding get_value_for_expr. */
558 if (offset != NULL_TREE)
560 double_int value, mask;
561 prop_value_t oval = get_value_for_expr (offset, true);
562 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
563 TREE_TYPE (expr), value_to_double_int (val), val.mask,
564 TREE_TYPE (expr), value_to_double_int (oval),
565 oval.mask);
566 val.mask = mask;
567 if (double_int_minus_one_p (mask))
569 val.lattice_val = VARYING;
570 val.value = NULL_TREE;
572 else
574 val.lattice_val = CONSTANT;
575 val.value = double_int_to_tree (TREE_TYPE (expr), value);
579 return val;
582 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
583 return constant bits extracted from alignment information for
584 invariant addresses. */
586 static prop_value_t
587 get_value_for_expr (tree expr, bool for_bits_p)
589 prop_value_t val;
591 if (TREE_CODE (expr) == SSA_NAME)
593 val = *get_value (expr);
594 if (for_bits_p
595 && val.lattice_val == CONSTANT
596 && TREE_CODE (val.value) == ADDR_EXPR)
597 val = get_value_from_alignment (val.value);
599 else if (is_gimple_min_invariant (expr)
600 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
602 val.lattice_val = CONSTANT;
603 val.value = expr;
604 val.mask = double_int_zero;
605 canonicalize_float_value (&val);
607 else if (TREE_CODE (expr) == ADDR_EXPR)
608 val = get_value_from_alignment (expr);
609 else
611 val.lattice_val = VARYING;
612 val.mask = double_int_minus_one;
613 val.value = NULL_TREE;
615 return val;
618 /* Return the likely CCP lattice value for STMT.
620 If STMT has no operands, then return CONSTANT.
622 Else if undefinedness of operands of STMT cause its value to be
623 undefined, then return UNDEFINED.
625 Else if any operands of STMT are constants, then return CONSTANT.
627 Else return VARYING. */
629 static ccp_lattice_t
630 likely_value (gimple stmt)
632 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
633 tree use;
634 ssa_op_iter iter;
635 unsigned i;
637 enum gimple_code code = gimple_code (stmt);
639 /* This function appears to be called only for assignments, calls,
640 conditionals, and switches, due to the logic in visit_stmt. */
641 gcc_assert (code == GIMPLE_ASSIGN
642 || code == GIMPLE_CALL
643 || code == GIMPLE_COND
644 || code == GIMPLE_SWITCH);
646 /* If the statement has volatile operands, it won't fold to a
647 constant value. */
648 if (gimple_has_volatile_ops (stmt))
649 return VARYING;
651 /* Arrive here for more complex cases. */
652 has_constant_operand = false;
653 has_undefined_operand = false;
654 all_undefined_operands = true;
655 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
657 prop_value_t *val = get_value (use);
659 if (val->lattice_val == UNDEFINED)
660 has_undefined_operand = true;
661 else
662 all_undefined_operands = false;
664 if (val->lattice_val == CONSTANT)
665 has_constant_operand = true;
668 /* There may be constants in regular rhs operands. For calls we
669 have to ignore lhs, fndecl and static chain, otherwise only
670 the lhs. */
671 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
672 i < gimple_num_ops (stmt); ++i)
674 tree op = gimple_op (stmt, i);
675 if (!op || TREE_CODE (op) == SSA_NAME)
676 continue;
677 if (is_gimple_min_invariant (op))
678 has_constant_operand = true;
681 if (has_constant_operand)
682 all_undefined_operands = false;
684 /* If the operation combines operands like COMPLEX_EXPR make sure to
685 not mark the result UNDEFINED if only one part of the result is
686 undefined. */
687 if (has_undefined_operand && all_undefined_operands)
688 return UNDEFINED;
689 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
691 switch (gimple_assign_rhs_code (stmt))
693 /* Unary operators are handled with all_undefined_operands. */
694 case PLUS_EXPR:
695 case MINUS_EXPR:
696 case POINTER_PLUS_EXPR:
697 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
698 Not bitwise operators, one VARYING operand may specify the
699 result completely. Not logical operators for the same reason.
700 Not COMPLEX_EXPR as one VARYING operand makes the result partly
701 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
702 the undefined operand may be promoted. */
703 return UNDEFINED;
705 default:
709 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
710 fall back to VARYING even if there were CONSTANT operands. */
711 if (has_undefined_operand)
712 return VARYING;
714 /* We do not consider virtual operands here -- load from read-only
715 memory may have only VARYING virtual operands, but still be
716 constant. */
717 if (has_constant_operand
718 || gimple_references_memory_p (stmt))
719 return CONSTANT;
721 return VARYING;
724 /* Returns true if STMT cannot be constant. */
726 static bool
727 surely_varying_stmt_p (gimple stmt)
729 /* If the statement has operands that we cannot handle, it cannot be
730 constant. */
731 if (gimple_has_volatile_ops (stmt))
732 return true;
734 /* If it is a call and does not return a value or is not a
735 builtin and not an indirect call, it is varying. */
736 if (is_gimple_call (stmt))
738 tree fndecl;
739 if (!gimple_call_lhs (stmt)
740 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
741 && !DECL_BUILT_IN (fndecl)))
742 return true;
745 /* Any other store operation is not interesting. */
746 else if (gimple_vdef (stmt))
747 return true;
749 /* Anything other than assignments and conditional jumps are not
750 interesting for CCP. */
751 if (gimple_code (stmt) != GIMPLE_ASSIGN
752 && gimple_code (stmt) != GIMPLE_COND
753 && gimple_code (stmt) != GIMPLE_SWITCH
754 && gimple_code (stmt) != GIMPLE_CALL)
755 return true;
757 return false;
760 /* Initialize local data structures for CCP. */
762 static void
763 ccp_initialize (void)
765 basic_block bb;
767 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
769 /* Initialize simulation flags for PHI nodes and statements. */
770 FOR_EACH_BB (bb)
772 gimple_stmt_iterator i;
774 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
776 gimple stmt = gsi_stmt (i);
777 bool is_varying;
779 /* If the statement is a control insn, then we do not
780 want to avoid simulating the statement once. Failure
781 to do so means that those edges will never get added. */
782 if (stmt_ends_bb_p (stmt))
783 is_varying = false;
784 else
785 is_varying = surely_varying_stmt_p (stmt);
787 if (is_varying)
789 tree def;
790 ssa_op_iter iter;
792 /* If the statement will not produce a constant, mark
793 all its outputs VARYING. */
794 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
795 set_value_varying (def);
797 prop_set_simulate_again (stmt, !is_varying);
801 /* Now process PHI nodes. We never clear the simulate_again flag on
802 phi nodes, since we do not know which edges are executable yet,
803 except for phi nodes for virtual operands when we do not do store ccp. */
804 FOR_EACH_BB (bb)
806 gimple_stmt_iterator i;
808 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
810 gimple phi = gsi_stmt (i);
812 if (!is_gimple_reg (gimple_phi_result (phi)))
813 prop_set_simulate_again (phi, false);
814 else
815 prop_set_simulate_again (phi, true);
820 /* Debug count support. Reset the values of ssa names
821 VARYING when the total number ssa names analyzed is
822 beyond the debug count specified. */
824 static void
825 do_dbg_cnt (void)
827 unsigned i;
828 for (i = 0; i < num_ssa_names; i++)
830 if (!dbg_cnt (ccp))
832 const_val[i].lattice_val = VARYING;
833 const_val[i].mask = double_int_minus_one;
834 const_val[i].value = NULL_TREE;
840 /* Do final substitution of propagated values, cleanup the flowgraph and
841 free allocated storage.
843 Return TRUE when something was optimized. */
845 static bool
846 ccp_finalize (void)
848 bool something_changed;
849 unsigned i;
851 do_dbg_cnt ();
853 /* Derive alignment and misalignment information from partially
854 constant pointers in the lattice. */
855 for (i = 1; i < num_ssa_names; ++i)
857 tree name = ssa_name (i);
858 prop_value_t *val;
859 struct ptr_info_def *pi;
860 unsigned int tem, align;
862 if (!name
863 || !POINTER_TYPE_P (TREE_TYPE (name)))
864 continue;
866 val = get_value (name);
867 if (val->lattice_val != CONSTANT
868 || TREE_CODE (val->value) != INTEGER_CST)
869 continue;
871 /* Trailing constant bits specify the alignment, trailing value
872 bits the misalignment. */
873 tem = val->mask.low;
874 align = (tem & -tem);
875 if (align == 1)
876 continue;
878 pi = get_ptr_info (name);
879 pi->align = align;
880 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
883 /* Perform substitutions based on the known constant values. */
884 something_changed = substitute_and_fold (get_constant_value,
885 ccp_fold_stmt, true);
887 free (const_val);
888 const_val = NULL;
889 return something_changed;;
893 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
894 in VAL1.
896 any M UNDEFINED = any
897 any M VARYING = VARYING
898 Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
902 static void
903 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
905 if (val1->lattice_val == UNDEFINED)
907 /* UNDEFINED M any = any */
908 *val1 = *val2;
910 else if (val2->lattice_val == UNDEFINED)
912 /* any M UNDEFINED = any
913 Nothing to do. VAL1 already contains the value we want. */
916 else if (val1->lattice_val == VARYING
917 || val2->lattice_val == VARYING)
919 /* any M VARYING = VARYING. */
920 val1->lattice_val = VARYING;
921 val1->mask = double_int_minus_one;
922 val1->value = NULL_TREE;
924 else if (val1->lattice_val == CONSTANT
925 && val2->lattice_val == CONSTANT
926 && TREE_CODE (val1->value) == INTEGER_CST
927 && TREE_CODE (val2->value) == INTEGER_CST)
929 /* Ci M Cj = Ci if (i == j)
930 Ci M Cj = VARYING if (i != j)
932 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
933 drop to varying. */
934 val1->mask
935 = double_int_ior (double_int_ior (val1->mask,
936 val2->mask),
937 double_int_xor (tree_to_double_int (val1->value),
938 tree_to_double_int (val2->value)));
939 if (double_int_minus_one_p (val1->mask))
941 val1->lattice_val = VARYING;
942 val1->value = NULL_TREE;
945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
947 && simple_cst_equal (val1->value, val2->value) == 1)
949 /* Ci M Cj = Ci if (i == j)
950 Ci M Cj = VARYING if (i != j)
952 VAL1 already contains the value we want for equivalent values. */
954 else if (val1->lattice_val == CONSTANT
955 && val2->lattice_val == CONSTANT
956 && (TREE_CODE (val1->value) == ADDR_EXPR
957 || TREE_CODE (val2->value) == ADDR_EXPR))
959 /* When not equal addresses are involved try meeting for
960 alignment. */
961 prop_value_t tem = *val2;
962 if (TREE_CODE (val1->value) == ADDR_EXPR)
963 *val1 = get_value_for_expr (val1->value, true);
964 if (TREE_CODE (val2->value) == ADDR_EXPR)
965 tem = get_value_for_expr (val2->value, true);
966 ccp_lattice_meet (val1, &tem);
968 else
970 /* Any other combination is VARYING. */
971 val1->lattice_val = VARYING;
972 val1->mask = double_int_minus_one;
973 val1->value = NULL_TREE;
978 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
979 lattice values to determine PHI_NODE's lattice value. The value of a
980 PHI node is determined calling ccp_lattice_meet with all the arguments
981 of the PHI node that are incoming via executable edges. */
983 static enum ssa_prop_result
984 ccp_visit_phi_node (gimple phi)
986 unsigned i;
987 prop_value_t *old_val, new_val;
989 if (dump_file && (dump_flags & TDF_DETAILS))
991 fprintf (dump_file, "\nVisiting PHI node: ");
992 print_gimple_stmt (dump_file, phi, 0, dump_flags);
995 old_val = get_value (gimple_phi_result (phi));
996 switch (old_val->lattice_val)
998 case VARYING:
999 return SSA_PROP_VARYING;
1001 case CONSTANT:
1002 new_val = *old_val;
1003 break;
1005 case UNDEFINED:
1006 new_val.lattice_val = UNDEFINED;
1007 new_val.value = NULL_TREE;
1008 break;
1010 default:
1011 gcc_unreachable ();
1014 for (i = 0; i < gimple_phi_num_args (phi); i++)
1016 /* Compute the meet operator over all the PHI arguments flowing
1017 through executable edges. */
1018 edge e = gimple_phi_arg_edge (phi, i);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1022 fprintf (dump_file,
1023 "\n Argument #%d (%d -> %d %sexecutable)\n",
1024 i, e->src->index, e->dest->index,
1025 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1028 /* If the incoming edge is executable, Compute the meet operator for
1029 the existing value of the PHI node and the current PHI argument. */
1030 if (e->flags & EDGE_EXECUTABLE)
1032 tree arg = gimple_phi_arg (phi, i)->def;
1033 prop_value_t arg_val = get_value_for_expr (arg, false);
1035 ccp_lattice_meet (&new_val, &arg_val);
1037 if (dump_file && (dump_flags & TDF_DETAILS))
1039 fprintf (dump_file, "\t");
1040 print_generic_expr (dump_file, arg, dump_flags);
1041 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1042 fprintf (dump_file, "\n");
1045 if (new_val.lattice_val == VARYING)
1046 break;
1050 if (dump_file && (dump_flags & TDF_DETAILS))
1052 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1053 fprintf (dump_file, "\n\n");
1056 /* Make the transition to the new value. */
1057 if (set_lattice_value (gimple_phi_result (phi), new_val))
1059 if (new_val.lattice_val == VARYING)
1060 return SSA_PROP_VARYING;
1061 else
1062 return SSA_PROP_INTERESTING;
1064 else
1065 return SSA_PROP_NOT_INTERESTING;
1068 /* Return the constant value for OP or OP otherwise. */
1070 static tree
1071 valueize_op (tree op)
1073 if (TREE_CODE (op) == SSA_NAME)
1075 tree tem = get_constant_value (op);
1076 if (tem)
1077 return tem;
1079 return op;
1082 /* CCP specific front-end to the non-destructive constant folding
1083 routines.
1085 Attempt to simplify the RHS of STMT knowing that one or more
1086 operands are constants.
1088 If simplification is possible, return the simplified RHS,
1089 otherwise return the original RHS or NULL_TREE. */
1091 static tree
1092 ccp_fold (gimple stmt)
1094 location_t loc = gimple_location (stmt);
1095 switch (gimple_code (stmt))
1097 case GIMPLE_ASSIGN:
1099 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1101 switch (get_gimple_rhs_class (subcode))
1103 case GIMPLE_SINGLE_RHS:
1105 tree rhs = gimple_assign_rhs1 (stmt);
1106 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1108 if (TREE_CODE (rhs) == SSA_NAME)
1110 /* If the RHS is an SSA_NAME, return its known constant value,
1111 if any. */
1112 return get_constant_value (rhs);
1114 /* Handle propagating invariant addresses into address operations.
1115 The folding we do here matches that in tree-ssa-forwprop.c. */
1116 else if (TREE_CODE (rhs) == ADDR_EXPR)
1118 tree *base;
1119 base = &TREE_OPERAND (rhs, 0);
1120 while (handled_component_p (*base))
1121 base = &TREE_OPERAND (*base, 0);
1122 if (TREE_CODE (*base) == MEM_REF
1123 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1125 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1126 if (val
1127 && TREE_CODE (val) == ADDR_EXPR)
1129 tree ret, save = *base;
1130 tree new_base;
1131 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1132 unshare_expr (val),
1133 TREE_OPERAND (*base, 1));
1134 /* We need to return a new tree, not modify the IL
1135 or share parts of it. So play some tricks to
1136 avoid manually building it. */
1137 *base = new_base;
1138 ret = unshare_expr (rhs);
1139 recompute_tree_invariant_for_addr_expr (ret);
1140 *base = save;
1141 return ret;
1145 else if (TREE_CODE (rhs) == CONSTRUCTOR
1146 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1147 && (CONSTRUCTOR_NELTS (rhs)
1148 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1150 unsigned i;
1151 tree val, list;
1153 list = NULL_TREE;
1154 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1156 val = valueize_op (val);
1157 if (TREE_CODE (val) == INTEGER_CST
1158 || TREE_CODE (val) == REAL_CST
1159 || TREE_CODE (val) == FIXED_CST)
1160 list = tree_cons (NULL_TREE, val, list);
1161 else
1162 return NULL_TREE;
1165 return build_vector (TREE_TYPE (rhs), nreverse (list));
1168 if (kind == tcc_reference)
1170 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1171 || TREE_CODE (rhs) == REALPART_EXPR
1172 || TREE_CODE (rhs) == IMAGPART_EXPR)
1173 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1175 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1176 if (val)
1177 return fold_unary_loc (EXPR_LOCATION (rhs),
1178 TREE_CODE (rhs),
1179 TREE_TYPE (rhs), val);
1181 else if (TREE_CODE (rhs) == MEM_REF
1182 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1184 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1185 if (val
1186 && TREE_CODE (val) == ADDR_EXPR)
1188 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1189 unshare_expr (val),
1190 TREE_OPERAND (rhs, 1));
1191 if (tem)
1192 rhs = tem;
1195 return fold_const_aggregate_ref (rhs);
1197 else if (kind == tcc_declaration)
1198 return get_symbol_constant_value (rhs);
1199 return rhs;
1202 case GIMPLE_UNARY_RHS:
1204 /* Handle unary operators that can appear in GIMPLE form.
1205 Note that we know the single operand must be a constant,
1206 so this should almost always return a simplified RHS. */
1207 tree lhs = gimple_assign_lhs (stmt);
1208 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1210 /* Conversions are useless for CCP purposes if they are
1211 value-preserving. Thus the restrictions that
1212 useless_type_conversion_p places for pointer type conversions
1213 do not apply here. Substitution later will only substitute to
1214 allowed places. */
1215 if (CONVERT_EXPR_CODE_P (subcode)
1216 && POINTER_TYPE_P (TREE_TYPE (lhs))
1217 && POINTER_TYPE_P (TREE_TYPE (op0)))
1219 tree tem;
1220 /* Try to re-construct array references on-the-fly. */
1221 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1222 TREE_TYPE (op0))
1223 && ((tem = maybe_fold_offset_to_address
1224 (loc,
1225 op0, integer_zero_node, TREE_TYPE (lhs)))
1226 != NULL_TREE))
1227 return tem;
1228 return op0;
1231 return
1232 fold_unary_ignore_overflow_loc (loc, subcode,
1233 gimple_expr_type (stmt), op0);
1236 case GIMPLE_BINARY_RHS:
1238 /* Handle binary operators that can appear in GIMPLE form. */
1239 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1240 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1242 /* Translate &x + CST into an invariant form suitable for
1243 further propagation. */
1244 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1245 && TREE_CODE (op0) == ADDR_EXPR
1246 && TREE_CODE (op1) == INTEGER_CST)
1248 tree off = fold_convert (ptr_type_node, op1);
1249 return build_fold_addr_expr
1250 (fold_build2 (MEM_REF,
1251 TREE_TYPE (TREE_TYPE (op0)),
1252 unshare_expr (op0), off));
1255 return fold_binary_loc (loc, subcode,
1256 gimple_expr_type (stmt), op0, op1);
1259 case GIMPLE_TERNARY_RHS:
1261 /* Handle ternary operators that can appear in GIMPLE form. */
1262 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1263 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1264 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1266 return fold_ternary_loc (loc, subcode,
1267 gimple_expr_type (stmt), op0, op1, op2);
1270 default:
1271 gcc_unreachable ();
1274 break;
1276 case GIMPLE_CALL:
1278 tree fn = valueize_op (gimple_call_fn (stmt));
1279 if (TREE_CODE (fn) == ADDR_EXPR
1280 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1281 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1283 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1284 tree call, retval;
1285 unsigned i;
1286 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1287 args[i] = valueize_op (gimple_call_arg (stmt, i));
1288 call = build_call_array_loc (loc,
1289 gimple_call_return_type (stmt),
1290 fn, gimple_call_num_args (stmt), args);
1291 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1292 if (retval)
1293 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1294 STRIP_NOPS (retval);
1295 return retval;
1297 return NULL_TREE;
1300 case GIMPLE_COND:
1302 /* Handle comparison operators that can appear in GIMPLE form. */
1303 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1304 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1305 enum tree_code code = gimple_cond_code (stmt);
1306 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1309 case GIMPLE_SWITCH:
1311 /* Return the constant switch index. */
1312 return valueize_op (gimple_switch_index (stmt));
1315 default:
1316 gcc_unreachable ();
1320 /* See if we can find constructor defining value of BASE.
1321 When we know the consructor with constant offset (such as
1322 base is array[40] and we do know constructor of array), then
1323 BIT_OFFSET is adjusted accordingly.
1325 As a special case, return error_mark_node when constructor
1326 is not explicitly available, but it is known to be zero
1327 such as 'static const int a;'. */
1328 static tree
1329 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset)
1331 HOST_WIDE_INT bit_offset2, size, max_size;
1332 if (TREE_CODE (base) == MEM_REF)
1334 if (!integer_zerop (TREE_OPERAND (base, 1)))
1336 if (!host_integerp (TREE_OPERAND (base, 1), 0))
1337 return NULL_TREE;
1338 *bit_offset += (mem_ref_offset (base).low
1339 * BITS_PER_UNIT);
1342 base = get_constant_value (TREE_OPERAND (base, 0));
1343 if (!base || TREE_CODE (base) != ADDR_EXPR)
1344 return NULL_TREE;
1345 base = TREE_OPERAND (base, 0);
1348 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1349 DECL_INITIAL. If BASE is a nested reference into another
1350 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1351 the inner reference. */
1352 switch (TREE_CODE (base))
1354 case VAR_DECL:
1355 if (!const_value_known_p (base))
1356 return NULL_TREE;
1358 /* Fallthru. */
1359 case CONST_DECL:
1360 if (!DECL_INITIAL (base)
1361 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1362 return error_mark_node;
1363 return DECL_INITIAL (base);
1365 case ARRAY_REF:
1366 case COMPONENT_REF:
1367 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
1368 if (max_size == -1 || size != max_size)
1369 return NULL_TREE;
1370 *bit_offset += bit_offset2;
1371 return get_base_constructor (base, bit_offset);
1373 case STRING_CST:
1374 case CONSTRUCTOR:
1375 return base;
1377 default:
1378 return NULL_TREE;
1382 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
1383 to the memory at bit OFFSET.
1385 We do only simple job of folding byte accesses. */
1387 static tree
1388 fold_string_cst_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1389 unsigned HOST_WIDE_INT size)
1391 if (INTEGRAL_TYPE_P (type)
1392 && (TYPE_MODE (type)
1393 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1394 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1395 == MODE_INT)
1396 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1397 && size == BITS_PER_UNIT
1398 && !(offset % BITS_PER_UNIT))
1400 offset /= BITS_PER_UNIT;
1401 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
1402 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
1403 [offset]));
1404 /* Folding
1405 const char a[20]="hello";
1406 return a[10];
1408 might lead to offset greater than string length. In this case we
1409 know value is either initialized to 0 or out of bounds. Return 0
1410 in both cases. */
1411 return build_zero_cst (type);
1413 return NULL_TREE;
1416 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
1417 SIZE to the memory at bit OFFSET. */
1419 static tree
1420 fold_array_ctor_reference (tree type, tree ctor,
1421 unsigned HOST_WIDE_INT offset,
1422 unsigned HOST_WIDE_INT size)
1424 unsigned HOST_WIDE_INT cnt;
1425 tree cfield, cval;
1426 double_int low_bound, elt_size;
1427 double_int index, max_index;
1428 double_int access_index;
1429 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
1430 HOST_WIDE_INT inner_offset;
1432 /* Compute low bound and elt size. */
1433 if (domain_type && TYPE_MIN_VALUE (domain_type))
1435 /* Static constructors for variably sized objects makes no sense. */
1436 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
1437 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
1439 else
1440 low_bound = double_int_zero;
1441 /* Static constructors for variably sized objects makes no sense. */
1442 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
1443 == INTEGER_CST);
1444 elt_size =
1445 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
1448 /* We can handle only constantly sized accesses that are known to not
1449 be larger than size of array element. */
1450 if (!TYPE_SIZE_UNIT (type)
1451 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1452 || double_int_cmp (elt_size,
1453 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
1454 return NULL_TREE;
1456 /* Compute the array index we look for. */
1457 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
1458 elt_size, TRUNC_DIV_EXPR);
1459 access_index = double_int_add (access_index, low_bound);
1461 /* And offset within the access. */
1462 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
1464 /* See if the array field is large enough to span whole access. We do not
1465 care to fold accesses spanning multiple array indexes. */
1466 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
1467 return NULL_TREE;
1469 index = double_int_sub (low_bound, double_int_one);
1470 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1472 /* Array constructor might explicitely set index, or specify range
1473 or leave index NULL meaning that it is next index after previous
1474 one. */
1475 if (cfield)
1477 if (TREE_CODE (cfield) == INTEGER_CST)
1478 max_index = index = tree_to_double_int (cfield);
1479 else
1481 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
1482 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
1483 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
1486 else
1487 max_index = index = double_int_add (index, double_int_one);
1489 /* Do we have match? */
1490 if (double_int_cmp (access_index, index, 1) >= 0
1491 && double_int_cmp (access_index, max_index, 1) <= 0)
1492 return fold_ctor_reference (type, cval, inner_offset, size);
1494 /* When memory is not explicitely mentioned in constructor,
1495 it is 0 (or out of range). */
1496 return build_zero_cst (type);
1499 /* CTOR is CONSTRUCTOR of an aggregate or vector.
1500 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
1502 static tree
1503 fold_nonarray_ctor_reference (tree type, tree ctor,
1504 unsigned HOST_WIDE_INT offset,
1505 unsigned HOST_WIDE_INT size)
1507 unsigned HOST_WIDE_INT cnt;
1508 tree cfield, cval;
1510 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
1511 cval)
1513 tree byte_offset = DECL_FIELD_OFFSET (cfield);
1514 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
1515 tree field_size = DECL_SIZE (cfield);
1516 double_int bitoffset;
1517 double_int byte_offset_cst = tree_to_double_int (byte_offset);
1518 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
1519 double_int bitoffset_end;
1521 /* Variable sized objects in static constructors makes no sense,
1522 but field_size can be NULL for flexible array members. */
1523 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
1524 && TREE_CODE (byte_offset) == INTEGER_CST
1525 && (field_size != NULL_TREE
1526 ? TREE_CODE (field_size) == INTEGER_CST
1527 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
1529 /* Compute bit offset of the field. */
1530 bitoffset = double_int_add (tree_to_double_int (field_offset),
1531 double_int_mul (byte_offset_cst,
1532 bits_per_unit_cst));
1533 /* Compute bit offset where the field ends. */
1534 if (field_size != NULL_TREE)
1535 bitoffset_end = double_int_add (bitoffset,
1536 tree_to_double_int (field_size));
1537 else
1538 bitoffset_end = double_int_zero;
1540 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
1541 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
1542 && (field_size == NULL_TREE
1543 || double_int_cmp (uhwi_to_double_int (offset),
1544 bitoffset_end, 0) < 0))
1546 double_int access_end = double_int_add (uhwi_to_double_int (offset),
1547 uhwi_to_double_int (size));
1548 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
1549 bitoffset);
1550 /* We do have overlap. Now see if field is large enough to
1551 cover the access. Give up for accesses spanning multiple
1552 fields. */
1553 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
1554 return NULL_TREE;
1555 return fold_ctor_reference (type, cval,
1556 double_int_to_uhwi (inner_offset), size);
1559 /* When memory is not explicitely mentioned in constructor, it is 0. */
1560 return build_zero_cst (type);
1563 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
1564 to the memory at bit OFFSET. */
1566 static tree
1567 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1568 unsigned HOST_WIDE_INT size)
1570 tree ret;
1572 /* We found the field with exact match. */
1573 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
1574 && !offset)
1575 return canonicalize_constructor_val (ctor);
1577 /* We are at the end of walk, see if we can view convert the
1578 result. */
1579 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
1580 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
1581 && operand_equal_p (TYPE_SIZE (type),
1582 TYPE_SIZE (TREE_TYPE (ctor)), 0))
1584 ret = canonicalize_constructor_val (ctor);
1585 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
1586 if (ret)
1587 STRIP_NOPS (ret);
1588 return ret;
1590 if (TREE_CODE (ctor) == STRING_CST)
1591 return fold_string_cst_ctor_reference (type, ctor, offset, size);
1592 if (TREE_CODE (ctor) == CONSTRUCTOR)
1595 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
1596 return fold_array_ctor_reference (type, ctor, offset, size);
1597 else
1598 return fold_nonarray_ctor_reference (type, ctor, offset, size);
1601 return NULL_TREE;
1604 /* Return the tree representing the element referenced by T if T is an
1605 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1606 NULL_TREE otherwise. */
1608 tree
1609 fold_const_aggregate_ref (tree t)
1611 tree ctor, idx, base;
1612 HOST_WIDE_INT offset, size, max_size;
1613 tree tem;
1615 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1616 return get_symbol_constant_value (t);
1618 tem = fold_read_from_constant_string (t);
1619 if (tem)
1620 return tem;
1622 switch (TREE_CODE (t))
1624 case ARRAY_REF:
1625 case ARRAY_RANGE_REF:
1626 /* Constant indexes are handled well by get_base_constructor.
1627 Only special case variable offsets.
1628 FIXME: This code can't handle nested references with variable indexes
1629 (they will be handled only by iteration of ccp). Perhaps we can bring
1630 get_ref_base_and_extent here and make it use get_constant_value. */
1631 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
1632 && (idx = get_constant_value (TREE_OPERAND (t, 1)))
1633 && host_integerp (idx, 0))
1635 tree low_bound, unit_size;
1637 /* If the resulting bit-offset is constant, track it. */
1638 if ((low_bound = array_ref_low_bound (t),
1639 host_integerp (low_bound, 0))
1640 && (unit_size = array_ref_element_size (t),
1641 host_integerp (unit_size, 1)))
1643 offset = TREE_INT_CST_LOW (idx);
1644 offset -= TREE_INT_CST_LOW (low_bound);
1645 offset *= TREE_INT_CST_LOW (unit_size);
1646 offset *= BITS_PER_UNIT;
1648 base = TREE_OPERAND (t, 0);
1649 ctor = get_base_constructor (base, &offset);
1650 /* Empty constructor. Always fold to 0. */
1651 if (ctor == error_mark_node)
1652 return build_zero_cst (TREE_TYPE (t));
1653 /* Out of bound array access. Value is undefined, but don't fold. */
1654 if (offset < 0)
1655 return NULL_TREE;
1656 /* We can not determine ctor. */
1657 if (!ctor)
1658 return NULL_TREE;
1659 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
1660 TREE_INT_CST_LOW (unit_size)
1661 * BITS_PER_UNIT);
1664 /* Fallthru. */
1666 case COMPONENT_REF:
1667 case BIT_FIELD_REF:
1668 case TARGET_MEM_REF:
1669 case MEM_REF:
1670 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
1671 ctor = get_base_constructor (base, &offset);
1673 /* Empty constructor. Always fold to 0. */
1674 if (ctor == error_mark_node)
1675 return build_zero_cst (TREE_TYPE (t));
1676 /* We do not know precise address. */
1677 if (max_size == -1 || max_size != size)
1678 return NULL_TREE;
1679 /* We can not determine ctor. */
1680 if (!ctor)
1681 return NULL_TREE;
1683 /* Out of bound array access. Value is undefined, but don't fold. */
1684 if (offset < 0)
1685 return NULL_TREE;
1687 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
1689 case REALPART_EXPR:
1690 case IMAGPART_EXPR:
1692 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1693 if (c && TREE_CODE (c) == COMPLEX_CST)
1694 return fold_build1_loc (EXPR_LOCATION (t),
1695 TREE_CODE (t), TREE_TYPE (t), c);
1696 break;
1699 default:
1700 break;
1703 return NULL_TREE;
1706 /* Apply the operation CODE in type TYPE to the value, mask pair
1707 RVAL and RMASK representing a value of type RTYPE and set
1708 the value, mask pair *VAL and *MASK to the result. */
1710 static void
1711 bit_value_unop_1 (enum tree_code code, tree type,
1712 double_int *val, double_int *mask,
1713 tree rtype, double_int rval, double_int rmask)
1715 switch (code)
1717 case BIT_NOT_EXPR:
1718 *mask = rmask;
1719 *val = double_int_not (rval);
1720 break;
1722 case NEGATE_EXPR:
1724 double_int temv, temm;
1725 /* Return ~rval + 1. */
1726 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1727 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1728 type, temv, temm,
1729 type, double_int_one, double_int_zero);
1730 break;
1733 CASE_CONVERT:
1735 bool uns;
1737 /* First extend mask and value according to the original type. */
1738 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1739 ? 0 : TYPE_UNSIGNED (rtype));
1740 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1741 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1743 /* Then extend mask and value according to the target type. */
1744 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1745 ? 0 : TYPE_UNSIGNED (type));
1746 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1747 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1748 break;
1751 default:
1752 *mask = double_int_minus_one;
1753 break;
1757 /* Apply the operation CODE in type TYPE to the value, mask pairs
1758 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1759 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1761 static void
1762 bit_value_binop_1 (enum tree_code code, tree type,
1763 double_int *val, double_int *mask,
1764 tree r1type, double_int r1val, double_int r1mask,
1765 tree r2type, double_int r2val, double_int r2mask)
1767 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1768 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1769 /* Assume we'll get a constant result. Use an initial varying value,
1770 we fall back to varying in the end if necessary. */
1771 *mask = double_int_minus_one;
1772 switch (code)
1774 case BIT_AND_EXPR:
1775 /* The mask is constant where there is a known not
1776 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1777 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1778 double_int_and (double_int_ior (r1val, r1mask),
1779 double_int_ior (r2val, r2mask)));
1780 *val = double_int_and (r1val, r2val);
1781 break;
1783 case BIT_IOR_EXPR:
1784 /* The mask is constant where there is a known
1785 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1786 *mask = double_int_and_not
1787 (double_int_ior (r1mask, r2mask),
1788 double_int_ior (double_int_and_not (r1val, r1mask),
1789 double_int_and_not (r2val, r2mask)));
1790 *val = double_int_ior (r1val, r2val);
1791 break;
1793 case BIT_XOR_EXPR:
1794 /* m1 | m2 */
1795 *mask = double_int_ior (r1mask, r2mask);
1796 *val = double_int_xor (r1val, r2val);
1797 break;
1799 case LROTATE_EXPR:
1800 case RROTATE_EXPR:
1801 if (double_int_zero_p (r2mask))
1803 HOST_WIDE_INT shift = r2val.low;
1804 if (code == RROTATE_EXPR)
1805 shift = -shift;
1806 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1807 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1809 break;
1811 case LSHIFT_EXPR:
1812 case RSHIFT_EXPR:
1813 /* ??? We can handle partially known shift counts if we know
1814 its sign. That way we can tell that (x << (y | 8)) & 255
1815 is zero. */
1816 if (double_int_zero_p (r2mask))
1818 HOST_WIDE_INT shift = r2val.low;
1819 if (code == RSHIFT_EXPR)
1820 shift = -shift;
1821 /* We need to know if we are doing a left or a right shift
1822 to properly shift in zeros for left shift and unsigned
1823 right shifts and the sign bit for signed right shifts.
1824 For signed right shifts we shift in varying in case
1825 the sign bit was varying. */
1826 if (shift > 0)
1828 *mask = double_int_lshift (r1mask, shift,
1829 TYPE_PRECISION (type), false);
1830 *val = double_int_lshift (r1val, shift,
1831 TYPE_PRECISION (type), false);
1833 else if (shift < 0)
1835 shift = -shift;
1836 *mask = double_int_rshift (r1mask, shift,
1837 TYPE_PRECISION (type), !uns);
1838 *val = double_int_rshift (r1val, shift,
1839 TYPE_PRECISION (type), !uns);
1841 else
1843 *mask = r1mask;
1844 *val = r1val;
1847 break;
1849 case PLUS_EXPR:
1850 case POINTER_PLUS_EXPR:
1852 double_int lo, hi;
1853 /* Do the addition with unknown bits set to zero, to give carry-ins of
1854 zero wherever possible. */
1855 lo = double_int_add (double_int_and_not (r1val, r1mask),
1856 double_int_and_not (r2val, r2mask));
1857 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1858 /* Do the addition with unknown bits set to one, to give carry-ins of
1859 one wherever possible. */
1860 hi = double_int_add (double_int_ior (r1val, r1mask),
1861 double_int_ior (r2val, r2mask));
1862 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1863 /* Each bit in the result is known if (a) the corresponding bits in
1864 both inputs are known, and (b) the carry-in to that bit position
1865 is known. We can check condition (b) by seeing if we got the same
1866 result with minimised carries as with maximised carries. */
1867 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1868 double_int_xor (lo, hi));
1869 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1870 /* It shouldn't matter whether we choose lo or hi here. */
1871 *val = lo;
1872 break;
1875 case MINUS_EXPR:
1877 double_int temv, temm;
1878 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1879 r2type, r2val, r2mask);
1880 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1881 r1type, r1val, r1mask,
1882 r2type, temv, temm);
1883 break;
1886 case MULT_EXPR:
1888 /* Just track trailing zeros in both operands and transfer
1889 them to the other. */
1890 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1891 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1892 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1894 *mask = double_int_zero;
1895 *val = double_int_zero;
1897 else if (r1tz + r2tz > 0)
1899 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1900 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1901 *val = double_int_zero;
1903 break;
1906 case EQ_EXPR:
1907 case NE_EXPR:
1909 double_int m = double_int_ior (r1mask, r2mask);
1910 if (!double_int_equal_p (double_int_and_not (r1val, m),
1911 double_int_and_not (r2val, m)))
1913 *mask = double_int_zero;
1914 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1916 else
1918 /* We know the result of a comparison is always one or zero. */
1919 *mask = double_int_one;
1920 *val = double_int_zero;
1922 break;
1925 case GE_EXPR:
1926 case GT_EXPR:
1928 double_int tem = r1val;
1929 r1val = r2val;
1930 r2val = tem;
1931 tem = r1mask;
1932 r1mask = r2mask;
1933 r2mask = tem;
1934 code = swap_tree_comparison (code);
1936 /* Fallthru. */
1937 case LT_EXPR:
1938 case LE_EXPR:
1940 int minmax, maxmin;
1941 /* If the most significant bits are not known we know nothing. */
1942 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1943 break;
1945 /* If we know the most significant bits we know the values
1946 value ranges by means of treating varying bits as zero
1947 or one. Do a cross comparison of the max/min pairs. */
1948 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1949 double_int_and_not (r2val, r2mask), uns);
1950 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1951 double_int_ior (r2val, r2mask), uns);
1952 if (maxmin < 0) /* r1 is less than r2. */
1954 *mask = double_int_zero;
1955 *val = double_int_one;
1957 else if (minmax > 0) /* r1 is not less or equal to r2. */
1959 *mask = double_int_zero;
1960 *val = double_int_zero;
1962 else if (maxmin == minmax) /* r1 and r2 are equal. */
1964 /* This probably should never happen as we'd have
1965 folded the thing during fully constant value folding. */
1966 *mask = double_int_zero;
1967 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1969 else
1971 /* We know the result of a comparison is always one or zero. */
1972 *mask = double_int_one;
1973 *val = double_int_zero;
1975 break;
1978 default:;
1982 /* Return the propagation value when applying the operation CODE to
1983 the value RHS yielding type TYPE. */
1985 static prop_value_t
1986 bit_value_unop (enum tree_code code, tree type, tree rhs)
1988 prop_value_t rval = get_value_for_expr (rhs, true);
1989 double_int value, mask;
1990 prop_value_t val;
1991 gcc_assert ((rval.lattice_val == CONSTANT
1992 && TREE_CODE (rval.value) == INTEGER_CST)
1993 || double_int_minus_one_p (rval.mask));
1994 bit_value_unop_1 (code, type, &value, &mask,
1995 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1996 if (!double_int_minus_one_p (mask))
1998 val.lattice_val = CONSTANT;
1999 val.mask = mask;
2000 /* ??? Delay building trees here. */
2001 val.value = double_int_to_tree (type, value);
2003 else
2005 val.lattice_val = VARYING;
2006 val.value = NULL_TREE;
2007 val.mask = double_int_minus_one;
2009 return val;
2012 /* Return the propagation value when applying the operation CODE to
2013 the values RHS1 and RHS2 yielding type TYPE. */
2015 static prop_value_t
2016 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
2018 prop_value_t r1val = get_value_for_expr (rhs1, true);
2019 prop_value_t r2val = get_value_for_expr (rhs2, true);
2020 double_int value, mask;
2021 prop_value_t val;
2022 gcc_assert ((r1val.lattice_val == CONSTANT
2023 && TREE_CODE (r1val.value) == INTEGER_CST)
2024 || double_int_minus_one_p (r1val.mask));
2025 gcc_assert ((r2val.lattice_val == CONSTANT
2026 && TREE_CODE (r2val.value) == INTEGER_CST)
2027 || double_int_minus_one_p (r2val.mask));
2028 bit_value_binop_1 (code, type, &value, &mask,
2029 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
2030 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
2031 if (!double_int_minus_one_p (mask))
2033 val.lattice_val = CONSTANT;
2034 val.mask = mask;
2035 /* ??? Delay building trees here. */
2036 val.value = double_int_to_tree (type, value);
2038 else
2040 val.lattice_val = VARYING;
2041 val.value = NULL_TREE;
2042 val.mask = double_int_minus_one;
2044 return val;
2047 /* Evaluate statement STMT.
2048 Valid only for assignments, calls, conditionals, and switches. */
2050 static prop_value_t
2051 evaluate_stmt (gimple stmt)
2053 prop_value_t val;
2054 tree simplified = NULL_TREE;
2055 ccp_lattice_t likelyvalue = likely_value (stmt);
2056 bool is_constant = false;
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2060 fprintf (dump_file, "which is likely ");
2061 switch (likelyvalue)
2063 case CONSTANT:
2064 fprintf (dump_file, "CONSTANT");
2065 break;
2066 case UNDEFINED:
2067 fprintf (dump_file, "UNDEFINED");
2068 break;
2069 case VARYING:
2070 fprintf (dump_file, "VARYING");
2071 break;
2072 default:;
2074 fprintf (dump_file, "\n");
2077 /* If the statement is likely to have a CONSTANT result, then try
2078 to fold the statement to determine the constant value. */
2079 /* FIXME. This is the only place that we call ccp_fold.
2080 Since likely_value never returns CONSTANT for calls, we will
2081 not attempt to fold them, including builtins that may profit. */
2082 if (likelyvalue == CONSTANT)
2084 fold_defer_overflow_warnings ();
2085 simplified = ccp_fold (stmt);
2086 is_constant = simplified && is_gimple_min_invariant (simplified);
2087 fold_undefer_overflow_warnings (is_constant, stmt, 0);
2088 if (is_constant)
2090 /* The statement produced a constant value. */
2091 val.lattice_val = CONSTANT;
2092 val.value = simplified;
2093 val.mask = double_int_zero;
2096 /* If the statement is likely to have a VARYING result, then do not
2097 bother folding the statement. */
2098 else if (likelyvalue == VARYING)
2100 enum gimple_code code = gimple_code (stmt);
2101 if (code == GIMPLE_ASSIGN)
2103 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2105 /* Other cases cannot satisfy is_gimple_min_invariant
2106 without folding. */
2107 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
2108 simplified = gimple_assign_rhs1 (stmt);
2110 else if (code == GIMPLE_SWITCH)
2111 simplified = gimple_switch_index (stmt);
2112 else
2113 /* These cannot satisfy is_gimple_min_invariant without folding. */
2114 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
2115 is_constant = simplified && is_gimple_min_invariant (simplified);
2116 if (is_constant)
2118 /* The statement produced a constant value. */
2119 val.lattice_val = CONSTANT;
2120 val.value = simplified;
2121 val.mask = double_int_zero;
2125 /* Resort to simplification for bitwise tracking. */
2126 if (flag_tree_bit_ccp
2127 && likelyvalue == CONSTANT
2128 && !is_constant)
2130 enum gimple_code code = gimple_code (stmt);
2131 tree fndecl;
2132 val.lattice_val = VARYING;
2133 val.value = NULL_TREE;
2134 val.mask = double_int_minus_one;
2135 if (code == GIMPLE_ASSIGN)
2137 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2138 tree rhs1 = gimple_assign_rhs1 (stmt);
2139 switch (get_gimple_rhs_class (subcode))
2141 case GIMPLE_SINGLE_RHS:
2142 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2143 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2144 val = get_value_for_expr (rhs1, true);
2145 break;
2147 case GIMPLE_UNARY_RHS:
2148 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2149 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2150 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2151 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2152 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2153 break;
2155 case GIMPLE_BINARY_RHS:
2156 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2157 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2159 tree rhs2 = gimple_assign_rhs2 (stmt);
2160 val = bit_value_binop (subcode,
2161 TREE_TYPE (rhs1), rhs1, rhs2);
2163 break;
2165 default:;
2168 else if (code == GIMPLE_COND)
2170 enum tree_code code = gimple_cond_code (stmt);
2171 tree rhs1 = gimple_cond_lhs (stmt);
2172 tree rhs2 = gimple_cond_rhs (stmt);
2173 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2174 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2175 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2177 else if (code == GIMPLE_CALL
2178 && (fndecl = gimple_call_fndecl (stmt))
2179 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2181 switch (DECL_FUNCTION_CODE (fndecl))
2183 case BUILT_IN_MALLOC:
2184 case BUILT_IN_REALLOC:
2185 case BUILT_IN_CALLOC:
2186 val.lattice_val = CONSTANT;
2187 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2188 val.mask = shwi_to_double_int
2189 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2190 / BITS_PER_UNIT - 1));
2191 break;
2193 case BUILT_IN_ALLOCA:
2194 val.lattice_val = CONSTANT;
2195 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2196 val.mask = shwi_to_double_int
2197 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2198 / BITS_PER_UNIT - 1));
2199 break;
2201 default:;
2204 is_constant = (val.lattice_val == CONSTANT);
2207 if (!is_constant)
2209 /* The statement produced a nonconstant value. If the statement
2210 had UNDEFINED operands, then the result of the statement
2211 should be UNDEFINED. Otherwise, the statement is VARYING. */
2212 if (likelyvalue == UNDEFINED)
2214 val.lattice_val = likelyvalue;
2215 val.mask = double_int_zero;
2217 else
2219 val.lattice_val = VARYING;
2220 val.mask = double_int_minus_one;
2223 val.value = NULL_TREE;
2226 return val;
2229 /* Fold the stmt at *GSI with CCP specific information that propagating
2230 and regular folding does not catch. */
2232 static bool
2233 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2235 gimple stmt = gsi_stmt (*gsi);
2237 switch (gimple_code (stmt))
2239 case GIMPLE_COND:
2241 prop_value_t val;
2242 /* Statement evaluation will handle type mismatches in constants
2243 more gracefully than the final propagation. This allows us to
2244 fold more conditionals here. */
2245 val = evaluate_stmt (stmt);
2246 if (val.lattice_val != CONSTANT
2247 || !double_int_zero_p (val.mask))
2248 return false;
2250 if (dump_file)
2252 fprintf (dump_file, "Folding predicate ");
2253 print_gimple_expr (dump_file, stmt, 0, 0);
2254 fprintf (dump_file, " to ");
2255 print_generic_expr (dump_file, val.value, 0);
2256 fprintf (dump_file, "\n");
2259 if (integer_zerop (val.value))
2260 gimple_cond_make_false (stmt);
2261 else
2262 gimple_cond_make_true (stmt);
2264 return true;
2267 case GIMPLE_CALL:
2269 tree lhs = gimple_call_lhs (stmt);
2270 tree val;
2271 tree argt;
2272 tree callee;
2273 bool changed = false;
2274 unsigned i;
2276 /* If the call was folded into a constant make sure it goes
2277 away even if we cannot propagate into all uses because of
2278 type issues. */
2279 if (lhs
2280 && TREE_CODE (lhs) == SSA_NAME
2281 && (val = get_constant_value (lhs)))
2283 tree new_rhs = unshare_expr (val);
2284 bool res;
2285 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2286 TREE_TYPE (new_rhs)))
2287 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2288 res = update_call_from_tree (gsi, new_rhs);
2289 gcc_assert (res);
2290 return true;
2293 /* Propagate into the call arguments. Compared to replace_uses_in
2294 this can use the argument slot types for type verification
2295 instead of the current argument type. We also can safely
2296 drop qualifiers here as we are dealing with constants anyway. */
2297 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2298 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2299 ++i, argt = TREE_CHAIN (argt))
2301 tree arg = gimple_call_arg (stmt, i);
2302 if (TREE_CODE (arg) == SSA_NAME
2303 && (val = get_constant_value (arg))
2304 && useless_type_conversion_p
2305 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2306 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2308 gimple_call_set_arg (stmt, i, unshare_expr (val));
2309 changed = true;
2313 callee = gimple_call_fn (stmt);
2314 if (TREE_CODE (callee) == OBJ_TYPE_REF
2315 && TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == SSA_NAME)
2317 tree expr = OBJ_TYPE_REF_EXPR (callee);
2318 OBJ_TYPE_REF_EXPR (callee) = valueize_op (expr);
2319 if (TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == ADDR_EXPR)
2321 tree t;
2322 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
2323 if (t)
2325 gimple_call_set_fn (stmt, t);
2326 changed = true;
2329 OBJ_TYPE_REF_EXPR (callee) = expr;
2332 return changed;
2335 case GIMPLE_ASSIGN:
2337 tree lhs = gimple_assign_lhs (stmt);
2338 tree val;
2340 /* If we have a load that turned out to be constant replace it
2341 as we cannot propagate into all uses in all cases. */
2342 if (gimple_assign_single_p (stmt)
2343 && TREE_CODE (lhs) == SSA_NAME
2344 && (val = get_constant_value (lhs)))
2346 tree rhs = unshare_expr (val);
2347 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2348 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2349 gimple_assign_set_rhs_from_tree (gsi, rhs);
2350 return true;
2353 return false;
2356 default:
2357 return false;
2361 /* Visit the assignment statement STMT. Set the value of its LHS to the
2362 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2363 creates virtual definitions, set the value of each new name to that
2364 of the RHS (if we can derive a constant out of the RHS).
2365 Value-returning call statements also perform an assignment, and
2366 are handled here. */
2368 static enum ssa_prop_result
2369 visit_assignment (gimple stmt, tree *output_p)
2371 prop_value_t val;
2372 enum ssa_prop_result retval;
2374 tree lhs = gimple_get_lhs (stmt);
2376 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2377 || gimple_call_lhs (stmt) != NULL_TREE);
2379 if (gimple_assign_single_p (stmt)
2380 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2381 /* For a simple copy operation, we copy the lattice values. */
2382 val = *get_value (gimple_assign_rhs1 (stmt));
2383 else
2384 /* Evaluate the statement, which could be
2385 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2386 val = evaluate_stmt (stmt);
2388 retval = SSA_PROP_NOT_INTERESTING;
2390 /* Set the lattice value of the statement's output. */
2391 if (TREE_CODE (lhs) == SSA_NAME)
2393 /* If STMT is an assignment to an SSA_NAME, we only have one
2394 value to set. */
2395 if (set_lattice_value (lhs, val))
2397 *output_p = lhs;
2398 if (val.lattice_val == VARYING)
2399 retval = SSA_PROP_VARYING;
2400 else
2401 retval = SSA_PROP_INTERESTING;
2405 return retval;
2409 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2410 if it can determine which edge will be taken. Otherwise, return
2411 SSA_PROP_VARYING. */
2413 static enum ssa_prop_result
2414 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2416 prop_value_t val;
2417 basic_block block;
2419 block = gimple_bb (stmt);
2420 val = evaluate_stmt (stmt);
2421 if (val.lattice_val != CONSTANT
2422 || !double_int_zero_p (val.mask))
2423 return SSA_PROP_VARYING;
2425 /* Find which edge out of the conditional block will be taken and add it
2426 to the worklist. If no single edge can be determined statically,
2427 return SSA_PROP_VARYING to feed all the outgoing edges to the
2428 propagation engine. */
2429 *taken_edge_p = find_taken_edge (block, val.value);
2430 if (*taken_edge_p)
2431 return SSA_PROP_INTERESTING;
2432 else
2433 return SSA_PROP_VARYING;
2437 /* Evaluate statement STMT. If the statement produces an output value and
2438 its evaluation changes the lattice value of its output, return
2439 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2440 output value.
2442 If STMT is a conditional branch and we can determine its truth
2443 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2444 value, return SSA_PROP_VARYING. */
2446 static enum ssa_prop_result
2447 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2449 tree def;
2450 ssa_op_iter iter;
2452 if (dump_file && (dump_flags & TDF_DETAILS))
2454 fprintf (dump_file, "\nVisiting statement:\n");
2455 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2458 switch (gimple_code (stmt))
2460 case GIMPLE_ASSIGN:
2461 /* If the statement is an assignment that produces a single
2462 output value, evaluate its RHS to see if the lattice value of
2463 its output has changed. */
2464 return visit_assignment (stmt, output_p);
2466 case GIMPLE_CALL:
2467 /* A value-returning call also performs an assignment. */
2468 if (gimple_call_lhs (stmt) != NULL_TREE)
2469 return visit_assignment (stmt, output_p);
2470 break;
2472 case GIMPLE_COND:
2473 case GIMPLE_SWITCH:
2474 /* If STMT is a conditional branch, see if we can determine
2475 which branch will be taken. */
2476 /* FIXME. It appears that we should be able to optimize
2477 computed GOTOs here as well. */
2478 return visit_cond_stmt (stmt, taken_edge_p);
2480 default:
2481 break;
2484 /* Any other kind of statement is not interesting for constant
2485 propagation and, therefore, not worth simulating. */
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2489 /* Definitions made by statements other than assignments to
2490 SSA_NAMEs represent unknown modifications to their outputs.
2491 Mark them VARYING. */
2492 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2494 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2495 set_lattice_value (def, v);
2498 return SSA_PROP_VARYING;
2502 /* Main entry point for SSA Conditional Constant Propagation. */
2504 static unsigned int
2505 do_ssa_ccp (void)
2507 ccp_initialize ();
2508 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2509 if (ccp_finalize ())
2510 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2511 else
2512 return 0;
2516 static bool
2517 gate_ccp (void)
2519 return flag_tree_ccp != 0;
2523 struct gimple_opt_pass pass_ccp =
2526 GIMPLE_PASS,
2527 "ccp", /* name */
2528 gate_ccp, /* gate */
2529 do_ssa_ccp, /* execute */
2530 NULL, /* sub */
2531 NULL, /* next */
2532 0, /* static_pass_number */
2533 TV_TREE_CCP, /* tv_id */
2534 PROP_cfg | PROP_ssa, /* properties_required */
2535 0, /* properties_provided */
2536 0, /* properties_destroyed */
2537 0, /* todo_flags_start */
2538 TODO_dump_func | TODO_verify_ssa
2539 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2545 /* Try to optimize out __builtin_stack_restore. Optimize it out
2546 if there is another __builtin_stack_restore in the same basic
2547 block and no calls or ASM_EXPRs are in between, or if this block's
2548 only outgoing edge is to EXIT_BLOCK and there are no calls or
2549 ASM_EXPRs after this __builtin_stack_restore. */
2551 static tree
2552 optimize_stack_restore (gimple_stmt_iterator i)
2554 tree callee;
2555 gimple stmt;
2557 basic_block bb = gsi_bb (i);
2558 gimple call = gsi_stmt (i);
2560 if (gimple_code (call) != GIMPLE_CALL
2561 || gimple_call_num_args (call) != 1
2562 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2563 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2564 return NULL_TREE;
2566 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2568 stmt = gsi_stmt (i);
2569 if (gimple_code (stmt) == GIMPLE_ASM)
2570 return NULL_TREE;
2571 if (gimple_code (stmt) != GIMPLE_CALL)
2572 continue;
2574 callee = gimple_call_fndecl (stmt);
2575 if (!callee
2576 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2577 /* All regular builtins are ok, just obviously not alloca. */
2578 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2579 return NULL_TREE;
2581 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2582 goto second_stack_restore;
2585 if (!gsi_end_p (i))
2586 return NULL_TREE;
2588 /* Allow one successor of the exit block, or zero successors. */
2589 switch (EDGE_COUNT (bb->succs))
2591 case 0:
2592 break;
2593 case 1:
2594 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2595 return NULL_TREE;
2596 break;
2597 default:
2598 return NULL_TREE;
2600 second_stack_restore:
2602 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2603 If there are multiple uses, then the last one should remove the call.
2604 In any case, whether the call to __builtin_stack_save can be removed
2605 or not is irrelevant to removing the call to __builtin_stack_restore. */
2606 if (has_single_use (gimple_call_arg (call, 0)))
2608 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2609 if (is_gimple_call (stack_save))
2611 callee = gimple_call_fndecl (stack_save);
2612 if (callee
2613 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2614 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2616 gimple_stmt_iterator stack_save_gsi;
2617 tree rhs;
2619 stack_save_gsi = gsi_for_stmt (stack_save);
2620 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2621 update_call_from_tree (&stack_save_gsi, rhs);
2626 /* No effect, so the statement will be deleted. */
2627 return integer_zero_node;
2630 /* If va_list type is a simple pointer and nothing special is needed,
2631 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2632 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2633 pointer assignment. */
2635 static tree
2636 optimize_stdarg_builtin (gimple call)
2638 tree callee, lhs, rhs, cfun_va_list;
2639 bool va_list_simple_ptr;
2640 location_t loc = gimple_location (call);
2642 if (gimple_code (call) != GIMPLE_CALL)
2643 return NULL_TREE;
2645 callee = gimple_call_fndecl (call);
2647 cfun_va_list = targetm.fn_abi_va_list (callee);
2648 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2649 && (TREE_TYPE (cfun_va_list) == void_type_node
2650 || TREE_TYPE (cfun_va_list) == char_type_node);
2652 switch (DECL_FUNCTION_CODE (callee))
2654 case BUILT_IN_VA_START:
2655 if (!va_list_simple_ptr
2656 || targetm.expand_builtin_va_start != NULL
2657 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2658 return NULL_TREE;
2660 if (gimple_call_num_args (call) != 2)
2661 return NULL_TREE;
2663 lhs = gimple_call_arg (call, 0);
2664 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2665 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2666 != TYPE_MAIN_VARIANT (cfun_va_list))
2667 return NULL_TREE;
2669 lhs = build_fold_indirect_ref_loc (loc, lhs);
2670 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2671 1, integer_zero_node);
2672 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2673 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2675 case BUILT_IN_VA_COPY:
2676 if (!va_list_simple_ptr)
2677 return NULL_TREE;
2679 if (gimple_call_num_args (call) != 2)
2680 return NULL_TREE;
2682 lhs = gimple_call_arg (call, 0);
2683 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2684 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2685 != TYPE_MAIN_VARIANT (cfun_va_list))
2686 return NULL_TREE;
2688 lhs = build_fold_indirect_ref_loc (loc, lhs);
2689 rhs = gimple_call_arg (call, 1);
2690 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2691 != TYPE_MAIN_VARIANT (cfun_va_list))
2692 return NULL_TREE;
2694 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2695 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2697 case BUILT_IN_VA_END:
2698 /* No effect, so the statement will be deleted. */
2699 return integer_zero_node;
2701 default:
2702 gcc_unreachable ();
2706 /* A simple pass that attempts to fold all builtin functions. This pass
2707 is run after we've propagated as many constants as we can. */
2709 static unsigned int
2710 execute_fold_all_builtins (void)
2712 bool cfg_changed = false;
2713 basic_block bb;
2714 unsigned int todoflags = 0;
2716 FOR_EACH_BB (bb)
2718 gimple_stmt_iterator i;
2719 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2721 gimple stmt, old_stmt;
2722 tree callee, result;
2723 enum built_in_function fcode;
2725 stmt = gsi_stmt (i);
2727 if (gimple_code (stmt) != GIMPLE_CALL)
2729 gsi_next (&i);
2730 continue;
2732 callee = gimple_call_fndecl (stmt);
2733 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2735 gsi_next (&i);
2736 continue;
2738 fcode = DECL_FUNCTION_CODE (callee);
2740 result = gimple_fold_builtin (stmt);
2742 if (result)
2743 gimple_remove_stmt_histograms (cfun, stmt);
2745 if (!result)
2746 switch (DECL_FUNCTION_CODE (callee))
2748 case BUILT_IN_CONSTANT_P:
2749 /* Resolve __builtin_constant_p. If it hasn't been
2750 folded to integer_one_node by now, it's fairly
2751 certain that the value simply isn't constant. */
2752 result = integer_zero_node;
2753 break;
2755 case BUILT_IN_STACK_RESTORE:
2756 result = optimize_stack_restore (i);
2757 if (result)
2758 break;
2759 gsi_next (&i);
2760 continue;
2762 case BUILT_IN_VA_START:
2763 case BUILT_IN_VA_END:
2764 case BUILT_IN_VA_COPY:
2765 /* These shouldn't be folded before pass_stdarg. */
2766 result = optimize_stdarg_builtin (stmt);
2767 if (result)
2768 break;
2769 /* FALLTHRU */
2771 default:
2772 gsi_next (&i);
2773 continue;
2776 if (dump_file && (dump_flags & TDF_DETAILS))
2778 fprintf (dump_file, "Simplified\n ");
2779 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2782 old_stmt = stmt;
2783 if (!update_call_from_tree (&i, result))
2785 gimplify_and_update_call_from_tree (&i, result);
2786 todoflags |= TODO_update_address_taken;
2789 stmt = gsi_stmt (i);
2790 update_stmt (stmt);
2792 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2793 && gimple_purge_dead_eh_edges (bb))
2794 cfg_changed = true;
2796 if (dump_file && (dump_flags & TDF_DETAILS))
2798 fprintf (dump_file, "to\n ");
2799 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2800 fprintf (dump_file, "\n");
2803 /* Retry the same statement if it changed into another
2804 builtin, there might be new opportunities now. */
2805 if (gimple_code (stmt) != GIMPLE_CALL)
2807 gsi_next (&i);
2808 continue;
2810 callee = gimple_call_fndecl (stmt);
2811 if (!callee
2812 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2813 || DECL_FUNCTION_CODE (callee) == fcode)
2814 gsi_next (&i);
2818 /* Delete unreachable blocks. */
2819 if (cfg_changed)
2820 todoflags |= TODO_cleanup_cfg;
2822 return todoflags;
2826 struct gimple_opt_pass pass_fold_builtins =
2829 GIMPLE_PASS,
2830 "fab", /* name */
2831 NULL, /* gate */
2832 execute_fold_all_builtins, /* execute */
2833 NULL, /* sub */
2834 NULL, /* next */
2835 0, /* static_pass_number */
2836 TV_NONE, /* tv_id */
2837 PROP_cfg | PROP_ssa, /* properties_required */
2838 0, /* properties_provided */
2839 0, /* properties_destroyed */
2840 0, /* todo_flags_start */
2841 TODO_dump_func
2842 | TODO_verify_ssa
2843 | TODO_update_ssa /* todo_flags_finish */