Daily bump.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob8dcd46deb6d2eb6fab303ca94fafd5a0f0d3c165
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2016 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
110 References:
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "backend.h"
125 #include "target.h"
126 #include "tree.h"
127 #include "gimple.h"
128 #include "tree-pass.h"
129 #include "ssa.h"
130 #include "gimple-pretty-print.h"
131 #include "fold-const.h"
132 #include "gimple-fold.h"
133 #include "tree-eh.h"
134 #include "gimplify.h"
135 #include "gimple-iterator.h"
136 #include "tree-cfg.h"
137 #include "tree-ssa-propagate.h"
138 #include "dbgcnt.h"
139 #include "params.h"
140 #include "builtins.h"
141 #include "tree-chkp.h"
142 #include "cfgloop.h"
145 /* Possible lattice values. */
146 typedef enum
148 UNINITIALIZED,
149 UNDEFINED,
150 CONSTANT,
151 VARYING
152 } ccp_lattice_t;
154 struct ccp_prop_value_t {
155 /* Lattice value. */
156 ccp_lattice_t lattice_val;
158 /* Propagated value. */
159 tree value;
161 /* Mask that applies to the propagated value during CCP. For X
162 with a CONSTANT lattice value X & ~mask == value & ~mask. The
163 zero bits in the mask cover constant values. The ones mean no
164 information. */
165 widest_int mask;
168 /* Array of propagated constant values. After propagation,
169 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
170 the constant is held in an SSA name representing a memory store
171 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
172 memory reference used to store (i.e., the LHS of the assignment
173 doing the store). */
174 static ccp_prop_value_t *const_val;
175 static unsigned n_const_val;
177 static void canonicalize_value (ccp_prop_value_t *);
178 static bool ccp_fold_stmt (gimple_stmt_iterator *);
179 static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
181 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
183 static void
184 dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
186 switch (val.lattice_val)
188 case UNINITIALIZED:
189 fprintf (outf, "%sUNINITIALIZED", prefix);
190 break;
191 case UNDEFINED:
192 fprintf (outf, "%sUNDEFINED", prefix);
193 break;
194 case VARYING:
195 fprintf (outf, "%sVARYING", prefix);
196 break;
197 case CONSTANT:
198 if (TREE_CODE (val.value) != INTEGER_CST
199 || val.mask == 0)
201 fprintf (outf, "%sCONSTANT ", prefix);
202 print_generic_expr (outf, val.value, dump_flags);
204 else
206 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
207 val.mask);
208 fprintf (outf, "%sCONSTANT ", prefix);
209 print_hex (cval, outf);
210 fprintf (outf, " (");
211 print_hex (val.mask, outf);
212 fprintf (outf, ")");
214 break;
215 default:
216 gcc_unreachable ();
221 /* Print lattice value VAL to stderr. */
223 void debug_lattice_value (ccp_prop_value_t val);
225 DEBUG_FUNCTION void
226 debug_lattice_value (ccp_prop_value_t val)
228 dump_lattice_value (stderr, "", val);
229 fprintf (stderr, "\n");
232 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
234 static widest_int
235 extend_mask (const wide_int &nonzero_bits)
237 return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
238 | widest_int::from (nonzero_bits, UNSIGNED));
241 /* Compute a default value for variable VAR and store it in the
242 CONST_VAL array. The following rules are used to get default
243 values:
245 1- Global and static variables that are declared constant are
246 considered CONSTANT.
248 2- Any other value is considered UNDEFINED. This is useful when
249 considering PHI nodes. PHI arguments that are undefined do not
250 change the constant value of the PHI node, which allows for more
251 constants to be propagated.
253 3- Variables defined by statements other than assignments and PHI
254 nodes are considered VARYING.
256 4- Initial values of variables that are not GIMPLE registers are
257 considered VARYING. */
259 static ccp_prop_value_t
260 get_default_value (tree var)
262 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
263 gimple *stmt;
265 stmt = SSA_NAME_DEF_STMT (var);
267 if (gimple_nop_p (stmt))
269 /* Variables defined by an empty statement are those used
270 before being initialized. If VAR is a local variable, we
271 can assume initially that it is UNDEFINED, otherwise we must
272 consider it VARYING. */
273 if (!virtual_operand_p (var)
274 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
275 val.lattice_val = UNDEFINED;
276 else
278 val.lattice_val = VARYING;
279 val.mask = -1;
280 if (flag_tree_bit_ccp)
282 wide_int nonzero_bits = get_nonzero_bits (var);
283 if (nonzero_bits != -1)
285 val.lattice_val = CONSTANT;
286 val.value = build_zero_cst (TREE_TYPE (var));
287 val.mask = extend_mask (nonzero_bits);
292 else if (is_gimple_assign (stmt))
294 tree cst;
295 if (gimple_assign_single_p (stmt)
296 && DECL_P (gimple_assign_rhs1 (stmt))
297 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
299 val.lattice_val = CONSTANT;
300 val.value = cst;
302 else
304 /* Any other variable defined by an assignment is considered
305 UNDEFINED. */
306 val.lattice_val = UNDEFINED;
309 else if ((is_gimple_call (stmt)
310 && gimple_call_lhs (stmt) != NULL_TREE)
311 || gimple_code (stmt) == GIMPLE_PHI)
313 /* A variable defined by a call or a PHI node is considered
314 UNDEFINED. */
315 val.lattice_val = UNDEFINED;
317 else
319 /* Otherwise, VAR will never take on a constant value. */
320 val.lattice_val = VARYING;
321 val.mask = -1;
324 return val;
328 /* Get the constant value associated with variable VAR. */
330 static inline ccp_prop_value_t *
331 get_value (tree var)
333 ccp_prop_value_t *val;
335 if (const_val == NULL
336 || SSA_NAME_VERSION (var) >= n_const_val)
337 return NULL;
339 val = &const_val[SSA_NAME_VERSION (var)];
340 if (val->lattice_val == UNINITIALIZED)
341 *val = get_default_value (var);
343 canonicalize_value (val);
345 return val;
348 /* Return the constant tree value associated with VAR. */
350 static inline tree
351 get_constant_value (tree var)
353 ccp_prop_value_t *val;
354 if (TREE_CODE (var) != SSA_NAME)
356 if (is_gimple_min_invariant (var))
357 return var;
358 return NULL_TREE;
360 val = get_value (var);
361 if (val
362 && val->lattice_val == CONSTANT
363 && (TREE_CODE (val->value) != INTEGER_CST
364 || val->mask == 0))
365 return val->value;
366 return NULL_TREE;
369 /* Sets the value associated with VAR to VARYING. */
371 static inline void
372 set_value_varying (tree var)
374 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
376 val->lattice_val = VARYING;
377 val->value = NULL_TREE;
378 val->mask = -1;
381 /* For integer constants, make sure to drop TREE_OVERFLOW. */
383 static void
384 canonicalize_value (ccp_prop_value_t *val)
386 if (val->lattice_val != CONSTANT)
387 return;
389 if (TREE_OVERFLOW_P (val->value))
390 val->value = drop_tree_overflow (val->value);
393 /* Return whether the lattice transition is valid. */
395 static bool
396 valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
398 /* Lattice transitions must always be monotonically increasing in
399 value. */
400 if (old_val.lattice_val < new_val.lattice_val)
401 return true;
403 if (old_val.lattice_val != new_val.lattice_val)
404 return false;
406 if (!old_val.value && !new_val.value)
407 return true;
409 /* Now both lattice values are CONSTANT. */
411 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
412 when only a single copy edge is executable. */
413 if (TREE_CODE (old_val.value) == SSA_NAME
414 && TREE_CODE (new_val.value) == SSA_NAME)
415 return true;
417 /* Allow transitioning from a constant to a copy. */
418 if (is_gimple_min_invariant (old_val.value)
419 && TREE_CODE (new_val.value) == SSA_NAME)
420 return true;
422 /* Allow transitioning from PHI <&x, not executable> == &x
423 to PHI <&x, &y> == common alignment. */
424 if (TREE_CODE (old_val.value) != INTEGER_CST
425 && TREE_CODE (new_val.value) == INTEGER_CST)
426 return true;
428 /* Bit-lattices have to agree in the still valid bits. */
429 if (TREE_CODE (old_val.value) == INTEGER_CST
430 && TREE_CODE (new_val.value) == INTEGER_CST)
431 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
432 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
434 /* Otherwise constant values have to agree. */
435 if (operand_equal_p (old_val.value, new_val.value, 0))
436 return true;
438 /* At least the kinds and types should agree now. */
439 if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
440 || !types_compatible_p (TREE_TYPE (old_val.value),
441 TREE_TYPE (new_val.value)))
442 return false;
444 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
445 to non-NaN. */
446 tree type = TREE_TYPE (new_val.value);
447 if (SCALAR_FLOAT_TYPE_P (type)
448 && !HONOR_NANS (type))
450 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
451 return true;
453 else if (VECTOR_FLOAT_TYPE_P (type)
454 && !HONOR_NANS (type))
456 for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
457 if (!REAL_VALUE_ISNAN
458 (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
459 && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
460 VECTOR_CST_ELT (new_val.value, i), 0))
461 return false;
462 return true;
464 else if (COMPLEX_FLOAT_TYPE_P (type)
465 && !HONOR_NANS (type))
467 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
468 && !operand_equal_p (TREE_REALPART (old_val.value),
469 TREE_REALPART (new_val.value), 0))
470 return false;
471 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
472 && !operand_equal_p (TREE_IMAGPART (old_val.value),
473 TREE_IMAGPART (new_val.value), 0))
474 return false;
475 return true;
477 return false;
480 /* Set the value for variable VAR to NEW_VAL. Return true if the new
481 value is different from VAR's previous value. */
483 static bool
484 set_lattice_value (tree var, ccp_prop_value_t *new_val)
486 /* We can deal with old UNINITIALIZED values just fine here. */
487 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
489 canonicalize_value (new_val);
491 /* We have to be careful to not go up the bitwise lattice
492 represented by the mask. Instead of dropping to VARYING
493 use the meet operator to retain a conservative value.
494 Missed optimizations like PR65851 makes this necessary.
495 It also ensures we converge to a stable lattice solution. */
496 if (new_val->lattice_val == CONSTANT
497 && old_val->lattice_val == CONSTANT
498 && TREE_CODE (new_val->value) != SSA_NAME)
499 ccp_lattice_meet (new_val, old_val);
501 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
503 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
504 caller that this was a non-transition. */
505 if (old_val->lattice_val != new_val->lattice_val
506 || (new_val->lattice_val == CONSTANT
507 && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
508 || (TREE_CODE (new_val->value) == INTEGER_CST
509 && (new_val->mask != old_val->mask
510 || (wi::bit_and_not (wi::to_widest (old_val->value),
511 new_val->mask)
512 != wi::bit_and_not (wi::to_widest (new_val->value),
513 new_val->mask))))
514 || (TREE_CODE (new_val->value) != INTEGER_CST
515 && !operand_equal_p (new_val->value, old_val->value, 0)))))
517 /* ??? We would like to delay creation of INTEGER_CSTs from
518 partially constants here. */
520 if (dump_file && (dump_flags & TDF_DETAILS))
522 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
523 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
526 *old_val = *new_val;
528 gcc_assert (new_val->lattice_val != UNINITIALIZED);
529 return true;
532 return false;
535 static ccp_prop_value_t get_value_for_expr (tree, bool);
536 static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
537 static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
538 tree, const widest_int &, const widest_int &,
539 tree, const widest_int &, const widest_int &);
541 /* Return a widest_int that can be used for bitwise simplifications
542 from VAL. */
544 static widest_int
545 value_to_wide_int (ccp_prop_value_t val)
547 if (val.value
548 && TREE_CODE (val.value) == INTEGER_CST)
549 return wi::to_widest (val.value);
551 return 0;
554 /* Return the value for the address expression EXPR based on alignment
555 information. */
557 static ccp_prop_value_t
558 get_value_from_alignment (tree expr)
560 tree type = TREE_TYPE (expr);
561 ccp_prop_value_t val;
562 unsigned HOST_WIDE_INT bitpos;
563 unsigned int align;
565 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
567 get_pointer_alignment_1 (expr, &align, &bitpos);
568 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
569 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
570 : -1).and_not (align / BITS_PER_UNIT - 1);
571 val.lattice_val
572 = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
573 if (val.lattice_val == CONSTANT)
574 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
575 else
576 val.value = NULL_TREE;
578 return val;
581 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
582 return constant bits extracted from alignment information for
583 invariant addresses. */
585 static ccp_prop_value_t
586 get_value_for_expr (tree expr, bool for_bits_p)
588 ccp_prop_value_t val;
590 if (TREE_CODE (expr) == SSA_NAME)
592 val = *get_value (expr);
593 if (for_bits_p
594 && val.lattice_val == CONSTANT
595 && TREE_CODE (val.value) == ADDR_EXPR)
596 val = get_value_from_alignment (val.value);
597 /* Fall back to a copy value. */
598 if (!for_bits_p
599 && val.lattice_val == VARYING
600 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
602 val.lattice_val = CONSTANT;
603 val.value = expr;
604 val.mask = -1;
607 else if (is_gimple_min_invariant (expr)
608 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
610 val.lattice_val = CONSTANT;
611 val.value = expr;
612 val.mask = 0;
613 canonicalize_value (&val);
615 else if (TREE_CODE (expr) == ADDR_EXPR)
616 val = get_value_from_alignment (expr);
617 else
619 val.lattice_val = VARYING;
620 val.mask = -1;
621 val.value = NULL_TREE;
624 if (val.lattice_val == VARYING
625 && TYPE_UNSIGNED (TREE_TYPE (expr)))
626 val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr)));
628 return val;
631 /* Return the likely CCP lattice value for STMT.
633 If STMT has no operands, then return CONSTANT.
635 Else if undefinedness of operands of STMT cause its value to be
636 undefined, then return UNDEFINED.
638 Else if any operands of STMT are constants, then return CONSTANT.
640 Else return VARYING. */
642 static ccp_lattice_t
643 likely_value (gimple *stmt)
645 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
646 bool has_nsa_operand;
647 tree use;
648 ssa_op_iter iter;
649 unsigned i;
651 enum gimple_code code = gimple_code (stmt);
653 /* This function appears to be called only for assignments, calls,
654 conditionals, and switches, due to the logic in visit_stmt. */
655 gcc_assert (code == GIMPLE_ASSIGN
656 || code == GIMPLE_CALL
657 || code == GIMPLE_COND
658 || code == GIMPLE_SWITCH);
660 /* If the statement has volatile operands, it won't fold to a
661 constant value. */
662 if (gimple_has_volatile_ops (stmt))
663 return VARYING;
665 /* Arrive here for more complex cases. */
666 has_constant_operand = false;
667 has_undefined_operand = false;
668 all_undefined_operands = true;
669 has_nsa_operand = false;
670 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
672 ccp_prop_value_t *val = get_value (use);
674 if (val->lattice_val == UNDEFINED)
675 has_undefined_operand = true;
676 else
677 all_undefined_operands = false;
679 if (val->lattice_val == CONSTANT)
680 has_constant_operand = true;
682 if (SSA_NAME_IS_DEFAULT_DEF (use)
683 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
684 has_nsa_operand = true;
687 /* There may be constants in regular rhs operands. For calls we
688 have to ignore lhs, fndecl and static chain, otherwise only
689 the lhs. */
690 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
691 i < gimple_num_ops (stmt); ++i)
693 tree op = gimple_op (stmt, i);
694 if (!op || TREE_CODE (op) == SSA_NAME)
695 continue;
696 if (is_gimple_min_invariant (op))
697 has_constant_operand = true;
700 if (has_constant_operand)
701 all_undefined_operands = false;
703 if (has_undefined_operand
704 && code == GIMPLE_CALL
705 && gimple_call_internal_p (stmt))
706 switch (gimple_call_internal_fn (stmt))
708 /* These 3 builtins use the first argument just as a magic
709 way how to find out a decl uid. */
710 case IFN_GOMP_SIMD_LANE:
711 case IFN_GOMP_SIMD_VF:
712 case IFN_GOMP_SIMD_LAST_LANE:
713 has_undefined_operand = false;
714 break;
715 default:
716 break;
719 /* If the operation combines operands like COMPLEX_EXPR make sure to
720 not mark the result UNDEFINED if only one part of the result is
721 undefined. */
722 if (has_undefined_operand && all_undefined_operands)
723 return UNDEFINED;
724 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
726 switch (gimple_assign_rhs_code (stmt))
728 /* Unary operators are handled with all_undefined_operands. */
729 case PLUS_EXPR:
730 case MINUS_EXPR:
731 case POINTER_PLUS_EXPR:
732 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
733 Not bitwise operators, one VARYING operand may specify the
734 result completely. Not logical operators for the same reason.
735 Not COMPLEX_EXPR as one VARYING operand makes the result partly
736 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
737 the undefined operand may be promoted. */
738 return UNDEFINED;
740 case ADDR_EXPR:
741 /* If any part of an address is UNDEFINED, like the index
742 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
743 return UNDEFINED;
745 default:
749 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
750 fall back to CONSTANT. During iteration UNDEFINED may still drop
751 to CONSTANT. */
752 if (has_undefined_operand)
753 return CONSTANT;
755 /* We do not consider virtual operands here -- load from read-only
756 memory may have only VARYING virtual operands, but still be
757 constant. Also we can combine the stmt with definitions from
758 operands whose definitions are not simulated again. */
759 if (has_constant_operand
760 || has_nsa_operand
761 || gimple_references_memory_p (stmt))
762 return CONSTANT;
764 return VARYING;
767 /* Returns true if STMT cannot be constant. */
769 static bool
770 surely_varying_stmt_p (gimple *stmt)
772 /* If the statement has operands that we cannot handle, it cannot be
773 constant. */
774 if (gimple_has_volatile_ops (stmt))
775 return true;
777 /* If it is a call and does not return a value or is not a
778 builtin and not an indirect call or a call to function with
779 assume_aligned/alloc_align attribute, it is varying. */
780 if (is_gimple_call (stmt))
782 tree fndecl, fntype = gimple_call_fntype (stmt);
783 if (!gimple_call_lhs (stmt)
784 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
785 && !DECL_BUILT_IN (fndecl)
786 && !lookup_attribute ("assume_aligned",
787 TYPE_ATTRIBUTES (fntype))
788 && !lookup_attribute ("alloc_align",
789 TYPE_ATTRIBUTES (fntype))))
790 return true;
793 /* Any other store operation is not interesting. */
794 else if (gimple_vdef (stmt))
795 return true;
797 /* Anything other than assignments and conditional jumps are not
798 interesting for CCP. */
799 if (gimple_code (stmt) != GIMPLE_ASSIGN
800 && gimple_code (stmt) != GIMPLE_COND
801 && gimple_code (stmt) != GIMPLE_SWITCH
802 && gimple_code (stmt) != GIMPLE_CALL)
803 return true;
805 return false;
808 /* Initialize local data structures for CCP. */
810 static void
811 ccp_initialize (void)
813 basic_block bb;
815 n_const_val = num_ssa_names;
816 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
818 /* Initialize simulation flags for PHI nodes and statements. */
819 FOR_EACH_BB_FN (bb, cfun)
821 gimple_stmt_iterator i;
823 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
825 gimple *stmt = gsi_stmt (i);
826 bool is_varying;
828 /* If the statement is a control insn, then we do not
829 want to avoid simulating the statement once. Failure
830 to do so means that those edges will never get added. */
831 if (stmt_ends_bb_p (stmt))
832 is_varying = false;
833 else
834 is_varying = surely_varying_stmt_p (stmt);
836 if (is_varying)
838 tree def;
839 ssa_op_iter iter;
841 /* If the statement will not produce a constant, mark
842 all its outputs VARYING. */
843 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
844 set_value_varying (def);
846 prop_set_simulate_again (stmt, !is_varying);
850 /* Now process PHI nodes. We never clear the simulate_again flag on
851 phi nodes, since we do not know which edges are executable yet,
852 except for phi nodes for virtual operands when we do not do store ccp. */
853 FOR_EACH_BB_FN (bb, cfun)
855 gphi_iterator i;
857 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
859 gphi *phi = i.phi ();
861 if (virtual_operand_p (gimple_phi_result (phi)))
862 prop_set_simulate_again (phi, false);
863 else
864 prop_set_simulate_again (phi, true);
869 /* Debug count support. Reset the values of ssa names
870 VARYING when the total number ssa names analyzed is
871 beyond the debug count specified. */
873 static void
874 do_dbg_cnt (void)
876 unsigned i;
877 for (i = 0; i < num_ssa_names; i++)
879 if (!dbg_cnt (ccp))
881 const_val[i].lattice_val = VARYING;
882 const_val[i].mask = -1;
883 const_val[i].value = NULL_TREE;
889 /* Do final substitution of propagated values, cleanup the flowgraph and
890 free allocated storage. If NONZERO_P, record nonzero bits.
892 Return TRUE when something was optimized. */
894 static bool
895 ccp_finalize (bool nonzero_p)
897 bool something_changed;
898 unsigned i;
900 do_dbg_cnt ();
902 /* Derive alignment and misalignment information from partially
903 constant pointers in the lattice or nonzero bits from partially
904 constant integers. */
905 for (i = 1; i < num_ssa_names; ++i)
907 tree name = ssa_name (i);
908 ccp_prop_value_t *val;
909 unsigned int tem, align;
911 if (!name
912 || (!POINTER_TYPE_P (TREE_TYPE (name))
913 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
914 /* Don't record nonzero bits before IPA to avoid
915 using too much memory. */
916 || !nonzero_p)))
917 continue;
919 val = get_value (name);
920 if (val->lattice_val != CONSTANT
921 || TREE_CODE (val->value) != INTEGER_CST)
922 continue;
924 if (POINTER_TYPE_P (TREE_TYPE (name)))
926 /* Trailing mask bits specify the alignment, trailing value
927 bits the misalignment. */
928 tem = val->mask.to_uhwi ();
929 align = (tem & -tem);
930 if (align > 1)
931 set_ptr_info_alignment (get_ptr_info (name), align,
932 (TREE_INT_CST_LOW (val->value)
933 & (align - 1)));
935 else
937 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
938 wide_int nonzero_bits = wide_int::from (val->mask, precision,
939 UNSIGNED) | val->value;
940 nonzero_bits &= get_nonzero_bits (name);
941 set_nonzero_bits (name, nonzero_bits);
945 /* Perform substitutions based on the known constant values. */
946 something_changed = substitute_and_fold (get_constant_value,
947 ccp_fold_stmt, true);
949 free (const_val);
950 const_val = NULL;
951 return something_changed;;
955 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
956 in VAL1.
958 any M UNDEFINED = any
959 any M VARYING = VARYING
960 Ci M Cj = Ci if (i == j)
961 Ci M Cj = VARYING if (i != j)
964 static void
965 ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
967 if (val1->lattice_val == UNDEFINED
968 /* For UNDEFINED M SSA we can't always SSA because its definition
969 may not dominate the PHI node. Doing optimistic copy propagation
970 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
971 && (val2->lattice_val != CONSTANT
972 || TREE_CODE (val2->value) != SSA_NAME))
974 /* UNDEFINED M any = any */
975 *val1 = *val2;
977 else if (val2->lattice_val == UNDEFINED
978 /* See above. */
979 && (val1->lattice_val != CONSTANT
980 || TREE_CODE (val1->value) != SSA_NAME))
982 /* any M UNDEFINED = any
983 Nothing to do. VAL1 already contains the value we want. */
986 else if (val1->lattice_val == VARYING
987 || val2->lattice_val == VARYING)
989 /* any M VARYING = VARYING. */
990 val1->lattice_val = VARYING;
991 val1->mask = -1;
992 val1->value = NULL_TREE;
994 else if (val1->lattice_val == CONSTANT
995 && val2->lattice_val == CONSTANT
996 && TREE_CODE (val1->value) == INTEGER_CST
997 && TREE_CODE (val2->value) == INTEGER_CST)
999 /* Ci M Cj = Ci if (i == j)
1000 Ci M Cj = VARYING if (i != j)
1002 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1003 drop to varying. */
1004 val1->mask = (val1->mask | val2->mask
1005 | (wi::to_widest (val1->value)
1006 ^ wi::to_widest (val2->value)));
1007 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
1009 val1->lattice_val = VARYING;
1010 val1->value = NULL_TREE;
1013 else if (val1->lattice_val == CONSTANT
1014 && val2->lattice_val == CONSTANT
1015 && operand_equal_p (val1->value, val2->value, 0))
1017 /* Ci M Cj = Ci if (i == j)
1018 Ci M Cj = VARYING if (i != j)
1020 VAL1 already contains the value we want for equivalent values. */
1022 else if (val1->lattice_val == CONSTANT
1023 && val2->lattice_val == CONSTANT
1024 && (TREE_CODE (val1->value) == ADDR_EXPR
1025 || TREE_CODE (val2->value) == ADDR_EXPR))
1027 /* When not equal addresses are involved try meeting for
1028 alignment. */
1029 ccp_prop_value_t tem = *val2;
1030 if (TREE_CODE (val1->value) == ADDR_EXPR)
1031 *val1 = get_value_for_expr (val1->value, true);
1032 if (TREE_CODE (val2->value) == ADDR_EXPR)
1033 tem = get_value_for_expr (val2->value, true);
1034 ccp_lattice_meet (val1, &tem);
1036 else
1038 /* Any other combination is VARYING. */
1039 val1->lattice_val = VARYING;
1040 val1->mask = -1;
1041 val1->value = NULL_TREE;
1046 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1047 lattice values to determine PHI_NODE's lattice value. The value of a
1048 PHI node is determined calling ccp_lattice_meet with all the arguments
1049 of the PHI node that are incoming via executable edges. */
1051 static enum ssa_prop_result
1052 ccp_visit_phi_node (gphi *phi)
1054 unsigned i;
1055 ccp_prop_value_t new_val;
1057 if (dump_file && (dump_flags & TDF_DETAILS))
1059 fprintf (dump_file, "\nVisiting PHI node: ");
1060 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1063 new_val.lattice_val = UNDEFINED;
1064 new_val.value = NULL_TREE;
1065 new_val.mask = 0;
1067 bool first = true;
1068 bool non_exec_edge = false;
1069 for (i = 0; i < gimple_phi_num_args (phi); i++)
1071 /* Compute the meet operator over all the PHI arguments flowing
1072 through executable edges. */
1073 edge e = gimple_phi_arg_edge (phi, i);
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1077 fprintf (dump_file,
1078 "\n Argument #%d (%d -> %d %sexecutable)\n",
1079 i, e->src->index, e->dest->index,
1080 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1083 /* If the incoming edge is executable, Compute the meet operator for
1084 the existing value of the PHI node and the current PHI argument. */
1085 if (e->flags & EDGE_EXECUTABLE)
1087 tree arg = gimple_phi_arg (phi, i)->def;
1088 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
1090 if (first)
1092 new_val = arg_val;
1093 first = false;
1095 else
1096 ccp_lattice_meet (&new_val, &arg_val);
1098 if (dump_file && (dump_flags & TDF_DETAILS))
1100 fprintf (dump_file, "\t");
1101 print_generic_expr (dump_file, arg, dump_flags);
1102 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1103 fprintf (dump_file, "\n");
1106 if (new_val.lattice_val == VARYING)
1107 break;
1109 else
1110 non_exec_edge = true;
1113 /* In case there were non-executable edges and the value is a copy
1114 make sure its definition dominates the PHI node. */
1115 if (non_exec_edge
1116 && new_val.lattice_val == CONSTANT
1117 && TREE_CODE (new_val.value) == SSA_NAME
1118 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
1119 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1120 gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
1122 new_val.lattice_val = VARYING;
1123 new_val.value = NULL_TREE;
1124 new_val.mask = -1;
1127 if (dump_file && (dump_flags & TDF_DETAILS))
1129 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1130 fprintf (dump_file, "\n\n");
1133 /* Make the transition to the new value. */
1134 if (set_lattice_value (gimple_phi_result (phi), &new_val))
1136 if (new_val.lattice_val == VARYING)
1137 return SSA_PROP_VARYING;
1138 else
1139 return SSA_PROP_INTERESTING;
1141 else
1142 return SSA_PROP_NOT_INTERESTING;
1145 /* Return the constant value for OP or OP otherwise. */
1147 static tree
1148 valueize_op (tree op)
1150 if (TREE_CODE (op) == SSA_NAME)
1152 tree tem = get_constant_value (op);
1153 if (tem)
1154 return tem;
1156 return op;
1159 /* Return the constant value for OP, but signal to not follow SSA
1160 edges if the definition may be simulated again. */
1162 static tree
1163 valueize_op_1 (tree op)
1165 if (TREE_CODE (op) == SSA_NAME)
1167 /* If the definition may be simulated again we cannot follow
1168 this SSA edge as the SSA propagator does not necessarily
1169 re-visit the use. */
1170 gimple *def_stmt = SSA_NAME_DEF_STMT (op);
1171 if (!gimple_nop_p (def_stmt)
1172 && prop_simulate_again_p (def_stmt))
1173 return NULL_TREE;
1174 tree tem = get_constant_value (op);
1175 if (tem)
1176 return tem;
1178 return op;
1181 /* CCP specific front-end to the non-destructive constant folding
1182 routines.
1184 Attempt to simplify the RHS of STMT knowing that one or more
1185 operands are constants.
1187 If simplification is possible, return the simplified RHS,
1188 otherwise return the original RHS or NULL_TREE. */
1190 static tree
1191 ccp_fold (gimple *stmt)
1193 location_t loc = gimple_location (stmt);
1194 switch (gimple_code (stmt))
1196 case GIMPLE_COND:
1198 /* Handle comparison operators that can appear in GIMPLE form. */
1199 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1200 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1201 enum tree_code code = gimple_cond_code (stmt);
1202 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1205 case GIMPLE_SWITCH:
1207 /* Return the constant switch index. */
1208 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
1211 case GIMPLE_ASSIGN:
1212 case GIMPLE_CALL:
1213 return gimple_fold_stmt_to_constant_1 (stmt,
1214 valueize_op, valueize_op_1);
1216 default:
1217 gcc_unreachable ();
1221 /* Apply the operation CODE in type TYPE to the value, mask pair
1222 RVAL and RMASK representing a value of type RTYPE and set
1223 the value, mask pair *VAL and *MASK to the result. */
1225 static void
1226 bit_value_unop_1 (enum tree_code code, tree type,
1227 widest_int *val, widest_int *mask,
1228 tree rtype, const widest_int &rval, const widest_int &rmask)
1230 switch (code)
1232 case BIT_NOT_EXPR:
1233 *mask = rmask;
1234 *val = ~rval;
1235 break;
1237 case NEGATE_EXPR:
1239 widest_int temv, temm;
1240 /* Return ~rval + 1. */
1241 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1242 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1243 type, temv, temm, type, 1, 0);
1244 break;
1247 CASE_CONVERT:
1249 signop sgn;
1251 /* First extend mask and value according to the original type. */
1252 sgn = TYPE_SIGN (rtype);
1253 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1254 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
1256 /* Then extend mask and value according to the target type. */
1257 sgn = TYPE_SIGN (type);
1258 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1259 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
1260 break;
1263 default:
1264 *mask = -1;
1265 break;
1269 /* Apply the operation CODE in type TYPE to the value, mask pairs
1270 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1271 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1273 static void
1274 bit_value_binop_1 (enum tree_code code, tree type,
1275 widest_int *val, widest_int *mask,
1276 tree r1type, const widest_int &r1val,
1277 const widest_int &r1mask, tree r2type,
1278 const widest_int &r2val, const widest_int &r2mask)
1280 signop sgn = TYPE_SIGN (type);
1281 int width = TYPE_PRECISION (type);
1282 bool swap_p = false;
1284 /* Assume we'll get a constant result. Use an initial non varying
1285 value, we fall back to varying in the end if necessary. */
1286 *mask = -1;
1288 switch (code)
1290 case BIT_AND_EXPR:
1291 /* The mask is constant where there is a known not
1292 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1293 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1294 *val = r1val & r2val;
1295 break;
1297 case BIT_IOR_EXPR:
1298 /* The mask is constant where there is a known
1299 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1300 *mask = (r1mask | r2mask)
1301 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1302 *val = r1val | r2val;
1303 break;
1305 case BIT_XOR_EXPR:
1306 /* m1 | m2 */
1307 *mask = r1mask | r2mask;
1308 *val = r1val ^ r2val;
1309 break;
1311 case LROTATE_EXPR:
1312 case RROTATE_EXPR:
1313 if (r2mask == 0)
1315 widest_int shift = r2val;
1316 if (shift == 0)
1318 *mask = r1mask;
1319 *val = r1val;
1321 else
1323 if (wi::neg_p (shift))
1325 shift = -shift;
1326 if (code == RROTATE_EXPR)
1327 code = LROTATE_EXPR;
1328 else
1329 code = RROTATE_EXPR;
1331 if (code == RROTATE_EXPR)
1333 *mask = wi::rrotate (r1mask, shift, width);
1334 *val = wi::rrotate (r1val, shift, width);
1336 else
1338 *mask = wi::lrotate (r1mask, shift, width);
1339 *val = wi::lrotate (r1val, shift, width);
1343 break;
1345 case LSHIFT_EXPR:
1346 case RSHIFT_EXPR:
1347 /* ??? We can handle partially known shift counts if we know
1348 its sign. That way we can tell that (x << (y | 8)) & 255
1349 is zero. */
1350 if (r2mask == 0)
1352 widest_int shift = r2val;
1353 if (shift == 0)
1355 *mask = r1mask;
1356 *val = r1val;
1358 else
1360 if (wi::neg_p (shift))
1362 shift = -shift;
1363 if (code == RSHIFT_EXPR)
1364 code = LSHIFT_EXPR;
1365 else
1366 code = RSHIFT_EXPR;
1368 if (code == RSHIFT_EXPR)
1370 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1371 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1373 else
1375 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1376 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
1380 break;
1382 case PLUS_EXPR:
1383 case POINTER_PLUS_EXPR:
1385 /* Do the addition with unknown bits set to zero, to give carry-ins of
1386 zero wherever possible. */
1387 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1388 lo = wi::ext (lo, width, sgn);
1389 /* Do the addition with unknown bits set to one, to give carry-ins of
1390 one wherever possible. */
1391 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1392 hi = wi::ext (hi, width, sgn);
1393 /* Each bit in the result is known if (a) the corresponding bits in
1394 both inputs are known, and (b) the carry-in to that bit position
1395 is known. We can check condition (b) by seeing if we got the same
1396 result with minimised carries as with maximised carries. */
1397 *mask = r1mask | r2mask | (lo ^ hi);
1398 *mask = wi::ext (*mask, width, sgn);
1399 /* It shouldn't matter whether we choose lo or hi here. */
1400 *val = lo;
1401 break;
1404 case MINUS_EXPR:
1406 widest_int temv, temm;
1407 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1408 r2type, r2val, r2mask);
1409 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1410 r1type, r1val, r1mask,
1411 r2type, temv, temm);
1412 break;
1415 case MULT_EXPR:
1417 /* Just track trailing zeros in both operands and transfer
1418 them to the other. */
1419 int r1tz = wi::ctz (r1val | r1mask);
1420 int r2tz = wi::ctz (r2val | r2mask);
1421 if (r1tz + r2tz >= width)
1423 *mask = 0;
1424 *val = 0;
1426 else if (r1tz + r2tz > 0)
1428 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1429 width, sgn);
1430 *val = 0;
1432 break;
1435 case EQ_EXPR:
1436 case NE_EXPR:
1438 widest_int m = r1mask | r2mask;
1439 if (r1val.and_not (m) != r2val.and_not (m))
1441 *mask = 0;
1442 *val = ((code == EQ_EXPR) ? 0 : 1);
1444 else
1446 /* We know the result of a comparison is always one or zero. */
1447 *mask = 1;
1448 *val = 0;
1450 break;
1453 case GE_EXPR:
1454 case GT_EXPR:
1455 swap_p = true;
1456 code = swap_tree_comparison (code);
1457 /* Fall through. */
1458 case LT_EXPR:
1459 case LE_EXPR:
1461 int minmax, maxmin;
1463 const widest_int &o1val = swap_p ? r2val : r1val;
1464 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1465 const widest_int &o2val = swap_p ? r1val : r2val;
1466 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1468 /* If the most significant bits are not known we know nothing. */
1469 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
1470 break;
1472 /* For comparisons the signedness is in the comparison operands. */
1473 sgn = TYPE_SIGN (r1type);
1475 /* If we know the most significant bits we know the values
1476 value ranges by means of treating varying bits as zero
1477 or one. Do a cross comparison of the max/min pairs. */
1478 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1479 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
1480 if (maxmin < 0) /* o1 is less than o2. */
1482 *mask = 0;
1483 *val = 1;
1485 else if (minmax > 0) /* o1 is not less or equal to o2. */
1487 *mask = 0;
1488 *val = 0;
1490 else if (maxmin == minmax) /* o1 and o2 are equal. */
1492 /* This probably should never happen as we'd have
1493 folded the thing during fully constant value folding. */
1494 *mask = 0;
1495 *val = (code == LE_EXPR ? 1 : 0);
1497 else
1499 /* We know the result of a comparison is always one or zero. */
1500 *mask = 1;
1501 *val = 0;
1503 break;
1506 default:;
1510 /* Return the propagation value when applying the operation CODE to
1511 the value RHS yielding type TYPE. */
1513 static ccp_prop_value_t
1514 bit_value_unop (enum tree_code code, tree type, tree rhs)
1516 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
1517 widest_int value, mask;
1518 ccp_prop_value_t val;
1520 if (rval.lattice_val == UNDEFINED)
1521 return rval;
1523 gcc_assert ((rval.lattice_val == CONSTANT
1524 && TREE_CODE (rval.value) == INTEGER_CST)
1525 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
1526 bit_value_unop_1 (code, type, &value, &mask,
1527 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
1528 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1530 val.lattice_val = CONSTANT;
1531 val.mask = mask;
1532 /* ??? Delay building trees here. */
1533 val.value = wide_int_to_tree (type, value);
1535 else
1537 val.lattice_val = VARYING;
1538 val.value = NULL_TREE;
1539 val.mask = -1;
1541 return val;
1544 /* Return the propagation value when applying the operation CODE to
1545 the values RHS1 and RHS2 yielding type TYPE. */
1547 static ccp_prop_value_t
1548 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1550 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1551 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
1552 widest_int value, mask;
1553 ccp_prop_value_t val;
1555 if (r1val.lattice_val == UNDEFINED
1556 || r2val.lattice_val == UNDEFINED)
1558 val.lattice_val = VARYING;
1559 val.value = NULL_TREE;
1560 val.mask = -1;
1561 return val;
1564 gcc_assert ((r1val.lattice_val == CONSTANT
1565 && TREE_CODE (r1val.value) == INTEGER_CST)
1566 || wi::sext (r1val.mask,
1567 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
1568 gcc_assert ((r2val.lattice_val == CONSTANT
1569 && TREE_CODE (r2val.value) == INTEGER_CST)
1570 || wi::sext (r2val.mask,
1571 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
1572 bit_value_binop_1 (code, type, &value, &mask,
1573 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1574 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1575 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1577 val.lattice_val = CONSTANT;
1578 val.mask = mask;
1579 /* ??? Delay building trees here. */
1580 val.value = wide_int_to_tree (type, value);
1582 else
1584 val.lattice_val = VARYING;
1585 val.value = NULL_TREE;
1586 val.mask = -1;
1588 return val;
1591 /* Return the propagation value for __builtin_assume_aligned
1592 and functions with assume_aligned or alloc_aligned attribute.
1593 For __builtin_assume_aligned, ATTR is NULL_TREE,
1594 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1595 is false, for alloc_aligned attribute ATTR is non-NULL and
1596 ALLOC_ALIGNED is true. */
1598 static ccp_prop_value_t
1599 bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
1600 bool alloc_aligned)
1602 tree align, misalign = NULL_TREE, type;
1603 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1604 ccp_prop_value_t alignval;
1605 widest_int value, mask;
1606 ccp_prop_value_t val;
1608 if (attr == NULL_TREE)
1610 tree ptr = gimple_call_arg (stmt, 0);
1611 type = TREE_TYPE (ptr);
1612 ptrval = get_value_for_expr (ptr, true);
1614 else
1616 tree lhs = gimple_call_lhs (stmt);
1617 type = TREE_TYPE (lhs);
1620 if (ptrval.lattice_val == UNDEFINED)
1621 return ptrval;
1622 gcc_assert ((ptrval.lattice_val == CONSTANT
1623 && TREE_CODE (ptrval.value) == INTEGER_CST)
1624 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
1625 if (attr == NULL_TREE)
1627 /* Get aligni and misaligni from __builtin_assume_aligned. */
1628 align = gimple_call_arg (stmt, 1);
1629 if (!tree_fits_uhwi_p (align))
1630 return ptrval;
1631 aligni = tree_to_uhwi (align);
1632 if (gimple_call_num_args (stmt) > 2)
1634 misalign = gimple_call_arg (stmt, 2);
1635 if (!tree_fits_uhwi_p (misalign))
1636 return ptrval;
1637 misaligni = tree_to_uhwi (misalign);
1640 else
1642 /* Get aligni and misaligni from assume_aligned or
1643 alloc_align attributes. */
1644 if (TREE_VALUE (attr) == NULL_TREE)
1645 return ptrval;
1646 attr = TREE_VALUE (attr);
1647 align = TREE_VALUE (attr);
1648 if (!tree_fits_uhwi_p (align))
1649 return ptrval;
1650 aligni = tree_to_uhwi (align);
1651 if (alloc_aligned)
1653 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1654 return ptrval;
1655 align = gimple_call_arg (stmt, aligni - 1);
1656 if (!tree_fits_uhwi_p (align))
1657 return ptrval;
1658 aligni = tree_to_uhwi (align);
1660 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1662 misalign = TREE_VALUE (TREE_CHAIN (attr));
1663 if (!tree_fits_uhwi_p (misalign))
1664 return ptrval;
1665 misaligni = tree_to_uhwi (misalign);
1668 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1669 return ptrval;
1671 align = build_int_cst_type (type, -aligni);
1672 alignval = get_value_for_expr (align, true);
1673 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1674 type, value_to_wide_int (ptrval), ptrval.mask,
1675 type, value_to_wide_int (alignval), alignval.mask);
1676 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1678 val.lattice_val = CONSTANT;
1679 val.mask = mask;
1680 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1681 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1682 value |= misaligni;
1683 /* ??? Delay building trees here. */
1684 val.value = wide_int_to_tree (type, value);
1686 else
1688 val.lattice_val = VARYING;
1689 val.value = NULL_TREE;
1690 val.mask = -1;
1692 return val;
1695 /* Evaluate statement STMT.
1696 Valid only for assignments, calls, conditionals, and switches. */
1698 static ccp_prop_value_t
1699 evaluate_stmt (gimple *stmt)
1701 ccp_prop_value_t val;
1702 tree simplified = NULL_TREE;
1703 ccp_lattice_t likelyvalue = likely_value (stmt);
1704 bool is_constant = false;
1705 unsigned int align;
1707 if (dump_file && (dump_flags & TDF_DETAILS))
1709 fprintf (dump_file, "which is likely ");
1710 switch (likelyvalue)
1712 case CONSTANT:
1713 fprintf (dump_file, "CONSTANT");
1714 break;
1715 case UNDEFINED:
1716 fprintf (dump_file, "UNDEFINED");
1717 break;
1718 case VARYING:
1719 fprintf (dump_file, "VARYING");
1720 break;
1721 default:;
1723 fprintf (dump_file, "\n");
1726 /* If the statement is likely to have a CONSTANT result, then try
1727 to fold the statement to determine the constant value. */
1728 /* FIXME. This is the only place that we call ccp_fold.
1729 Since likely_value never returns CONSTANT for calls, we will
1730 not attempt to fold them, including builtins that may profit. */
1731 if (likelyvalue == CONSTANT)
1733 fold_defer_overflow_warnings ();
1734 simplified = ccp_fold (stmt);
1735 if (simplified
1736 && TREE_CODE (simplified) == SSA_NAME
1737 /* We may not use values of something that may be simulated again,
1738 see valueize_op_1. */
1739 && (SSA_NAME_IS_DEFAULT_DEF (simplified)
1740 || ! prop_simulate_again_p (SSA_NAME_DEF_STMT (simplified))))
1742 val = *get_value (simplified);
1743 if (val.lattice_val != VARYING)
1745 fold_undefer_overflow_warnings (true, stmt, 0);
1746 return val;
1749 is_constant = simplified && is_gimple_min_invariant (simplified);
1750 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1751 if (is_constant)
1753 /* The statement produced a constant value. */
1754 val.lattice_val = CONSTANT;
1755 val.value = simplified;
1756 val.mask = 0;
1757 return val;
1760 /* If the statement is likely to have a VARYING result, then do not
1761 bother folding the statement. */
1762 else if (likelyvalue == VARYING)
1764 enum gimple_code code = gimple_code (stmt);
1765 if (code == GIMPLE_ASSIGN)
1767 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1769 /* Other cases cannot satisfy is_gimple_min_invariant
1770 without folding. */
1771 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1772 simplified = gimple_assign_rhs1 (stmt);
1774 else if (code == GIMPLE_SWITCH)
1775 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
1776 else
1777 /* These cannot satisfy is_gimple_min_invariant without folding. */
1778 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1779 is_constant = simplified && is_gimple_min_invariant (simplified);
1780 if (is_constant)
1782 /* The statement produced a constant value. */
1783 val.lattice_val = CONSTANT;
1784 val.value = simplified;
1785 val.mask = 0;
1788 /* If the statement result is likely UNDEFINED, make it so. */
1789 else if (likelyvalue == UNDEFINED)
1791 val.lattice_val = UNDEFINED;
1792 val.value = NULL_TREE;
1793 val.mask = 0;
1794 return val;
1797 /* Resort to simplification for bitwise tracking. */
1798 if (flag_tree_bit_ccp
1799 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
1800 || (gimple_assign_single_p (stmt)
1801 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
1802 && !is_constant)
1804 enum gimple_code code = gimple_code (stmt);
1805 val.lattice_val = VARYING;
1806 val.value = NULL_TREE;
1807 val.mask = -1;
1808 if (code == GIMPLE_ASSIGN)
1810 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1811 tree rhs1 = gimple_assign_rhs1 (stmt);
1812 tree lhs = gimple_assign_lhs (stmt);
1813 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1814 || POINTER_TYPE_P (TREE_TYPE (lhs)))
1815 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1816 || POINTER_TYPE_P (TREE_TYPE (rhs1))))
1817 switch (get_gimple_rhs_class (subcode))
1819 case GIMPLE_SINGLE_RHS:
1820 val = get_value_for_expr (rhs1, true);
1821 break;
1823 case GIMPLE_UNARY_RHS:
1824 val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
1825 break;
1827 case GIMPLE_BINARY_RHS:
1828 val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
1829 gimple_assign_rhs2 (stmt));
1830 break;
1832 default:;
1835 else if (code == GIMPLE_COND)
1837 enum tree_code code = gimple_cond_code (stmt);
1838 tree rhs1 = gimple_cond_lhs (stmt);
1839 tree rhs2 = gimple_cond_rhs (stmt);
1840 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1841 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1842 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1844 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1846 tree fndecl = gimple_call_fndecl (stmt);
1847 switch (DECL_FUNCTION_CODE (fndecl))
1849 case BUILT_IN_MALLOC:
1850 case BUILT_IN_REALLOC:
1851 case BUILT_IN_CALLOC:
1852 case BUILT_IN_STRDUP:
1853 case BUILT_IN_STRNDUP:
1854 val.lattice_val = CONSTANT;
1855 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1856 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1857 / BITS_PER_UNIT - 1);
1858 break;
1860 case BUILT_IN_ALLOCA:
1861 case BUILT_IN_ALLOCA_WITH_ALIGN:
1862 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1863 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1864 : BIGGEST_ALIGNMENT);
1865 val.lattice_val = CONSTANT;
1866 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1867 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
1868 break;
1870 /* These builtins return their first argument, unmodified. */
1871 case BUILT_IN_MEMCPY:
1872 case BUILT_IN_MEMMOVE:
1873 case BUILT_IN_MEMSET:
1874 case BUILT_IN_STRCPY:
1875 case BUILT_IN_STRNCPY:
1876 case BUILT_IN_MEMCPY_CHK:
1877 case BUILT_IN_MEMMOVE_CHK:
1878 case BUILT_IN_MEMSET_CHK:
1879 case BUILT_IN_STRCPY_CHK:
1880 case BUILT_IN_STRNCPY_CHK:
1881 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1882 break;
1884 case BUILT_IN_ASSUME_ALIGNED:
1885 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
1886 break;
1888 case BUILT_IN_ALIGNED_ALLOC:
1890 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1891 if (align
1892 && tree_fits_uhwi_p (align))
1894 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1895 if (aligni > 1
1896 /* align must be power-of-two */
1897 && (aligni & (aligni - 1)) == 0)
1899 val.lattice_val = CONSTANT;
1900 val.value = build_int_cst (ptr_type_node, 0);
1901 val.mask = -aligni;
1904 break;
1907 default:;
1910 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1912 tree fntype = gimple_call_fntype (stmt);
1913 if (fntype)
1915 tree attrs = lookup_attribute ("assume_aligned",
1916 TYPE_ATTRIBUTES (fntype));
1917 if (attrs)
1918 val = bit_value_assume_aligned (stmt, attrs, val, false);
1919 attrs = lookup_attribute ("alloc_align",
1920 TYPE_ATTRIBUTES (fntype));
1921 if (attrs)
1922 val = bit_value_assume_aligned (stmt, attrs, val, true);
1925 is_constant = (val.lattice_val == CONSTANT);
1928 if (flag_tree_bit_ccp
1929 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1930 || !is_constant)
1931 && gimple_get_lhs (stmt)
1932 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1934 tree lhs = gimple_get_lhs (stmt);
1935 wide_int nonzero_bits = get_nonzero_bits (lhs);
1936 if (nonzero_bits != -1)
1938 if (!is_constant)
1940 val.lattice_val = CONSTANT;
1941 val.value = build_zero_cst (TREE_TYPE (lhs));
1942 val.mask = extend_mask (nonzero_bits);
1943 is_constant = true;
1945 else
1947 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
1948 val.value = wide_int_to_tree (TREE_TYPE (lhs),
1949 nonzero_bits & val.value);
1950 if (nonzero_bits == 0)
1951 val.mask = 0;
1952 else
1953 val.mask = val.mask & extend_mask (nonzero_bits);
1958 /* The statement produced a nonconstant value. */
1959 if (!is_constant)
1961 /* The statement produced a copy. */
1962 if (simplified && TREE_CODE (simplified) == SSA_NAME
1963 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
1965 val.lattice_val = CONSTANT;
1966 val.value = simplified;
1967 val.mask = -1;
1969 /* The statement is VARYING. */
1970 else
1972 val.lattice_val = VARYING;
1973 val.value = NULL_TREE;
1974 val.mask = -1;
1978 return val;
1981 typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
1983 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1984 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1986 static void
1987 insert_clobber_before_stack_restore (tree saved_val, tree var,
1988 gimple_htab **visited)
1990 gimple *stmt;
1991 gassign *clobber_stmt;
1992 tree clobber;
1993 imm_use_iterator iter;
1994 gimple_stmt_iterator i;
1995 gimple **slot;
1997 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1998 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2000 clobber = build_constructor (TREE_TYPE (var),
2001 NULL);
2002 TREE_THIS_VOLATILE (clobber) = 1;
2003 clobber_stmt = gimple_build_assign (var, clobber);
2005 i = gsi_for_stmt (stmt);
2006 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2008 else if (gimple_code (stmt) == GIMPLE_PHI)
2010 if (!*visited)
2011 *visited = new gimple_htab (10);
2013 slot = (*visited)->find_slot (stmt, INSERT);
2014 if (*slot != NULL)
2015 continue;
2017 *slot = stmt;
2018 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2019 visited);
2021 else if (gimple_assign_ssa_name_copy_p (stmt))
2022 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2023 visited);
2024 else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
2025 continue;
2026 else
2027 gcc_assert (is_gimple_debug (stmt));
2030 /* Advance the iterator to the previous non-debug gimple statement in the same
2031 or dominating basic block. */
2033 static inline void
2034 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2036 basic_block dom;
2038 gsi_prev_nondebug (i);
2039 while (gsi_end_p (*i))
2041 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
2042 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2043 return;
2045 *i = gsi_last_bb (dom);
2049 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2050 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2052 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2053 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2054 that case the function gives up without inserting the clobbers. */
2056 static void
2057 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2059 gimple *stmt;
2060 tree saved_val;
2061 gimple_htab *visited = NULL;
2063 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2065 stmt = gsi_stmt (i);
2067 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2068 continue;
2070 saved_val = gimple_call_lhs (stmt);
2071 if (saved_val == NULL_TREE)
2072 continue;
2074 insert_clobber_before_stack_restore (saved_val, var, &visited);
2075 break;
2078 delete visited;
2081 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2082 fixed-size array and returns the address, if found, otherwise returns
2083 NULL_TREE. */
2085 static tree
2086 fold_builtin_alloca_with_align (gimple *stmt)
2088 unsigned HOST_WIDE_INT size, threshold, n_elem;
2089 tree lhs, arg, block, var, elem_type, array_type;
2091 /* Get lhs. */
2092 lhs = gimple_call_lhs (stmt);
2093 if (lhs == NULL_TREE)
2094 return NULL_TREE;
2096 /* Detect constant argument. */
2097 arg = get_constant_value (gimple_call_arg (stmt, 0));
2098 if (arg == NULL_TREE
2099 || TREE_CODE (arg) != INTEGER_CST
2100 || !tree_fits_uhwi_p (arg))
2101 return NULL_TREE;
2103 size = tree_to_uhwi (arg);
2105 /* Heuristic: don't fold large allocas. */
2106 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
2107 /* In case the alloca is located at function entry, it has the same lifetime
2108 as a declared array, so we allow a larger size. */
2109 block = gimple_block (stmt);
2110 if (!(cfun->after_inlining
2111 && block
2112 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2113 threshold /= 10;
2114 if (size > threshold)
2115 return NULL_TREE;
2117 /* Declare array. */
2118 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2119 n_elem = size * 8 / BITS_PER_UNIT;
2120 array_type = build_array_type_nelts (elem_type, n_elem);
2121 var = create_tmp_var (array_type);
2122 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
2124 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2125 if (pi != NULL && !pi->pt.anything)
2127 bool singleton_p;
2128 unsigned uid;
2129 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2130 gcc_assert (singleton_p);
2131 SET_DECL_PT_UID (var, uid);
2135 /* Fold alloca to the address of the array. */
2136 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2139 /* Fold the stmt at *GSI with CCP specific information that propagating
2140 and regular folding does not catch. */
2142 static bool
2143 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2145 gimple *stmt = gsi_stmt (*gsi);
2147 switch (gimple_code (stmt))
2149 case GIMPLE_COND:
2151 gcond *cond_stmt = as_a <gcond *> (stmt);
2152 ccp_prop_value_t val;
2153 /* Statement evaluation will handle type mismatches in constants
2154 more gracefully than the final propagation. This allows us to
2155 fold more conditionals here. */
2156 val = evaluate_stmt (stmt);
2157 if (val.lattice_val != CONSTANT
2158 || val.mask != 0)
2159 return false;
2161 if (dump_file)
2163 fprintf (dump_file, "Folding predicate ");
2164 print_gimple_expr (dump_file, stmt, 0, 0);
2165 fprintf (dump_file, " to ");
2166 print_generic_expr (dump_file, val.value, 0);
2167 fprintf (dump_file, "\n");
2170 if (integer_zerop (val.value))
2171 gimple_cond_make_false (cond_stmt);
2172 else
2173 gimple_cond_make_true (cond_stmt);
2175 return true;
2178 case GIMPLE_CALL:
2180 tree lhs = gimple_call_lhs (stmt);
2181 int flags = gimple_call_flags (stmt);
2182 tree val;
2183 tree argt;
2184 bool changed = false;
2185 unsigned i;
2187 /* If the call was folded into a constant make sure it goes
2188 away even if we cannot propagate into all uses because of
2189 type issues. */
2190 if (lhs
2191 && TREE_CODE (lhs) == SSA_NAME
2192 && (val = get_constant_value (lhs))
2193 /* Don't optimize away calls that have side-effects. */
2194 && (flags & (ECF_CONST|ECF_PURE)) != 0
2195 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
2197 tree new_rhs = unshare_expr (val);
2198 bool res;
2199 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2200 TREE_TYPE (new_rhs)))
2201 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2202 res = update_call_from_tree (gsi, new_rhs);
2203 gcc_assert (res);
2204 return true;
2207 /* Internal calls provide no argument types, so the extra laxity
2208 for normal calls does not apply. */
2209 if (gimple_call_internal_p (stmt))
2210 return false;
2212 /* The heuristic of fold_builtin_alloca_with_align differs before and
2213 after inlining, so we don't require the arg to be changed into a
2214 constant for folding, but just to be constant. */
2215 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
2217 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2218 if (new_rhs)
2220 bool res = update_call_from_tree (gsi, new_rhs);
2221 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2222 gcc_assert (res);
2223 insert_clobbers_for_var (*gsi, var);
2224 return true;
2228 /* Propagate into the call arguments. Compared to replace_uses_in
2229 this can use the argument slot types for type verification
2230 instead of the current argument type. We also can safely
2231 drop qualifiers here as we are dealing with constants anyway. */
2232 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2233 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2234 ++i, argt = TREE_CHAIN (argt))
2236 tree arg = gimple_call_arg (stmt, i);
2237 if (TREE_CODE (arg) == SSA_NAME
2238 && (val = get_constant_value (arg))
2239 && useless_type_conversion_p
2240 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2241 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2243 gimple_call_set_arg (stmt, i, unshare_expr (val));
2244 changed = true;
2248 return changed;
2251 case GIMPLE_ASSIGN:
2253 tree lhs = gimple_assign_lhs (stmt);
2254 tree val;
2256 /* If we have a load that turned out to be constant replace it
2257 as we cannot propagate into all uses in all cases. */
2258 if (gimple_assign_single_p (stmt)
2259 && TREE_CODE (lhs) == SSA_NAME
2260 && (val = get_constant_value (lhs)))
2262 tree rhs = unshare_expr (val);
2263 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2264 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2265 gimple_assign_set_rhs_from_tree (gsi, rhs);
2266 return true;
2269 return false;
2272 default:
2273 return false;
2277 /* Visit the assignment statement STMT. Set the value of its LHS to the
2278 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2279 creates virtual definitions, set the value of each new name to that
2280 of the RHS (if we can derive a constant out of the RHS).
2281 Value-returning call statements also perform an assignment, and
2282 are handled here. */
2284 static enum ssa_prop_result
2285 visit_assignment (gimple *stmt, tree *output_p)
2287 ccp_prop_value_t val;
2288 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
2290 tree lhs = gimple_get_lhs (stmt);
2291 if (TREE_CODE (lhs) == SSA_NAME)
2293 /* Evaluate the statement, which could be
2294 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2295 val = evaluate_stmt (stmt);
2297 /* If STMT is an assignment to an SSA_NAME, we only have one
2298 value to set. */
2299 if (set_lattice_value (lhs, &val))
2301 *output_p = lhs;
2302 if (val.lattice_val == VARYING)
2303 retval = SSA_PROP_VARYING;
2304 else
2305 retval = SSA_PROP_INTERESTING;
2309 return retval;
2313 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2314 if it can determine which edge will be taken. Otherwise, return
2315 SSA_PROP_VARYING. */
2317 static enum ssa_prop_result
2318 visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
2320 ccp_prop_value_t val;
2321 basic_block block;
2323 block = gimple_bb (stmt);
2324 val = evaluate_stmt (stmt);
2325 if (val.lattice_val != CONSTANT
2326 || val.mask != 0)
2327 return SSA_PROP_VARYING;
2329 /* Find which edge out of the conditional block will be taken and add it
2330 to the worklist. If no single edge can be determined statically,
2331 return SSA_PROP_VARYING to feed all the outgoing edges to the
2332 propagation engine. */
2333 *taken_edge_p = find_taken_edge (block, val.value);
2334 if (*taken_edge_p)
2335 return SSA_PROP_INTERESTING;
2336 else
2337 return SSA_PROP_VARYING;
2341 /* Evaluate statement STMT. If the statement produces an output value and
2342 its evaluation changes the lattice value of its output, return
2343 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2344 output value.
2346 If STMT is a conditional branch and we can determine its truth
2347 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2348 value, return SSA_PROP_VARYING. */
2350 static enum ssa_prop_result
2351 ccp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
2353 tree def;
2354 ssa_op_iter iter;
2356 if (dump_file && (dump_flags & TDF_DETAILS))
2358 fprintf (dump_file, "\nVisiting statement:\n");
2359 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2362 switch (gimple_code (stmt))
2364 case GIMPLE_ASSIGN:
2365 /* If the statement is an assignment that produces a single
2366 output value, evaluate its RHS to see if the lattice value of
2367 its output has changed. */
2368 return visit_assignment (stmt, output_p);
2370 case GIMPLE_CALL:
2371 /* A value-returning call also performs an assignment. */
2372 if (gimple_call_lhs (stmt) != NULL_TREE)
2373 return visit_assignment (stmt, output_p);
2374 break;
2376 case GIMPLE_COND:
2377 case GIMPLE_SWITCH:
2378 /* If STMT is a conditional branch, see if we can determine
2379 which branch will be taken. */
2380 /* FIXME. It appears that we should be able to optimize
2381 computed GOTOs here as well. */
2382 return visit_cond_stmt (stmt, taken_edge_p);
2384 default:
2385 break;
2388 /* Any other kind of statement is not interesting for constant
2389 propagation and, therefore, not worth simulating. */
2390 if (dump_file && (dump_flags & TDF_DETAILS))
2391 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2393 /* Definitions made by statements other than assignments to
2394 SSA_NAMEs represent unknown modifications to their outputs.
2395 Mark them VARYING. */
2396 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2397 set_value_varying (def);
2399 return SSA_PROP_VARYING;
2403 /* Main entry point for SSA Conditional Constant Propagation. If NONZERO_P,
2404 record nonzero bits. */
2406 static unsigned int
2407 do_ssa_ccp (bool nonzero_p)
2409 unsigned int todo = 0;
2410 calculate_dominance_info (CDI_DOMINATORS);
2412 ccp_initialize ();
2413 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2414 if (ccp_finalize (nonzero_p))
2416 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2418 /* ccp_finalize does not preserve loop-closed ssa. */
2419 loops_state_clear (LOOP_CLOSED_SSA);
2422 free_dominance_info (CDI_DOMINATORS);
2423 return todo;
2427 namespace {
2429 const pass_data pass_data_ccp =
2431 GIMPLE_PASS, /* type */
2432 "ccp", /* name */
2433 OPTGROUP_NONE, /* optinfo_flags */
2434 TV_TREE_CCP, /* tv_id */
2435 ( PROP_cfg | PROP_ssa ), /* properties_required */
2436 0, /* properties_provided */
2437 0, /* properties_destroyed */
2438 0, /* todo_flags_start */
2439 TODO_update_address_taken, /* todo_flags_finish */
2442 class pass_ccp : public gimple_opt_pass
2444 public:
2445 pass_ccp (gcc::context *ctxt)
2446 : gimple_opt_pass (pass_data_ccp, ctxt), nonzero_p (false)
2449 /* opt_pass methods: */
2450 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2451 void set_pass_param (unsigned int n, bool param)
2453 gcc_assert (n == 0);
2454 nonzero_p = param;
2456 virtual bool gate (function *) { return flag_tree_ccp != 0; }
2457 virtual unsigned int execute (function *) { return do_ssa_ccp (nonzero_p); }
2459 private:
2460 /* Determines whether the pass instance records nonzero bits. */
2461 bool nonzero_p;
2462 }; // class pass_ccp
2464 } // anon namespace
2466 gimple_opt_pass *
2467 make_pass_ccp (gcc::context *ctxt)
2469 return new pass_ccp (ctxt);
2474 /* Try to optimize out __builtin_stack_restore. Optimize it out
2475 if there is another __builtin_stack_restore in the same basic
2476 block and no calls or ASM_EXPRs are in between, or if this block's
2477 only outgoing edge is to EXIT_BLOCK and there are no calls or
2478 ASM_EXPRs after this __builtin_stack_restore. */
2480 static tree
2481 optimize_stack_restore (gimple_stmt_iterator i)
2483 tree callee;
2484 gimple *stmt;
2486 basic_block bb = gsi_bb (i);
2487 gimple *call = gsi_stmt (i);
2489 if (gimple_code (call) != GIMPLE_CALL
2490 || gimple_call_num_args (call) != 1
2491 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2492 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2493 return NULL_TREE;
2495 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2497 stmt = gsi_stmt (i);
2498 if (gimple_code (stmt) == GIMPLE_ASM)
2499 return NULL_TREE;
2500 if (gimple_code (stmt) != GIMPLE_CALL)
2501 continue;
2503 callee = gimple_call_fndecl (stmt);
2504 if (!callee
2505 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2506 /* All regular builtins are ok, just obviously not alloca. */
2507 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2508 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2509 return NULL_TREE;
2511 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2512 goto second_stack_restore;
2515 if (!gsi_end_p (i))
2516 return NULL_TREE;
2518 /* Allow one successor of the exit block, or zero successors. */
2519 switch (EDGE_COUNT (bb->succs))
2521 case 0:
2522 break;
2523 case 1:
2524 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2525 return NULL_TREE;
2526 break;
2527 default:
2528 return NULL_TREE;
2530 second_stack_restore:
2532 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2533 If there are multiple uses, then the last one should remove the call.
2534 In any case, whether the call to __builtin_stack_save can be removed
2535 or not is irrelevant to removing the call to __builtin_stack_restore. */
2536 if (has_single_use (gimple_call_arg (call, 0)))
2538 gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2539 if (is_gimple_call (stack_save))
2541 callee = gimple_call_fndecl (stack_save);
2542 if (callee
2543 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2544 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2546 gimple_stmt_iterator stack_save_gsi;
2547 tree rhs;
2549 stack_save_gsi = gsi_for_stmt (stack_save);
2550 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2551 update_call_from_tree (&stack_save_gsi, rhs);
2556 /* No effect, so the statement will be deleted. */
2557 return integer_zero_node;
2560 /* If va_list type is a simple pointer and nothing special is needed,
2561 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2562 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2563 pointer assignment. */
2565 static tree
2566 optimize_stdarg_builtin (gimple *call)
2568 tree callee, lhs, rhs, cfun_va_list;
2569 bool va_list_simple_ptr;
2570 location_t loc = gimple_location (call);
2572 if (gimple_code (call) != GIMPLE_CALL)
2573 return NULL_TREE;
2575 callee = gimple_call_fndecl (call);
2577 cfun_va_list = targetm.fn_abi_va_list (callee);
2578 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2579 && (TREE_TYPE (cfun_va_list) == void_type_node
2580 || TREE_TYPE (cfun_va_list) == char_type_node);
2582 switch (DECL_FUNCTION_CODE (callee))
2584 case BUILT_IN_VA_START:
2585 if (!va_list_simple_ptr
2586 || targetm.expand_builtin_va_start != NULL
2587 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2588 return NULL_TREE;
2590 if (gimple_call_num_args (call) != 2)
2591 return NULL_TREE;
2593 lhs = gimple_call_arg (call, 0);
2594 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2595 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2596 != TYPE_MAIN_VARIANT (cfun_va_list))
2597 return NULL_TREE;
2599 lhs = build_fold_indirect_ref_loc (loc, lhs);
2600 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2601 1, integer_zero_node);
2602 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2603 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2605 case BUILT_IN_VA_COPY:
2606 if (!va_list_simple_ptr)
2607 return NULL_TREE;
2609 if (gimple_call_num_args (call) != 2)
2610 return NULL_TREE;
2612 lhs = gimple_call_arg (call, 0);
2613 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2614 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2615 != TYPE_MAIN_VARIANT (cfun_va_list))
2616 return NULL_TREE;
2618 lhs = build_fold_indirect_ref_loc (loc, lhs);
2619 rhs = gimple_call_arg (call, 1);
2620 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2621 != TYPE_MAIN_VARIANT (cfun_va_list))
2622 return NULL_TREE;
2624 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2625 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2627 case BUILT_IN_VA_END:
2628 /* No effect, so the statement will be deleted. */
2629 return integer_zero_node;
2631 default:
2632 gcc_unreachable ();
2636 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2637 the incoming jumps. Return true if at least one jump was changed. */
2639 static bool
2640 optimize_unreachable (gimple_stmt_iterator i)
2642 basic_block bb = gsi_bb (i);
2643 gimple_stmt_iterator gsi;
2644 gimple *stmt;
2645 edge_iterator ei;
2646 edge e;
2647 bool ret;
2649 if (flag_sanitize & SANITIZE_UNREACHABLE)
2650 return false;
2652 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2654 stmt = gsi_stmt (gsi);
2656 if (is_gimple_debug (stmt))
2657 continue;
2659 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2661 /* Verify we do not need to preserve the label. */
2662 if (FORCED_LABEL (gimple_label_label (label_stmt)))
2663 return false;
2665 continue;
2668 /* Only handle the case that __builtin_unreachable is the first statement
2669 in the block. We rely on DCE to remove stmts without side-effects
2670 before __builtin_unreachable. */
2671 if (gsi_stmt (gsi) != gsi_stmt (i))
2672 return false;
2675 ret = false;
2676 FOR_EACH_EDGE (e, ei, bb->preds)
2678 gsi = gsi_last_bb (e->src);
2679 if (gsi_end_p (gsi))
2680 continue;
2682 stmt = gsi_stmt (gsi);
2683 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
2685 if (e->flags & EDGE_TRUE_VALUE)
2686 gimple_cond_make_false (cond_stmt);
2687 else if (e->flags & EDGE_FALSE_VALUE)
2688 gimple_cond_make_true (cond_stmt);
2689 else
2690 gcc_unreachable ();
2691 update_stmt (cond_stmt);
2693 else
2695 /* Todo: handle other cases, f.i. switch statement. */
2696 continue;
2699 ret = true;
2702 return ret;
2705 /* A simple pass that attempts to fold all builtin functions. This pass
2706 is run after we've propagated as many constants as we can. */
2708 namespace {
2710 const pass_data pass_data_fold_builtins =
2712 GIMPLE_PASS, /* type */
2713 "fab", /* name */
2714 OPTGROUP_NONE, /* optinfo_flags */
2715 TV_NONE, /* tv_id */
2716 ( PROP_cfg | PROP_ssa ), /* properties_required */
2717 0, /* properties_provided */
2718 0, /* properties_destroyed */
2719 0, /* todo_flags_start */
2720 TODO_update_ssa, /* todo_flags_finish */
2723 class pass_fold_builtins : public gimple_opt_pass
2725 public:
2726 pass_fold_builtins (gcc::context *ctxt)
2727 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2730 /* opt_pass methods: */
2731 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2732 virtual unsigned int execute (function *);
2734 }; // class pass_fold_builtins
2736 unsigned int
2737 pass_fold_builtins::execute (function *fun)
2739 bool cfg_changed = false;
2740 basic_block bb;
2741 unsigned int todoflags = 0;
2743 FOR_EACH_BB_FN (bb, fun)
2745 gimple_stmt_iterator i;
2746 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2748 gimple *stmt, *old_stmt;
2749 tree callee;
2750 enum built_in_function fcode;
2752 stmt = gsi_stmt (i);
2754 if (gimple_code (stmt) != GIMPLE_CALL)
2756 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2757 after the last GIMPLE DSE they aren't needed and might
2758 unnecessarily keep the SSA_NAMEs live. */
2759 if (gimple_clobber_p (stmt))
2761 tree lhs = gimple_assign_lhs (stmt);
2762 if (TREE_CODE (lhs) == MEM_REF
2763 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2765 unlink_stmt_vdef (stmt);
2766 gsi_remove (&i, true);
2767 release_defs (stmt);
2768 continue;
2771 gsi_next (&i);
2772 continue;
2775 callee = gimple_call_fndecl (stmt);
2776 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2778 gsi_next (&i);
2779 continue;
2782 fcode = DECL_FUNCTION_CODE (callee);
2783 if (fold_stmt (&i))
2785 else
2787 tree result = NULL_TREE;
2788 switch (DECL_FUNCTION_CODE (callee))
2790 case BUILT_IN_CONSTANT_P:
2791 /* Resolve __builtin_constant_p. If it hasn't been
2792 folded to integer_one_node by now, it's fairly
2793 certain that the value simply isn't constant. */
2794 result = integer_zero_node;
2795 break;
2797 case BUILT_IN_ASSUME_ALIGNED:
2798 /* Remove __builtin_assume_aligned. */
2799 result = gimple_call_arg (stmt, 0);
2800 break;
2802 case BUILT_IN_STACK_RESTORE:
2803 result = optimize_stack_restore (i);
2804 if (result)
2805 break;
2806 gsi_next (&i);
2807 continue;
2809 case BUILT_IN_UNREACHABLE:
2810 if (optimize_unreachable (i))
2811 cfg_changed = true;
2812 break;
2814 case BUILT_IN_VA_START:
2815 case BUILT_IN_VA_END:
2816 case BUILT_IN_VA_COPY:
2817 /* These shouldn't be folded before pass_stdarg. */
2818 result = optimize_stdarg_builtin (stmt);
2819 if (result)
2820 break;
2821 /* FALLTHRU */
2823 default:;
2826 if (!result)
2828 gsi_next (&i);
2829 continue;
2832 if (!update_call_from_tree (&i, result))
2833 gimplify_and_update_call_from_tree (&i, result);
2836 todoflags |= TODO_update_address_taken;
2838 if (dump_file && (dump_flags & TDF_DETAILS))
2840 fprintf (dump_file, "Simplified\n ");
2841 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2844 old_stmt = stmt;
2845 stmt = gsi_stmt (i);
2846 update_stmt (stmt);
2848 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2849 && gimple_purge_dead_eh_edges (bb))
2850 cfg_changed = true;
2852 if (dump_file && (dump_flags & TDF_DETAILS))
2854 fprintf (dump_file, "to\n ");
2855 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2856 fprintf (dump_file, "\n");
2859 /* Retry the same statement if it changed into another
2860 builtin, there might be new opportunities now. */
2861 if (gimple_code (stmt) != GIMPLE_CALL)
2863 gsi_next (&i);
2864 continue;
2866 callee = gimple_call_fndecl (stmt);
2867 if (!callee
2868 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2869 || DECL_FUNCTION_CODE (callee) == fcode)
2870 gsi_next (&i);
2874 /* Delete unreachable blocks. */
2875 if (cfg_changed)
2876 todoflags |= TODO_cleanup_cfg;
2878 return todoflags;
2881 } // anon namespace
2883 gimple_opt_pass *
2884 make_pass_fold_builtins (gcc::context *ctxt)
2886 return new pass_fold_builtins (ctxt);