1 /* Support for fully folding sub-trees of an expression for C compiler.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 static tree
c_fully_fold_internal (tree expr
, bool, bool *, bool *, bool);
32 /* If DISABLE is true, stop issuing warnings. This is used when
33 parsing code that we know will not be executed. This function may
34 be called multiple times, and works as a stack. */
37 c_disable_warnings (bool disable
)
41 ++c_inhibit_evaluation_warnings
;
42 fold_defer_overflow_warnings ();
46 /* If ENABLE is true, reenable issuing warnings. */
49 c_enable_warnings (bool enable
)
53 --c_inhibit_evaluation_warnings
;
54 fold_undefer_and_ignore_overflow_warnings ();
58 /* Fully fold EXPR, an expression that was not folded (beyond integer
59 constant expressions and null pointer constants) when being built
60 up. If IN_INIT, this is in a static initializer and certain
61 changes are made to the folding done. Clear *MAYBE_CONST if
62 MAYBE_CONST is not NULL and EXPR is definitely not a constant
63 expression because it contains an evaluated operator (in C99) or an
64 operator outside of sizeof returning an integer constant (in C90)
65 not permitted in constant expressions, or because it contains an
66 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
67 set to true by callers before calling this function.) Return the
68 folded expression. Function arguments have already been folded
69 before calling this function, as have the contents of SAVE_EXPR,
70 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
71 C_MAYBE_CONST_EXPR. */
74 c_fully_fold (tree expr
, bool in_init
, bool *maybe_const
)
77 tree eptype
= NULL_TREE
;
79 bool maybe_const_itself
= true;
80 location_t loc
= EXPR_LOCATION (expr
);
84 if (TREE_CODE (expr
) == EXCESS_PRECISION_EXPR
)
86 eptype
= TREE_TYPE (expr
);
87 expr
= TREE_OPERAND (expr
, 0);
89 ret
= c_fully_fold_internal (expr
, in_init
, maybe_const
,
90 &maybe_const_itself
, false);
92 ret
= fold_convert_loc (loc
, eptype
, ret
);
93 *maybe_const
&= maybe_const_itself
;
97 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
98 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
99 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
100 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
101 both evaluated and unevaluated subexpressions while
102 *MAYBE_CONST_ITSELF is carried from only evaluated
103 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
104 with integer constant operands, and if any of the operands doesn't
105 get folded to an integer constant, don't fold the expression itself. */
108 c_fully_fold_internal (tree expr
, bool in_init
, bool *maybe_const_operands
,
109 bool *maybe_const_itself
, bool for_int_const
)
112 enum tree_code code
= TREE_CODE (expr
);
113 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
114 location_t loc
= EXPR_LOCATION (expr
);
115 tree op0
, op1
, op2
, op3
;
116 tree orig_op0
, orig_op1
, orig_op2
;
117 bool op0_const
= true, op1_const
= true, op2_const
= true;
118 bool op0_const_self
= true, op1_const_self
= true, op2_const_self
= true;
119 bool nowarning
= TREE_NO_WARNING (expr
);
121 source_range old_range
;
123 /* Constants, declarations, statements, errors, and anything else not
124 counted as an expression cannot usefully be folded further at this
126 if (!IS_EXPR_CODE_CLASS (kind
)
127 || kind
== tcc_statement
)
130 if (IS_EXPR_CODE_CLASS (kind
))
131 old_range
= EXPR_LOCATION_RANGE (expr
);
133 /* Operands of variable-length expressions (function calls) have
134 already been folded, as have __builtin_* function calls, and such
135 expressions cannot occur in constant expressions. */
136 if (kind
== tcc_vl_exp
)
138 *maybe_const_operands
= false;
143 if (code
== C_MAYBE_CONST_EXPR
)
145 tree pre
= C_MAYBE_CONST_EXPR_PRE (expr
);
146 tree inner
= C_MAYBE_CONST_EXPR_EXPR (expr
);
147 if (C_MAYBE_CONST_EXPR_NON_CONST (expr
))
148 *maybe_const_operands
= false;
149 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr
))
151 *maybe_const_itself
= false;
152 inner
= c_fully_fold_internal (inner
, in_init
, maybe_const_operands
,
153 maybe_const_itself
, true);
156 ret
= build2 (COMPOUND_EXPR
, TREE_TYPE (expr
), pre
, inner
);
162 /* Assignment, increment, decrement, function call and comma
163 operators, and statement expressions, cannot occur in constant
164 expressions if evaluated / outside of sizeof. (Function calls
165 were handled above, though VA_ARG_EXPR is treated like a function
166 call here, and statement expressions are handled through
167 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
171 case PREDECREMENT_EXPR
:
172 case PREINCREMENT_EXPR
:
173 case POSTDECREMENT_EXPR
:
174 case POSTINCREMENT_EXPR
:
176 *maybe_const_operands
= false;
183 *maybe_const_operands
= false;
191 /* Fold individual tree codes as appropriate. */
194 case COMPOUND_LITERAL_EXPR
:
195 /* Any non-constancy will have been marked in a containing
196 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
200 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
201 op1
= TREE_OPERAND (expr
, 1);
202 op2
= TREE_OPERAND (expr
, 2);
203 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
204 maybe_const_itself
, for_int_const
);
205 STRIP_TYPE_NOPS (op0
);
207 ret
= build3 (COMPONENT_REF
, TREE_TYPE (expr
), op0
, op1
, op2
);
210 TREE_READONLY (ret
) = TREE_READONLY (expr
);
211 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (expr
);
216 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
217 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
218 op2
= TREE_OPERAND (expr
, 2);
219 op3
= TREE_OPERAND (expr
, 3);
220 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
221 maybe_const_itself
, for_int_const
);
222 STRIP_TYPE_NOPS (op0
);
223 op1
= c_fully_fold_internal (op1
, in_init
, maybe_const_operands
,
224 maybe_const_itself
, for_int_const
);
225 STRIP_TYPE_NOPS (op1
);
226 op1
= decl_constant_value_for_optimization (op1
);
227 if (op0
!= orig_op0
|| op1
!= orig_op1
)
228 ret
= build4 (ARRAY_REF
, TREE_TYPE (expr
), op0
, op1
, op2
, op3
);
231 TREE_READONLY (ret
) = TREE_READONLY (expr
);
232 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (expr
);
233 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (expr
);
240 case PREDECREMENT_EXPR
:
241 case PREINCREMENT_EXPR
:
242 case POSTDECREMENT_EXPR
:
243 case POSTINCREMENT_EXPR
:
247 case POINTER_PLUS_EXPR
:
276 /* Binary operations evaluating both arguments (increment and
277 decrement are binary internally in GCC). */
278 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
279 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
280 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
281 maybe_const_itself
, for_int_const
);
282 STRIP_TYPE_NOPS (op0
);
283 if (code
!= MODIFY_EXPR
284 && code
!= PREDECREMENT_EXPR
285 && code
!= PREINCREMENT_EXPR
286 && code
!= POSTDECREMENT_EXPR
287 && code
!= POSTINCREMENT_EXPR
)
288 op0
= decl_constant_value_for_optimization (op0
);
289 /* The RHS of a MODIFY_EXPR was fully folded when building that
290 expression for the sake of conversion warnings. */
291 if (code
!= MODIFY_EXPR
)
292 op1
= c_fully_fold_internal (op1
, in_init
, maybe_const_operands
,
293 maybe_const_itself
, for_int_const
);
294 STRIP_TYPE_NOPS (op1
);
295 op1
= decl_constant_value_for_optimization (op1
);
297 if (for_int_const
&& (TREE_CODE (op0
) != INTEGER_CST
298 || TREE_CODE (op1
) != INTEGER_CST
))
301 if (op0
!= orig_op0
|| op1
!= orig_op1
|| in_init
)
303 ? fold_build2_initializer_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
)
304 : fold_build2_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
);
307 if (TREE_OVERFLOW_P (ret
)
308 && !TREE_OVERFLOW_P (op0
)
309 && !TREE_OVERFLOW_P (op1
))
310 overflow_warning (EXPR_LOC_OR_LOC (expr
, input_location
), ret
, expr
);
311 if (code
== LSHIFT_EXPR
312 && TREE_CODE (orig_op0
) != INTEGER_CST
313 && TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
314 && TREE_CODE (op0
) == INTEGER_CST
315 && c_inhibit_evaluation_warnings
== 0
316 && tree_int_cst_sgn (op0
) < 0)
317 warning_at (loc
, OPT_Wshift_negative_value
,
318 "left shift of negative value");
319 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
320 && TREE_CODE (orig_op1
) != INTEGER_CST
321 && TREE_CODE (op1
) == INTEGER_CST
322 && TREE_CODE (TREE_TYPE (orig_op1
)) == INTEGER_TYPE
323 && c_inhibit_evaluation_warnings
== 0)
325 if (tree_int_cst_sgn (op1
) < 0)
326 warning_at (loc
, OPT_Wshift_count_negative
,
328 ? G_("left shift count is negative")
329 : G_("right shift count is negative")));
330 else if ((TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
331 || TREE_CODE (TREE_TYPE (orig_op0
)) == FIXED_POINT_TYPE
)
332 && compare_tree_int (op1
,
333 TYPE_PRECISION (TREE_TYPE (orig_op0
)))
335 warning_at (loc
, OPT_Wshift_count_overflow
,
337 ? G_("left shift count >= width of type")
338 : G_("right shift count >= width of type")));
339 else if (TREE_CODE (TREE_TYPE (orig_op0
)) == VECTOR_TYPE
340 && compare_tree_int (op1
,
341 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0
))))
343 warning_at (loc
, OPT_Wshift_count_overflow
,
345 ? G_("left shift count >= width of vector element")
346 : G_("right shift count >= width of vector element"));
348 if (code
== LSHIFT_EXPR
349 /* If either OP0 has been folded to INTEGER_CST... */
350 && ((TREE_CODE (orig_op0
) != INTEGER_CST
351 && TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
352 && TREE_CODE (op0
) == INTEGER_CST
)
353 /* ...or if OP1 has been folded to INTEGER_CST... */
354 || (TREE_CODE (orig_op1
) != INTEGER_CST
355 && TREE_CODE (TREE_TYPE (orig_op1
)) == INTEGER_TYPE
356 && TREE_CODE (op1
) == INTEGER_CST
))
357 && c_inhibit_evaluation_warnings
== 0)
358 /* ...then maybe we can detect an overflow. */
359 maybe_warn_shift_overflow (loc
, op0
, op1
);
360 if ((code
== TRUNC_DIV_EXPR
361 || code
== CEIL_DIV_EXPR
362 || code
== FLOOR_DIV_EXPR
363 || code
== EXACT_DIV_EXPR
364 || code
== TRUNC_MOD_EXPR
)
365 && TREE_CODE (orig_op1
) != INTEGER_CST
366 && TREE_CODE (op1
) == INTEGER_CST
367 && (TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
368 || TREE_CODE (TREE_TYPE (orig_op0
)) == FIXED_POINT_TYPE
)
369 && TREE_CODE (TREE_TYPE (orig_op1
)) == INTEGER_TYPE
)
370 warn_for_div_by_zero (loc
, op1
);
377 case ADDR_SPACE_CONVERT_EXPR
:
378 case VIEW_CONVERT_EXPR
:
379 case NON_LVALUE_EXPR
:
387 /* Unary operations. */
388 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
389 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
390 maybe_const_itself
, for_int_const
);
391 STRIP_TYPE_NOPS (op0
);
392 if (code
!= ADDR_EXPR
&& code
!= REALPART_EXPR
&& code
!= IMAGPART_EXPR
)
393 op0
= decl_constant_value_for_optimization (op0
);
395 if (for_int_const
&& TREE_CODE (op0
) != INTEGER_CST
)
398 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
399 not prepared to deal with them if they occur in initializers. */
402 && (op1
= get_base_address (op0
)) != NULL_TREE
403 && INDIRECT_REF_P (op1
)
404 && TREE_CONSTANT (TREE_OPERAND (op1
, 0)))
405 ret
= fold_convert_loc (loc
, TREE_TYPE (expr
), fold_offsetof_1 (op0
));
406 else if (op0
!= orig_op0
|| in_init
)
408 ? fold_build1_initializer_loc (loc
, code
, TREE_TYPE (expr
), op0
)
409 : fold_build1_loc (loc
, code
, TREE_TYPE (expr
), op0
);
412 if (code
== INDIRECT_REF
414 && INDIRECT_REF_P (ret
))
416 TREE_READONLY (ret
) = TREE_READONLY (expr
);
417 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (expr
);
418 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (expr
);
425 /* Don't warn about explicit conversions. We will already
426 have warned about suspect implicit conversions. */
430 if (TREE_OVERFLOW_P (ret
) && !TREE_OVERFLOW_P (op0
))
431 overflow_warning (EXPR_LOCATION (expr
), ret
, op0
);
436 case TRUTH_ANDIF_EXPR
:
437 case TRUTH_ORIF_EXPR
:
438 /* Binary operations not necessarily evaluating both
440 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
441 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
442 op0
= c_fully_fold_internal (op0
, in_init
, &op0_const
, &op0_const_self
,
444 STRIP_TYPE_NOPS (op0
);
446 unused_p
= (op0
== (code
== TRUTH_ANDIF_EXPR
447 ? truthvalue_false_node
448 : truthvalue_true_node
));
449 c_disable_warnings (unused_p
);
450 op1
= c_fully_fold_internal (op1
, in_init
, &op1_const
, &op1_const_self
,
452 STRIP_TYPE_NOPS (op1
);
453 c_enable_warnings (unused_p
);
456 && (TREE_CODE (op0
) != INTEGER_CST
457 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
458 || (!unused_p
&& TREE_CODE (op1
) != INTEGER_CST
)))
461 if (op0
!= orig_op0
|| op1
!= orig_op1
|| in_init
)
463 ? fold_build2_initializer_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
)
464 : fold_build2_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
);
467 *maybe_const_operands
&= op0_const
;
468 *maybe_const_itself
&= op0_const_self
;
472 && (code
== TRUTH_ANDIF_EXPR
473 ? op0
== truthvalue_false_node
474 : op0
== truthvalue_true_node
)))
475 *maybe_const_operands
&= op1_const
;
478 && (code
== TRUTH_ANDIF_EXPR
479 ? op0
== truthvalue_false_node
480 : op0
== truthvalue_true_node
)))
481 *maybe_const_itself
&= op1_const_self
;
485 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
486 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
487 orig_op2
= op2
= TREE_OPERAND (expr
, 2);
488 op0
= c_fully_fold_internal (op0
, in_init
, &op0_const
, &op0_const_self
,
491 STRIP_TYPE_NOPS (op0
);
492 c_disable_warnings (op0
== truthvalue_false_node
);
493 op1
= c_fully_fold_internal (op1
, in_init
, &op1_const
, &op1_const_self
,
495 STRIP_TYPE_NOPS (op1
);
496 c_enable_warnings (op0
== truthvalue_false_node
);
498 c_disable_warnings (op0
== truthvalue_true_node
);
499 op2
= c_fully_fold_internal (op2
, in_init
, &op2_const
, &op2_const_self
,
501 STRIP_TYPE_NOPS (op2
);
502 c_enable_warnings (op0
== truthvalue_true_node
);
505 && (TREE_CODE (op0
) != INTEGER_CST
506 /* Only the evaluated operand must be an INTEGER_CST. */
507 || (op0
== truthvalue_true_node
508 ? TREE_CODE (op1
) != INTEGER_CST
509 : TREE_CODE (op2
) != INTEGER_CST
)))
512 if (op0
!= orig_op0
|| op1
!= orig_op1
|| op2
!= orig_op2
)
513 ret
= fold_build3_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
, op2
);
516 *maybe_const_operands
&= op0_const
;
517 *maybe_const_itself
&= op0_const_self
;
521 && op0
== truthvalue_false_node
))
522 *maybe_const_operands
&= op1_const
;
525 && op0
== truthvalue_false_node
))
526 *maybe_const_itself
&= op1_const_self
;
530 && op0
== truthvalue_true_node
))
531 *maybe_const_operands
&= op2_const
;
534 && op0
== truthvalue_true_node
))
535 *maybe_const_itself
&= op2_const_self
;
539 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
540 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
541 orig_op2
= op2
= TREE_OPERAND (expr
, 2);
542 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
543 maybe_const_itself
, for_int_const
);
544 STRIP_TYPE_NOPS (op0
);
545 op1
= c_fully_fold_internal (op1
, in_init
, maybe_const_operands
,
546 maybe_const_itself
, for_int_const
);
547 STRIP_TYPE_NOPS (op1
);
548 op2
= c_fully_fold_internal (op2
, in_init
, maybe_const_operands
,
549 maybe_const_itself
, for_int_const
);
550 STRIP_TYPE_NOPS (op2
);
552 if (op0
!= orig_op0
|| op1
!= orig_op1
|| op2
!= orig_op2
)
553 ret
= fold_build3_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
, op2
);
558 case EXCESS_PRECISION_EXPR
:
559 /* Each case where an operand with excess precision may be
560 encountered must remove the EXCESS_PRECISION_EXPR around
561 inner operands and possibly put one around the whole
562 expression or possibly convert to the semantic type (which
563 c_fully_fold does); we cannot tell at this stage which is
564 appropriate in any particular case. */
568 /* Make sure to fold the contents of a SAVE_EXPR exactly once. */
569 op0
= TREE_OPERAND (expr
, 0);
570 if (!SAVE_EXPR_FOLDED_P (expr
))
572 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
573 maybe_const_itself
, for_int_const
);
574 TREE_OPERAND (expr
, 0) = op0
;
575 SAVE_EXPR_FOLDED_P (expr
) = true;
577 /* Return the SAVE_EXPR operand if it is invariant. */
578 if (tree_invariant_p (op0
))
583 /* Various codes may appear through folding built-in functions
584 and their arguments. */
589 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
590 have been done by this point, so remove them again. */
591 nowarning
|= TREE_NO_WARNING (ret
);
592 STRIP_TYPE_NOPS (ret
);
593 if (nowarning
&& !TREE_NO_WARNING (ret
))
595 if (!CAN_HAVE_LOCATION_P (ret
))
596 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
597 TREE_NO_WARNING (ret
) = 1;
601 protected_set_expr_location (ret
, loc
);
602 if (IS_EXPR_CODE_CLASS (kind
))
603 set_source_range (ret
, old_range
.m_start
, old_range
.m_finish
);
608 /* If not optimizing, EXP is not a VAR_DECL, or EXP has array type,
609 return EXP. Otherwise, return either EXP or its known constant
610 value (if it has one), but return EXP if EXP has mode BLKmode. ???
611 Is the BLKmode test appropriate? */
614 decl_constant_value_for_optimization (tree exp
)
620 || TREE_CODE (TREE_TYPE (exp
)) == ARRAY_TYPE
621 || DECL_MODE (exp
) == BLKmode
)
624 ret
= decl_constant_value (exp
);
625 /* Avoid unwanted tree sharing between the initializer and current
626 function's body where the tree can be modified e.g. by the
628 if (ret
!= exp
&& TREE_STATIC (exp
))
629 ret
= unshare_expr (ret
);