1 /* Support for fully folding sub-trees of an expression for C compiler.
2 Copyright (C) 1992-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 static tree
c_fully_fold_internal (tree expr
, bool, bool *, bool *, bool);
32 /* If DISABLE is true, stop issuing warnings. This is used when
33 parsing code that we know will not be executed. This function may
34 be called multiple times, and works as a stack. */
37 c_disable_warnings (bool disable
)
41 ++c_inhibit_evaluation_warnings
;
42 fold_defer_overflow_warnings ();
46 /* If ENABLE is true, reenable issuing warnings. */
49 c_enable_warnings (bool enable
)
53 --c_inhibit_evaluation_warnings
;
54 fold_undefer_and_ignore_overflow_warnings ();
58 /* Fully fold EXPR, an expression that was not folded (beyond integer
59 constant expressions and null pointer constants) when being built
60 up. If IN_INIT, this is in a static initializer and certain
61 changes are made to the folding done. Clear *MAYBE_CONST if
62 MAYBE_CONST is not NULL and EXPR is definitely not a constant
63 expression because it contains an evaluated operator (in C99) or an
64 operator outside of sizeof returning an integer constant (in C90)
65 not permitted in constant expressions, or because it contains an
66 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
67 set to true by callers before calling this function.) Return the
68 folded expression. Function arguments have already been folded
69 before calling this function, as have the contents of SAVE_EXPR,
70 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
71 C_MAYBE_CONST_EXPR. */
74 c_fully_fold (tree expr
, bool in_init
, bool *maybe_const
)
77 tree eptype
= NULL_TREE
;
79 bool maybe_const_itself
= true;
80 location_t loc
= EXPR_LOCATION (expr
);
84 if (TREE_CODE (expr
) == EXCESS_PRECISION_EXPR
)
86 eptype
= TREE_TYPE (expr
);
87 expr
= TREE_OPERAND (expr
, 0);
89 ret
= c_fully_fold_internal (expr
, in_init
, maybe_const
,
90 &maybe_const_itself
, false);
92 ret
= fold_convert_loc (loc
, eptype
, ret
);
93 *maybe_const
&= maybe_const_itself
;
97 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
98 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
99 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
100 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
101 both evaluated and unevaluated subexpressions while
102 *MAYBE_CONST_ITSELF is carried from only evaluated
103 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
104 with integer constant operands, and if any of the operands doesn't
105 get folded to an integer constant, don't fold the expression itself. */
108 c_fully_fold_internal (tree expr
, bool in_init
, bool *maybe_const_operands
,
109 bool *maybe_const_itself
, bool for_int_const
)
112 enum tree_code code
= TREE_CODE (expr
);
113 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
114 location_t loc
= EXPR_LOCATION (expr
);
115 tree op0
, op1
, op2
, op3
;
116 tree orig_op0
, orig_op1
, orig_op2
;
117 bool op0_const
= true, op1_const
= true, op2_const
= true;
118 bool op0_const_self
= true, op1_const_self
= true, op2_const_self
= true;
119 bool nowarning
= TREE_NO_WARNING (expr
);
121 source_range old_range
;
123 /* Constants, declarations, statements, errors, SAVE_EXPRs and
124 anything else not counted as an expression cannot usefully be
125 folded further at this point. */
126 if (!IS_EXPR_CODE_CLASS (kind
)
127 || kind
== tcc_statement
128 || code
== SAVE_EXPR
)
131 if (IS_EXPR_CODE_CLASS (kind
))
132 old_range
= EXPR_LOCATION_RANGE (expr
);
134 /* Operands of variable-length expressions (function calls) have
135 already been folded, as have __builtin_* function calls, and such
136 expressions cannot occur in constant expressions. */
137 if (kind
== tcc_vl_exp
)
139 *maybe_const_operands
= false;
144 if (code
== C_MAYBE_CONST_EXPR
)
146 tree pre
= C_MAYBE_CONST_EXPR_PRE (expr
);
147 tree inner
= C_MAYBE_CONST_EXPR_EXPR (expr
);
148 if (C_MAYBE_CONST_EXPR_NON_CONST (expr
))
149 *maybe_const_operands
= false;
150 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr
))
152 *maybe_const_itself
= false;
153 inner
= c_fully_fold_internal (inner
, in_init
, maybe_const_operands
,
154 maybe_const_itself
, true);
157 ret
= build2 (COMPOUND_EXPR
, TREE_TYPE (expr
), pre
, inner
);
163 /* Assignment, increment, decrement, function call and comma
164 operators, and statement expressions, cannot occur in constant
165 expressions if evaluated / outside of sizeof. (Function calls
166 were handled above, though VA_ARG_EXPR is treated like a function
167 call here, and statement expressions are handled through
168 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
172 case PREDECREMENT_EXPR
:
173 case PREINCREMENT_EXPR
:
174 case POSTDECREMENT_EXPR
:
175 case POSTINCREMENT_EXPR
:
177 *maybe_const_operands
= false;
184 *maybe_const_operands
= false;
192 /* Fold individual tree codes as appropriate. */
195 case COMPOUND_LITERAL_EXPR
:
196 /* Any non-constancy will have been marked in a containing
197 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
201 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
202 op1
= TREE_OPERAND (expr
, 1);
203 op2
= TREE_OPERAND (expr
, 2);
204 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
205 maybe_const_itself
, for_int_const
);
206 STRIP_TYPE_NOPS (op0
);
208 ret
= build3 (COMPONENT_REF
, TREE_TYPE (expr
), op0
, op1
, op2
);
211 TREE_READONLY (ret
) = TREE_READONLY (expr
);
212 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (expr
);
217 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
218 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
219 op2
= TREE_OPERAND (expr
, 2);
220 op3
= TREE_OPERAND (expr
, 3);
221 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
222 maybe_const_itself
, for_int_const
);
223 STRIP_TYPE_NOPS (op0
);
224 op1
= c_fully_fold_internal (op1
, in_init
, maybe_const_operands
,
225 maybe_const_itself
, for_int_const
);
226 STRIP_TYPE_NOPS (op1
);
227 op1
= decl_constant_value_for_optimization (op1
);
228 if (op0
!= orig_op0
|| op1
!= orig_op1
)
229 ret
= build4 (ARRAY_REF
, TREE_TYPE (expr
), op0
, op1
, op2
, op3
);
232 TREE_READONLY (ret
) = TREE_READONLY (expr
);
233 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (expr
);
234 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (expr
);
241 case PREDECREMENT_EXPR
:
242 case PREINCREMENT_EXPR
:
243 case POSTDECREMENT_EXPR
:
244 case POSTINCREMENT_EXPR
:
248 case POINTER_PLUS_EXPR
:
277 /* Binary operations evaluating both arguments (increment and
278 decrement are binary internally in GCC). */
279 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
280 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
281 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
282 maybe_const_itself
, for_int_const
);
283 STRIP_TYPE_NOPS (op0
);
284 if (code
!= MODIFY_EXPR
285 && code
!= PREDECREMENT_EXPR
286 && code
!= PREINCREMENT_EXPR
287 && code
!= POSTDECREMENT_EXPR
288 && code
!= POSTINCREMENT_EXPR
)
289 op0
= decl_constant_value_for_optimization (op0
);
290 /* The RHS of a MODIFY_EXPR was fully folded when building that
291 expression for the sake of conversion warnings. */
292 if (code
!= MODIFY_EXPR
)
293 op1
= c_fully_fold_internal (op1
, in_init
, maybe_const_operands
,
294 maybe_const_itself
, for_int_const
);
295 STRIP_TYPE_NOPS (op1
);
296 op1
= decl_constant_value_for_optimization (op1
);
298 if (for_int_const
&& (TREE_CODE (op0
) != INTEGER_CST
299 || TREE_CODE (op1
) != INTEGER_CST
))
302 if (op0
!= orig_op0
|| op1
!= orig_op1
|| in_init
)
304 ? fold_build2_initializer_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
)
305 : fold_build2_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
);
308 if (TREE_OVERFLOW_P (ret
)
309 && !TREE_OVERFLOW_P (op0
)
310 && !TREE_OVERFLOW_P (op1
))
311 overflow_warning (EXPR_LOC_OR_LOC (expr
, input_location
), ret
);
312 if (code
== LSHIFT_EXPR
313 && TREE_CODE (orig_op0
) != INTEGER_CST
314 && TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
315 && TREE_CODE (op0
) == INTEGER_CST
316 && c_inhibit_evaluation_warnings
== 0
317 && tree_int_cst_sgn (op0
) < 0)
318 warning_at (loc
, OPT_Wshift_negative_value
,
319 "left shift of negative value");
320 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
321 && TREE_CODE (orig_op1
) != INTEGER_CST
322 && TREE_CODE (op1
) == INTEGER_CST
323 && TREE_CODE (TREE_TYPE (orig_op1
)) == INTEGER_TYPE
324 && c_inhibit_evaluation_warnings
== 0)
326 if (tree_int_cst_sgn (op1
) < 0)
327 warning_at (loc
, OPT_Wshift_count_negative
,
329 ? G_("left shift count is negative")
330 : G_("right shift count is negative")));
331 else if ((TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
332 || TREE_CODE (TREE_TYPE (orig_op0
)) == FIXED_POINT_TYPE
)
333 && compare_tree_int (op1
,
334 TYPE_PRECISION (TREE_TYPE (orig_op0
)))
336 warning_at (loc
, OPT_Wshift_count_overflow
,
338 ? G_("left shift count >= width of type")
339 : G_("right shift count >= width of type")));
340 else if (TREE_CODE (TREE_TYPE (orig_op0
)) == VECTOR_TYPE
341 && compare_tree_int (op1
,
342 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0
))))
344 warning_at (loc
, OPT_Wshift_count_overflow
,
346 ? G_("left shift count >= width of vector element")
347 : G_("right shift count >= width of vector element"));
349 if (code
== LSHIFT_EXPR
350 /* If either OP0 has been folded to INTEGER_CST... */
351 && ((TREE_CODE (orig_op0
) != INTEGER_CST
352 && TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
353 && TREE_CODE (op0
) == INTEGER_CST
)
354 /* ...or if OP1 has been folded to INTEGER_CST... */
355 || (TREE_CODE (orig_op1
) != INTEGER_CST
356 && TREE_CODE (TREE_TYPE (orig_op1
)) == INTEGER_TYPE
357 && TREE_CODE (op1
) == INTEGER_CST
))
358 && c_inhibit_evaluation_warnings
== 0)
359 /* ...then maybe we can detect an overflow. */
360 maybe_warn_shift_overflow (loc
, op0
, op1
);
361 if ((code
== TRUNC_DIV_EXPR
362 || code
== CEIL_DIV_EXPR
363 || code
== FLOOR_DIV_EXPR
364 || code
== EXACT_DIV_EXPR
365 || code
== TRUNC_MOD_EXPR
)
366 && TREE_CODE (orig_op1
) != INTEGER_CST
367 && TREE_CODE (op1
) == INTEGER_CST
368 && (TREE_CODE (TREE_TYPE (orig_op0
)) == INTEGER_TYPE
369 || TREE_CODE (TREE_TYPE (orig_op0
)) == FIXED_POINT_TYPE
)
370 && TREE_CODE (TREE_TYPE (orig_op1
)) == INTEGER_TYPE
)
371 warn_for_div_by_zero (loc
, op1
);
378 case ADDR_SPACE_CONVERT_EXPR
:
379 case VIEW_CONVERT_EXPR
:
380 case NON_LVALUE_EXPR
:
388 /* Unary operations. */
389 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
390 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
391 maybe_const_itself
, for_int_const
);
392 STRIP_TYPE_NOPS (op0
);
393 if (code
!= ADDR_EXPR
&& code
!= REALPART_EXPR
&& code
!= IMAGPART_EXPR
)
394 op0
= decl_constant_value_for_optimization (op0
);
396 if (for_int_const
&& TREE_CODE (op0
) != INTEGER_CST
)
399 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
400 not prepared to deal with them if they occur in initializers. */
403 && (op1
= get_base_address (op0
)) != NULL_TREE
404 && INDIRECT_REF_P (op1
)
405 && TREE_CONSTANT (TREE_OPERAND (op1
, 0)))
406 ret
= fold_convert_loc (loc
, TREE_TYPE (expr
), fold_offsetof_1 (op0
));
407 else if (op0
!= orig_op0
|| in_init
)
409 ? fold_build1_initializer_loc (loc
, code
, TREE_TYPE (expr
), op0
)
410 : fold_build1_loc (loc
, code
, TREE_TYPE (expr
), op0
);
413 if (code
== INDIRECT_REF
415 && INDIRECT_REF_P (ret
))
417 TREE_READONLY (ret
) = TREE_READONLY (expr
);
418 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (expr
);
419 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (expr
);
426 /* Don't warn about explicit conversions. We will already
427 have warned about suspect implicit conversions. */
431 if (TREE_OVERFLOW_P (ret
) && !TREE_OVERFLOW_P (op0
))
432 overflow_warning (EXPR_LOCATION (expr
), ret
);
437 case TRUTH_ANDIF_EXPR
:
438 case TRUTH_ORIF_EXPR
:
439 /* Binary operations not necessarily evaluating both
441 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
442 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
443 op0
= c_fully_fold_internal (op0
, in_init
, &op0_const
, &op0_const_self
,
445 STRIP_TYPE_NOPS (op0
);
447 unused_p
= (op0
== (code
== TRUTH_ANDIF_EXPR
448 ? truthvalue_false_node
449 : truthvalue_true_node
));
450 c_disable_warnings (unused_p
);
451 op1
= c_fully_fold_internal (op1
, in_init
, &op1_const
, &op1_const_self
,
453 STRIP_TYPE_NOPS (op1
);
454 c_enable_warnings (unused_p
);
457 && (TREE_CODE (op0
) != INTEGER_CST
458 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
459 || (!unused_p
&& TREE_CODE (op1
) != INTEGER_CST
)))
462 if (op0
!= orig_op0
|| op1
!= orig_op1
|| in_init
)
464 ? fold_build2_initializer_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
)
465 : fold_build2_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
);
468 *maybe_const_operands
&= op0_const
;
469 *maybe_const_itself
&= op0_const_self
;
473 && (code
== TRUTH_ANDIF_EXPR
474 ? op0
== truthvalue_false_node
475 : op0
== truthvalue_true_node
)))
476 *maybe_const_operands
&= op1_const
;
479 && (code
== TRUTH_ANDIF_EXPR
480 ? op0
== truthvalue_false_node
481 : op0
== truthvalue_true_node
)))
482 *maybe_const_itself
&= op1_const_self
;
486 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
487 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
488 orig_op2
= op2
= TREE_OPERAND (expr
, 2);
489 op0
= c_fully_fold_internal (op0
, in_init
, &op0_const
, &op0_const_self
,
492 STRIP_TYPE_NOPS (op0
);
493 c_disable_warnings (op0
== truthvalue_false_node
);
494 op1
= c_fully_fold_internal (op1
, in_init
, &op1_const
, &op1_const_self
,
496 STRIP_TYPE_NOPS (op1
);
497 c_enable_warnings (op0
== truthvalue_false_node
);
499 c_disable_warnings (op0
== truthvalue_true_node
);
500 op2
= c_fully_fold_internal (op2
, in_init
, &op2_const
, &op2_const_self
,
502 STRIP_TYPE_NOPS (op2
);
503 c_enable_warnings (op0
== truthvalue_true_node
);
506 && (TREE_CODE (op0
) != INTEGER_CST
507 /* Only the evaluated operand must be an INTEGER_CST. */
508 || (op0
== truthvalue_true_node
509 ? TREE_CODE (op1
) != INTEGER_CST
510 : TREE_CODE (op2
) != INTEGER_CST
)))
513 if (op0
!= orig_op0
|| op1
!= orig_op1
|| op2
!= orig_op2
)
514 ret
= fold_build3_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
, op2
);
517 *maybe_const_operands
&= op0_const
;
518 *maybe_const_itself
&= op0_const_self
;
522 && op0
== truthvalue_false_node
))
523 *maybe_const_operands
&= op1_const
;
526 && op0
== truthvalue_false_node
))
527 *maybe_const_itself
&= op1_const_self
;
531 && op0
== truthvalue_true_node
))
532 *maybe_const_operands
&= op2_const
;
535 && op0
== truthvalue_true_node
))
536 *maybe_const_itself
&= op2_const_self
;
540 orig_op0
= op0
= TREE_OPERAND (expr
, 0);
541 orig_op1
= op1
= TREE_OPERAND (expr
, 1);
542 orig_op2
= op2
= TREE_OPERAND (expr
, 2);
543 op0
= c_fully_fold_internal (op0
, in_init
, maybe_const_operands
,
544 maybe_const_itself
, for_int_const
);
545 STRIP_TYPE_NOPS (op0
);
546 op1
= c_fully_fold_internal (op1
, in_init
, maybe_const_operands
,
547 maybe_const_itself
, for_int_const
);
548 STRIP_TYPE_NOPS (op1
);
549 op2
= c_fully_fold_internal (op2
, in_init
, maybe_const_operands
,
550 maybe_const_itself
, for_int_const
);
551 STRIP_TYPE_NOPS (op2
);
553 if (op0
!= orig_op0
|| op1
!= orig_op1
|| op2
!= orig_op2
)
554 ret
= fold_build3_loc (loc
, code
, TREE_TYPE (expr
), op0
, op1
, op2
);
559 case EXCESS_PRECISION_EXPR
:
560 /* Each case where an operand with excess precision may be
561 encountered must remove the EXCESS_PRECISION_EXPR around
562 inner operands and possibly put one around the whole
563 expression or possibly convert to the semantic type (which
564 c_fully_fold does); we cannot tell at this stage which is
565 appropriate in any particular case. */
569 /* Various codes may appear through folding built-in functions
570 and their arguments. */
575 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
576 have been done by this point, so remove them again. */
577 nowarning
|= TREE_NO_WARNING (ret
);
578 STRIP_TYPE_NOPS (ret
);
579 if (nowarning
&& !TREE_NO_WARNING (ret
))
581 if (!CAN_HAVE_LOCATION_P (ret
))
582 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
583 TREE_NO_WARNING (ret
) = 1;
587 protected_set_expr_location (ret
, loc
);
588 if (IS_EXPR_CODE_CLASS (kind
))
589 set_source_range (ret
, old_range
.m_start
, old_range
.m_finish
);
594 /* If not optimizing, EXP is not a VAR_DECL, or EXP has array type,
595 return EXP. Otherwise, return either EXP or its known constant
596 value (if it has one), but return EXP if EXP has mode BLKmode. ???
597 Is the BLKmode test appropriate? */
600 decl_constant_value_for_optimization (tree exp
)
606 || TREE_CODE (TREE_TYPE (exp
)) == ARRAY_TYPE
607 || DECL_MODE (exp
) == BLKmode
)
610 ret
= decl_constant_value (exp
);
611 /* Avoid unwanted tree sharing between the initializer and current
612 function's body where the tree can be modified e.g. by the
614 if (ret
!= exp
&& TREE_STATIC (exp
))
615 ret
= unshare_expr (ret
);