Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / c / c-fold.c
blob0ebcb469d287e6b4254010b81282cb14fd7c51e6
1 /* Support for fully folding sub-trees of an expression for C compiler.
2 Copyright (C) 1992-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "bitmap.h"
26 #include "c-tree.h"
27 #include "intl.h"
28 #include "gimplify.h"
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool,
31 bool);
33 /* If DISABLE is true, stop issuing warnings. This is used when
34 parsing code that we know will not be executed. This function may
35 be called multiple times, and works as a stack. */
37 static void
38 c_disable_warnings (bool disable)
40 if (disable)
42 ++c_inhibit_evaluation_warnings;
43 fold_defer_overflow_warnings ();
47 /* If ENABLE is true, reenable issuing warnings. */
49 static void
50 c_enable_warnings (bool enable)
52 if (enable)
54 --c_inhibit_evaluation_warnings;
55 fold_undefer_and_ignore_overflow_warnings ();
59 /* Try to fold ARRAY_REF ary[index] if possible and not handled by
60 normal fold, return NULL_TREE otherwise. */
62 static tree
63 c_fold_array_ref (tree type, tree ary, tree index)
65 if (TREE_CODE (ary) != STRING_CST
66 || TREE_CODE (index) != INTEGER_CST
67 || TREE_OVERFLOW (index)
68 || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE
69 || !tree_fits_uhwi_p (index))
70 return NULL_TREE;
72 tree elem_type = TREE_TYPE (TREE_TYPE (ary));
73 unsigned elem_nchars = (TYPE_PRECISION (elem_type)
74 / TYPE_PRECISION (char_type_node));
75 unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars;
76 tree nelts = array_type_nelts (TREE_TYPE (ary));
77 bool dummy1 = true, dummy2 = true;
78 nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false);
79 unsigned HOST_WIDE_INT i = tree_to_uhwi (index);
80 if (!tree_int_cst_le (index, nelts)
81 || i >= len
82 || i + elem_nchars > len)
83 return NULL_TREE;
85 if (elem_nchars == 1)
86 return build_int_cst (type, TREE_STRING_POINTER (ary)[i]);
88 const unsigned char *ptr
89 = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars);
90 return native_interpret_expr (type, ptr, elem_nchars);
93 /* Fully fold EXPR, an expression that was not folded (beyond integer
94 constant expressions and null pointer constants) when being built
95 up. If IN_INIT, this is in a static initializer and certain
96 changes are made to the folding done. Clear *MAYBE_CONST if
97 MAYBE_CONST is not NULL and EXPR is definitely not a constant
98 expression because it contains an evaluated operator (in C99) or an
99 operator outside of sizeof returning an integer constant (in C90)
100 not permitted in constant expressions, or because it contains an
101 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
102 set to true by callers before calling this function.) Return the
103 folded expression. Function arguments have already been folded
104 before calling this function, as have the contents of SAVE_EXPR,
105 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
106 C_MAYBE_CONST_EXPR. LVAL is true if it should be treated as an
107 lvalue. */
109 tree
110 c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval)
112 tree ret;
113 tree eptype = NULL_TREE;
114 bool dummy = true;
115 bool maybe_const_itself = true;
116 location_t loc = EXPR_LOCATION (expr);
118 if (!maybe_const)
119 maybe_const = &dummy;
120 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
122 eptype = TREE_TYPE (expr);
123 expr = TREE_OPERAND (expr, 0);
125 ret = c_fully_fold_internal (expr, in_init, maybe_const,
126 &maybe_const_itself, false, lval);
127 if (eptype)
128 ret = fold_convert_loc (loc, eptype, ret);
129 *maybe_const &= maybe_const_itself;
130 return ret;
133 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
134 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
135 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
136 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
137 both evaluated and unevaluated subexpressions while
138 *MAYBE_CONST_ITSELF is carried from only evaluated
139 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
140 with integer constant operands, and if any of the operands doesn't
141 get folded to an integer constant, don't fold the expression itself.
142 LVAL indicates folding of lvalue, where we can't replace it with
143 an rvalue. */
145 static tree
146 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
147 bool *maybe_const_itself, bool for_int_const, bool lval)
149 tree ret = expr;
150 enum tree_code code = TREE_CODE (expr);
151 enum tree_code_class kind = TREE_CODE_CLASS (code);
152 location_t loc = EXPR_LOCATION (expr);
153 tree op0, op1, op2, op3;
154 tree orig_op0, orig_op1, orig_op2;
155 bool op0_const = true, op1_const = true, op2_const = true;
156 bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
157 bool nowarning = warning_suppressed_p (expr, OPT_Woverflow);
158 bool unused_p;
159 bool op0_lval = false;
160 source_range old_range;
162 /* Constants, declarations, statements, errors, and anything else not
163 counted as an expression cannot usefully be folded further at this
164 point. */
165 if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement)
167 /* Except for variables which we can optimize to its initializer. */
168 if (VAR_P (expr) && !lval && (optimize || in_init))
170 if (in_init)
171 ret = decl_constant_value_1 (expr, true);
172 else
174 ret = decl_constant_value (expr);
175 if (ret != expr
176 && (TYPE_MODE (TREE_TYPE (ret)) == BLKmode
177 || TREE_CODE (TREE_TYPE (ret)) == ARRAY_TYPE))
178 return expr;
180 /* Avoid unwanted tree sharing between the initializer and current
181 function's body where the tree can be modified e.g. by the
182 gimplifier. */
183 if (ret != expr && TREE_STATIC (expr))
184 ret = unshare_expr (ret);
185 return ret;
187 return expr;
190 if (IS_EXPR_CODE_CLASS (kind))
191 old_range = EXPR_LOCATION_RANGE (expr);
193 /* Operands of variable-length expressions (function calls) have
194 already been folded, as have __builtin_* function calls, and such
195 expressions cannot occur in constant expressions. */
196 if (kind == tcc_vl_exp)
198 *maybe_const_operands = false;
199 ret = fold (expr);
200 goto out;
203 if (code == C_MAYBE_CONST_EXPR)
205 tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
206 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
207 if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
208 *maybe_const_operands = false;
209 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
211 *maybe_const_itself = false;
212 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
213 maybe_const_itself, true, lval);
215 if (pre && !in_init)
216 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
217 else
218 ret = inner;
219 goto out;
222 /* Assignment, increment, decrement, function call and comma
223 operators, and statement expressions, cannot occur in constant
224 expressions if evaluated / outside of sizeof. (Function calls
225 were handled above, though VA_ARG_EXPR is treated like a function
226 call here, and statement expressions are handled through
227 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
228 switch (code)
230 case MODIFY_EXPR:
231 case PREDECREMENT_EXPR:
232 case PREINCREMENT_EXPR:
233 case POSTDECREMENT_EXPR:
234 case POSTINCREMENT_EXPR:
235 case COMPOUND_EXPR:
236 *maybe_const_operands = false;
237 break;
239 case VA_ARG_EXPR:
240 case TARGET_EXPR:
241 case BIND_EXPR:
242 case OBJ_TYPE_REF:
243 *maybe_const_operands = false;
244 ret = fold (expr);
245 goto out;
247 default:
248 break;
251 /* Fold individual tree codes as appropriate. */
252 switch (code)
254 case COMPOUND_LITERAL_EXPR:
255 /* Any non-constancy will have been marked in a containing
256 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
257 goto out;
259 case COMPONENT_REF:
260 orig_op0 = op0 = TREE_OPERAND (expr, 0);
261 op1 = TREE_OPERAND (expr, 1);
262 op2 = TREE_OPERAND (expr, 2);
263 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
264 maybe_const_itself, for_int_const, lval);
265 STRIP_TYPE_NOPS (op0);
266 if (op0 != orig_op0)
267 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
268 if (ret != expr)
270 TREE_READONLY (ret) = TREE_READONLY (expr);
271 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
273 if (!lval)
274 ret = fold (ret);
275 goto out;
277 case ARRAY_REF:
278 orig_op0 = op0 = TREE_OPERAND (expr, 0);
279 orig_op1 = op1 = TREE_OPERAND (expr, 1);
280 op2 = TREE_OPERAND (expr, 2);
281 op3 = TREE_OPERAND (expr, 3);
282 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
283 maybe_const_itself, for_int_const, lval);
284 STRIP_TYPE_NOPS (op0);
285 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
286 maybe_const_itself, for_int_const, false);
287 STRIP_TYPE_NOPS (op1);
288 /* Fold "foo"[2] in initializers. */
289 if (!lval && in_init)
291 ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1);
292 if (ret)
293 goto out;
294 ret = expr;
296 if (op0 != orig_op0 || op1 != orig_op1)
297 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
298 if (ret != expr)
300 TREE_READONLY (ret) = TREE_READONLY (expr);
301 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
302 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
304 if (!lval)
305 ret = fold (ret);
306 goto out;
308 case MODIFY_EXPR:
309 case PREDECREMENT_EXPR:
310 case PREINCREMENT_EXPR:
311 case POSTDECREMENT_EXPR:
312 case POSTINCREMENT_EXPR:
313 op0_lval = true;
314 /* FALLTHRU */
315 case COMPOUND_EXPR:
316 case PLUS_EXPR:
317 case MINUS_EXPR:
318 case MULT_EXPR:
319 case POINTER_PLUS_EXPR:
320 case POINTER_DIFF_EXPR:
321 case TRUNC_DIV_EXPR:
322 case CEIL_DIV_EXPR:
323 case FLOOR_DIV_EXPR:
324 case TRUNC_MOD_EXPR:
325 case RDIV_EXPR:
326 case EXACT_DIV_EXPR:
327 case LSHIFT_EXPR:
328 case RSHIFT_EXPR:
329 case BIT_IOR_EXPR:
330 case BIT_XOR_EXPR:
331 case BIT_AND_EXPR:
332 case LT_EXPR:
333 case LE_EXPR:
334 case GT_EXPR:
335 case GE_EXPR:
336 case EQ_EXPR:
337 case NE_EXPR:
338 case COMPLEX_EXPR:
339 case TRUTH_AND_EXPR:
340 case TRUTH_OR_EXPR:
341 case TRUTH_XOR_EXPR:
342 case UNORDERED_EXPR:
343 case ORDERED_EXPR:
344 case UNLT_EXPR:
345 case UNLE_EXPR:
346 case UNGT_EXPR:
347 case UNGE_EXPR:
348 case UNEQ_EXPR:
349 case MEM_REF:
350 /* Binary operations evaluating both arguments (increment and
351 decrement are binary internally in GCC). */
352 orig_op0 = op0 = TREE_OPERAND (expr, 0);
353 orig_op1 = op1 = TREE_OPERAND (expr, 1);
354 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
355 maybe_const_itself, for_int_const,
356 op0_lval);
357 STRIP_TYPE_NOPS (op0);
358 /* The RHS of a MODIFY_EXPR was fully folded when building that
359 expression for the sake of conversion warnings. */
360 if (code != MODIFY_EXPR)
361 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
362 maybe_const_itself, for_int_const, false);
363 STRIP_TYPE_NOPS (op1);
365 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
366 || TREE_CODE (op1) != INTEGER_CST))
367 goto out;
369 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
370 ret = in_init
371 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
372 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
373 else
374 ret = fold (expr);
375 if (TREE_OVERFLOW_P (ret)
376 && !TREE_OVERFLOW_P (op0)
377 && !(BINARY_CLASS_P (op0) && TREE_OVERFLOW_P (TREE_OPERAND (op0, 1)))
378 && !TREE_OVERFLOW_P (op1))
379 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
380 if (code == LSHIFT_EXPR
381 && TREE_CODE (orig_op0) != INTEGER_CST
382 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
383 && TREE_CODE (op0) == INTEGER_CST
384 && c_inhibit_evaluation_warnings == 0
385 && tree_int_cst_sgn (op0) < 0)
386 warning_at (loc, OPT_Wshift_negative_value,
387 "left shift of negative value");
388 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
389 && TREE_CODE (orig_op1) != INTEGER_CST
390 && TREE_CODE (op1) == INTEGER_CST
391 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
392 && c_inhibit_evaluation_warnings == 0)
394 if (tree_int_cst_sgn (op1) < 0)
395 warning_at (loc, OPT_Wshift_count_negative,
396 (code == LSHIFT_EXPR
397 ? G_("left shift count is negative")
398 : G_("right shift count is negative")));
399 else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
400 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
401 && compare_tree_int (op1,
402 TYPE_PRECISION (TREE_TYPE (orig_op0)))
403 >= 0)
404 warning_at (loc, OPT_Wshift_count_overflow,
405 (code == LSHIFT_EXPR
406 ? G_("left shift count >= width of type")
407 : G_("right shift count >= width of type")));
408 else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
409 && compare_tree_int (op1,
410 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
411 >= 0)
412 warning_at (loc, OPT_Wshift_count_overflow,
413 code == LSHIFT_EXPR
414 ? G_("left shift count >= width of vector element")
415 : G_("right shift count >= width of vector element"));
417 if (code == LSHIFT_EXPR
418 /* If either OP0 has been folded to INTEGER_CST... */
419 && ((TREE_CODE (orig_op0) != INTEGER_CST
420 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
421 && TREE_CODE (op0) == INTEGER_CST)
422 /* ...or if OP1 has been folded to INTEGER_CST... */
423 || (TREE_CODE (orig_op1) != INTEGER_CST
424 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
425 && TREE_CODE (op1) == INTEGER_CST))
426 && c_inhibit_evaluation_warnings == 0)
427 /* ...then maybe we can detect an overflow. */
428 maybe_warn_shift_overflow (loc, op0, op1);
429 if ((code == TRUNC_DIV_EXPR
430 || code == CEIL_DIV_EXPR
431 || code == FLOOR_DIV_EXPR
432 || code == EXACT_DIV_EXPR
433 || code == TRUNC_MOD_EXPR)
434 && TREE_CODE (orig_op1) != INTEGER_CST
435 && TREE_CODE (op1) == INTEGER_CST
436 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
437 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
438 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
439 warn_for_div_by_zero (loc, op1);
440 if (code == MEM_REF
441 && ret != expr
442 && TREE_CODE (ret) == MEM_REF)
444 TREE_READONLY (ret) = TREE_READONLY (expr);
445 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
446 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
448 goto out;
450 case ADDR_EXPR:
451 op0_lval = true;
452 goto unary;
453 case REALPART_EXPR:
454 case IMAGPART_EXPR:
455 case VIEW_CONVERT_EXPR:
456 op0_lval = lval;
457 /* FALLTHRU */
458 case INDIRECT_REF:
459 case FIX_TRUNC_EXPR:
460 case FLOAT_EXPR:
461 CASE_CONVERT:
462 case ADDR_SPACE_CONVERT_EXPR:
463 case NON_LVALUE_EXPR:
464 case NEGATE_EXPR:
465 case BIT_NOT_EXPR:
466 case TRUTH_NOT_EXPR:
467 case CONJ_EXPR:
468 unary:
469 /* Unary operations. */
470 orig_op0 = op0 = TREE_OPERAND (expr, 0);
471 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
472 maybe_const_itself, for_int_const,
473 op0_lval);
474 STRIP_TYPE_NOPS (op0);
476 if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
477 goto out;
479 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
480 not prepared to deal with them if they occur in initializers. */
481 if (op0 != orig_op0
482 && code == ADDR_EXPR
483 && (op1 = get_base_address (op0)) != NULL_TREE
484 && INDIRECT_REF_P (op1)
485 && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
486 ret = fold_offsetof (op0, TREE_TYPE (expr));
487 else if (op0 != orig_op0 || in_init)
488 ret = in_init
489 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
490 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
491 else
492 ret = fold (expr);
493 if (code == INDIRECT_REF
494 && ret != expr
495 && INDIRECT_REF_P (ret))
497 TREE_READONLY (ret) = TREE_READONLY (expr);
498 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
499 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
501 switch (code)
503 case FIX_TRUNC_EXPR:
504 case FLOAT_EXPR:
505 CASE_CONVERT:
506 /* Don't warn about explicit conversions. We will already
507 have warned about suspect implicit conversions. */
508 break;
510 default:
511 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
512 overflow_warning (EXPR_LOCATION (expr), ret, op0);
513 break;
515 goto out;
517 case TRUTH_ANDIF_EXPR:
518 case TRUTH_ORIF_EXPR:
519 /* Binary operations not necessarily evaluating both
520 arguments. */
521 orig_op0 = op0 = TREE_OPERAND (expr, 0);
522 orig_op1 = op1 = TREE_OPERAND (expr, 1);
523 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
524 for_int_const, false);
525 STRIP_TYPE_NOPS (op0);
527 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
528 ? truthvalue_false_node
529 : truthvalue_true_node));
530 c_disable_warnings (unused_p);
531 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
532 for_int_const, false);
533 STRIP_TYPE_NOPS (op1);
534 c_enable_warnings (unused_p);
536 if (for_int_const
537 && (TREE_CODE (op0) != INTEGER_CST
538 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
539 || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
540 goto out;
542 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
543 ret = in_init
544 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
545 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
546 else
547 ret = fold (expr);
548 *maybe_const_operands &= op0_const;
549 *maybe_const_itself &= op0_const_self;
550 if (!(flag_isoc99
551 && op0_const
552 && op0_const_self
553 && (code == TRUTH_ANDIF_EXPR
554 ? op0 == truthvalue_false_node
555 : op0 == truthvalue_true_node)))
556 *maybe_const_operands &= op1_const;
557 if (!(op0_const
558 && op0_const_self
559 && (code == TRUTH_ANDIF_EXPR
560 ? op0 == truthvalue_false_node
561 : op0 == truthvalue_true_node)))
562 *maybe_const_itself &= op1_const_self;
563 goto out;
565 case COND_EXPR:
566 orig_op0 = op0 = TREE_OPERAND (expr, 0);
567 orig_op1 = op1 = TREE_OPERAND (expr, 1);
568 orig_op2 = op2 = TREE_OPERAND (expr, 2);
569 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
570 for_int_const, false);
572 STRIP_TYPE_NOPS (op0);
573 c_disable_warnings (op0 == truthvalue_false_node);
574 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
575 for_int_const, false);
576 STRIP_TYPE_NOPS (op1);
577 c_enable_warnings (op0 == truthvalue_false_node);
579 c_disable_warnings (op0 == truthvalue_true_node);
580 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
581 for_int_const, false);
582 STRIP_TYPE_NOPS (op2);
583 c_enable_warnings (op0 == truthvalue_true_node);
585 if (for_int_const
586 && (TREE_CODE (op0) != INTEGER_CST
587 /* Only the evaluated operand must be an INTEGER_CST. */
588 || (op0 == truthvalue_true_node
589 ? TREE_CODE (op1) != INTEGER_CST
590 : TREE_CODE (op2) != INTEGER_CST)))
591 goto out;
593 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
594 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
595 else
596 ret = fold (expr);
597 *maybe_const_operands &= op0_const;
598 *maybe_const_itself &= op0_const_self;
599 if (!(flag_isoc99
600 && op0_const
601 && op0_const_self
602 && op0 == truthvalue_false_node))
603 *maybe_const_operands &= op1_const;
604 if (!(op0_const
605 && op0_const_self
606 && op0 == truthvalue_false_node))
607 *maybe_const_itself &= op1_const_self;
608 if (!(flag_isoc99
609 && op0_const
610 && op0_const_self
611 && op0 == truthvalue_true_node))
612 *maybe_const_operands &= op2_const;
613 if (!(op0_const
614 && op0_const_self
615 && op0 == truthvalue_true_node))
616 *maybe_const_itself &= op2_const_self;
617 goto out;
619 case VEC_COND_EXPR:
620 orig_op0 = op0 = TREE_OPERAND (expr, 0);
621 orig_op1 = op1 = TREE_OPERAND (expr, 1);
622 orig_op2 = op2 = TREE_OPERAND (expr, 2);
623 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
624 maybe_const_itself, for_int_const, false);
625 STRIP_TYPE_NOPS (op0);
626 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
627 maybe_const_itself, for_int_const, false);
628 STRIP_TYPE_NOPS (op1);
629 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
630 maybe_const_itself, for_int_const, false);
631 STRIP_TYPE_NOPS (op2);
633 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
634 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
635 else
636 ret = fold (expr);
637 goto out;
639 case EXCESS_PRECISION_EXPR:
640 /* Each case where an operand with excess precision may be
641 encountered must remove the EXCESS_PRECISION_EXPR around
642 inner operands and possibly put one around the whole
643 expression or possibly convert to the semantic type (which
644 c_fully_fold does); we cannot tell at this stage which is
645 appropriate in any particular case. */
646 gcc_unreachable ();
648 case SAVE_EXPR:
649 /* Make sure to fold the contents of a SAVE_EXPR exactly once. */
650 op0 = TREE_OPERAND (expr, 0);
651 if (!SAVE_EXPR_FOLDED_P (expr))
653 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
654 maybe_const_itself, for_int_const,
655 false);
656 TREE_OPERAND (expr, 0) = op0;
657 SAVE_EXPR_FOLDED_P (expr) = true;
659 /* Return the SAVE_EXPR operand if it is invariant. */
660 if (tree_invariant_p (op0))
661 ret = op0;
662 goto out;
664 default:
665 /* Various codes may appear through folding built-in functions
666 and their arguments. */
667 goto out;
670 out:
671 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
672 have been done by this point, so remove them again. */
673 nowarning |= warning_suppressed_p (ret, OPT_Woverflow);
674 STRIP_TYPE_NOPS (ret);
675 if (nowarning && !warning_suppressed_p (ret, OPT_Woverflow))
677 if (!CAN_HAVE_LOCATION_P (ret))
678 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
679 suppress_warning (ret, OPT_Woverflow);
681 if (ret != expr)
683 protected_set_expr_location (ret, loc);
684 if (IS_EXPR_CODE_CLASS (kind))
685 set_source_range (ret, old_range.m_start, old_range.m_finish);
687 return ret;
690 /* Fold X for consideration by one of the warning functions when checking
691 whether an expression has a constant value. */
693 tree
694 fold_for_warn (tree x)
696 /* The C front-end has already folded X appropriately. */
697 return x;