PR c++/3698
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob356b188e00b9d0c4f833696e4f2e5be219b4cdc2
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
38 /* Forward declarations. */
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
45 /* Local declarations. */
47 enum bc_t { bc_break = 0, bc_continue = 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
54 indicates which.
56 Just creates a label with location LOCATION and pushes it into the current
57 context. */
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
62 tree label = create_artificial_label (location);
63 DECL_CHAIN (label) = bc_label[bc];
64 bc_label[bc] = label;
65 if (bc == bc_break)
66 LABEL_DECL_BREAK (label) = true;
67 else
68 LABEL_DECL_CONTINUE (label) = true;
69 return label;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 gcc_assert (label == bc_label[bc]);
84 if (TREE_USED (label))
85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 block);
88 bc_label[bc] = DECL_CHAIN (label);
89 DECL_CHAIN (label) = NULL_TREE;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
95 static tree
96 get_bc_label (enum bc_t bc)
98 tree label = bc_label[bc];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label) = 1;
102 return label;
105 /* Genericize a TRY_BLOCK. */
107 static void
108 genericize_try_block (tree *stmt_p)
110 tree body = TRY_STMTS (*stmt_p);
111 tree cleanup = TRY_HANDLERS (*stmt_p);
113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 static void
119 genericize_catch_block (tree *stmt_p)
121 tree type = HANDLER_TYPE (*stmt_p);
122 tree body = HANDLER_BODY (*stmt_p);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
128 /* A terser interface for building a representation of an exception
129 specification. */
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 tree t;
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 return t;
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
152 tree body = EH_SPEC_STMTS (*stmt_p);
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 TREE_NO_WARNING (*stmt_p) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163 static void
164 genericize_if_stmt (tree *stmt_p)
166 tree stmt, cond, then_, else_;
167 location_t locus = EXPR_LOCATION (*stmt_p);
169 stmt = *stmt_p;
170 cond = IF_COND (stmt);
171 then_ = THEN_CLAUSE (stmt);
172 else_ = ELSE_CLAUSE (stmt);
174 if (!then_)
175 then_ = build_empty_stmt (locus);
176 if (!else_)
177 else_ = build_empty_stmt (locus);
179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180 stmt = then_;
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = else_;
183 else
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185 if (!EXPR_HAS_LOCATION (stmt))
186 protected_set_expr_location (stmt, locus);
187 *stmt_p = stmt;
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 tree incr, bool cond_is_first, int *walk_subtrees,
200 void *data)
202 tree blab, clab;
203 tree exit = NULL;
204 tree stmt_list = NULL;
206 blab = begin_bc_block (bc_break, start_locus);
207 clab = begin_bc_block (bc_continue, start_locus);
209 protected_set_expr_location (incr, start_locus);
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
216 if (cond && TREE_CODE (cond) != INTEGER_CST)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 build_empty_stmt (cloc), exit);
227 if (exit && cond_is_first)
228 append_to_statement_list (exit, &stmt_list);
229 append_to_statement_list (body, &stmt_list);
230 finish_bc_block (&stmt_list, bc_continue, clab);
231 append_to_statement_list (incr, &stmt_list);
232 if (exit && !cond_is_first)
233 append_to_statement_list (exit, &stmt_list);
235 if (!stmt_list)
236 stmt_list = build_empty_stmt (start_locus);
238 tree loop;
239 if (cond && integer_zerop (cond))
241 if (cond_is_first)
242 loop = fold_build3_loc (start_locus, COND_EXPR,
243 void_type_node, cond, stmt_list,
244 build_empty_stmt (start_locus));
245 else
246 loop = stmt_list;
248 else
250 location_t loc = start_locus;
251 if (!cond || integer_nonzerop (cond))
252 loc = EXPR_LOCATION (expr_first (body));
253 if (loc == UNKNOWN_LOCATION)
254 loc = start_locus;
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 *stmt_p = stmt_list;
267 /* Genericize a FOR_STMT node *STMT_P. */
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
277 if (init)
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
291 /* Genericize a WHILE_STMT node *STMT_P. */
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 /* Genericize a DO_STMT node *STMT_P. */
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
320 break_block = begin_bc_block (bc_break, stmt_locus);
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
333 if (TREE_USED (break_block))
334 SWITCH_BREAK_LABEL_P (break_block) = 1;
335 finish_bc_block (&body, bc_break, break_block);
336 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
337 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
339 || !TREE_USED (break_block));
342 /* Genericize a CONTINUE_STMT node *STMT_P. */
344 static void
345 genericize_continue_stmt (tree *stmt_p)
347 tree stmt_list = NULL;
348 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
349 tree label = get_bc_label (bc_continue);
350 location_t location = EXPR_LOCATION (*stmt_p);
351 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
352 append_to_statement_list_force (pred, &stmt_list);
353 append_to_statement_list (jump, &stmt_list);
354 *stmt_p = stmt_list;
357 /* Genericize a BREAK_STMT node *STMT_P. */
359 static void
360 genericize_break_stmt (tree *stmt_p)
362 tree label = get_bc_label (bc_break);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
367 /* Genericize a OMP_FOR node *STMT_P. */
369 static void
370 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
372 tree stmt = *stmt_p;
373 location_t locus = EXPR_LOCATION (stmt);
374 tree clab = begin_bc_block (bc_continue, locus);
376 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
377 if (TREE_CODE (stmt) != OMP_TASKLOOP)
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
381 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
383 *walk_subtrees = 0;
385 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
388 /* Hook into the middle of gimplifying an OMP_FOR node. */
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
393 tree for_stmt = *expr_p;
394 gimple_seq seq = NULL;
396 /* Protect ourselves from recursion. */
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
398 return GS_UNHANDLED;
399 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
401 gimplify_and_add (for_stmt, &seq);
402 gimple_seq_add_seq (pre_p, seq);
404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
406 return GS_ALL_DONE;
409 /* Gimplify an EXPR_STMT node. */
411 static void
412 gimplify_expr_stmt (tree *stmt_p)
414 tree stmt = EXPR_STMT_EXPR (*stmt_p);
416 if (stmt == error_mark_node)
417 stmt = NULL;
419 /* Gimplification of a statement expression will nullify the
420 statement if all its side effects are moved to *PRE_P and *POST_P.
422 In this case we will not want to emit the gimplified statement.
423 However, we may still want to emit a warning, so we do that before
424 gimplification. */
425 if (stmt && warn_unused_value)
427 if (!TREE_SIDE_EFFECTS (stmt))
429 if (!IS_EMPTY_STMT (stmt)
430 && !VOID_TYPE_P (TREE_TYPE (stmt))
431 && !TREE_NO_WARNING (stmt))
432 warning (OPT_Wunused_value, "statement with no effect");
434 else
435 warn_if_unused_value (stmt, input_location);
438 if (stmt == NULL_TREE)
439 stmt = alloc_stmt_list ();
441 *stmt_p = stmt;
444 /* Gimplify initialization from an AGGR_INIT_EXPR. */
446 static void
447 cp_gimplify_init_expr (tree *expr_p)
449 tree from = TREE_OPERAND (*expr_p, 1);
450 tree to = TREE_OPERAND (*expr_p, 0);
451 tree t;
453 /* What about code that pulls out the temp and uses it elsewhere? I
454 think that such code never uses the TARGET_EXPR as an initializer. If
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
456 case, I guess we'll need to replace references somehow. */
457 if (TREE_CODE (from) == TARGET_EXPR)
458 from = TARGET_EXPR_INITIAL (from);
460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461 inside the TARGET_EXPR. */
462 for (t = from; t; )
464 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 replace the slot operand with our target.
469 Should we add a target parm to gimplify_expr instead? No, as in this
470 case we want to replace the INIT_EXPR. */
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR
472 || TREE_CODE (sub) == VEC_INIT_EXPR)
474 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
475 AGGR_INIT_EXPR_SLOT (sub) = to;
476 else
477 VEC_INIT_EXPR_SLOT (sub) = to;
478 *expr_p = from;
480 /* The initialization is now a side-effect, so the container can
481 become void. */
482 if (from != sub)
483 TREE_TYPE (from) = void_type_node;
486 /* Handle aggregate NSDMI. */
487 replace_placeholders (sub, to);
489 if (t == sub)
490 break;
491 else
492 t = TREE_OPERAND (t, 1);
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
502 tree stmt = *expr_p;
503 tree temp = voidify_wrapper_expr (stmt, NULL);
504 tree body = TREE_OPERAND (stmt, 0);
505 gimple_seq try_ = NULL;
506 gimple_seq catch_ = NULL;
507 gimple *mnt;
509 gimplify_and_add (body, &try_);
510 mnt = gimple_build_eh_must_not_throw (terminate_fn);
511 gimple_seq_add_stmt_without_update (&catch_, mnt);
512 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
514 gimple_seq_add_stmt_without_update (pre_p, mnt);
515 if (temp)
517 *expr_p = temp;
518 return GS_OK;
521 *expr_p = NULL;
522 return GS_ALL_DONE;
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526 really just an empty class copy.
528 Check that the operand has a simple form so that TARGET_EXPRs and
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
530 return slot optimization alone because it isn't a copy. */
532 static bool
533 simple_empty_class_p (tree type, tree op)
535 return
536 ((TREE_CODE (op) == COMPOUND_EXPR
537 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
538 || TREE_CODE (op) == EMPTY_CLASS_EXPR
539 || is_gimple_lvalue (op)
540 || INDIRECT_REF_P (op)
541 || (TREE_CODE (op) == CONSTRUCTOR
542 && CONSTRUCTOR_NELTS (op) == 0
543 && !TREE_CLOBBER_P (op))
544 || (TREE_CODE (op) == CALL_EXPR
545 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
546 && is_really_empty_class (type);
549 /* Returns true if evaluating E as an lvalue has side-effects;
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551 have side-effects until there is a read or write through it. */
553 static bool
554 lvalue_has_side_effects (tree e)
556 if (!TREE_SIDE_EFFECTS (e))
557 return false;
558 while (handled_component_p (e))
560 if (TREE_CODE (e) == ARRAY_REF
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
562 return true;
563 e = TREE_OPERAND (e, 0);
565 if (DECL_P (e))
566 /* Just naming a variable has no side-effects. */
567 return false;
568 else if (INDIRECT_REF_P (e))
569 /* Similarly, indirection has no side-effects. */
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
571 else
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
573 return TREE_SIDE_EFFECTS (e);
576 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
579 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
581 int saved_stmts_are_full_exprs_p = 0;
582 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
583 enum tree_code code = TREE_CODE (*expr_p);
584 enum gimplify_status ret;
586 if (STATEMENT_CODE_P (code))
588 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
589 current_stmt_tree ()->stmts_are_full_exprs_p
590 = STMT_IS_FULL_EXPR_P (*expr_p);
593 switch (code)
595 case AGGR_INIT_EXPR:
596 simplify_aggr_init_expr (expr_p);
597 ret = GS_OK;
598 break;
600 case VEC_INIT_EXPR:
602 location_t loc = input_location;
603 tree init = VEC_INIT_EXPR_INIT (*expr_p);
604 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
605 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
606 input_location = EXPR_LOCATION (*expr_p);
607 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
608 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
609 from_array,
610 tf_warning_or_error);
611 hash_set<tree> pset;
612 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
613 cp_genericize_tree (expr_p, false);
614 ret = GS_OK;
615 input_location = loc;
617 break;
619 case THROW_EXPR:
620 /* FIXME communicate throw type to back end, probably by moving
621 THROW_EXPR into ../tree.def. */
622 *expr_p = TREE_OPERAND (*expr_p, 0);
623 ret = GS_OK;
624 break;
626 case MUST_NOT_THROW_EXPR:
627 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
628 break;
630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 LHS of an assignment might also be involved in the RHS, as in bug
632 25979. */
633 case INIT_EXPR:
634 cp_gimplify_init_expr (expr_p);
635 if (TREE_CODE (*expr_p) != INIT_EXPR)
636 return GS_OK;
637 /* Fall through. */
638 case MODIFY_EXPR:
639 modify_expr_case:
641 /* If the back end isn't clever enough to know that the lhs and rhs
642 types are the same, add an explicit conversion. */
643 tree op0 = TREE_OPERAND (*expr_p, 0);
644 tree op1 = TREE_OPERAND (*expr_p, 1);
646 if (!error_operand_p (op0)
647 && !error_operand_p (op1)
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
650 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
651 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
652 TREE_TYPE (op0), op1);
654 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
656 /* Remove any copies of empty classes. Also drop volatile
657 variables on the RHS to avoid infinite recursion from
658 gimplify_expr trying to load the value. */
659 if (TREE_SIDE_EFFECTS (op1))
661 if (TREE_THIS_VOLATILE (op1)
662 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
663 op1 = build_fold_addr_expr (op1);
665 gimplify_and_add (op1, pre_p);
667 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
668 is_gimple_lvalue, fb_lvalue);
669 *expr_p = TREE_OPERAND (*expr_p, 0);
671 /* P0145 says that the RHS is sequenced before the LHS.
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 isn't quite strong enough in two cases:
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 mean it's evaluated after the LHS.
678 2) the value calculation of the RHS is also sequenced before the
679 LHS, so for scalar assignment we need to preevaluate if the
680 RHS could be affected by LHS side-effects even if it has no
681 side-effects of its own. We don't need this for classes because
682 class assignment takes its RHS by reference. */
683 else if (flag_strong_eval_order > 1
684 && TREE_CODE (*expr_p) == MODIFY_EXPR
685 && lvalue_has_side_effects (op0)
686 && (TREE_CODE (op1) == CALL_EXPR
687 || (SCALAR_TYPE_P (TREE_TYPE (op1))
688 && !TREE_CONSTANT (op1))))
689 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
691 ret = GS_OK;
692 break;
694 case EMPTY_CLASS_EXPR:
695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
696 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
697 ret = GS_OK;
698 break;
700 case BASELINK:
701 *expr_p = BASELINK_FUNCTIONS (*expr_p);
702 ret = GS_OK;
703 break;
705 case TRY_BLOCK:
706 genericize_try_block (expr_p);
707 ret = GS_OK;
708 break;
710 case HANDLER:
711 genericize_catch_block (expr_p);
712 ret = GS_OK;
713 break;
715 case EH_SPEC_BLOCK:
716 genericize_eh_spec_block (expr_p);
717 ret = GS_OK;
718 break;
720 case USING_STMT:
721 gcc_unreachable ();
723 case FOR_STMT:
724 case WHILE_STMT:
725 case DO_STMT:
726 case SWITCH_STMT:
727 case CONTINUE_STMT:
728 case BREAK_STMT:
729 gcc_unreachable ();
731 case OMP_FOR:
732 case OMP_SIMD:
733 case OMP_DISTRIBUTE:
734 case OMP_TASKLOOP:
735 ret = cp_gimplify_omp_for (expr_p, pre_p);
736 break;
738 case EXPR_STMT:
739 gimplify_expr_stmt (expr_p);
740 ret = GS_OK;
741 break;
743 case UNARY_PLUS_EXPR:
745 tree arg = TREE_OPERAND (*expr_p, 0);
746 tree type = TREE_TYPE (*expr_p);
747 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
748 : arg;
749 ret = GS_OK;
751 break;
753 case CALL_EXPR:
754 ret = GS_OK;
755 if (!CALL_EXPR_FN (*expr_p))
756 /* Internal function call. */;
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
759 /* This is a call to a (compound) assignment operator that used
760 the operator syntax; gimplify the RHS first. */
761 gcc_assert (call_expr_nargs (*expr_p) == 2);
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
763 enum gimplify_status t
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
765 if (t == GS_ERROR)
766 ret = GS_ERROR;
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
771 with __builtin_va_arg_pack(). */
772 int nargs = call_expr_nargs (*expr_p) - 1;
773 for (int i = 0; i < nargs; ++i)
775 enum gimplify_status t
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
777 if (t == GS_ERROR)
778 ret = GS_ERROR;
781 else if (flag_strong_eval_order
782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
784 /* If flag_strong_eval_order, evaluate the object argument first. */
785 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
786 if (INDIRECT_TYPE_P (fntype))
787 fntype = TREE_TYPE (fntype);
788 if (TREE_CODE (fntype) == METHOD_TYPE)
790 enum gimplify_status t
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
792 if (t == GS_ERROR)
793 ret = GS_ERROR;
796 break;
798 case RETURN_EXPR:
799 if (TREE_OPERAND (*expr_p, 0)
800 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
801 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
803 expr_p = &TREE_OPERAND (*expr_p, 0);
804 code = TREE_CODE (*expr_p);
805 /* Avoid going through the INIT_EXPR case, which can
806 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
807 goto modify_expr_case;
809 /* Fall through. */
811 default:
812 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
813 break;
816 /* Restore saved state. */
817 if (STATEMENT_CODE_P (code))
818 current_stmt_tree ()->stmts_are_full_exprs_p
819 = saved_stmts_are_full_exprs_p;
821 return ret;
824 static inline bool
825 is_invisiref_parm (const_tree t)
827 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
828 && DECL_BY_REFERENCE (t));
831 /* Return true if the uid in both int tree maps are equal. */
833 bool
834 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
836 return (a->uid == b->uid);
839 /* Hash a UID in a cxx_int_tree_map. */
841 unsigned int
842 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
844 return item->uid;
847 /* A stable comparison routine for use with splay trees and DECLs. */
849 static int
850 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
852 tree a = (tree) xa;
853 tree b = (tree) xb;
855 return DECL_UID (a) - DECL_UID (b);
858 /* OpenMP context during genericization. */
860 struct cp_genericize_omp_taskreg
862 bool is_parallel;
863 bool default_shared;
864 struct cp_genericize_omp_taskreg *outer;
865 splay_tree variables;
868 /* Return true if genericization should try to determine if
869 DECL is firstprivate or shared within task regions. */
871 static bool
872 omp_var_to_track (tree decl)
874 tree type = TREE_TYPE (decl);
875 if (is_invisiref_parm (decl))
876 type = TREE_TYPE (type);
877 else if (TYPE_REF_P (type))
878 type = TREE_TYPE (type);
879 while (TREE_CODE (type) == ARRAY_TYPE)
880 type = TREE_TYPE (type);
881 if (type == error_mark_node || !CLASS_TYPE_P (type))
882 return false;
883 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
884 return false;
885 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
886 return false;
887 return true;
890 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
892 static void
893 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
895 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
896 (splay_tree_key) decl);
897 if (n == NULL)
899 int flags = OMP_CLAUSE_DEFAULT_SHARED;
900 if (omp_ctx->outer)
901 omp_cxx_notice_variable (omp_ctx->outer, decl);
902 if (!omp_ctx->default_shared)
904 struct cp_genericize_omp_taskreg *octx;
906 for (octx = omp_ctx->outer; octx; octx = octx->outer)
908 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
909 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
911 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
912 break;
914 if (octx->is_parallel)
915 break;
917 if (octx == NULL
918 && (TREE_CODE (decl) == PARM_DECL
919 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
920 && DECL_CONTEXT (decl) == current_function_decl)))
921 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
922 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
924 /* DECL is implicitly determined firstprivate in
925 the current task construct. Ensure copy ctor and
926 dtor are instantiated, because during gimplification
927 it will be already too late. */
928 tree type = TREE_TYPE (decl);
929 if (is_invisiref_parm (decl))
930 type = TREE_TYPE (type);
931 else if (TYPE_REF_P (type))
932 type = TREE_TYPE (type);
933 while (TREE_CODE (type) == ARRAY_TYPE)
934 type = TREE_TYPE (type);
935 get_copy_ctor (type, tf_none);
936 get_dtor (type, tf_none);
939 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
943 /* Genericization context. */
945 struct cp_genericize_data
947 hash_set<tree> *p_set;
948 vec<tree> bind_expr_stack;
949 struct cp_genericize_omp_taskreg *omp_ctx;
950 tree try_block;
951 bool no_sanitize_p;
952 bool handle_invisiref_parm_p;
955 /* Perform any pre-gimplification folding of C++ front end trees to
956 GENERIC.
957 Note: The folding of none-omp cases is something to move into
958 the middle-end. As for now we have most foldings only on GENERIC
959 in fold-const, we need to perform this before transformation to
960 GIMPLE-form. */
962 static tree
963 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
965 tree stmt;
966 enum tree_code code;
968 *stmt_p = stmt = cp_fold (*stmt_p);
970 if (((hash_set<tree> *) data)->add (stmt))
972 /* Don't walk subtrees of stmts we've already walked once, otherwise
973 we can have exponential complexity with e.g. lots of nested
974 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
975 always the same tree, which the first time cp_fold_r has been
976 called on it had the subtrees walked. */
977 *walk_subtrees = 0;
978 return NULL;
981 code = TREE_CODE (stmt);
982 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
983 || code == OMP_TASKLOOP || code == OACC_LOOP)
985 tree x;
986 int i, n;
988 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
989 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
990 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
991 x = OMP_FOR_COND (stmt);
992 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
994 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
995 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
997 else if (x && TREE_CODE (x) == TREE_VEC)
999 n = TREE_VEC_LENGTH (x);
1000 for (i = 0; i < n; i++)
1002 tree o = TREE_VEC_ELT (x, i);
1003 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1004 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1007 x = OMP_FOR_INCR (stmt);
1008 if (x && TREE_CODE (x) == TREE_VEC)
1010 n = TREE_VEC_LENGTH (x);
1011 for (i = 0; i < n; i++)
1013 tree o = TREE_VEC_ELT (x, i);
1014 if (o && TREE_CODE (o) == MODIFY_EXPR)
1015 o = TREE_OPERAND (o, 1);
1016 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1017 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1019 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1020 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1024 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1025 *walk_subtrees = 0;
1028 return NULL;
1031 /* Fold ALL the trees! FIXME we should be able to remove this, but
1032 apparently that still causes optimization regressions. */
1034 void
1035 cp_fold_function (tree fndecl)
1037 hash_set<tree> pset;
1038 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1041 /* Perform any pre-gimplification lowering of C++ front end trees to
1042 GENERIC. */
1044 static tree
1045 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1047 tree stmt = *stmt_p;
1048 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1049 hash_set<tree> *p_set = wtd->p_set;
1051 /* If in an OpenMP context, note var uses. */
1052 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1053 && (VAR_P (stmt)
1054 || TREE_CODE (stmt) == PARM_DECL
1055 || TREE_CODE (stmt) == RESULT_DECL)
1056 && omp_var_to_track (stmt))
1057 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1059 /* Don't dereference parms in a thunk, pass the references through. */
1060 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1061 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1063 *walk_subtrees = 0;
1064 return NULL;
1067 /* Dereference invisible reference parms. */
1068 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1070 *stmt_p = convert_from_reference (stmt);
1071 p_set->add (*stmt_p);
1072 *walk_subtrees = 0;
1073 return NULL;
1076 /* Map block scope extern declarations to visible declarations with the
1077 same name and type in outer scopes if any. */
1078 if (cp_function_chain->extern_decl_map
1079 && VAR_OR_FUNCTION_DECL_P (stmt)
1080 && DECL_EXTERNAL (stmt))
1082 struct cxx_int_tree_map *h, in;
1083 in.uid = DECL_UID (stmt);
1084 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1085 if (h)
1087 *stmt_p = h->to;
1088 TREE_USED (h->to) |= TREE_USED (stmt);
1089 *walk_subtrees = 0;
1090 return NULL;
1094 if (TREE_CODE (stmt) == INTEGER_CST
1095 && TYPE_REF_P (TREE_TYPE (stmt))
1096 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1097 && !wtd->no_sanitize_p)
1099 ubsan_maybe_instrument_reference (stmt_p);
1100 if (*stmt_p != stmt)
1102 *walk_subtrees = 0;
1103 return NULL_TREE;
1107 /* Other than invisiref parms, don't walk the same tree twice. */
1108 if (p_set->contains (stmt))
1110 *walk_subtrees = 0;
1111 return NULL_TREE;
1114 switch (TREE_CODE (stmt))
1116 case ADDR_EXPR:
1117 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1119 /* If in an OpenMP context, note var uses. */
1120 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1121 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1122 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1123 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1124 *walk_subtrees = 0;
1126 break;
1128 case RETURN_EXPR:
1129 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1130 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1131 *walk_subtrees = 0;
1132 break;
1134 case OMP_CLAUSE:
1135 switch (OMP_CLAUSE_CODE (stmt))
1137 case OMP_CLAUSE_LASTPRIVATE:
1138 /* Don't dereference an invisiref in OpenMP clauses. */
1139 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1141 *walk_subtrees = 0;
1142 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1143 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1144 cp_genericize_r, data, NULL);
1146 break;
1147 case OMP_CLAUSE_PRIVATE:
1148 /* Don't dereference an invisiref in OpenMP clauses. */
1149 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1150 *walk_subtrees = 0;
1151 else if (wtd->omp_ctx != NULL)
1153 /* Private clause doesn't cause any references to the
1154 var in outer contexts, avoid calling
1155 omp_cxx_notice_variable for it. */
1156 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1157 wtd->omp_ctx = NULL;
1158 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1159 data, NULL);
1160 wtd->omp_ctx = old;
1161 *walk_subtrees = 0;
1163 break;
1164 case OMP_CLAUSE_SHARED:
1165 case OMP_CLAUSE_FIRSTPRIVATE:
1166 case OMP_CLAUSE_COPYIN:
1167 case OMP_CLAUSE_COPYPRIVATE:
1168 /* Don't dereference an invisiref in OpenMP clauses. */
1169 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1170 *walk_subtrees = 0;
1171 break;
1172 case OMP_CLAUSE_REDUCTION:
1173 /* Don't dereference an invisiref in reduction clause's
1174 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1175 still needs to be genericized. */
1176 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1178 *walk_subtrees = 0;
1179 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1180 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1181 cp_genericize_r, data, NULL);
1182 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1183 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1184 cp_genericize_r, data, NULL);
1186 break;
1187 default:
1188 break;
1190 break;
1192 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1193 to lower this construct before scanning it, so we need to lower these
1194 before doing anything else. */
1195 case CLEANUP_STMT:
1196 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1197 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1198 : TRY_FINALLY_EXPR,
1199 void_type_node,
1200 CLEANUP_BODY (stmt),
1201 CLEANUP_EXPR (stmt));
1202 break;
1204 case IF_STMT:
1205 genericize_if_stmt (stmt_p);
1206 /* *stmt_p has changed, tail recurse to handle it again. */
1207 return cp_genericize_r (stmt_p, walk_subtrees, data);
1209 /* COND_EXPR might have incompatible types in branches if one or both
1210 arms are bitfields. Fix it up now. */
1211 case COND_EXPR:
1213 tree type_left
1214 = (TREE_OPERAND (stmt, 1)
1215 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1216 : NULL_TREE);
1217 tree type_right
1218 = (TREE_OPERAND (stmt, 2)
1219 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1220 : NULL_TREE);
1221 if (type_left
1222 && !useless_type_conversion_p (TREE_TYPE (stmt),
1223 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1225 TREE_OPERAND (stmt, 1)
1226 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1227 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1228 type_left));
1230 if (type_right
1231 && !useless_type_conversion_p (TREE_TYPE (stmt),
1232 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1234 TREE_OPERAND (stmt, 2)
1235 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1236 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1237 type_right));
1240 break;
1242 case BIND_EXPR:
1243 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1245 tree decl;
1246 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1247 if (VAR_P (decl)
1248 && !DECL_EXTERNAL (decl)
1249 && omp_var_to_track (decl))
1251 splay_tree_node n
1252 = splay_tree_lookup (wtd->omp_ctx->variables,
1253 (splay_tree_key) decl);
1254 if (n == NULL)
1255 splay_tree_insert (wtd->omp_ctx->variables,
1256 (splay_tree_key) decl,
1257 TREE_STATIC (decl)
1258 ? OMP_CLAUSE_DEFAULT_SHARED
1259 : OMP_CLAUSE_DEFAULT_PRIVATE);
1262 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1264 /* The point here is to not sanitize static initializers. */
1265 bool no_sanitize_p = wtd->no_sanitize_p;
1266 wtd->no_sanitize_p = true;
1267 for (tree decl = BIND_EXPR_VARS (stmt);
1268 decl;
1269 decl = DECL_CHAIN (decl))
1270 if (VAR_P (decl)
1271 && TREE_STATIC (decl)
1272 && DECL_INITIAL (decl))
1273 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1274 wtd->no_sanitize_p = no_sanitize_p;
1276 wtd->bind_expr_stack.safe_push (stmt);
1277 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1278 cp_genericize_r, data, NULL);
1279 wtd->bind_expr_stack.pop ();
1280 break;
1282 case USING_STMT:
1284 tree block = NULL_TREE;
1286 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1287 BLOCK, and append an IMPORTED_DECL to its
1288 BLOCK_VARS chained list. */
1289 if (wtd->bind_expr_stack.exists ())
1291 int i;
1292 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1293 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1294 break;
1296 if (block)
1298 tree decl = TREE_OPERAND (stmt, 0);
1299 gcc_assert (decl);
1301 if (undeduced_auto_decl (decl))
1302 /* Omit from the GENERIC, the back-end can't handle it. */;
1303 else
1305 tree using_directive = make_node (IMPORTED_DECL);
1306 TREE_TYPE (using_directive) = void_type_node;
1308 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1309 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1310 BLOCK_VARS (block) = using_directive;
1313 /* The USING_STMT won't appear in GENERIC. */
1314 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1315 *walk_subtrees = 0;
1317 break;
1319 case DECL_EXPR:
1320 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1322 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1323 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1324 *walk_subtrees = 0;
1326 else
1328 tree d = DECL_EXPR_DECL (stmt);
1329 if (VAR_P (d))
1330 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1332 break;
1334 case OMP_PARALLEL:
1335 case OMP_TASK:
1336 case OMP_TASKLOOP:
1338 struct cp_genericize_omp_taskreg omp_ctx;
1339 tree c, decl;
1340 splay_tree_node n;
1342 *walk_subtrees = 0;
1343 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1344 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1345 omp_ctx.default_shared = omp_ctx.is_parallel;
1346 omp_ctx.outer = wtd->omp_ctx;
1347 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1348 wtd->omp_ctx = &omp_ctx;
1349 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1350 switch (OMP_CLAUSE_CODE (c))
1352 case OMP_CLAUSE_SHARED:
1353 case OMP_CLAUSE_PRIVATE:
1354 case OMP_CLAUSE_FIRSTPRIVATE:
1355 case OMP_CLAUSE_LASTPRIVATE:
1356 decl = OMP_CLAUSE_DECL (c);
1357 if (decl == error_mark_node || !omp_var_to_track (decl))
1358 break;
1359 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1360 if (n != NULL)
1361 break;
1362 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1363 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1364 ? OMP_CLAUSE_DEFAULT_SHARED
1365 : OMP_CLAUSE_DEFAULT_PRIVATE);
1366 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1367 omp_cxx_notice_variable (omp_ctx.outer, decl);
1368 break;
1369 case OMP_CLAUSE_DEFAULT:
1370 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1371 omp_ctx.default_shared = true;
1372 default:
1373 break;
1375 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1376 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1377 else
1378 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1379 wtd->omp_ctx = omp_ctx.outer;
1380 splay_tree_delete (omp_ctx.variables);
1382 break;
1384 case TRY_BLOCK:
1386 *walk_subtrees = 0;
1387 tree try_block = wtd->try_block;
1388 wtd->try_block = stmt;
1389 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1390 wtd->try_block = try_block;
1391 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1393 break;
1395 case MUST_NOT_THROW_EXPR:
1396 /* MUST_NOT_THROW_COND might be something else with TM. */
1397 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1399 *walk_subtrees = 0;
1400 tree try_block = wtd->try_block;
1401 wtd->try_block = stmt;
1402 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1403 wtd->try_block = try_block;
1405 break;
1407 case THROW_EXPR:
1409 location_t loc = location_of (stmt);
1410 if (TREE_NO_WARNING (stmt))
1411 /* Never mind. */;
1412 else if (wtd->try_block)
1414 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1415 && warning_at (loc, OPT_Wterminate,
1416 "throw will always call terminate()")
1417 && cxx_dialect >= cxx11
1418 && DECL_DESTRUCTOR_P (current_function_decl))
1419 inform (loc, "in C++11 destructors default to noexcept");
1421 else
1423 if (warn_cxx11_compat && cxx_dialect < cxx11
1424 && DECL_DESTRUCTOR_P (current_function_decl)
1425 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1426 == NULL_TREE)
1427 && (get_defaulted_eh_spec (current_function_decl)
1428 == empty_except_spec))
1429 warning_at (loc, OPT_Wc__11_compat,
1430 "in C++11 this throw will terminate because "
1431 "destructors default to noexcept");
1434 break;
1436 case CONVERT_EXPR:
1437 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1438 break;
1440 case FOR_STMT:
1441 genericize_for_stmt (stmt_p, walk_subtrees, data);
1442 break;
1444 case WHILE_STMT:
1445 genericize_while_stmt (stmt_p, walk_subtrees, data);
1446 break;
1448 case DO_STMT:
1449 genericize_do_stmt (stmt_p, walk_subtrees, data);
1450 break;
1452 case SWITCH_STMT:
1453 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1454 break;
1456 case CONTINUE_STMT:
1457 genericize_continue_stmt (stmt_p);
1458 break;
1460 case BREAK_STMT:
1461 genericize_break_stmt (stmt_p);
1462 break;
1464 case OMP_FOR:
1465 case OMP_SIMD:
1466 case OMP_DISTRIBUTE:
1467 case OACC_LOOP:
1468 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1469 break;
1471 case PTRMEM_CST:
1472 /* By the time we get here we're handing off to the back end, so we don't
1473 need or want to preserve PTRMEM_CST anymore. */
1474 *stmt_p = cplus_expand_constant (stmt);
1475 *walk_subtrees = 0;
1476 break;
1478 case MEM_REF:
1479 /* For MEM_REF, make sure not to sanitize the second operand even
1480 if it has reference type. It is just an offset with a type
1481 holding other information. There is no other processing we
1482 need to do for INTEGER_CSTs, so just ignore the second argument
1483 unconditionally. */
1484 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1485 *walk_subtrees = 0;
1486 break;
1488 case NOP_EXPR:
1489 if (!wtd->no_sanitize_p
1490 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1491 && TYPE_REF_P (TREE_TYPE (stmt)))
1492 ubsan_maybe_instrument_reference (stmt_p);
1493 break;
1495 case CALL_EXPR:
1496 if (!wtd->no_sanitize_p
1497 && sanitize_flags_p ((SANITIZE_NULL
1498 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1500 tree fn = CALL_EXPR_FN (stmt);
1501 if (fn != NULL_TREE
1502 && !error_operand_p (fn)
1503 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1504 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1506 bool is_ctor
1507 = TREE_CODE (fn) == ADDR_EXPR
1508 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1509 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1510 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1511 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1512 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1513 cp_ubsan_maybe_instrument_member_call (stmt);
1515 else if (fn == NULL_TREE
1516 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1517 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1518 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1519 *walk_subtrees = 0;
1521 /* Fall through. */
1522 case AGGR_INIT_EXPR:
1523 /* For calls to a multi-versioned function, overload resolution
1524 returns the function with the highest target priority, that is,
1525 the version that will checked for dispatching first. If this
1526 version is inlinable, a direct call to this version can be made
1527 otherwise the call should go through the dispatcher. */
1529 tree fn = cp_get_callee_fndecl_nofold (stmt);
1530 if (fn && DECL_FUNCTION_VERSIONED (fn)
1531 && (current_function_decl == NULL
1532 || !targetm.target_option.can_inline_p (current_function_decl,
1533 fn)))
1534 if (tree dis = get_function_version_dispatcher (fn))
1536 mark_versions_used (dis);
1537 dis = build_address (dis);
1538 if (TREE_CODE (stmt) == CALL_EXPR)
1539 CALL_EXPR_FN (stmt) = dis;
1540 else
1541 AGGR_INIT_EXPR_FN (stmt) = dis;
1544 break;
1546 case TARGET_EXPR:
1547 if (TARGET_EXPR_INITIAL (stmt)
1548 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1549 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1550 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1551 break;
1553 default:
1554 if (IS_TYPE_OR_DECL_P (stmt))
1555 *walk_subtrees = 0;
1556 break;
1559 p_set->add (*stmt_p);
1561 return NULL;
1564 /* Lower C++ front end trees to GENERIC in T_P. */
1566 static void
1567 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1569 struct cp_genericize_data wtd;
1571 wtd.p_set = new hash_set<tree>;
1572 wtd.bind_expr_stack.create (0);
1573 wtd.omp_ctx = NULL;
1574 wtd.try_block = NULL_TREE;
1575 wtd.no_sanitize_p = false;
1576 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1577 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1578 delete wtd.p_set;
1579 wtd.bind_expr_stack.release ();
1580 if (sanitize_flags_p (SANITIZE_VPTR))
1581 cp_ubsan_instrument_member_accesses (t_p);
1584 /* If a function that should end with a return in non-void
1585 function doesn't obviously end with return, add ubsan
1586 instrumentation code to verify it at runtime. If -fsanitize=return
1587 is not enabled, instrument __builtin_unreachable. */
1589 static void
1590 cp_maybe_instrument_return (tree fndecl)
1592 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1593 || DECL_CONSTRUCTOR_P (fndecl)
1594 || DECL_DESTRUCTOR_P (fndecl)
1595 || !targetm.warn_func_return (fndecl))
1596 return;
1598 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1599 /* Don't add __builtin_unreachable () if not optimizing, it will not
1600 improve any optimizations in that case, just break UB code.
1601 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1602 UBSan covers this with ubsan_instrument_return above where sufficient
1603 information is provided, while the __builtin_unreachable () below
1604 if return sanitization is disabled will just result in hard to
1605 understand runtime error without location. */
1606 && (!optimize
1607 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1608 return;
1610 tree t = DECL_SAVED_TREE (fndecl);
1611 while (t)
1613 switch (TREE_CODE (t))
1615 case BIND_EXPR:
1616 t = BIND_EXPR_BODY (t);
1617 continue;
1618 case TRY_FINALLY_EXPR:
1619 case CLEANUP_POINT_EXPR:
1620 t = TREE_OPERAND (t, 0);
1621 continue;
1622 case STATEMENT_LIST:
1624 tree_stmt_iterator i = tsi_last (t);
1625 while (!tsi_end_p (i))
1627 tree p = tsi_stmt (i);
1628 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1629 break;
1630 tsi_prev (&i);
1632 if (!tsi_end_p (i))
1634 t = tsi_stmt (i);
1635 continue;
1638 break;
1639 case RETURN_EXPR:
1640 return;
1641 default:
1642 break;
1644 break;
1646 if (t == NULL_TREE)
1647 return;
1648 tree *p = &DECL_SAVED_TREE (fndecl);
1649 if (TREE_CODE (*p) == BIND_EXPR)
1650 p = &BIND_EXPR_BODY (*p);
1652 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1653 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1654 t = ubsan_instrument_return (loc);
1655 else
1657 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1658 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1661 append_to_statement_list (t, p);
1664 void
1665 cp_genericize (tree fndecl)
1667 tree t;
1669 /* Fix up the types of parms passed by invisible reference. */
1670 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1671 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1673 /* If a function's arguments are copied to create a thunk,
1674 then DECL_BY_REFERENCE will be set -- but the type of the
1675 argument will be a pointer type, so we will never get
1676 here. */
1677 gcc_assert (!DECL_BY_REFERENCE (t));
1678 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1679 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1680 DECL_BY_REFERENCE (t) = 1;
1681 TREE_ADDRESSABLE (t) = 0;
1682 relayout_decl (t);
1685 /* Do the same for the return value. */
1686 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1688 t = DECL_RESULT (fndecl);
1689 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1690 DECL_BY_REFERENCE (t) = 1;
1691 TREE_ADDRESSABLE (t) = 0;
1692 relayout_decl (t);
1693 if (DECL_NAME (t))
1695 /* Adjust DECL_VALUE_EXPR of the original var. */
1696 tree outer = outer_curly_brace_block (current_function_decl);
1697 tree var;
1699 if (outer)
1700 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1701 if (VAR_P (var)
1702 && DECL_NAME (t) == DECL_NAME (var)
1703 && DECL_HAS_VALUE_EXPR_P (var)
1704 && DECL_VALUE_EXPR (var) == t)
1706 tree val = convert_from_reference (t);
1707 SET_DECL_VALUE_EXPR (var, val);
1708 break;
1713 /* If we're a clone, the body is already GIMPLE. */
1714 if (DECL_CLONED_FUNCTION_P (fndecl))
1715 return;
1717 /* Allow cp_genericize calls to be nested. */
1718 tree save_bc_label[2];
1719 save_bc_label[bc_break] = bc_label[bc_break];
1720 save_bc_label[bc_continue] = bc_label[bc_continue];
1721 bc_label[bc_break] = NULL_TREE;
1722 bc_label[bc_continue] = NULL_TREE;
1724 /* We do want to see every occurrence of the parms, so we can't just use
1725 walk_tree's hash functionality. */
1726 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1728 cp_maybe_instrument_return (fndecl);
1730 /* Do everything else. */
1731 c_genericize (fndecl);
1733 gcc_assert (bc_label[bc_break] == NULL);
1734 gcc_assert (bc_label[bc_continue] == NULL);
1735 bc_label[bc_break] = save_bc_label[bc_break];
1736 bc_label[bc_continue] = save_bc_label[bc_continue];
1739 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1740 NULL if there is in fact nothing to do. ARG2 may be null if FN
1741 actually only takes one argument. */
1743 static tree
1744 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1746 tree defparm, parm, t;
1747 int i = 0;
1748 int nargs;
1749 tree *argarray;
1751 if (fn == NULL)
1752 return NULL;
1754 nargs = list_length (DECL_ARGUMENTS (fn));
1755 argarray = XALLOCAVEC (tree, nargs);
1757 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1758 if (arg2)
1759 defparm = TREE_CHAIN (defparm);
1761 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1762 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1764 tree inner_type = TREE_TYPE (arg1);
1765 tree start1, end1, p1;
1766 tree start2 = NULL, p2 = NULL;
1767 tree ret = NULL, lab;
1769 start1 = arg1;
1770 start2 = arg2;
1773 inner_type = TREE_TYPE (inner_type);
1774 start1 = build4 (ARRAY_REF, inner_type, start1,
1775 size_zero_node, NULL, NULL);
1776 if (arg2)
1777 start2 = build4 (ARRAY_REF, inner_type, start2,
1778 size_zero_node, NULL, NULL);
1780 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1781 start1 = build_fold_addr_expr_loc (input_location, start1);
1782 if (arg2)
1783 start2 = build_fold_addr_expr_loc (input_location, start2);
1785 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1786 end1 = fold_build_pointer_plus (start1, end1);
1788 p1 = create_tmp_var (TREE_TYPE (start1));
1789 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1790 append_to_statement_list (t, &ret);
1792 if (arg2)
1794 p2 = create_tmp_var (TREE_TYPE (start2));
1795 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1796 append_to_statement_list (t, &ret);
1799 lab = create_artificial_label (input_location);
1800 t = build1 (LABEL_EXPR, void_type_node, lab);
1801 append_to_statement_list (t, &ret);
1803 argarray[i++] = p1;
1804 if (arg2)
1805 argarray[i++] = p2;
1806 /* Handle default arguments. */
1807 for (parm = defparm; parm && parm != void_list_node;
1808 parm = TREE_CHAIN (parm), i++)
1809 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1810 TREE_PURPOSE (parm), fn,
1811 i - is_method, tf_warning_or_error);
1812 t = build_call_a (fn, i, argarray);
1813 t = fold_convert (void_type_node, t);
1814 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1815 append_to_statement_list (t, &ret);
1817 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1818 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1819 append_to_statement_list (t, &ret);
1821 if (arg2)
1823 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1824 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1825 append_to_statement_list (t, &ret);
1828 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1829 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1830 append_to_statement_list (t, &ret);
1832 return ret;
1834 else
1836 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1837 if (arg2)
1838 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1839 /* Handle default arguments. */
1840 for (parm = defparm; parm && parm != void_list_node;
1841 parm = TREE_CHAIN (parm), i++)
1842 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1843 TREE_PURPOSE (parm), fn,
1844 i - is_method, tf_warning_or_error);
1845 t = build_call_a (fn, i, argarray);
1846 t = fold_convert (void_type_node, t);
1847 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1851 /* Return code to initialize DECL with its default constructor, or
1852 NULL if there's nothing to do. */
1854 tree
1855 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1857 tree info = CP_OMP_CLAUSE_INFO (clause);
1858 tree ret = NULL;
1860 if (info)
1861 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1863 return ret;
1866 /* Return code to initialize DST with a copy constructor from SRC. */
1868 tree
1869 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1871 tree info = CP_OMP_CLAUSE_INFO (clause);
1872 tree ret = NULL;
1874 if (info)
1875 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1876 if (ret == NULL)
1877 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1879 return ret;
1882 /* Similarly, except use an assignment operator instead. */
1884 tree
1885 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1887 tree info = CP_OMP_CLAUSE_INFO (clause);
1888 tree ret = NULL;
1890 if (info)
1891 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1892 if (ret == NULL)
1893 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1895 return ret;
1898 /* Return code to destroy DECL. */
1900 tree
1901 cxx_omp_clause_dtor (tree clause, tree decl)
1903 tree info = CP_OMP_CLAUSE_INFO (clause);
1904 tree ret = NULL;
1906 if (info)
1907 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1909 return ret;
1912 /* True if OpenMP should privatize what this DECL points to rather
1913 than the DECL itself. */
1915 bool
1916 cxx_omp_privatize_by_reference (const_tree decl)
1918 return (TYPE_REF_P (TREE_TYPE (decl))
1919 || is_invisiref_parm (decl));
1922 /* Return true if DECL is const qualified var having no mutable member. */
1923 bool
1924 cxx_omp_const_qual_no_mutable (tree decl)
1926 tree type = TREE_TYPE (decl);
1927 if (TYPE_REF_P (type))
1929 if (!is_invisiref_parm (decl))
1930 return false;
1931 type = TREE_TYPE (type);
1933 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1935 /* NVR doesn't preserve const qualification of the
1936 variable's type. */
1937 tree outer = outer_curly_brace_block (current_function_decl);
1938 tree var;
1940 if (outer)
1941 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1942 if (VAR_P (var)
1943 && DECL_NAME (decl) == DECL_NAME (var)
1944 && (TYPE_MAIN_VARIANT (type)
1945 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1947 if (TYPE_READONLY (TREE_TYPE (var)))
1948 type = TREE_TYPE (var);
1949 break;
1954 if (type == error_mark_node)
1955 return false;
1957 /* Variables with const-qualified type having no mutable member
1958 are predetermined shared. */
1959 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1960 return true;
1962 return false;
1965 /* True if OpenMP sharing attribute of DECL is predetermined. */
1967 enum omp_clause_default_kind
1968 cxx_omp_predetermined_sharing_1 (tree decl)
1970 /* Static data members are predetermined shared. */
1971 if (TREE_STATIC (decl))
1973 tree ctx = CP_DECL_CONTEXT (decl);
1974 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1975 return OMP_CLAUSE_DEFAULT_SHARED;
1978 /* Const qualified vars having no mutable member are predetermined
1979 shared. */
1980 if (cxx_omp_const_qual_no_mutable (decl))
1981 return OMP_CLAUSE_DEFAULT_SHARED;
1983 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1986 /* Likewise, but also include the artificial vars. We don't want to
1987 disallow the artificial vars being mentioned in explicit clauses,
1988 as we use artificial vars e.g. for loop constructs with random
1989 access iterators other than pointers, but during gimplification
1990 we want to treat them as predetermined. */
1992 enum omp_clause_default_kind
1993 cxx_omp_predetermined_sharing (tree decl)
1995 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
1996 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1997 return ret;
1999 /* Predetermine artificial variables holding integral values, those
2000 are usually result of gimplify_one_sizepos or SAVE_EXPR
2001 gimplification. */
2002 if (VAR_P (decl)
2003 && DECL_ARTIFICIAL (decl)
2004 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2005 && !(DECL_LANG_SPECIFIC (decl)
2006 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2007 return OMP_CLAUSE_DEFAULT_SHARED;
2009 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2012 /* Finalize an implicitly determined clause. */
2014 void
2015 cxx_omp_finish_clause (tree c, gimple_seq *)
2017 tree decl, inner_type;
2018 bool make_shared = false;
2020 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2021 return;
2023 decl = OMP_CLAUSE_DECL (c);
2024 decl = require_complete_type (decl);
2025 inner_type = TREE_TYPE (decl);
2026 if (decl == error_mark_node)
2027 make_shared = true;
2028 else if (TYPE_REF_P (TREE_TYPE (decl)))
2029 inner_type = TREE_TYPE (inner_type);
2031 /* We're interested in the base element, not arrays. */
2032 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2033 inner_type = TREE_TYPE (inner_type);
2035 /* Check for special function availability by building a call to one.
2036 Save the results, because later we won't be in the right context
2037 for making these queries. */
2038 if (!make_shared
2039 && CLASS_TYPE_P (inner_type)
2040 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2041 make_shared = true;
2043 if (make_shared)
2045 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2046 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2047 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2051 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2052 disregarded in OpenMP construct, because it is going to be
2053 remapped during OpenMP lowering. SHARED is true if DECL
2054 is going to be shared, false if it is going to be privatized. */
2056 bool
2057 cxx_omp_disregard_value_expr (tree decl, bool shared)
2059 return !shared
2060 && VAR_P (decl)
2061 && DECL_HAS_VALUE_EXPR_P (decl)
2062 && DECL_ARTIFICIAL (decl)
2063 && DECL_LANG_SPECIFIC (decl)
2064 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2067 /* Fold expression X which is used as an rvalue if RVAL is true. */
2069 static tree
2070 cp_fold_maybe_rvalue (tree x, bool rval)
2072 while (true)
2074 x = cp_fold (x);
2075 if (rval && DECL_P (x)
2076 && !TYPE_REF_P (TREE_TYPE (x)))
2078 tree v = decl_constant_value (x);
2079 if (v != x && v != error_mark_node)
2081 x = v;
2082 continue;
2085 break;
2087 return x;
2090 /* Fold expression X which is used as an rvalue. */
2092 static tree
2093 cp_fold_rvalue (tree x)
2095 return cp_fold_maybe_rvalue (x, true);
2098 /* Perform folding on expression X. */
2100 tree
2101 cp_fully_fold (tree x)
2103 if (processing_template_decl)
2104 return x;
2105 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2106 have to call both. */
2107 if (cxx_dialect >= cxx11)
2109 x = maybe_constant_value (x);
2110 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2111 a TARGET_EXPR; undo that here. */
2112 if (TREE_CODE (x) == TARGET_EXPR)
2113 x = TARGET_EXPR_INITIAL (x);
2114 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2115 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2116 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2117 x = TREE_OPERAND (x, 0);
2119 return cp_fold_rvalue (x);
2122 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2123 and certain changes are made to the folding done. Or should be (FIXME). We
2124 never touch maybe_const, as it is only used for the C front-end
2125 C_MAYBE_CONST_EXPR. */
2127 tree
2128 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2130 return cp_fold_maybe_rvalue (x, !lval);
2133 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2135 /* Dispose of the whole FOLD_CACHE. */
2137 void
2138 clear_fold_cache (void)
2140 if (fold_cache != NULL)
2141 fold_cache->empty ();
2144 /* This function tries to fold an expression X.
2145 To avoid combinatorial explosion, folding results are kept in fold_cache.
2146 If X is invalid, we don't fold at all.
2147 For performance reasons we don't cache expressions representing a
2148 declaration or constant.
2149 Function returns X or its folded variant. */
2151 static tree
2152 cp_fold (tree x)
2154 tree op0, op1, op2, op3;
2155 tree org_x = x, r = NULL_TREE;
2156 enum tree_code code;
2157 location_t loc;
2158 bool rval_ops = true;
2160 if (!x || x == error_mark_node)
2161 return x;
2163 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2164 return x;
2166 /* Don't bother to cache DECLs or constants. */
2167 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2168 return x;
2170 if (fold_cache == NULL)
2171 fold_cache = hash_map<tree, tree>::create_ggc (101);
2173 if (tree *cached = fold_cache->get (x))
2174 return *cached;
2176 code = TREE_CODE (x);
2177 switch (code)
2179 case CLEANUP_POINT_EXPR:
2180 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2181 effects. */
2182 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2183 if (!TREE_SIDE_EFFECTS (r))
2184 x = r;
2185 break;
2187 case SIZEOF_EXPR:
2188 x = fold_sizeof_expr (x);
2189 break;
2191 case VIEW_CONVERT_EXPR:
2192 rval_ops = false;
2193 /* FALLTHRU */
2194 case CONVERT_EXPR:
2195 case NOP_EXPR:
2196 case NON_LVALUE_EXPR:
2198 if (VOID_TYPE_P (TREE_TYPE (x)))
2200 /* This is just to make sure we don't end up with casts to
2201 void from error_mark_node. If we just return x, then
2202 cp_fold_r might fold the operand into error_mark_node and
2203 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2204 during gimplification doesn't like such casts.
2205 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2206 folding of the operand should be in the caches and if in cp_fold_r
2207 it will modify it in place. */
2208 op0 = cp_fold (TREE_OPERAND (x, 0));
2209 if (op0 == error_mark_node)
2210 x = error_mark_node;
2211 break;
2214 loc = EXPR_LOCATION (x);
2215 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2217 if (code == CONVERT_EXPR
2218 && SCALAR_TYPE_P (TREE_TYPE (x))
2219 && op0 != void_node)
2220 /* During parsing we used convert_to_*_nofold; re-convert now using the
2221 folding variants, since fold() doesn't do those transformations. */
2222 x = fold (convert (TREE_TYPE (x), op0));
2223 else if (op0 != TREE_OPERAND (x, 0))
2225 if (op0 == error_mark_node)
2226 x = error_mark_node;
2227 else
2228 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2230 else
2231 x = fold (x);
2233 /* Conversion of an out-of-range value has implementation-defined
2234 behavior; the language considers it different from arithmetic
2235 overflow, which is undefined. */
2236 if (TREE_CODE (op0) == INTEGER_CST
2237 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2238 TREE_OVERFLOW (x) = false;
2240 break;
2242 case INDIRECT_REF:
2243 /* We don't need the decltype(auto) obfuscation anymore. */
2244 if (REF_PARENTHESIZED_P (x))
2246 tree p = maybe_undo_parenthesized_ref (x);
2247 return cp_fold (p);
2249 goto unary;
2251 case ADDR_EXPR:
2252 loc = EXPR_LOCATION (x);
2253 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2255 /* Cope with user tricks that amount to offsetof. */
2256 if (op0 != error_mark_node
2257 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2258 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2260 tree val = get_base_address (op0);
2261 if (val
2262 && INDIRECT_REF_P (val)
2263 && COMPLETE_TYPE_P (TREE_TYPE (val))
2264 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2266 val = TREE_OPERAND (val, 0);
2267 STRIP_NOPS (val);
2268 if (TREE_CODE (val) == INTEGER_CST)
2269 return fold_offsetof (op0, TREE_TYPE (x));
2272 goto finish_unary;
2274 case REALPART_EXPR:
2275 case IMAGPART_EXPR:
2276 rval_ops = false;
2277 /* FALLTHRU */
2278 case CONJ_EXPR:
2279 case FIX_TRUNC_EXPR:
2280 case FLOAT_EXPR:
2281 case NEGATE_EXPR:
2282 case ABS_EXPR:
2283 case ABSU_EXPR:
2284 case BIT_NOT_EXPR:
2285 case TRUTH_NOT_EXPR:
2286 case FIXED_CONVERT_EXPR:
2287 unary:
2289 loc = EXPR_LOCATION (x);
2290 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2292 finish_unary:
2293 if (op0 != TREE_OPERAND (x, 0))
2295 if (op0 == error_mark_node)
2296 x = error_mark_node;
2297 else
2299 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2300 if (code == INDIRECT_REF
2301 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2303 TREE_READONLY (x) = TREE_READONLY (org_x);
2304 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2305 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2309 else
2310 x = fold (x);
2312 gcc_assert (TREE_CODE (x) != COND_EXPR
2313 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2314 break;
2316 case UNARY_PLUS_EXPR:
2317 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2318 if (op0 == error_mark_node)
2319 x = error_mark_node;
2320 else
2321 x = fold_convert (TREE_TYPE (x), op0);
2322 break;
2324 case POSTDECREMENT_EXPR:
2325 case POSTINCREMENT_EXPR:
2326 case INIT_EXPR:
2327 case PREDECREMENT_EXPR:
2328 case PREINCREMENT_EXPR:
2329 case COMPOUND_EXPR:
2330 case MODIFY_EXPR:
2331 rval_ops = false;
2332 /* FALLTHRU */
2333 case POINTER_PLUS_EXPR:
2334 case PLUS_EXPR:
2335 case POINTER_DIFF_EXPR:
2336 case MINUS_EXPR:
2337 case MULT_EXPR:
2338 case TRUNC_DIV_EXPR:
2339 case CEIL_DIV_EXPR:
2340 case FLOOR_DIV_EXPR:
2341 case ROUND_DIV_EXPR:
2342 case TRUNC_MOD_EXPR:
2343 case CEIL_MOD_EXPR:
2344 case ROUND_MOD_EXPR:
2345 case RDIV_EXPR:
2346 case EXACT_DIV_EXPR:
2347 case MIN_EXPR:
2348 case MAX_EXPR:
2349 case LSHIFT_EXPR:
2350 case RSHIFT_EXPR:
2351 case LROTATE_EXPR:
2352 case RROTATE_EXPR:
2353 case BIT_AND_EXPR:
2354 case BIT_IOR_EXPR:
2355 case BIT_XOR_EXPR:
2356 case TRUTH_AND_EXPR:
2357 case TRUTH_ANDIF_EXPR:
2358 case TRUTH_OR_EXPR:
2359 case TRUTH_ORIF_EXPR:
2360 case TRUTH_XOR_EXPR:
2361 case LT_EXPR: case LE_EXPR:
2362 case GT_EXPR: case GE_EXPR:
2363 case EQ_EXPR: case NE_EXPR:
2364 case UNORDERED_EXPR: case ORDERED_EXPR:
2365 case UNLT_EXPR: case UNLE_EXPR:
2366 case UNGT_EXPR: case UNGE_EXPR:
2367 case UNEQ_EXPR: case LTGT_EXPR:
2368 case RANGE_EXPR: case COMPLEX_EXPR:
2370 loc = EXPR_LOCATION (x);
2371 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2372 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2374 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2376 if (op0 == error_mark_node || op1 == error_mark_node)
2377 x = error_mark_node;
2378 else
2379 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2381 else
2382 x = fold (x);
2384 if (TREE_NO_WARNING (org_x)
2385 && warn_nonnull_compare
2386 && COMPARISON_CLASS_P (org_x))
2388 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2390 else if (COMPARISON_CLASS_P (x))
2391 TREE_NO_WARNING (x) = 1;
2392 /* Otherwise give up on optimizing these, let GIMPLE folders
2393 optimize those later on. */
2394 else if (op0 != TREE_OPERAND (org_x, 0)
2395 || op1 != TREE_OPERAND (org_x, 1))
2397 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2398 TREE_NO_WARNING (x) = 1;
2400 else
2401 x = org_x;
2403 break;
2405 case VEC_COND_EXPR:
2406 case COND_EXPR:
2407 loc = EXPR_LOCATION (x);
2408 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2409 op1 = cp_fold (TREE_OPERAND (x, 1));
2410 op2 = cp_fold (TREE_OPERAND (x, 2));
2412 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2414 warning_sentinel s (warn_int_in_bool_context);
2415 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2416 op1 = cp_truthvalue_conversion (op1);
2417 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2418 op2 = cp_truthvalue_conversion (op2);
2420 else if (VOID_TYPE_P (TREE_TYPE (x)))
2422 if (TREE_CODE (op0) == INTEGER_CST)
2424 /* If the condition is constant, fold can fold away
2425 the COND_EXPR. If some statement-level uses of COND_EXPR
2426 have one of the branches NULL, avoid folding crash. */
2427 if (!op1)
2428 op1 = build_empty_stmt (loc);
2429 if (!op2)
2430 op2 = build_empty_stmt (loc);
2432 else
2434 /* Otherwise, don't bother folding a void condition, since
2435 it can't produce a constant value. */
2436 if (op0 != TREE_OPERAND (x, 0)
2437 || op1 != TREE_OPERAND (x, 1)
2438 || op2 != TREE_OPERAND (x, 2))
2439 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2440 break;
2444 if (op0 != TREE_OPERAND (x, 0)
2445 || op1 != TREE_OPERAND (x, 1)
2446 || op2 != TREE_OPERAND (x, 2))
2448 if (op0 == error_mark_node
2449 || op1 == error_mark_node
2450 || op2 == error_mark_node)
2451 x = error_mark_node;
2452 else
2453 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2455 else
2456 x = fold (x);
2458 /* A COND_EXPR might have incompatible types in branches if one or both
2459 arms are bitfields. If folding exposed such a branch, fix it up. */
2460 if (TREE_CODE (x) != code
2461 && x != error_mark_node
2462 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2463 x = fold_convert (TREE_TYPE (org_x), x);
2465 break;
2467 case CALL_EXPR:
2469 int i, m, sv = optimize, nw = sv, changed = 0;
2470 tree callee = get_callee_fndecl (x);
2472 /* Some built-in function calls will be evaluated at compile-time in
2473 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2474 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2475 if (callee && DECL_BUILT_IN (callee) && !optimize
2476 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2477 && current_function_decl
2478 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2479 nw = 1;
2481 x = copy_node (x);
2483 m = call_expr_nargs (x);
2484 for (i = 0; i < m; i++)
2486 r = cp_fold (CALL_EXPR_ARG (x, i));
2487 if (r != CALL_EXPR_ARG (x, i))
2489 if (r == error_mark_node)
2491 x = error_mark_node;
2492 break;
2494 changed = 1;
2496 CALL_EXPR_ARG (x, i) = r;
2498 if (x == error_mark_node)
2499 break;
2501 optimize = nw;
2502 r = fold (x);
2503 optimize = sv;
2505 if (TREE_CODE (r) != CALL_EXPR)
2507 x = cp_fold (r);
2508 break;
2511 optimize = nw;
2513 /* Invoke maybe_constant_value for functions declared
2514 constexpr and not called with AGGR_INIT_EXPRs.
2515 TODO:
2516 Do constexpr expansion of expressions where the call itself is not
2517 constant, but the call followed by an INDIRECT_REF is. */
2518 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2519 && !flag_no_inline)
2520 r = maybe_constant_value (x);
2521 optimize = sv;
2523 if (TREE_CODE (r) != CALL_EXPR)
2525 if (DECL_CONSTRUCTOR_P (callee))
2527 loc = EXPR_LOCATION (x);
2528 tree s = build_fold_indirect_ref_loc (loc,
2529 CALL_EXPR_ARG (x, 0));
2530 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2532 x = r;
2533 break;
2536 if (!changed)
2537 x = org_x;
2538 break;
2541 case CONSTRUCTOR:
2543 unsigned i;
2544 constructor_elt *p;
2545 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2546 vec<constructor_elt, va_gc> *nelts = NULL;
2547 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2549 tree op = cp_fold (p->value);
2550 if (op != p->value)
2552 if (op == error_mark_node)
2554 x = error_mark_node;
2555 vec_free (nelts);
2556 break;
2558 if (nelts == NULL)
2559 nelts = elts->copy ();
2560 (*nelts)[i].value = op;
2563 if (nelts)
2565 x = build_constructor (TREE_TYPE (x), nelts);
2566 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2567 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2569 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2570 x = fold (x);
2571 break;
2573 case TREE_VEC:
2575 bool changed = false;
2576 vec<tree, va_gc> *vec = make_tree_vector ();
2577 int i, n = TREE_VEC_LENGTH (x);
2578 vec_safe_reserve (vec, n);
2580 for (i = 0; i < n; i++)
2582 tree op = cp_fold (TREE_VEC_ELT (x, i));
2583 vec->quick_push (op);
2584 if (op != TREE_VEC_ELT (x, i))
2585 changed = true;
2588 if (changed)
2590 r = copy_node (x);
2591 for (i = 0; i < n; i++)
2592 TREE_VEC_ELT (r, i) = (*vec)[i];
2593 x = r;
2596 release_tree_vector (vec);
2599 break;
2601 case ARRAY_REF:
2602 case ARRAY_RANGE_REF:
2604 loc = EXPR_LOCATION (x);
2605 op0 = cp_fold (TREE_OPERAND (x, 0));
2606 op1 = cp_fold (TREE_OPERAND (x, 1));
2607 op2 = cp_fold (TREE_OPERAND (x, 2));
2608 op3 = cp_fold (TREE_OPERAND (x, 3));
2610 if (op0 != TREE_OPERAND (x, 0)
2611 || op1 != TREE_OPERAND (x, 1)
2612 || op2 != TREE_OPERAND (x, 2)
2613 || op3 != TREE_OPERAND (x, 3))
2615 if (op0 == error_mark_node
2616 || op1 == error_mark_node
2617 || op2 == error_mark_node
2618 || op3 == error_mark_node)
2619 x = error_mark_node;
2620 else
2622 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2623 TREE_READONLY (x) = TREE_READONLY (org_x);
2624 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2625 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2629 x = fold (x);
2630 break;
2632 case SAVE_EXPR:
2633 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2634 folding, evaluates to an invariant. In that case no need to wrap
2635 this folded tree with a SAVE_EXPR. */
2636 r = cp_fold (TREE_OPERAND (x, 0));
2637 if (tree_invariant_p (r))
2638 x = r;
2639 break;
2641 default:
2642 return org_x;
2645 fold_cache->put (org_x, x);
2646 /* Prevent that we try to fold an already folded result again. */
2647 if (x != org_x)
2648 fold_cache->put (x, x);
2650 return x;
2653 #include "gt-cp-cp-gimplify.h"