Update LOCAL_PATCHES after libsanitizer merge.
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob90a8f9fef8f2398af46e7572c93480a97515e819
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
38 /* Forward declarations. */
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
45 /* Local declarations. */
47 enum bc_t { bc_break = 0, bc_continue = 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
54 indicates which.
56 Just creates a label with location LOCATION and pushes it into the current
57 context. */
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
62 tree label = create_artificial_label (location);
63 DECL_CHAIN (label) = bc_label[bc];
64 bc_label[bc] = label;
65 if (bc == bc_break)
66 LABEL_DECL_BREAK (label) = true;
67 else
68 LABEL_DECL_CONTINUE (label) = true;
69 return label;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 gcc_assert (label == bc_label[bc]);
84 if (TREE_USED (label))
85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 block);
88 bc_label[bc] = DECL_CHAIN (label);
89 DECL_CHAIN (label) = NULL_TREE;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
95 static tree
96 get_bc_label (enum bc_t bc)
98 tree label = bc_label[bc];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label) = 1;
102 return label;
105 /* Genericize a TRY_BLOCK. */
107 static void
108 genericize_try_block (tree *stmt_p)
110 tree body = TRY_STMTS (*stmt_p);
111 tree cleanup = TRY_HANDLERS (*stmt_p);
113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 static void
119 genericize_catch_block (tree *stmt_p)
121 tree type = HANDLER_TYPE (*stmt_p);
122 tree body = HANDLER_BODY (*stmt_p);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
128 /* A terser interface for building a representation of an exception
129 specification. */
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 tree t;
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 return t;
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
152 tree body = EH_SPEC_STMTS (*stmt_p);
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 TREE_NO_WARNING (*stmt_p) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163 static void
164 genericize_if_stmt (tree *stmt_p)
166 tree stmt, cond, then_, else_;
167 location_t locus = EXPR_LOCATION (*stmt_p);
169 stmt = *stmt_p;
170 cond = IF_COND (stmt);
171 then_ = THEN_CLAUSE (stmt);
172 else_ = ELSE_CLAUSE (stmt);
174 if (!then_)
175 then_ = build_empty_stmt (locus);
176 if (!else_)
177 else_ = build_empty_stmt (locus);
179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180 stmt = then_;
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = else_;
183 else
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185 if (!EXPR_HAS_LOCATION (stmt))
186 protected_set_expr_location (stmt, locus);
187 *stmt_p = stmt;
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 tree incr, bool cond_is_first, int *walk_subtrees,
200 void *data)
202 tree blab, clab;
203 tree exit = NULL;
204 tree stmt_list = NULL;
206 blab = begin_bc_block (bc_break, start_locus);
207 clab = begin_bc_block (bc_continue, start_locus);
209 protected_set_expr_location (incr, start_locus);
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
216 if (cond && TREE_CODE (cond) != INTEGER_CST)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 build_empty_stmt (cloc), exit);
227 if (exit && cond_is_first)
228 append_to_statement_list (exit, &stmt_list);
229 append_to_statement_list (body, &stmt_list);
230 finish_bc_block (&stmt_list, bc_continue, clab);
231 append_to_statement_list (incr, &stmt_list);
232 if (exit && !cond_is_first)
233 append_to_statement_list (exit, &stmt_list);
235 if (!stmt_list)
236 stmt_list = build_empty_stmt (start_locus);
238 tree loop;
239 if (cond && integer_zerop (cond))
241 if (cond_is_first)
242 loop = fold_build3_loc (start_locus, COND_EXPR,
243 void_type_node, cond, stmt_list,
244 build_empty_stmt (start_locus));
245 else
246 loop = stmt_list;
248 else
250 location_t loc = start_locus;
251 if (!cond || integer_nonzerop (cond))
252 loc = EXPR_LOCATION (expr_first (body));
253 if (loc == UNKNOWN_LOCATION)
254 loc = start_locus;
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 *stmt_p = stmt_list;
267 /* Genericize a FOR_STMT node *STMT_P. */
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
277 if (init)
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
291 /* Genericize a WHILE_STMT node *STMT_P. */
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 /* Genericize a DO_STMT node *STMT_P. */
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
320 break_block = begin_bc_block (bc_break, stmt_locus);
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
333 if (TREE_USED (break_block))
334 SWITCH_BREAK_LABEL_P (break_block) = 1;
335 finish_bc_block (&body, bc_break, break_block);
336 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
337 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
339 || !TREE_USED (break_block));
342 /* Genericize a CONTINUE_STMT node *STMT_P. */
344 static void
345 genericize_continue_stmt (tree *stmt_p)
347 tree stmt_list = NULL;
348 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
349 tree label = get_bc_label (bc_continue);
350 location_t location = EXPR_LOCATION (*stmt_p);
351 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
352 append_to_statement_list_force (pred, &stmt_list);
353 append_to_statement_list (jump, &stmt_list);
354 *stmt_p = stmt_list;
357 /* Genericize a BREAK_STMT node *STMT_P. */
359 static void
360 genericize_break_stmt (tree *stmt_p)
362 tree label = get_bc_label (bc_break);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
367 /* Genericize a OMP_FOR node *STMT_P. */
369 static void
370 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
372 tree stmt = *stmt_p;
373 location_t locus = EXPR_LOCATION (stmt);
374 tree clab = begin_bc_block (bc_continue, locus);
376 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
377 if (TREE_CODE (stmt) != OMP_TASKLOOP)
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
381 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
383 *walk_subtrees = 0;
385 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
388 /* Hook into the middle of gimplifying an OMP_FOR node. */
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
393 tree for_stmt = *expr_p;
394 gimple_seq seq = NULL;
396 /* Protect ourselves from recursion. */
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
398 return GS_UNHANDLED;
399 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
401 gimplify_and_add (for_stmt, &seq);
402 gimple_seq_add_seq (pre_p, seq);
404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
406 return GS_ALL_DONE;
409 /* Gimplify an EXPR_STMT node. */
411 static void
412 gimplify_expr_stmt (tree *stmt_p)
414 tree stmt = EXPR_STMT_EXPR (*stmt_p);
416 if (stmt == error_mark_node)
417 stmt = NULL;
419 /* Gimplification of a statement expression will nullify the
420 statement if all its side effects are moved to *PRE_P and *POST_P.
422 In this case we will not want to emit the gimplified statement.
423 However, we may still want to emit a warning, so we do that before
424 gimplification. */
425 if (stmt && warn_unused_value)
427 if (!TREE_SIDE_EFFECTS (stmt))
429 if (!IS_EMPTY_STMT (stmt)
430 && !VOID_TYPE_P (TREE_TYPE (stmt))
431 && !TREE_NO_WARNING (stmt))
432 warning (OPT_Wunused_value, "statement with no effect");
434 else
435 warn_if_unused_value (stmt, input_location);
438 if (stmt == NULL_TREE)
439 stmt = alloc_stmt_list ();
441 *stmt_p = stmt;
444 /* Gimplify initialization from an AGGR_INIT_EXPR. */
446 static void
447 cp_gimplify_init_expr (tree *expr_p)
449 tree from = TREE_OPERAND (*expr_p, 1);
450 tree to = TREE_OPERAND (*expr_p, 0);
451 tree t;
453 /* What about code that pulls out the temp and uses it elsewhere? I
454 think that such code never uses the TARGET_EXPR as an initializer. If
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
456 case, I guess we'll need to replace references somehow. */
457 if (TREE_CODE (from) == TARGET_EXPR)
458 from = TARGET_EXPR_INITIAL (from);
460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461 inside the TARGET_EXPR. */
462 for (t = from; t; )
464 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 replace the slot operand with our target.
469 Should we add a target parm to gimplify_expr instead? No, as in this
470 case we want to replace the INIT_EXPR. */
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR
472 || TREE_CODE (sub) == VEC_INIT_EXPR)
474 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
475 AGGR_INIT_EXPR_SLOT (sub) = to;
476 else
477 VEC_INIT_EXPR_SLOT (sub) = to;
478 *expr_p = from;
480 /* The initialization is now a side-effect, so the container can
481 become void. */
482 if (from != sub)
483 TREE_TYPE (from) = void_type_node;
486 /* Handle aggregate NSDMI. */
487 replace_placeholders (sub, to);
489 if (t == sub)
490 break;
491 else
492 t = TREE_OPERAND (t, 1);
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
502 tree stmt = *expr_p;
503 tree temp = voidify_wrapper_expr (stmt, NULL);
504 tree body = TREE_OPERAND (stmt, 0);
505 gimple_seq try_ = NULL;
506 gimple_seq catch_ = NULL;
507 gimple *mnt;
509 gimplify_and_add (body, &try_);
510 mnt = gimple_build_eh_must_not_throw (terminate_fn);
511 gimple_seq_add_stmt_without_update (&catch_, mnt);
512 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
514 gimple_seq_add_stmt_without_update (pre_p, mnt);
515 if (temp)
517 *expr_p = temp;
518 return GS_OK;
521 *expr_p = NULL;
522 return GS_ALL_DONE;
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526 really just an empty class copy.
528 Check that the operand has a simple form so that TARGET_EXPRs and
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
530 return slot optimization alone because it isn't a copy. */
532 static bool
533 simple_empty_class_p (tree type, tree op)
535 return
536 ((TREE_CODE (op) == COMPOUND_EXPR
537 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
538 || TREE_CODE (op) == EMPTY_CLASS_EXPR
539 || is_gimple_lvalue (op)
540 || INDIRECT_REF_P (op)
541 || (TREE_CODE (op) == CONSTRUCTOR
542 && CONSTRUCTOR_NELTS (op) == 0
543 && !TREE_CLOBBER_P (op))
544 || (TREE_CODE (op) == CALL_EXPR
545 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
546 && is_really_empty_class (type);
549 /* Returns true if evaluating E as an lvalue has side-effects;
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551 have side-effects until there is a read or write through it. */
553 static bool
554 lvalue_has_side_effects (tree e)
556 if (!TREE_SIDE_EFFECTS (e))
557 return false;
558 while (handled_component_p (e))
560 if (TREE_CODE (e) == ARRAY_REF
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
562 return true;
563 e = TREE_OPERAND (e, 0);
565 if (DECL_P (e))
566 /* Just naming a variable has no side-effects. */
567 return false;
568 else if (INDIRECT_REF_P (e))
569 /* Similarly, indirection has no side-effects. */
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
571 else
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
573 return TREE_SIDE_EFFECTS (e);
576 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
579 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
581 int saved_stmts_are_full_exprs_p = 0;
582 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
583 enum tree_code code = TREE_CODE (*expr_p);
584 enum gimplify_status ret;
586 if (STATEMENT_CODE_P (code))
588 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
589 current_stmt_tree ()->stmts_are_full_exprs_p
590 = STMT_IS_FULL_EXPR_P (*expr_p);
593 switch (code)
595 case AGGR_INIT_EXPR:
596 simplify_aggr_init_expr (expr_p);
597 ret = GS_OK;
598 break;
600 case VEC_INIT_EXPR:
602 location_t loc = input_location;
603 tree init = VEC_INIT_EXPR_INIT (*expr_p);
604 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
605 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
606 input_location = EXPR_LOCATION (*expr_p);
607 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
608 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
609 from_array,
610 tf_warning_or_error);
611 hash_set<tree> pset;
612 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
613 cp_genericize_tree (expr_p, false);
614 ret = GS_OK;
615 input_location = loc;
617 break;
619 case THROW_EXPR:
620 /* FIXME communicate throw type to back end, probably by moving
621 THROW_EXPR into ../tree.def. */
622 *expr_p = TREE_OPERAND (*expr_p, 0);
623 ret = GS_OK;
624 break;
626 case MUST_NOT_THROW_EXPR:
627 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
628 break;
630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 LHS of an assignment might also be involved in the RHS, as in bug
632 25979. */
633 case INIT_EXPR:
634 cp_gimplify_init_expr (expr_p);
635 if (TREE_CODE (*expr_p) != INIT_EXPR)
636 return GS_OK;
637 /* Fall through. */
638 case MODIFY_EXPR:
639 modify_expr_case:
641 /* If the back end isn't clever enough to know that the lhs and rhs
642 types are the same, add an explicit conversion. */
643 tree op0 = TREE_OPERAND (*expr_p, 0);
644 tree op1 = TREE_OPERAND (*expr_p, 1);
646 if (!error_operand_p (op0)
647 && !error_operand_p (op1)
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
650 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
651 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
652 TREE_TYPE (op0), op1);
654 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
656 /* Remove any copies of empty classes. Also drop volatile
657 variables on the RHS to avoid infinite recursion from
658 gimplify_expr trying to load the value. */
659 if (TREE_SIDE_EFFECTS (op1))
661 if (TREE_THIS_VOLATILE (op1)
662 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
663 op1 = build_fold_addr_expr (op1);
665 gimplify_and_add (op1, pre_p);
667 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
668 is_gimple_lvalue, fb_lvalue);
669 *expr_p = TREE_OPERAND (*expr_p, 0);
671 /* P0145 says that the RHS is sequenced before the LHS.
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 isn't quite strong enough in two cases:
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 mean it's evaluated after the LHS.
678 2) the value calculation of the RHS is also sequenced before the
679 LHS, so for scalar assignment we need to preevaluate if the
680 RHS could be affected by LHS side-effects even if it has no
681 side-effects of its own. We don't need this for classes because
682 class assignment takes its RHS by reference. */
683 else if (flag_strong_eval_order > 1
684 && TREE_CODE (*expr_p) == MODIFY_EXPR
685 && lvalue_has_side_effects (op0)
686 && (TREE_CODE (op1) == CALL_EXPR
687 || (SCALAR_TYPE_P (TREE_TYPE (op1))
688 && !TREE_CONSTANT (op1))))
689 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
691 ret = GS_OK;
692 break;
694 case EMPTY_CLASS_EXPR:
695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
696 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
697 ret = GS_OK;
698 break;
700 case BASELINK:
701 *expr_p = BASELINK_FUNCTIONS (*expr_p);
702 ret = GS_OK;
703 break;
705 case TRY_BLOCK:
706 genericize_try_block (expr_p);
707 ret = GS_OK;
708 break;
710 case HANDLER:
711 genericize_catch_block (expr_p);
712 ret = GS_OK;
713 break;
715 case EH_SPEC_BLOCK:
716 genericize_eh_spec_block (expr_p);
717 ret = GS_OK;
718 break;
720 case USING_STMT:
721 gcc_unreachable ();
723 case FOR_STMT:
724 case WHILE_STMT:
725 case DO_STMT:
726 case SWITCH_STMT:
727 case CONTINUE_STMT:
728 case BREAK_STMT:
729 gcc_unreachable ();
731 case OMP_FOR:
732 case OMP_SIMD:
733 case OMP_DISTRIBUTE:
734 case OMP_TASKLOOP:
735 ret = cp_gimplify_omp_for (expr_p, pre_p);
736 break;
738 case EXPR_STMT:
739 gimplify_expr_stmt (expr_p);
740 ret = GS_OK;
741 break;
743 case UNARY_PLUS_EXPR:
745 tree arg = TREE_OPERAND (*expr_p, 0);
746 tree type = TREE_TYPE (*expr_p);
747 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
748 : arg;
749 ret = GS_OK;
751 break;
753 case CALL_EXPR:
754 ret = GS_OK;
755 if (!CALL_EXPR_FN (*expr_p))
756 /* Internal function call. */;
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
759 /* This is a call to a (compound) assignment operator that used
760 the operator syntax; gimplify the RHS first. */
761 gcc_assert (call_expr_nargs (*expr_p) == 2);
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
763 enum gimplify_status t
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
765 if (t == GS_ERROR)
766 ret = GS_ERROR;
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
771 with __builtin_va_arg_pack(). */
772 int nargs = call_expr_nargs (*expr_p) - 1;
773 for (int i = 0; i < nargs; ++i)
775 enum gimplify_status t
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
777 if (t == GS_ERROR)
778 ret = GS_ERROR;
781 else if (flag_strong_eval_order
782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
784 /* If flag_strong_eval_order, evaluate the object argument first. */
785 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
786 if (INDIRECT_TYPE_P (fntype))
787 fntype = TREE_TYPE (fntype);
788 if (TREE_CODE (fntype) == METHOD_TYPE)
790 enum gimplify_status t
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
792 if (t == GS_ERROR)
793 ret = GS_ERROR;
796 if (ret != GS_ERROR)
798 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
799 if (decl
800 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
801 BUILT_IN_FRONTEND))
802 *expr_p = boolean_false_node;
804 break;
806 case RETURN_EXPR:
807 if (TREE_OPERAND (*expr_p, 0)
808 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
809 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
811 expr_p = &TREE_OPERAND (*expr_p, 0);
812 code = TREE_CODE (*expr_p);
813 /* Avoid going through the INIT_EXPR case, which can
814 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
815 goto modify_expr_case;
817 /* Fall through. */
819 default:
820 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
821 break;
824 /* Restore saved state. */
825 if (STATEMENT_CODE_P (code))
826 current_stmt_tree ()->stmts_are_full_exprs_p
827 = saved_stmts_are_full_exprs_p;
829 return ret;
832 static inline bool
833 is_invisiref_parm (const_tree t)
835 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
836 && DECL_BY_REFERENCE (t));
839 /* Return true if the uid in both int tree maps are equal. */
841 bool
842 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
844 return (a->uid == b->uid);
847 /* Hash a UID in a cxx_int_tree_map. */
849 unsigned int
850 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
852 return item->uid;
855 /* A stable comparison routine for use with splay trees and DECLs. */
857 static int
858 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
860 tree a = (tree) xa;
861 tree b = (tree) xb;
863 return DECL_UID (a) - DECL_UID (b);
866 /* OpenMP context during genericization. */
868 struct cp_genericize_omp_taskreg
870 bool is_parallel;
871 bool default_shared;
872 struct cp_genericize_omp_taskreg *outer;
873 splay_tree variables;
876 /* Return true if genericization should try to determine if
877 DECL is firstprivate or shared within task regions. */
879 static bool
880 omp_var_to_track (tree decl)
882 tree type = TREE_TYPE (decl);
883 if (is_invisiref_parm (decl))
884 type = TREE_TYPE (type);
885 else if (TYPE_REF_P (type))
886 type = TREE_TYPE (type);
887 while (TREE_CODE (type) == ARRAY_TYPE)
888 type = TREE_TYPE (type);
889 if (type == error_mark_node || !CLASS_TYPE_P (type))
890 return false;
891 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
892 return false;
893 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
894 return false;
895 return true;
898 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
900 static void
901 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
903 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
904 (splay_tree_key) decl);
905 if (n == NULL)
907 int flags = OMP_CLAUSE_DEFAULT_SHARED;
908 if (omp_ctx->outer)
909 omp_cxx_notice_variable (omp_ctx->outer, decl);
910 if (!omp_ctx->default_shared)
912 struct cp_genericize_omp_taskreg *octx;
914 for (octx = omp_ctx->outer; octx; octx = octx->outer)
916 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
917 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
919 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
920 break;
922 if (octx->is_parallel)
923 break;
925 if (octx == NULL
926 && (TREE_CODE (decl) == PARM_DECL
927 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
928 && DECL_CONTEXT (decl) == current_function_decl)))
929 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
930 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
932 /* DECL is implicitly determined firstprivate in
933 the current task construct. Ensure copy ctor and
934 dtor are instantiated, because during gimplification
935 it will be already too late. */
936 tree type = TREE_TYPE (decl);
937 if (is_invisiref_parm (decl))
938 type = TREE_TYPE (type);
939 else if (TYPE_REF_P (type))
940 type = TREE_TYPE (type);
941 while (TREE_CODE (type) == ARRAY_TYPE)
942 type = TREE_TYPE (type);
943 get_copy_ctor (type, tf_none);
944 get_dtor (type, tf_none);
947 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
951 /* Genericization context. */
953 struct cp_genericize_data
955 hash_set<tree> *p_set;
956 vec<tree> bind_expr_stack;
957 struct cp_genericize_omp_taskreg *omp_ctx;
958 tree try_block;
959 bool no_sanitize_p;
960 bool handle_invisiref_parm_p;
963 /* Perform any pre-gimplification folding of C++ front end trees to
964 GENERIC.
965 Note: The folding of none-omp cases is something to move into
966 the middle-end. As for now we have most foldings only on GENERIC
967 in fold-const, we need to perform this before transformation to
968 GIMPLE-form. */
970 static tree
971 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
973 tree stmt;
974 enum tree_code code;
976 *stmt_p = stmt = cp_fold (*stmt_p);
978 if (((hash_set<tree> *) data)->add (stmt))
980 /* Don't walk subtrees of stmts we've already walked once, otherwise
981 we can have exponential complexity with e.g. lots of nested
982 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
983 always the same tree, which the first time cp_fold_r has been
984 called on it had the subtrees walked. */
985 *walk_subtrees = 0;
986 return NULL;
989 code = TREE_CODE (stmt);
990 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
991 || code == OMP_TASKLOOP || code == OACC_LOOP)
993 tree x;
994 int i, n;
996 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
997 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
998 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
999 x = OMP_FOR_COND (stmt);
1000 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1002 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1003 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1005 else if (x && TREE_CODE (x) == TREE_VEC)
1007 n = TREE_VEC_LENGTH (x);
1008 for (i = 0; i < n; i++)
1010 tree o = TREE_VEC_ELT (x, i);
1011 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1012 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1015 x = OMP_FOR_INCR (stmt);
1016 if (x && TREE_CODE (x) == TREE_VEC)
1018 n = TREE_VEC_LENGTH (x);
1019 for (i = 0; i < n; i++)
1021 tree o = TREE_VEC_ELT (x, i);
1022 if (o && TREE_CODE (o) == MODIFY_EXPR)
1023 o = TREE_OPERAND (o, 1);
1024 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1025 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1027 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1028 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1032 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1033 *walk_subtrees = 0;
1036 return NULL;
1039 /* Fold ALL the trees! FIXME we should be able to remove this, but
1040 apparently that still causes optimization regressions. */
1042 void
1043 cp_fold_function (tree fndecl)
1045 hash_set<tree> pset;
1046 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1049 /* Perform any pre-gimplification lowering of C++ front end trees to
1050 GENERIC. */
1052 static tree
1053 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1055 tree stmt = *stmt_p;
1056 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1057 hash_set<tree> *p_set = wtd->p_set;
1059 /* If in an OpenMP context, note var uses. */
1060 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1061 && (VAR_P (stmt)
1062 || TREE_CODE (stmt) == PARM_DECL
1063 || TREE_CODE (stmt) == RESULT_DECL)
1064 && omp_var_to_track (stmt))
1065 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1067 /* Don't dereference parms in a thunk, pass the references through. */
1068 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1069 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1071 *walk_subtrees = 0;
1072 return NULL;
1075 /* Dereference invisible reference parms. */
1076 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1078 *stmt_p = convert_from_reference (stmt);
1079 p_set->add (*stmt_p);
1080 *walk_subtrees = 0;
1081 return NULL;
1084 /* Map block scope extern declarations to visible declarations with the
1085 same name and type in outer scopes if any. */
1086 if (cp_function_chain->extern_decl_map
1087 && VAR_OR_FUNCTION_DECL_P (stmt)
1088 && DECL_EXTERNAL (stmt))
1090 struct cxx_int_tree_map *h, in;
1091 in.uid = DECL_UID (stmt);
1092 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1093 if (h)
1095 *stmt_p = h->to;
1096 TREE_USED (h->to) |= TREE_USED (stmt);
1097 *walk_subtrees = 0;
1098 return NULL;
1102 if (TREE_CODE (stmt) == INTEGER_CST
1103 && TYPE_REF_P (TREE_TYPE (stmt))
1104 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1105 && !wtd->no_sanitize_p)
1107 ubsan_maybe_instrument_reference (stmt_p);
1108 if (*stmt_p != stmt)
1110 *walk_subtrees = 0;
1111 return NULL_TREE;
1115 /* Other than invisiref parms, don't walk the same tree twice. */
1116 if (p_set->contains (stmt))
1118 *walk_subtrees = 0;
1119 return NULL_TREE;
1122 switch (TREE_CODE (stmt))
1124 case ADDR_EXPR:
1125 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1127 /* If in an OpenMP context, note var uses. */
1128 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1129 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1130 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1131 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1132 *walk_subtrees = 0;
1134 break;
1136 case RETURN_EXPR:
1137 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1138 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1139 *walk_subtrees = 0;
1140 break;
1142 case OMP_CLAUSE:
1143 switch (OMP_CLAUSE_CODE (stmt))
1145 case OMP_CLAUSE_LASTPRIVATE:
1146 /* Don't dereference an invisiref in OpenMP clauses. */
1147 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1149 *walk_subtrees = 0;
1150 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1151 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1152 cp_genericize_r, data, NULL);
1154 break;
1155 case OMP_CLAUSE_PRIVATE:
1156 /* Don't dereference an invisiref in OpenMP clauses. */
1157 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1158 *walk_subtrees = 0;
1159 else if (wtd->omp_ctx != NULL)
1161 /* Private clause doesn't cause any references to the
1162 var in outer contexts, avoid calling
1163 omp_cxx_notice_variable for it. */
1164 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1165 wtd->omp_ctx = NULL;
1166 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1167 data, NULL);
1168 wtd->omp_ctx = old;
1169 *walk_subtrees = 0;
1171 break;
1172 case OMP_CLAUSE_SHARED:
1173 case OMP_CLAUSE_FIRSTPRIVATE:
1174 case OMP_CLAUSE_COPYIN:
1175 case OMP_CLAUSE_COPYPRIVATE:
1176 /* Don't dereference an invisiref in OpenMP clauses. */
1177 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1178 *walk_subtrees = 0;
1179 break;
1180 case OMP_CLAUSE_REDUCTION:
1181 /* Don't dereference an invisiref in reduction clause's
1182 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1183 still needs to be genericized. */
1184 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1186 *walk_subtrees = 0;
1187 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1188 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1189 cp_genericize_r, data, NULL);
1190 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1191 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1192 cp_genericize_r, data, NULL);
1194 break;
1195 default:
1196 break;
1198 break;
1200 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1201 to lower this construct before scanning it, so we need to lower these
1202 before doing anything else. */
1203 case CLEANUP_STMT:
1204 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1205 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1206 : TRY_FINALLY_EXPR,
1207 void_type_node,
1208 CLEANUP_BODY (stmt),
1209 CLEANUP_EXPR (stmt));
1210 break;
1212 case IF_STMT:
1213 genericize_if_stmt (stmt_p);
1214 /* *stmt_p has changed, tail recurse to handle it again. */
1215 return cp_genericize_r (stmt_p, walk_subtrees, data);
1217 /* COND_EXPR might have incompatible types in branches if one or both
1218 arms are bitfields. Fix it up now. */
1219 case COND_EXPR:
1221 tree type_left
1222 = (TREE_OPERAND (stmt, 1)
1223 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1224 : NULL_TREE);
1225 tree type_right
1226 = (TREE_OPERAND (stmt, 2)
1227 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1228 : NULL_TREE);
1229 if (type_left
1230 && !useless_type_conversion_p (TREE_TYPE (stmt),
1231 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1233 TREE_OPERAND (stmt, 1)
1234 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1235 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1236 type_left));
1238 if (type_right
1239 && !useless_type_conversion_p (TREE_TYPE (stmt),
1240 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1242 TREE_OPERAND (stmt, 2)
1243 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1244 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1245 type_right));
1248 break;
1250 case BIND_EXPR:
1251 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1253 tree decl;
1254 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1255 if (VAR_P (decl)
1256 && !DECL_EXTERNAL (decl)
1257 && omp_var_to_track (decl))
1259 splay_tree_node n
1260 = splay_tree_lookup (wtd->omp_ctx->variables,
1261 (splay_tree_key) decl);
1262 if (n == NULL)
1263 splay_tree_insert (wtd->omp_ctx->variables,
1264 (splay_tree_key) decl,
1265 TREE_STATIC (decl)
1266 ? OMP_CLAUSE_DEFAULT_SHARED
1267 : OMP_CLAUSE_DEFAULT_PRIVATE);
1270 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1272 /* The point here is to not sanitize static initializers. */
1273 bool no_sanitize_p = wtd->no_sanitize_p;
1274 wtd->no_sanitize_p = true;
1275 for (tree decl = BIND_EXPR_VARS (stmt);
1276 decl;
1277 decl = DECL_CHAIN (decl))
1278 if (VAR_P (decl)
1279 && TREE_STATIC (decl)
1280 && DECL_INITIAL (decl))
1281 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1282 wtd->no_sanitize_p = no_sanitize_p;
1284 wtd->bind_expr_stack.safe_push (stmt);
1285 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1286 cp_genericize_r, data, NULL);
1287 wtd->bind_expr_stack.pop ();
1288 break;
1290 case USING_STMT:
1292 tree block = NULL_TREE;
1294 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1295 BLOCK, and append an IMPORTED_DECL to its
1296 BLOCK_VARS chained list. */
1297 if (wtd->bind_expr_stack.exists ())
1299 int i;
1300 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1301 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1302 break;
1304 if (block)
1306 tree decl = TREE_OPERAND (stmt, 0);
1307 gcc_assert (decl);
1309 if (undeduced_auto_decl (decl))
1310 /* Omit from the GENERIC, the back-end can't handle it. */;
1311 else
1313 tree using_directive = make_node (IMPORTED_DECL);
1314 TREE_TYPE (using_directive) = void_type_node;
1316 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1317 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1318 BLOCK_VARS (block) = using_directive;
1321 /* The USING_STMT won't appear in GENERIC. */
1322 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1323 *walk_subtrees = 0;
1325 break;
1327 case DECL_EXPR:
1328 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1330 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1331 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1332 *walk_subtrees = 0;
1334 else
1336 tree d = DECL_EXPR_DECL (stmt);
1337 if (VAR_P (d))
1338 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1340 break;
1342 case OMP_PARALLEL:
1343 case OMP_TASK:
1344 case OMP_TASKLOOP:
1346 struct cp_genericize_omp_taskreg omp_ctx;
1347 tree c, decl;
1348 splay_tree_node n;
1350 *walk_subtrees = 0;
1351 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1352 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1353 omp_ctx.default_shared = omp_ctx.is_parallel;
1354 omp_ctx.outer = wtd->omp_ctx;
1355 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1356 wtd->omp_ctx = &omp_ctx;
1357 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1358 switch (OMP_CLAUSE_CODE (c))
1360 case OMP_CLAUSE_SHARED:
1361 case OMP_CLAUSE_PRIVATE:
1362 case OMP_CLAUSE_FIRSTPRIVATE:
1363 case OMP_CLAUSE_LASTPRIVATE:
1364 decl = OMP_CLAUSE_DECL (c);
1365 if (decl == error_mark_node || !omp_var_to_track (decl))
1366 break;
1367 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1368 if (n != NULL)
1369 break;
1370 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1371 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1372 ? OMP_CLAUSE_DEFAULT_SHARED
1373 : OMP_CLAUSE_DEFAULT_PRIVATE);
1374 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1375 omp_cxx_notice_variable (omp_ctx.outer, decl);
1376 break;
1377 case OMP_CLAUSE_DEFAULT:
1378 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1379 omp_ctx.default_shared = true;
1380 default:
1381 break;
1383 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1384 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1385 else
1386 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1387 wtd->omp_ctx = omp_ctx.outer;
1388 splay_tree_delete (omp_ctx.variables);
1390 break;
1392 case TRY_BLOCK:
1394 *walk_subtrees = 0;
1395 tree try_block = wtd->try_block;
1396 wtd->try_block = stmt;
1397 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1398 wtd->try_block = try_block;
1399 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1401 break;
1403 case MUST_NOT_THROW_EXPR:
1404 /* MUST_NOT_THROW_COND might be something else with TM. */
1405 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1407 *walk_subtrees = 0;
1408 tree try_block = wtd->try_block;
1409 wtd->try_block = stmt;
1410 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1411 wtd->try_block = try_block;
1413 break;
1415 case THROW_EXPR:
1417 location_t loc = location_of (stmt);
1418 if (TREE_NO_WARNING (stmt))
1419 /* Never mind. */;
1420 else if (wtd->try_block)
1422 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1424 auto_diagnostic_group d;
1425 if (warning_at (loc, OPT_Wterminate,
1426 "throw will always call terminate()")
1427 && cxx_dialect >= cxx11
1428 && DECL_DESTRUCTOR_P (current_function_decl))
1429 inform (loc, "in C++11 destructors default to noexcept");
1432 else
1434 if (warn_cxx11_compat && cxx_dialect < cxx11
1435 && DECL_DESTRUCTOR_P (current_function_decl)
1436 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1437 == NULL_TREE)
1438 && (get_defaulted_eh_spec (current_function_decl)
1439 == empty_except_spec))
1440 warning_at (loc, OPT_Wc__11_compat,
1441 "in C++11 this throw will terminate because "
1442 "destructors default to noexcept");
1445 break;
1447 case CONVERT_EXPR:
1448 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1449 break;
1451 case FOR_STMT:
1452 genericize_for_stmt (stmt_p, walk_subtrees, data);
1453 break;
1455 case WHILE_STMT:
1456 genericize_while_stmt (stmt_p, walk_subtrees, data);
1457 break;
1459 case DO_STMT:
1460 genericize_do_stmt (stmt_p, walk_subtrees, data);
1461 break;
1463 case SWITCH_STMT:
1464 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1465 break;
1467 case CONTINUE_STMT:
1468 genericize_continue_stmt (stmt_p);
1469 break;
1471 case BREAK_STMT:
1472 genericize_break_stmt (stmt_p);
1473 break;
1475 case OMP_FOR:
1476 case OMP_SIMD:
1477 case OMP_DISTRIBUTE:
1478 case OACC_LOOP:
1479 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1480 break;
1482 case PTRMEM_CST:
1483 /* By the time we get here we're handing off to the back end, so we don't
1484 need or want to preserve PTRMEM_CST anymore. */
1485 *stmt_p = cplus_expand_constant (stmt);
1486 *walk_subtrees = 0;
1487 break;
1489 case MEM_REF:
1490 /* For MEM_REF, make sure not to sanitize the second operand even
1491 if it has reference type. It is just an offset with a type
1492 holding other information. There is no other processing we
1493 need to do for INTEGER_CSTs, so just ignore the second argument
1494 unconditionally. */
1495 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1496 *walk_subtrees = 0;
1497 break;
1499 case NOP_EXPR:
1500 if (!wtd->no_sanitize_p
1501 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1502 && TYPE_REF_P (TREE_TYPE (stmt)))
1503 ubsan_maybe_instrument_reference (stmt_p);
1504 break;
1506 case CALL_EXPR:
1507 if (!wtd->no_sanitize_p
1508 && sanitize_flags_p ((SANITIZE_NULL
1509 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1511 tree fn = CALL_EXPR_FN (stmt);
1512 if (fn != NULL_TREE
1513 && !error_operand_p (fn)
1514 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1515 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1517 bool is_ctor
1518 = TREE_CODE (fn) == ADDR_EXPR
1519 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1520 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1521 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1522 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1523 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1524 cp_ubsan_maybe_instrument_member_call (stmt);
1526 else if (fn == NULL_TREE
1527 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1528 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1529 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1530 *walk_subtrees = 0;
1532 /* Fall through. */
1533 case AGGR_INIT_EXPR:
1534 /* For calls to a multi-versioned function, overload resolution
1535 returns the function with the highest target priority, that is,
1536 the version that will checked for dispatching first. If this
1537 version is inlinable, a direct call to this version can be made
1538 otherwise the call should go through the dispatcher. */
1540 tree fn = cp_get_callee_fndecl_nofold (stmt);
1541 if (fn && DECL_FUNCTION_VERSIONED (fn)
1542 && (current_function_decl == NULL
1543 || !targetm.target_option.can_inline_p (current_function_decl,
1544 fn)))
1545 if (tree dis = get_function_version_dispatcher (fn))
1547 mark_versions_used (dis);
1548 dis = build_address (dis);
1549 if (TREE_CODE (stmt) == CALL_EXPR)
1550 CALL_EXPR_FN (stmt) = dis;
1551 else
1552 AGGR_INIT_EXPR_FN (stmt) = dis;
1555 break;
1557 case TARGET_EXPR:
1558 if (TARGET_EXPR_INITIAL (stmt)
1559 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1560 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1561 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1562 break;
1564 default:
1565 if (IS_TYPE_OR_DECL_P (stmt))
1566 *walk_subtrees = 0;
1567 break;
1570 p_set->add (*stmt_p);
1572 return NULL;
1575 /* Lower C++ front end trees to GENERIC in T_P. */
1577 static void
1578 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1580 struct cp_genericize_data wtd;
1582 wtd.p_set = new hash_set<tree>;
1583 wtd.bind_expr_stack.create (0);
1584 wtd.omp_ctx = NULL;
1585 wtd.try_block = NULL_TREE;
1586 wtd.no_sanitize_p = false;
1587 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1588 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1589 delete wtd.p_set;
1590 wtd.bind_expr_stack.release ();
1591 if (sanitize_flags_p (SANITIZE_VPTR))
1592 cp_ubsan_instrument_member_accesses (t_p);
1595 /* If a function that should end with a return in non-void
1596 function doesn't obviously end with return, add ubsan
1597 instrumentation code to verify it at runtime. If -fsanitize=return
1598 is not enabled, instrument __builtin_unreachable. */
1600 static void
1601 cp_maybe_instrument_return (tree fndecl)
1603 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1604 || DECL_CONSTRUCTOR_P (fndecl)
1605 || DECL_DESTRUCTOR_P (fndecl)
1606 || !targetm.warn_func_return (fndecl))
1607 return;
1609 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1610 /* Don't add __builtin_unreachable () if not optimizing, it will not
1611 improve any optimizations in that case, just break UB code.
1612 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1613 UBSan covers this with ubsan_instrument_return above where sufficient
1614 information is provided, while the __builtin_unreachable () below
1615 if return sanitization is disabled will just result in hard to
1616 understand runtime error without location. */
1617 && (!optimize
1618 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1619 return;
1621 tree t = DECL_SAVED_TREE (fndecl);
1622 while (t)
1624 switch (TREE_CODE (t))
1626 case BIND_EXPR:
1627 t = BIND_EXPR_BODY (t);
1628 continue;
1629 case TRY_FINALLY_EXPR:
1630 case CLEANUP_POINT_EXPR:
1631 t = TREE_OPERAND (t, 0);
1632 continue;
1633 case STATEMENT_LIST:
1635 tree_stmt_iterator i = tsi_last (t);
1636 while (!tsi_end_p (i))
1638 tree p = tsi_stmt (i);
1639 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1640 break;
1641 tsi_prev (&i);
1643 if (!tsi_end_p (i))
1645 t = tsi_stmt (i);
1646 continue;
1649 break;
1650 case RETURN_EXPR:
1651 return;
1652 default:
1653 break;
1655 break;
1657 if (t == NULL_TREE)
1658 return;
1659 tree *p = &DECL_SAVED_TREE (fndecl);
1660 if (TREE_CODE (*p) == BIND_EXPR)
1661 p = &BIND_EXPR_BODY (*p);
1663 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1664 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1665 t = ubsan_instrument_return (loc);
1666 else
1668 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1669 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1672 append_to_statement_list (t, p);
1675 void
1676 cp_genericize (tree fndecl)
1678 tree t;
1680 /* Fix up the types of parms passed by invisible reference. */
1681 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1682 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1684 /* If a function's arguments are copied to create a thunk,
1685 then DECL_BY_REFERENCE will be set -- but the type of the
1686 argument will be a pointer type, so we will never get
1687 here. */
1688 gcc_assert (!DECL_BY_REFERENCE (t));
1689 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1690 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1691 DECL_BY_REFERENCE (t) = 1;
1692 TREE_ADDRESSABLE (t) = 0;
1693 relayout_decl (t);
1696 /* Do the same for the return value. */
1697 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1699 t = DECL_RESULT (fndecl);
1700 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1701 DECL_BY_REFERENCE (t) = 1;
1702 TREE_ADDRESSABLE (t) = 0;
1703 relayout_decl (t);
1704 if (DECL_NAME (t))
1706 /* Adjust DECL_VALUE_EXPR of the original var. */
1707 tree outer = outer_curly_brace_block (current_function_decl);
1708 tree var;
1710 if (outer)
1711 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1712 if (VAR_P (var)
1713 && DECL_NAME (t) == DECL_NAME (var)
1714 && DECL_HAS_VALUE_EXPR_P (var)
1715 && DECL_VALUE_EXPR (var) == t)
1717 tree val = convert_from_reference (t);
1718 SET_DECL_VALUE_EXPR (var, val);
1719 break;
1724 /* If we're a clone, the body is already GIMPLE. */
1725 if (DECL_CLONED_FUNCTION_P (fndecl))
1726 return;
1728 /* Allow cp_genericize calls to be nested. */
1729 tree save_bc_label[2];
1730 save_bc_label[bc_break] = bc_label[bc_break];
1731 save_bc_label[bc_continue] = bc_label[bc_continue];
1732 bc_label[bc_break] = NULL_TREE;
1733 bc_label[bc_continue] = NULL_TREE;
1735 /* We do want to see every occurrence of the parms, so we can't just use
1736 walk_tree's hash functionality. */
1737 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1739 cp_maybe_instrument_return (fndecl);
1741 /* Do everything else. */
1742 c_genericize (fndecl);
1744 gcc_assert (bc_label[bc_break] == NULL);
1745 gcc_assert (bc_label[bc_continue] == NULL);
1746 bc_label[bc_break] = save_bc_label[bc_break];
1747 bc_label[bc_continue] = save_bc_label[bc_continue];
1750 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1751 NULL if there is in fact nothing to do. ARG2 may be null if FN
1752 actually only takes one argument. */
1754 static tree
1755 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1757 tree defparm, parm, t;
1758 int i = 0;
1759 int nargs;
1760 tree *argarray;
1762 if (fn == NULL)
1763 return NULL;
1765 nargs = list_length (DECL_ARGUMENTS (fn));
1766 argarray = XALLOCAVEC (tree, nargs);
1768 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1769 if (arg2)
1770 defparm = TREE_CHAIN (defparm);
1772 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1773 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1775 tree inner_type = TREE_TYPE (arg1);
1776 tree start1, end1, p1;
1777 tree start2 = NULL, p2 = NULL;
1778 tree ret = NULL, lab;
1780 start1 = arg1;
1781 start2 = arg2;
1784 inner_type = TREE_TYPE (inner_type);
1785 start1 = build4 (ARRAY_REF, inner_type, start1,
1786 size_zero_node, NULL, NULL);
1787 if (arg2)
1788 start2 = build4 (ARRAY_REF, inner_type, start2,
1789 size_zero_node, NULL, NULL);
1791 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1792 start1 = build_fold_addr_expr_loc (input_location, start1);
1793 if (arg2)
1794 start2 = build_fold_addr_expr_loc (input_location, start2);
1796 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1797 end1 = fold_build_pointer_plus (start1, end1);
1799 p1 = create_tmp_var (TREE_TYPE (start1));
1800 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1801 append_to_statement_list (t, &ret);
1803 if (arg2)
1805 p2 = create_tmp_var (TREE_TYPE (start2));
1806 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1807 append_to_statement_list (t, &ret);
1810 lab = create_artificial_label (input_location);
1811 t = build1 (LABEL_EXPR, void_type_node, lab);
1812 append_to_statement_list (t, &ret);
1814 argarray[i++] = p1;
1815 if (arg2)
1816 argarray[i++] = p2;
1817 /* Handle default arguments. */
1818 for (parm = defparm; parm && parm != void_list_node;
1819 parm = TREE_CHAIN (parm), i++)
1820 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1821 TREE_PURPOSE (parm), fn,
1822 i - is_method, tf_warning_or_error);
1823 t = build_call_a (fn, i, argarray);
1824 t = fold_convert (void_type_node, t);
1825 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1826 append_to_statement_list (t, &ret);
1828 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1829 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1830 append_to_statement_list (t, &ret);
1832 if (arg2)
1834 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1835 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1836 append_to_statement_list (t, &ret);
1839 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1840 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1841 append_to_statement_list (t, &ret);
1843 return ret;
1845 else
1847 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1848 if (arg2)
1849 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1850 /* Handle default arguments. */
1851 for (parm = defparm; parm && parm != void_list_node;
1852 parm = TREE_CHAIN (parm), i++)
1853 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1854 TREE_PURPOSE (parm), fn,
1855 i - is_method, tf_warning_or_error);
1856 t = build_call_a (fn, i, argarray);
1857 t = fold_convert (void_type_node, t);
1858 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1862 /* Return code to initialize DECL with its default constructor, or
1863 NULL if there's nothing to do. */
1865 tree
1866 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1868 tree info = CP_OMP_CLAUSE_INFO (clause);
1869 tree ret = NULL;
1871 if (info)
1872 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1874 return ret;
1877 /* Return code to initialize DST with a copy constructor from SRC. */
1879 tree
1880 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1882 tree info = CP_OMP_CLAUSE_INFO (clause);
1883 tree ret = NULL;
1885 if (info)
1886 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1887 if (ret == NULL)
1888 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1890 return ret;
1893 /* Similarly, except use an assignment operator instead. */
1895 tree
1896 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1898 tree info = CP_OMP_CLAUSE_INFO (clause);
1899 tree ret = NULL;
1901 if (info)
1902 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1903 if (ret == NULL)
1904 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1906 return ret;
1909 /* Return code to destroy DECL. */
1911 tree
1912 cxx_omp_clause_dtor (tree clause, tree decl)
1914 tree info = CP_OMP_CLAUSE_INFO (clause);
1915 tree ret = NULL;
1917 if (info)
1918 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1920 return ret;
1923 /* True if OpenMP should privatize what this DECL points to rather
1924 than the DECL itself. */
1926 bool
1927 cxx_omp_privatize_by_reference (const_tree decl)
1929 return (TYPE_REF_P (TREE_TYPE (decl))
1930 || is_invisiref_parm (decl));
1933 /* Return true if DECL is const qualified var having no mutable member. */
1934 bool
1935 cxx_omp_const_qual_no_mutable (tree decl)
1937 tree type = TREE_TYPE (decl);
1938 if (TYPE_REF_P (type))
1940 if (!is_invisiref_parm (decl))
1941 return false;
1942 type = TREE_TYPE (type);
1944 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1946 /* NVR doesn't preserve const qualification of the
1947 variable's type. */
1948 tree outer = outer_curly_brace_block (current_function_decl);
1949 tree var;
1951 if (outer)
1952 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1953 if (VAR_P (var)
1954 && DECL_NAME (decl) == DECL_NAME (var)
1955 && (TYPE_MAIN_VARIANT (type)
1956 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1958 if (TYPE_READONLY (TREE_TYPE (var)))
1959 type = TREE_TYPE (var);
1960 break;
1965 if (type == error_mark_node)
1966 return false;
1968 /* Variables with const-qualified type having no mutable member
1969 are predetermined shared. */
1970 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1971 return true;
1973 return false;
1976 /* True if OpenMP sharing attribute of DECL is predetermined. */
1978 enum omp_clause_default_kind
1979 cxx_omp_predetermined_sharing_1 (tree decl)
1981 /* Static data members are predetermined shared. */
1982 if (TREE_STATIC (decl))
1984 tree ctx = CP_DECL_CONTEXT (decl);
1985 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1986 return OMP_CLAUSE_DEFAULT_SHARED;
1989 /* Const qualified vars having no mutable member are predetermined
1990 shared. */
1991 if (cxx_omp_const_qual_no_mutable (decl))
1992 return OMP_CLAUSE_DEFAULT_SHARED;
1994 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1997 /* Likewise, but also include the artificial vars. We don't want to
1998 disallow the artificial vars being mentioned in explicit clauses,
1999 as we use artificial vars e.g. for loop constructs with random
2000 access iterators other than pointers, but during gimplification
2001 we want to treat them as predetermined. */
2003 enum omp_clause_default_kind
2004 cxx_omp_predetermined_sharing (tree decl)
2006 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2007 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2008 return ret;
2010 /* Predetermine artificial variables holding integral values, those
2011 are usually result of gimplify_one_sizepos or SAVE_EXPR
2012 gimplification. */
2013 if (VAR_P (decl)
2014 && DECL_ARTIFICIAL (decl)
2015 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2016 && !(DECL_LANG_SPECIFIC (decl)
2017 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2018 return OMP_CLAUSE_DEFAULT_SHARED;
2020 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2023 /* Finalize an implicitly determined clause. */
2025 void
2026 cxx_omp_finish_clause (tree c, gimple_seq *)
2028 tree decl, inner_type;
2029 bool make_shared = false;
2031 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2032 return;
2034 decl = OMP_CLAUSE_DECL (c);
2035 decl = require_complete_type (decl);
2036 inner_type = TREE_TYPE (decl);
2037 if (decl == error_mark_node)
2038 make_shared = true;
2039 else if (TYPE_REF_P (TREE_TYPE (decl)))
2040 inner_type = TREE_TYPE (inner_type);
2042 /* We're interested in the base element, not arrays. */
2043 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2044 inner_type = TREE_TYPE (inner_type);
2046 /* Check for special function availability by building a call to one.
2047 Save the results, because later we won't be in the right context
2048 for making these queries. */
2049 if (!make_shared
2050 && CLASS_TYPE_P (inner_type)
2051 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2052 make_shared = true;
2054 if (make_shared)
2056 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2057 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2058 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2062 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2063 disregarded in OpenMP construct, because it is going to be
2064 remapped during OpenMP lowering. SHARED is true if DECL
2065 is going to be shared, false if it is going to be privatized. */
2067 bool
2068 cxx_omp_disregard_value_expr (tree decl, bool shared)
2070 return !shared
2071 && VAR_P (decl)
2072 && DECL_HAS_VALUE_EXPR_P (decl)
2073 && DECL_ARTIFICIAL (decl)
2074 && DECL_LANG_SPECIFIC (decl)
2075 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2078 /* Fold expression X which is used as an rvalue if RVAL is true. */
2080 static tree
2081 cp_fold_maybe_rvalue (tree x, bool rval)
2083 while (true)
2085 x = cp_fold (x);
2086 if (rval && DECL_P (x)
2087 && !TYPE_REF_P (TREE_TYPE (x)))
2089 tree v = decl_constant_value (x);
2090 if (v != x && v != error_mark_node)
2092 x = v;
2093 continue;
2096 break;
2098 return x;
2101 /* Fold expression X which is used as an rvalue. */
2103 static tree
2104 cp_fold_rvalue (tree x)
2106 return cp_fold_maybe_rvalue (x, true);
2109 /* Perform folding on expression X. */
2111 tree
2112 cp_fully_fold (tree x)
2114 if (processing_template_decl)
2115 return x;
2116 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2117 have to call both. */
2118 if (cxx_dialect >= cxx11)
2120 x = maybe_constant_value (x);
2121 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2122 a TARGET_EXPR; undo that here. */
2123 if (TREE_CODE (x) == TARGET_EXPR)
2124 x = TARGET_EXPR_INITIAL (x);
2125 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2126 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2127 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2128 x = TREE_OPERAND (x, 0);
2130 return cp_fold_rvalue (x);
2133 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2134 and certain changes are made to the folding done. Or should be (FIXME). We
2135 never touch maybe_const, as it is only used for the C front-end
2136 C_MAYBE_CONST_EXPR. */
2138 tree
2139 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2141 return cp_fold_maybe_rvalue (x, !lval);
2144 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2146 /* Dispose of the whole FOLD_CACHE. */
2148 void
2149 clear_fold_cache (void)
2151 if (fold_cache != NULL)
2152 fold_cache->empty ();
2155 /* This function tries to fold an expression X.
2156 To avoid combinatorial explosion, folding results are kept in fold_cache.
2157 If X is invalid, we don't fold at all.
2158 For performance reasons we don't cache expressions representing a
2159 declaration or constant.
2160 Function returns X or its folded variant. */
2162 static tree
2163 cp_fold (tree x)
2165 tree op0, op1, op2, op3;
2166 tree org_x = x, r = NULL_TREE;
2167 enum tree_code code;
2168 location_t loc;
2169 bool rval_ops = true;
2171 if (!x || x == error_mark_node)
2172 return x;
2174 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2175 return x;
2177 /* Don't bother to cache DECLs or constants. */
2178 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2179 return x;
2181 if (fold_cache == NULL)
2182 fold_cache = hash_map<tree, tree>::create_ggc (101);
2184 if (tree *cached = fold_cache->get (x))
2185 return *cached;
2187 code = TREE_CODE (x);
2188 switch (code)
2190 case CLEANUP_POINT_EXPR:
2191 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2192 effects. */
2193 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2194 if (!TREE_SIDE_EFFECTS (r))
2195 x = r;
2196 break;
2198 case SIZEOF_EXPR:
2199 x = fold_sizeof_expr (x);
2200 break;
2202 case VIEW_CONVERT_EXPR:
2203 rval_ops = false;
2204 /* FALLTHRU */
2205 case CONVERT_EXPR:
2206 case NOP_EXPR:
2207 case NON_LVALUE_EXPR:
2209 if (VOID_TYPE_P (TREE_TYPE (x)))
2211 /* This is just to make sure we don't end up with casts to
2212 void from error_mark_node. If we just return x, then
2213 cp_fold_r might fold the operand into error_mark_node and
2214 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2215 during gimplification doesn't like such casts.
2216 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2217 folding of the operand should be in the caches and if in cp_fold_r
2218 it will modify it in place. */
2219 op0 = cp_fold (TREE_OPERAND (x, 0));
2220 if (op0 == error_mark_node)
2221 x = error_mark_node;
2222 break;
2225 loc = EXPR_LOCATION (x);
2226 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2228 if (code == CONVERT_EXPR
2229 && SCALAR_TYPE_P (TREE_TYPE (x))
2230 && op0 != void_node)
2231 /* During parsing we used convert_to_*_nofold; re-convert now using the
2232 folding variants, since fold() doesn't do those transformations. */
2233 x = fold (convert (TREE_TYPE (x), op0));
2234 else if (op0 != TREE_OPERAND (x, 0))
2236 if (op0 == error_mark_node)
2237 x = error_mark_node;
2238 else
2239 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2241 else
2242 x = fold (x);
2244 /* Conversion of an out-of-range value has implementation-defined
2245 behavior; the language considers it different from arithmetic
2246 overflow, which is undefined. */
2247 if (TREE_CODE (op0) == INTEGER_CST
2248 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2249 TREE_OVERFLOW (x) = false;
2251 break;
2253 case INDIRECT_REF:
2254 /* We don't need the decltype(auto) obfuscation anymore. */
2255 if (REF_PARENTHESIZED_P (x))
2257 tree p = maybe_undo_parenthesized_ref (x);
2258 return cp_fold (p);
2260 goto unary;
2262 case ADDR_EXPR:
2263 loc = EXPR_LOCATION (x);
2264 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2266 /* Cope with user tricks that amount to offsetof. */
2267 if (op0 != error_mark_node
2268 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2269 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2271 tree val = get_base_address (op0);
2272 if (val
2273 && INDIRECT_REF_P (val)
2274 && COMPLETE_TYPE_P (TREE_TYPE (val))
2275 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2277 val = TREE_OPERAND (val, 0);
2278 STRIP_NOPS (val);
2279 if (TREE_CODE (val) == INTEGER_CST)
2280 return fold_offsetof (op0, TREE_TYPE (x));
2283 goto finish_unary;
2285 case REALPART_EXPR:
2286 case IMAGPART_EXPR:
2287 rval_ops = false;
2288 /* FALLTHRU */
2289 case CONJ_EXPR:
2290 case FIX_TRUNC_EXPR:
2291 case FLOAT_EXPR:
2292 case NEGATE_EXPR:
2293 case ABS_EXPR:
2294 case ABSU_EXPR:
2295 case BIT_NOT_EXPR:
2296 case TRUTH_NOT_EXPR:
2297 case FIXED_CONVERT_EXPR:
2298 unary:
2300 loc = EXPR_LOCATION (x);
2301 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2303 finish_unary:
2304 if (op0 != TREE_OPERAND (x, 0))
2306 if (op0 == error_mark_node)
2307 x = error_mark_node;
2308 else
2310 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2311 if (code == INDIRECT_REF
2312 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2314 TREE_READONLY (x) = TREE_READONLY (org_x);
2315 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2316 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2320 else
2321 x = fold (x);
2323 gcc_assert (TREE_CODE (x) != COND_EXPR
2324 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2325 break;
2327 case UNARY_PLUS_EXPR:
2328 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2329 if (op0 == error_mark_node)
2330 x = error_mark_node;
2331 else
2332 x = fold_convert (TREE_TYPE (x), op0);
2333 break;
2335 case POSTDECREMENT_EXPR:
2336 case POSTINCREMENT_EXPR:
2337 case INIT_EXPR:
2338 case PREDECREMENT_EXPR:
2339 case PREINCREMENT_EXPR:
2340 case COMPOUND_EXPR:
2341 case MODIFY_EXPR:
2342 rval_ops = false;
2343 /* FALLTHRU */
2344 case POINTER_PLUS_EXPR:
2345 case PLUS_EXPR:
2346 case POINTER_DIFF_EXPR:
2347 case MINUS_EXPR:
2348 case MULT_EXPR:
2349 case TRUNC_DIV_EXPR:
2350 case CEIL_DIV_EXPR:
2351 case FLOOR_DIV_EXPR:
2352 case ROUND_DIV_EXPR:
2353 case TRUNC_MOD_EXPR:
2354 case CEIL_MOD_EXPR:
2355 case ROUND_MOD_EXPR:
2356 case RDIV_EXPR:
2357 case EXACT_DIV_EXPR:
2358 case MIN_EXPR:
2359 case MAX_EXPR:
2360 case LSHIFT_EXPR:
2361 case RSHIFT_EXPR:
2362 case LROTATE_EXPR:
2363 case RROTATE_EXPR:
2364 case BIT_AND_EXPR:
2365 case BIT_IOR_EXPR:
2366 case BIT_XOR_EXPR:
2367 case TRUTH_AND_EXPR:
2368 case TRUTH_ANDIF_EXPR:
2369 case TRUTH_OR_EXPR:
2370 case TRUTH_ORIF_EXPR:
2371 case TRUTH_XOR_EXPR:
2372 case LT_EXPR: case LE_EXPR:
2373 case GT_EXPR: case GE_EXPR:
2374 case EQ_EXPR: case NE_EXPR:
2375 case UNORDERED_EXPR: case ORDERED_EXPR:
2376 case UNLT_EXPR: case UNLE_EXPR:
2377 case UNGT_EXPR: case UNGE_EXPR:
2378 case UNEQ_EXPR: case LTGT_EXPR:
2379 case RANGE_EXPR: case COMPLEX_EXPR:
2381 loc = EXPR_LOCATION (x);
2382 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2383 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2385 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2387 if (op0 == error_mark_node || op1 == error_mark_node)
2388 x = error_mark_node;
2389 else
2390 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2392 else
2393 x = fold (x);
2395 /* This is only needed for -Wnonnull-compare and only if
2396 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2397 generation, we do it always. */
2398 if (COMPARISON_CLASS_P (org_x))
2400 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2402 else if (COMPARISON_CLASS_P (x))
2404 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2405 TREE_NO_WARNING (x) = 1;
2407 /* Otherwise give up on optimizing these, let GIMPLE folders
2408 optimize those later on. */
2409 else if (op0 != TREE_OPERAND (org_x, 0)
2410 || op1 != TREE_OPERAND (org_x, 1))
2412 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2413 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2414 TREE_NO_WARNING (x) = 1;
2416 else
2417 x = org_x;
2419 break;
2421 case VEC_COND_EXPR:
2422 case COND_EXPR:
2423 loc = EXPR_LOCATION (x);
2424 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2425 op1 = cp_fold (TREE_OPERAND (x, 1));
2426 op2 = cp_fold (TREE_OPERAND (x, 2));
2428 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2430 warning_sentinel s (warn_int_in_bool_context);
2431 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2432 op1 = cp_truthvalue_conversion (op1);
2433 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2434 op2 = cp_truthvalue_conversion (op2);
2436 else if (VOID_TYPE_P (TREE_TYPE (x)))
2438 if (TREE_CODE (op0) == INTEGER_CST)
2440 /* If the condition is constant, fold can fold away
2441 the COND_EXPR. If some statement-level uses of COND_EXPR
2442 have one of the branches NULL, avoid folding crash. */
2443 if (!op1)
2444 op1 = build_empty_stmt (loc);
2445 if (!op2)
2446 op2 = build_empty_stmt (loc);
2448 else
2450 /* Otherwise, don't bother folding a void condition, since
2451 it can't produce a constant value. */
2452 if (op0 != TREE_OPERAND (x, 0)
2453 || op1 != TREE_OPERAND (x, 1)
2454 || op2 != TREE_OPERAND (x, 2))
2455 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2456 break;
2460 if (op0 != TREE_OPERAND (x, 0)
2461 || op1 != TREE_OPERAND (x, 1)
2462 || op2 != TREE_OPERAND (x, 2))
2464 if (op0 == error_mark_node
2465 || op1 == error_mark_node
2466 || op2 == error_mark_node)
2467 x = error_mark_node;
2468 else
2469 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2471 else
2472 x = fold (x);
2474 /* A COND_EXPR might have incompatible types in branches if one or both
2475 arms are bitfields. If folding exposed such a branch, fix it up. */
2476 if (TREE_CODE (x) != code
2477 && x != error_mark_node
2478 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2479 x = fold_convert (TREE_TYPE (org_x), x);
2481 break;
2483 case CALL_EXPR:
2485 int i, m, sv = optimize, nw = sv, changed = 0;
2486 tree callee = get_callee_fndecl (x);
2488 /* Some built-in function calls will be evaluated at compile-time in
2489 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2490 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2491 if (callee && fndecl_built_in_p (callee) && !optimize
2492 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2493 && current_function_decl
2494 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2495 nw = 1;
2497 /* Defer folding __builtin_is_constant_evaluated. */
2498 if (callee
2499 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2500 BUILT_IN_FRONTEND))
2501 break;
2503 x = copy_node (x);
2505 m = call_expr_nargs (x);
2506 for (i = 0; i < m; i++)
2508 r = cp_fold (CALL_EXPR_ARG (x, i));
2509 if (r != CALL_EXPR_ARG (x, i))
2511 if (r == error_mark_node)
2513 x = error_mark_node;
2514 break;
2516 changed = 1;
2518 CALL_EXPR_ARG (x, i) = r;
2520 if (x == error_mark_node)
2521 break;
2523 optimize = nw;
2524 r = fold (x);
2525 optimize = sv;
2527 if (TREE_CODE (r) != CALL_EXPR)
2529 x = cp_fold (r);
2530 break;
2533 optimize = nw;
2535 /* Invoke maybe_constant_value for functions declared
2536 constexpr and not called with AGGR_INIT_EXPRs.
2537 TODO:
2538 Do constexpr expansion of expressions where the call itself is not
2539 constant, but the call followed by an INDIRECT_REF is. */
2540 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2541 && !flag_no_inline)
2542 r = maybe_constant_value (x);
2543 optimize = sv;
2545 if (TREE_CODE (r) != CALL_EXPR)
2547 if (DECL_CONSTRUCTOR_P (callee))
2549 loc = EXPR_LOCATION (x);
2550 tree s = build_fold_indirect_ref_loc (loc,
2551 CALL_EXPR_ARG (x, 0));
2552 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2554 x = r;
2555 break;
2558 if (!changed)
2559 x = org_x;
2560 break;
2563 case CONSTRUCTOR:
2565 unsigned i;
2566 constructor_elt *p;
2567 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2568 vec<constructor_elt, va_gc> *nelts = NULL;
2569 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2571 tree op = cp_fold (p->value);
2572 if (op != p->value)
2574 if (op == error_mark_node)
2576 x = error_mark_node;
2577 vec_free (nelts);
2578 break;
2580 if (nelts == NULL)
2581 nelts = elts->copy ();
2582 (*nelts)[i].value = op;
2585 if (nelts)
2587 x = build_constructor (TREE_TYPE (x), nelts);
2588 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2589 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2591 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2592 x = fold (x);
2593 break;
2595 case TREE_VEC:
2597 bool changed = false;
2598 vec<tree, va_gc> *vec = make_tree_vector ();
2599 int i, n = TREE_VEC_LENGTH (x);
2600 vec_safe_reserve (vec, n);
2602 for (i = 0; i < n; i++)
2604 tree op = cp_fold (TREE_VEC_ELT (x, i));
2605 vec->quick_push (op);
2606 if (op != TREE_VEC_ELT (x, i))
2607 changed = true;
2610 if (changed)
2612 r = copy_node (x);
2613 for (i = 0; i < n; i++)
2614 TREE_VEC_ELT (r, i) = (*vec)[i];
2615 x = r;
2618 release_tree_vector (vec);
2621 break;
2623 case ARRAY_REF:
2624 case ARRAY_RANGE_REF:
2626 loc = EXPR_LOCATION (x);
2627 op0 = cp_fold (TREE_OPERAND (x, 0));
2628 op1 = cp_fold (TREE_OPERAND (x, 1));
2629 op2 = cp_fold (TREE_OPERAND (x, 2));
2630 op3 = cp_fold (TREE_OPERAND (x, 3));
2632 if (op0 != TREE_OPERAND (x, 0)
2633 || op1 != TREE_OPERAND (x, 1)
2634 || op2 != TREE_OPERAND (x, 2)
2635 || op3 != TREE_OPERAND (x, 3))
2637 if (op0 == error_mark_node
2638 || op1 == error_mark_node
2639 || op2 == error_mark_node
2640 || op3 == error_mark_node)
2641 x = error_mark_node;
2642 else
2644 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2645 TREE_READONLY (x) = TREE_READONLY (org_x);
2646 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2647 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2651 x = fold (x);
2652 break;
2654 case SAVE_EXPR:
2655 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2656 folding, evaluates to an invariant. In that case no need to wrap
2657 this folded tree with a SAVE_EXPR. */
2658 r = cp_fold (TREE_OPERAND (x, 0));
2659 if (tree_invariant_p (r))
2660 x = r;
2661 break;
2663 default:
2664 return org_x;
2667 fold_cache->put (org_x, x);
2668 /* Prevent that we try to fold an already folded result again. */
2669 if (x != org_x)
2670 fold_cache->put (x, x);
2672 return x;
2675 #include "gt-cp-cp-gimplify.h"