[PR c++/82878] pass-by-invisiref in lambda
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob201a59505916269e16886bfb2fe63d88bcaa654d
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
40 /* Forward declarations. */
42 static tree cp_genericize_r (tree *, int *, void *);
43 static tree cp_fold_r (tree *, int *, void *);
44 static void cp_genericize_tree (tree*, bool);
45 static tree cp_fold (tree);
47 /* Local declarations. */
49 enum bc_t { bc_break = 0, bc_continue = 1 };
51 /* Stack of labels which are targets for "break" or "continue",
52 linked through TREE_CHAIN. */
53 static tree bc_label[2];
55 /* Begin a scope which can be exited by a break or continue statement. BC
56 indicates which.
58 Just creates a label with location LOCATION and pushes it into the current
59 context. */
61 static tree
62 begin_bc_block (enum bc_t bc, location_t location)
64 tree label = create_artificial_label (location);
65 DECL_CHAIN (label) = bc_label[bc];
66 bc_label[bc] = label;
67 if (bc == bc_break)
68 LABEL_DECL_BREAK (label) = true;
69 else
70 LABEL_DECL_CONTINUE (label) = true;
71 return label;
74 /* Finish a scope which can be exited by a break or continue statement.
75 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
76 an expression for the contents of the scope.
78 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
79 BLOCK. Otherwise, just forget the label. */
81 static void
82 finish_bc_block (tree *block, enum bc_t bc, tree label)
84 gcc_assert (label == bc_label[bc]);
86 if (TREE_USED (label))
87 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
88 block);
90 bc_label[bc] = DECL_CHAIN (label);
91 DECL_CHAIN (label) = NULL_TREE;
94 /* Get the LABEL_EXPR to represent a break or continue statement
95 in the current block scope. BC indicates which. */
97 static tree
98 get_bc_label (enum bc_t bc)
100 tree label = bc_label[bc];
102 /* Mark the label used for finish_bc_block. */
103 TREE_USED (label) = 1;
104 return label;
107 /* Genericize a TRY_BLOCK. */
109 static void
110 genericize_try_block (tree *stmt_p)
112 tree body = TRY_STMTS (*stmt_p);
113 tree cleanup = TRY_HANDLERS (*stmt_p);
115 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
118 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
120 static void
121 genericize_catch_block (tree *stmt_p)
123 tree type = HANDLER_TYPE (*stmt_p);
124 tree body = HANDLER_BODY (*stmt_p);
126 /* FIXME should the caught type go in TREE_TYPE? */
127 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
130 /* A terser interface for building a representation of an exception
131 specification. */
133 static tree
134 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
136 tree t;
138 /* FIXME should the allowed types go in TREE_TYPE? */
139 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
140 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
142 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
143 append_to_statement_list (body, &TREE_OPERAND (t, 0));
145 return t;
148 /* Genericize an EH_SPEC_BLOCK by converting it to a
149 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
151 static void
152 genericize_eh_spec_block (tree *stmt_p)
154 tree body = EH_SPEC_STMTS (*stmt_p);
155 tree allowed = EH_SPEC_RAISES (*stmt_p);
156 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
158 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
159 TREE_NO_WARNING (*stmt_p) = true;
160 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
163 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
165 static void
166 genericize_if_stmt (tree *stmt_p)
168 tree stmt, cond, then_, else_;
169 location_t locus = EXPR_LOCATION (*stmt_p);
171 stmt = *stmt_p;
172 cond = IF_COND (stmt);
173 then_ = THEN_CLAUSE (stmt);
174 else_ = ELSE_CLAUSE (stmt);
176 if (!then_)
177 then_ = build_empty_stmt (locus);
178 if (!else_)
179 else_ = build_empty_stmt (locus);
181 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
182 stmt = then_;
183 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
184 stmt = else_;
185 else
186 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
187 if (!EXPR_HAS_LOCATION (stmt))
188 protected_set_expr_location (stmt, locus);
189 *stmt_p = stmt;
192 /* Build a generic representation of one of the C loop forms. COND is the
193 loop condition or NULL_TREE. BODY is the (possibly compound) statement
194 controlled by the loop. INCR is the increment expression of a for-loop,
195 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
196 evaluated before the loop body as in while and for loops, or after the
197 loop body as in do-while loops. */
199 static void
200 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
201 tree incr, bool cond_is_first, int *walk_subtrees,
202 void *data)
204 tree blab, clab;
205 tree exit = NULL;
206 tree stmt_list = NULL;
208 blab = begin_bc_block (bc_break, start_locus);
209 clab = begin_bc_block (bc_continue, start_locus);
211 protected_set_expr_location (incr, start_locus);
213 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214 cp_walk_tree (&body, cp_genericize_r, data, NULL);
215 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216 *walk_subtrees = 0;
218 if (cond && TREE_CODE (cond) != INTEGER_CST)
220 /* If COND is constant, don't bother building an exit. If it's false,
221 we won't build a loop. If it's true, any exits are in the body. */
222 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
223 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
224 get_bc_label (bc_break));
225 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
226 build_empty_stmt (cloc), exit);
229 if (exit && cond_is_first)
230 append_to_statement_list (exit, &stmt_list);
231 append_to_statement_list (body, &stmt_list);
232 finish_bc_block (&stmt_list, bc_continue, clab);
233 append_to_statement_list (incr, &stmt_list);
234 if (exit && !cond_is_first)
235 append_to_statement_list (exit, &stmt_list);
237 if (!stmt_list)
238 stmt_list = build_empty_stmt (start_locus);
240 tree loop;
241 if (cond && integer_zerop (cond))
243 if (cond_is_first)
244 loop = fold_build3_loc (start_locus, COND_EXPR,
245 void_type_node, cond, stmt_list,
246 build_empty_stmt (start_locus));
247 else
248 loop = stmt_list;
250 else
252 location_t loc = start_locus;
253 if (!cond || integer_nonzerop (cond))
254 loc = EXPR_LOCATION (expr_first (body));
255 if (loc == UNKNOWN_LOCATION)
256 loc = start_locus;
257 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
260 stmt_list = NULL;
261 append_to_statement_list (loop, &stmt_list);
262 finish_bc_block (&stmt_list, bc_break, blab);
263 if (!stmt_list)
264 stmt_list = build_empty_stmt (start_locus);
266 *stmt_p = stmt_list;
269 /* Genericize a FOR_STMT node *STMT_P. */
271 static void
272 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
274 tree stmt = *stmt_p;
275 tree expr = NULL;
276 tree loop;
277 tree init = FOR_INIT_STMT (stmt);
279 if (init)
281 cp_walk_tree (&init, cp_genericize_r, data, NULL);
282 append_to_statement_list (init, &expr);
285 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
286 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
287 append_to_statement_list (loop, &expr);
288 if (expr == NULL_TREE)
289 expr = loop;
290 *stmt_p = expr;
293 /* Genericize a WHILE_STMT node *STMT_P. */
295 static void
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
298 tree stmt = *stmt_p;
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
305 static void
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 tree stmt = *stmt_p;
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
315 static void
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 tree stmt = *stmt_p;
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
325 if (!body)
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
333 *walk_subtrees = 0;
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
341 static void
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list_force (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
351 *stmt_p = stmt_list;
354 /* Genericize a BREAK_STMT node *STMT_P. */
356 static void
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
366 static void
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
369 tree stmt = *stmt_p;
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 if (TREE_CODE (stmt) != OMP_TASKLOOP)
375 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
380 *walk_subtrees = 0;
382 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
385 /* Hook into the middle of gimplifying an OMP_FOR node. */
387 static enum gimplify_status
388 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
390 tree for_stmt = *expr_p;
391 gimple_seq seq = NULL;
393 /* Protect ourselves from recursion. */
394 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
395 return GS_UNHANDLED;
396 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
398 gimplify_and_add (for_stmt, &seq);
399 gimple_seq_add_seq (pre_p, seq);
401 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
403 return GS_ALL_DONE;
406 /* Gimplify an EXPR_STMT node. */
408 static void
409 gimplify_expr_stmt (tree *stmt_p)
411 tree stmt = EXPR_STMT_EXPR (*stmt_p);
413 if (stmt == error_mark_node)
414 stmt = NULL;
416 /* Gimplification of a statement expression will nullify the
417 statement if all its side effects are moved to *PRE_P and *POST_P.
419 In this case we will not want to emit the gimplified statement.
420 However, we may still want to emit a warning, so we do that before
421 gimplification. */
422 if (stmt && warn_unused_value)
424 if (!TREE_SIDE_EFFECTS (stmt))
426 if (!IS_EMPTY_STMT (stmt)
427 && !VOID_TYPE_P (TREE_TYPE (stmt))
428 && !TREE_NO_WARNING (stmt))
429 warning (OPT_Wunused_value, "statement with no effect");
431 else
432 warn_if_unused_value (stmt, input_location);
435 if (stmt == NULL_TREE)
436 stmt = alloc_stmt_list ();
438 *stmt_p = stmt;
441 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 static void
444 cp_gimplify_init_expr (tree *expr_p)
446 tree from = TREE_OPERAND (*expr_p, 1);
447 tree to = TREE_OPERAND (*expr_p, 0);
448 tree t;
450 /* What about code that pulls out the temp and uses it elsewhere? I
451 think that such code never uses the TARGET_EXPR as an initializer. If
452 I'm wrong, we'll abort because the temp won't have any RTL. In that
453 case, I guess we'll need to replace references somehow. */
454 if (TREE_CODE (from) == TARGET_EXPR)
455 from = TARGET_EXPR_INITIAL (from);
457 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
458 inside the TARGET_EXPR. */
459 for (t = from; t; )
461 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
463 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
464 replace the slot operand with our target.
466 Should we add a target parm to gimplify_expr instead? No, as in this
467 case we want to replace the INIT_EXPR. */
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR
469 || TREE_CODE (sub) == VEC_INIT_EXPR)
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
472 AGGR_INIT_EXPR_SLOT (sub) = to;
473 else
474 VEC_INIT_EXPR_SLOT (sub) = to;
475 *expr_p = from;
477 /* The initialization is now a side-effect, so the container can
478 become void. */
479 if (from != sub)
480 TREE_TYPE (from) = void_type_node;
483 /* Handle aggregate NSDMI. */
484 replace_placeholders (sub, to);
486 if (t == sub)
487 break;
488 else
489 t = TREE_OPERAND (t, 1);
494 /* Gimplify a MUST_NOT_THROW_EXPR. */
496 static enum gimplify_status
497 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
499 tree stmt = *expr_p;
500 tree temp = voidify_wrapper_expr (stmt, NULL);
501 tree body = TREE_OPERAND (stmt, 0);
502 gimple_seq try_ = NULL;
503 gimple_seq catch_ = NULL;
504 gimple *mnt;
506 gimplify_and_add (body, &try_);
507 mnt = gimple_build_eh_must_not_throw (terminate_fn);
508 gimple_seq_add_stmt_without_update (&catch_, mnt);
509 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
511 gimple_seq_add_stmt_without_update (pre_p, mnt);
512 if (temp)
514 *expr_p = temp;
515 return GS_OK;
518 *expr_p = NULL;
519 return GS_ALL_DONE;
522 /* Return TRUE if an operand (OP) of a given TYPE being copied is
523 really just an empty class copy.
525 Check that the operand has a simple form so that TARGET_EXPRs and
526 non-empty CONSTRUCTORs get reduced properly, and we leave the
527 return slot optimization alone because it isn't a copy. */
529 static bool
530 simple_empty_class_p (tree type, tree op)
532 return
533 ((TREE_CODE (op) == COMPOUND_EXPR
534 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
535 || TREE_CODE (op) == EMPTY_CLASS_EXPR
536 || is_gimple_lvalue (op)
537 || INDIRECT_REF_P (op)
538 || (TREE_CODE (op) == CONSTRUCTOR
539 && CONSTRUCTOR_NELTS (op) == 0
540 && !TREE_CLOBBER_P (op))
541 || (TREE_CODE (op) == CALL_EXPR
542 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
543 && is_really_empty_class (type);
546 /* Returns true if evaluating E as an lvalue has side-effects;
547 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
548 have side-effects until there is a read or write through it. */
550 static bool
551 lvalue_has_side_effects (tree e)
553 if (!TREE_SIDE_EFFECTS (e))
554 return false;
555 while (handled_component_p (e))
557 if (TREE_CODE (e) == ARRAY_REF
558 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
559 return true;
560 e = TREE_OPERAND (e, 0);
562 if (DECL_P (e))
563 /* Just naming a variable has no side-effects. */
564 return false;
565 else if (INDIRECT_REF_P (e))
566 /* Similarly, indirection has no side-effects. */
567 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
568 else
569 /* For anything else, trust TREE_SIDE_EFFECTS. */
570 return TREE_SIDE_EFFECTS (e);
573 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
576 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
578 int saved_stmts_are_full_exprs_p = 0;
579 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
580 enum tree_code code = TREE_CODE (*expr_p);
581 enum gimplify_status ret;
583 if (STATEMENT_CODE_P (code))
585 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
586 current_stmt_tree ()->stmts_are_full_exprs_p
587 = STMT_IS_FULL_EXPR_P (*expr_p);
590 switch (code)
592 case AGGR_INIT_EXPR:
593 simplify_aggr_init_expr (expr_p);
594 ret = GS_OK;
595 break;
597 case VEC_INIT_EXPR:
599 location_t loc = input_location;
600 tree init = VEC_INIT_EXPR_INIT (*expr_p);
601 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
602 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
603 input_location = EXPR_LOCATION (*expr_p);
604 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
605 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
606 from_array,
607 tf_warning_or_error);
608 hash_set<tree> pset;
609 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
610 cp_genericize_tree (expr_p, false);
611 ret = GS_OK;
612 input_location = loc;
614 break;
616 case THROW_EXPR:
617 /* FIXME communicate throw type to back end, probably by moving
618 THROW_EXPR into ../tree.def. */
619 *expr_p = TREE_OPERAND (*expr_p, 0);
620 ret = GS_OK;
621 break;
623 case MUST_NOT_THROW_EXPR:
624 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
625 break;
627 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
628 LHS of an assignment might also be involved in the RHS, as in bug
629 25979. */
630 case INIT_EXPR:
631 if (fn_contains_cilk_spawn_p (cfun))
633 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
634 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
635 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
636 return GS_ERROR;
639 cp_gimplify_init_expr (expr_p);
640 if (TREE_CODE (*expr_p) != INIT_EXPR)
641 return GS_OK;
642 /* Fall through. */
643 case MODIFY_EXPR:
644 modify_expr_case:
646 if (fn_contains_cilk_spawn_p (cfun)
647 && cilk_cp_detect_spawn_and_unwrap (expr_p)
648 && !seen_error ())
649 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
650 /* If the back end isn't clever enough to know that the lhs and rhs
651 types are the same, add an explicit conversion. */
652 tree op0 = TREE_OPERAND (*expr_p, 0);
653 tree op1 = TREE_OPERAND (*expr_p, 1);
655 if (!error_operand_p (op0)
656 && !error_operand_p (op1)
657 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
658 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
659 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
660 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
661 TREE_TYPE (op0), op1);
663 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
665 /* Remove any copies of empty classes. Also drop volatile
666 variables on the RHS to avoid infinite recursion from
667 gimplify_expr trying to load the value. */
668 if (TREE_SIDE_EFFECTS (op1))
670 if (TREE_THIS_VOLATILE (op1)
671 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
672 op1 = build_fold_addr_expr (op1);
674 gimplify_and_add (op1, pre_p);
676 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
677 is_gimple_lvalue, fb_lvalue);
678 *expr_p = TREE_OPERAND (*expr_p, 0);
680 /* P0145 says that the RHS is sequenced before the LHS.
681 gimplify_modify_expr gimplifies the RHS before the LHS, but that
682 isn't quite strong enough in two cases:
684 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
685 mean it's evaluated after the LHS.
687 2) the value calculation of the RHS is also sequenced before the
688 LHS, so for scalar assignment we need to preevaluate if the
689 RHS could be affected by LHS side-effects even if it has no
690 side-effects of its own. We don't need this for classes because
691 class assignment takes its RHS by reference. */
692 else if (flag_strong_eval_order > 1
693 && TREE_CODE (*expr_p) == MODIFY_EXPR
694 && lvalue_has_side_effects (op0)
695 && (TREE_CODE (op1) == CALL_EXPR
696 || (SCALAR_TYPE_P (TREE_TYPE (op1))
697 && !TREE_CONSTANT (op1))))
698 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
700 ret = GS_OK;
701 break;
703 case EMPTY_CLASS_EXPR:
704 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
705 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
706 ret = GS_OK;
707 break;
709 case BASELINK:
710 *expr_p = BASELINK_FUNCTIONS (*expr_p);
711 ret = GS_OK;
712 break;
714 case TRY_BLOCK:
715 genericize_try_block (expr_p);
716 ret = GS_OK;
717 break;
719 case HANDLER:
720 genericize_catch_block (expr_p);
721 ret = GS_OK;
722 break;
724 case EH_SPEC_BLOCK:
725 genericize_eh_spec_block (expr_p);
726 ret = GS_OK;
727 break;
729 case USING_STMT:
730 gcc_unreachable ();
732 case FOR_STMT:
733 case WHILE_STMT:
734 case DO_STMT:
735 case SWITCH_STMT:
736 case CONTINUE_STMT:
737 case BREAK_STMT:
738 gcc_unreachable ();
740 case OMP_FOR:
741 case OMP_SIMD:
742 case OMP_DISTRIBUTE:
743 case OMP_TASKLOOP:
744 ret = cp_gimplify_omp_for (expr_p, pre_p);
745 break;
747 case EXPR_STMT:
748 gimplify_expr_stmt (expr_p);
749 ret = GS_OK;
750 break;
752 case UNARY_PLUS_EXPR:
754 tree arg = TREE_OPERAND (*expr_p, 0);
755 tree type = TREE_TYPE (*expr_p);
756 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
757 : arg;
758 ret = GS_OK;
760 break;
762 case CILK_SPAWN_STMT:
763 gcc_assert(fn_contains_cilk_spawn_p (cfun)
764 && cilk_cp_detect_spawn_and_unwrap (expr_p));
766 if (!seen_error ())
767 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
768 return GS_ERROR;
770 case CALL_EXPR:
771 if (fn_contains_cilk_spawn_p (cfun)
772 && cilk_cp_detect_spawn_and_unwrap (expr_p)
773 && !seen_error ())
774 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
775 ret = GS_OK;
776 if (!CALL_EXPR_FN (*expr_p))
777 /* Internal function call. */;
778 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
780 /* This is a call to a (compound) assignment operator that used
781 the operator syntax; gimplify the RHS first. */
782 gcc_assert (call_expr_nargs (*expr_p) == 2);
783 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
784 enum gimplify_status t
785 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
786 if (t == GS_ERROR)
787 ret = GS_ERROR;
789 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
791 /* Leave the last argument for gimplify_call_expr, to avoid problems
792 with __builtin_va_arg_pack(). */
793 int nargs = call_expr_nargs (*expr_p) - 1;
794 for (int i = 0; i < nargs; ++i)
796 enum gimplify_status t
797 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
798 if (t == GS_ERROR)
799 ret = GS_ERROR;
802 else if (flag_strong_eval_order
803 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
805 /* If flag_strong_eval_order, evaluate the object argument first. */
806 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
807 if (POINTER_TYPE_P (fntype))
808 fntype = TREE_TYPE (fntype);
809 if (TREE_CODE (fntype) == METHOD_TYPE)
811 enum gimplify_status t
812 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
813 if (t == GS_ERROR)
814 ret = GS_ERROR;
817 break;
819 case RETURN_EXPR:
820 if (TREE_OPERAND (*expr_p, 0)
821 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
822 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
824 expr_p = &TREE_OPERAND (*expr_p, 0);
825 code = TREE_CODE (*expr_p);
826 /* Avoid going through the INIT_EXPR case, which can
827 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
828 goto modify_expr_case;
830 /* Fall through. */
832 default:
833 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
834 break;
837 /* Restore saved state. */
838 if (STATEMENT_CODE_P (code))
839 current_stmt_tree ()->stmts_are_full_exprs_p
840 = saved_stmts_are_full_exprs_p;
842 return ret;
845 static inline bool
846 is_invisiref_parm (const_tree t)
848 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
849 && DECL_BY_REFERENCE (t));
852 /* Return true if the uid in both int tree maps are equal. */
854 bool
855 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
857 return (a->uid == b->uid);
860 /* Hash a UID in a cxx_int_tree_map. */
862 unsigned int
863 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
865 return item->uid;
868 /* A stable comparison routine for use with splay trees and DECLs. */
870 static int
871 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
873 tree a = (tree) xa;
874 tree b = (tree) xb;
876 return DECL_UID (a) - DECL_UID (b);
879 /* OpenMP context during genericization. */
881 struct cp_genericize_omp_taskreg
883 bool is_parallel;
884 bool default_shared;
885 struct cp_genericize_omp_taskreg *outer;
886 splay_tree variables;
889 /* Return true if genericization should try to determine if
890 DECL is firstprivate or shared within task regions. */
892 static bool
893 omp_var_to_track (tree decl)
895 tree type = TREE_TYPE (decl);
896 if (is_invisiref_parm (decl))
897 type = TREE_TYPE (type);
898 else if (TREE_CODE (type) == REFERENCE_TYPE)
899 type = TREE_TYPE (type);
900 while (TREE_CODE (type) == ARRAY_TYPE)
901 type = TREE_TYPE (type);
902 if (type == error_mark_node || !CLASS_TYPE_P (type))
903 return false;
904 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
905 return false;
906 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
907 return false;
908 return true;
911 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
913 static void
914 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
916 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
917 (splay_tree_key) decl);
918 if (n == NULL)
920 int flags = OMP_CLAUSE_DEFAULT_SHARED;
921 if (omp_ctx->outer)
922 omp_cxx_notice_variable (omp_ctx->outer, decl);
923 if (!omp_ctx->default_shared)
925 struct cp_genericize_omp_taskreg *octx;
927 for (octx = omp_ctx->outer; octx; octx = octx->outer)
929 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
930 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
932 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
933 break;
935 if (octx->is_parallel)
936 break;
938 if (octx == NULL
939 && (TREE_CODE (decl) == PARM_DECL
940 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
941 && DECL_CONTEXT (decl) == current_function_decl)))
942 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
943 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
945 /* DECL is implicitly determined firstprivate in
946 the current task construct. Ensure copy ctor and
947 dtor are instantiated, because during gimplification
948 it will be already too late. */
949 tree type = TREE_TYPE (decl);
950 if (is_invisiref_parm (decl))
951 type = TREE_TYPE (type);
952 else if (TREE_CODE (type) == REFERENCE_TYPE)
953 type = TREE_TYPE (type);
954 while (TREE_CODE (type) == ARRAY_TYPE)
955 type = TREE_TYPE (type);
956 get_copy_ctor (type, tf_none);
957 get_dtor (type, tf_none);
960 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
964 /* Genericization context. */
966 struct cp_genericize_data
968 hash_set<tree> *p_set;
969 vec<tree> bind_expr_stack;
970 struct cp_genericize_omp_taskreg *omp_ctx;
971 tree try_block;
972 bool no_sanitize_p;
973 bool handle_invisiref_parm_p;
976 /* Perform any pre-gimplification folding of C++ front end trees to
977 GENERIC.
978 Note: The folding of none-omp cases is something to move into
979 the middle-end. As for now we have most foldings only on GENERIC
980 in fold-const, we need to perform this before transformation to
981 GIMPLE-form. */
983 static tree
984 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
986 tree stmt;
987 enum tree_code code;
989 *stmt_p = stmt = cp_fold (*stmt_p);
991 if (((hash_set<tree> *) data)->add (stmt))
993 /* Don't walk subtrees of stmts we've already walked once, otherwise
994 we can have exponential complexity with e.g. lots of nested
995 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
996 always the same tree, which the first time cp_fold_r has been
997 called on it had the subtrees walked. */
998 *walk_subtrees = 0;
999 return NULL;
1002 code = TREE_CODE (stmt);
1003 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1004 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1005 || code == OACC_LOOP)
1007 tree x;
1008 int i, n;
1010 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1011 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1012 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1013 x = OMP_FOR_COND (stmt);
1014 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1016 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1017 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1019 else if (x && TREE_CODE (x) == TREE_VEC)
1021 n = TREE_VEC_LENGTH (x);
1022 for (i = 0; i < n; i++)
1024 tree o = TREE_VEC_ELT (x, i);
1025 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1026 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1029 x = OMP_FOR_INCR (stmt);
1030 if (x && TREE_CODE (x) == TREE_VEC)
1032 n = TREE_VEC_LENGTH (x);
1033 for (i = 0; i < n; i++)
1035 tree o = TREE_VEC_ELT (x, i);
1036 if (o && TREE_CODE (o) == MODIFY_EXPR)
1037 o = TREE_OPERAND (o, 1);
1038 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1039 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1041 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1046 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1047 *walk_subtrees = 0;
1050 return NULL;
1053 /* Fold ALL the trees! FIXME we should be able to remove this, but
1054 apparently that still causes optimization regressions. */
1056 void
1057 cp_fold_function (tree fndecl)
1059 hash_set<tree> pset;
1060 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1063 /* Perform any pre-gimplification lowering of C++ front end trees to
1064 GENERIC. */
1066 static tree
1067 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1069 tree stmt = *stmt_p;
1070 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1071 hash_set<tree> *p_set = wtd->p_set;
1073 /* If in an OpenMP context, note var uses. */
1074 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1075 && (VAR_P (stmt)
1076 || TREE_CODE (stmt) == PARM_DECL
1077 || TREE_CODE (stmt) == RESULT_DECL)
1078 && omp_var_to_track (stmt))
1079 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1081 /* Don't dereference parms in a thunk, pass the references through. */
1082 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1083 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1085 *walk_subtrees = 0;
1086 return NULL;
1089 /* Dereference invisible reference parms. */
1090 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1092 *stmt_p = convert_from_reference (stmt);
1093 p_set->add (*stmt_p);
1094 *walk_subtrees = 0;
1095 return NULL;
1098 /* Map block scope extern declarations to visible declarations with the
1099 same name and type in outer scopes if any. */
1100 if (cp_function_chain->extern_decl_map
1101 && VAR_OR_FUNCTION_DECL_P (stmt)
1102 && DECL_EXTERNAL (stmt))
1104 struct cxx_int_tree_map *h, in;
1105 in.uid = DECL_UID (stmt);
1106 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1107 if (h)
1109 *stmt_p = h->to;
1110 *walk_subtrees = 0;
1111 return NULL;
1115 if (TREE_CODE (stmt) == INTEGER_CST
1116 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1117 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1118 && !wtd->no_sanitize_p)
1120 ubsan_maybe_instrument_reference (stmt_p);
1121 if (*stmt_p != stmt)
1123 *walk_subtrees = 0;
1124 return NULL_TREE;
1128 /* Other than invisiref parms, don't walk the same tree twice. */
1129 if (p_set->contains (stmt))
1131 *walk_subtrees = 0;
1132 return NULL_TREE;
1135 switch (TREE_CODE (stmt))
1137 case ADDR_EXPR:
1138 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1140 /* If in an OpenMP context, note var uses. */
1141 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1142 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1143 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1144 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1145 *walk_subtrees = 0;
1147 break;
1149 case RETURN_EXPR:
1150 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1151 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1152 *walk_subtrees = 0;
1153 break;
1155 case OMP_CLAUSE:
1156 switch (OMP_CLAUSE_CODE (stmt))
1158 case OMP_CLAUSE_LASTPRIVATE:
1159 /* Don't dereference an invisiref in OpenMP clauses. */
1160 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1162 *walk_subtrees = 0;
1163 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1164 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1165 cp_genericize_r, data, NULL);
1167 break;
1168 case OMP_CLAUSE_PRIVATE:
1169 /* Don't dereference an invisiref in OpenMP clauses. */
1170 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1171 *walk_subtrees = 0;
1172 else if (wtd->omp_ctx != NULL)
1174 /* Private clause doesn't cause any references to the
1175 var in outer contexts, avoid calling
1176 omp_cxx_notice_variable for it. */
1177 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1178 wtd->omp_ctx = NULL;
1179 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1180 data, NULL);
1181 wtd->omp_ctx = old;
1182 *walk_subtrees = 0;
1184 break;
1185 case OMP_CLAUSE_SHARED:
1186 case OMP_CLAUSE_FIRSTPRIVATE:
1187 case OMP_CLAUSE_COPYIN:
1188 case OMP_CLAUSE_COPYPRIVATE:
1189 /* Don't dereference an invisiref in OpenMP clauses. */
1190 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1191 *walk_subtrees = 0;
1192 break;
1193 case OMP_CLAUSE_REDUCTION:
1194 /* Don't dereference an invisiref in reduction clause's
1195 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1196 still needs to be genericized. */
1197 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1199 *walk_subtrees = 0;
1200 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1201 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1202 cp_genericize_r, data, NULL);
1203 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1204 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1205 cp_genericize_r, data, NULL);
1207 break;
1208 default:
1209 break;
1211 break;
1213 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1214 to lower this construct before scanning it, so we need to lower these
1215 before doing anything else. */
1216 case CLEANUP_STMT:
1217 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1218 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1219 : TRY_FINALLY_EXPR,
1220 void_type_node,
1221 CLEANUP_BODY (stmt),
1222 CLEANUP_EXPR (stmt));
1223 break;
1225 case IF_STMT:
1226 genericize_if_stmt (stmt_p);
1227 /* *stmt_p has changed, tail recurse to handle it again. */
1228 return cp_genericize_r (stmt_p, walk_subtrees, data);
1230 /* COND_EXPR might have incompatible types in branches if one or both
1231 arms are bitfields. Fix it up now. */
1232 case COND_EXPR:
1234 tree type_left
1235 = (TREE_OPERAND (stmt, 1)
1236 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1237 : NULL_TREE);
1238 tree type_right
1239 = (TREE_OPERAND (stmt, 2)
1240 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1241 : NULL_TREE);
1242 if (type_left
1243 && !useless_type_conversion_p (TREE_TYPE (stmt),
1244 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1246 TREE_OPERAND (stmt, 1)
1247 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1248 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1249 type_left));
1251 if (type_right
1252 && !useless_type_conversion_p (TREE_TYPE (stmt),
1253 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1255 TREE_OPERAND (stmt, 2)
1256 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1257 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1258 type_right));
1261 break;
1263 case BIND_EXPR:
1264 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1266 tree decl;
1267 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1268 if (VAR_P (decl)
1269 && !DECL_EXTERNAL (decl)
1270 && omp_var_to_track (decl))
1272 splay_tree_node n
1273 = splay_tree_lookup (wtd->omp_ctx->variables,
1274 (splay_tree_key) decl);
1275 if (n == NULL)
1276 splay_tree_insert (wtd->omp_ctx->variables,
1277 (splay_tree_key) decl,
1278 TREE_STATIC (decl)
1279 ? OMP_CLAUSE_DEFAULT_SHARED
1280 : OMP_CLAUSE_DEFAULT_PRIVATE);
1283 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1285 /* The point here is to not sanitize static initializers. */
1286 bool no_sanitize_p = wtd->no_sanitize_p;
1287 wtd->no_sanitize_p = true;
1288 for (tree decl = BIND_EXPR_VARS (stmt);
1289 decl;
1290 decl = DECL_CHAIN (decl))
1291 if (VAR_P (decl)
1292 && TREE_STATIC (decl)
1293 && DECL_INITIAL (decl))
1294 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1295 wtd->no_sanitize_p = no_sanitize_p;
1297 wtd->bind_expr_stack.safe_push (stmt);
1298 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1299 cp_genericize_r, data, NULL);
1300 wtd->bind_expr_stack.pop ();
1301 break;
1303 case USING_STMT:
1305 tree block = NULL_TREE;
1307 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1308 BLOCK, and append an IMPORTED_DECL to its
1309 BLOCK_VARS chained list. */
1310 if (wtd->bind_expr_stack.exists ())
1312 int i;
1313 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1314 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1315 break;
1317 if (block)
1319 tree using_directive;
1320 gcc_assert (TREE_OPERAND (stmt, 0));
1322 using_directive = make_node (IMPORTED_DECL);
1323 TREE_TYPE (using_directive) = void_type_node;
1325 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1326 = TREE_OPERAND (stmt, 0);
1327 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1328 BLOCK_VARS (block) = using_directive;
1330 /* The USING_STMT won't appear in GENERIC. */
1331 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1332 *walk_subtrees = 0;
1334 break;
1336 case DECL_EXPR:
1337 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1339 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1340 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1341 *walk_subtrees = 0;
1343 else
1345 tree d = DECL_EXPR_DECL (stmt);
1346 if (VAR_P (d))
1347 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1349 break;
1351 case OMP_PARALLEL:
1352 case OMP_TASK:
1353 case OMP_TASKLOOP:
1355 struct cp_genericize_omp_taskreg omp_ctx;
1356 tree c, decl;
1357 splay_tree_node n;
1359 *walk_subtrees = 0;
1360 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1361 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1362 omp_ctx.default_shared = omp_ctx.is_parallel;
1363 omp_ctx.outer = wtd->omp_ctx;
1364 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1365 wtd->omp_ctx = &omp_ctx;
1366 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1367 switch (OMP_CLAUSE_CODE (c))
1369 case OMP_CLAUSE_SHARED:
1370 case OMP_CLAUSE_PRIVATE:
1371 case OMP_CLAUSE_FIRSTPRIVATE:
1372 case OMP_CLAUSE_LASTPRIVATE:
1373 decl = OMP_CLAUSE_DECL (c);
1374 if (decl == error_mark_node || !omp_var_to_track (decl))
1375 break;
1376 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1377 if (n != NULL)
1378 break;
1379 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1381 ? OMP_CLAUSE_DEFAULT_SHARED
1382 : OMP_CLAUSE_DEFAULT_PRIVATE);
1383 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1384 omp_cxx_notice_variable (omp_ctx.outer, decl);
1385 break;
1386 case OMP_CLAUSE_DEFAULT:
1387 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1388 omp_ctx.default_shared = true;
1389 default:
1390 break;
1392 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1393 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1394 else
1395 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1396 wtd->omp_ctx = omp_ctx.outer;
1397 splay_tree_delete (omp_ctx.variables);
1399 break;
1401 case TRY_BLOCK:
1403 *walk_subtrees = 0;
1404 tree try_block = wtd->try_block;
1405 wtd->try_block = stmt;
1406 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1407 wtd->try_block = try_block;
1408 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1410 break;
1412 case MUST_NOT_THROW_EXPR:
1413 /* MUST_NOT_THROW_COND might be something else with TM. */
1414 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1416 *walk_subtrees = 0;
1417 tree try_block = wtd->try_block;
1418 wtd->try_block = stmt;
1419 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1420 wtd->try_block = try_block;
1422 break;
1424 case THROW_EXPR:
1426 location_t loc = location_of (stmt);
1427 if (TREE_NO_WARNING (stmt))
1428 /* Never mind. */;
1429 else if (wtd->try_block)
1431 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1432 && warning_at (loc, OPT_Wterminate,
1433 "throw will always call terminate()")
1434 && cxx_dialect >= cxx11
1435 && DECL_DESTRUCTOR_P (current_function_decl))
1436 inform (loc, "in C++11 destructors default to noexcept");
1438 else
1440 if (warn_cxx11_compat && cxx_dialect < cxx11
1441 && DECL_DESTRUCTOR_P (current_function_decl)
1442 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1443 == NULL_TREE)
1444 && (get_defaulted_eh_spec (current_function_decl)
1445 == empty_except_spec))
1446 warning_at (loc, OPT_Wc__11_compat,
1447 "in C++11 this throw will terminate because "
1448 "destructors default to noexcept");
1451 break;
1453 case CONVERT_EXPR:
1454 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1455 break;
1457 case FOR_STMT:
1458 genericize_for_stmt (stmt_p, walk_subtrees, data);
1459 break;
1461 case WHILE_STMT:
1462 genericize_while_stmt (stmt_p, walk_subtrees, data);
1463 break;
1465 case DO_STMT:
1466 genericize_do_stmt (stmt_p, walk_subtrees, data);
1467 break;
1469 case SWITCH_STMT:
1470 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1471 break;
1473 case CONTINUE_STMT:
1474 genericize_continue_stmt (stmt_p);
1475 break;
1477 case BREAK_STMT:
1478 genericize_break_stmt (stmt_p);
1479 break;
1481 case OMP_FOR:
1482 case OMP_SIMD:
1483 case OMP_DISTRIBUTE:
1484 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1485 break;
1487 case PTRMEM_CST:
1488 /* By the time we get here we're handing off to the back end, so we don't
1489 need or want to preserve PTRMEM_CST anymore. */
1490 *stmt_p = cplus_expand_constant (stmt);
1491 *walk_subtrees = 0;
1492 break;
1494 case MEM_REF:
1495 /* For MEM_REF, make sure not to sanitize the second operand even
1496 if it has reference type. It is just an offset with a type
1497 holding other information. There is no other processing we
1498 need to do for INTEGER_CSTs, so just ignore the second argument
1499 unconditionally. */
1500 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1501 *walk_subtrees = 0;
1502 break;
1504 case NOP_EXPR:
1505 if (!wtd->no_sanitize_p
1506 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1507 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1508 ubsan_maybe_instrument_reference (stmt_p);
1509 break;
1511 case CALL_EXPR:
1512 if (!wtd->no_sanitize_p
1513 && sanitize_flags_p ((SANITIZE_NULL
1514 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1516 tree fn = CALL_EXPR_FN (stmt);
1517 if (fn != NULL_TREE
1518 && !error_operand_p (fn)
1519 && POINTER_TYPE_P (TREE_TYPE (fn))
1520 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1522 bool is_ctor
1523 = TREE_CODE (fn) == ADDR_EXPR
1524 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1525 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1526 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1527 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1528 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1529 cp_ubsan_maybe_instrument_member_call (stmt);
1532 break;
1534 default:
1535 if (IS_TYPE_OR_DECL_P (stmt))
1536 *walk_subtrees = 0;
1537 break;
1540 p_set->add (*stmt_p);
1542 return NULL;
1545 /* Lower C++ front end trees to GENERIC in T_P. */
1547 static void
1548 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1550 struct cp_genericize_data wtd;
1552 wtd.p_set = new hash_set<tree>;
1553 wtd.bind_expr_stack.create (0);
1554 wtd.omp_ctx = NULL;
1555 wtd.try_block = NULL_TREE;
1556 wtd.no_sanitize_p = false;
1557 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1558 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1559 delete wtd.p_set;
1560 wtd.bind_expr_stack.release ();
1561 if (sanitize_flags_p (SANITIZE_VPTR))
1562 cp_ubsan_instrument_member_accesses (t_p);
1565 /* If a function that should end with a return in non-void
1566 function doesn't obviously end with return, add ubsan
1567 instrumentation code to verify it at runtime. If -fsanitize=return
1568 is not enabled, instrument __builtin_unreachable. */
1570 static void
1571 cp_maybe_instrument_return (tree fndecl)
1573 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1574 || DECL_CONSTRUCTOR_P (fndecl)
1575 || DECL_DESTRUCTOR_P (fndecl)
1576 || !targetm.warn_func_return (fndecl))
1577 return;
1579 tree t = DECL_SAVED_TREE (fndecl);
1580 while (t)
1582 switch (TREE_CODE (t))
1584 case BIND_EXPR:
1585 t = BIND_EXPR_BODY (t);
1586 continue;
1587 case TRY_FINALLY_EXPR:
1588 t = TREE_OPERAND (t, 0);
1589 continue;
1590 case STATEMENT_LIST:
1592 tree_stmt_iterator i = tsi_last (t);
1593 if (!tsi_end_p (i))
1595 t = tsi_stmt (i);
1596 continue;
1599 break;
1600 case RETURN_EXPR:
1601 return;
1602 default:
1603 break;
1605 break;
1607 if (t == NULL_TREE)
1608 return;
1609 tree *p = &DECL_SAVED_TREE (fndecl);
1610 if (TREE_CODE (*p) == BIND_EXPR)
1611 p = &BIND_EXPR_BODY (*p);
1613 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1614 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1615 t = ubsan_instrument_return (loc);
1616 else
1618 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1619 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1622 append_to_statement_list (t, p);
1625 void
1626 cp_genericize (tree fndecl)
1628 tree t;
1630 /* Fix up the types of parms passed by invisible reference. */
1631 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1632 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1634 /* If a function's arguments are copied to create a thunk,
1635 then DECL_BY_REFERENCE will be set -- but the type of the
1636 argument will be a pointer type, so we will never get
1637 here. */
1638 gcc_assert (!DECL_BY_REFERENCE (t));
1639 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1640 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1641 DECL_BY_REFERENCE (t) = 1;
1642 TREE_ADDRESSABLE (t) = 0;
1643 relayout_decl (t);
1646 /* Do the same for the return value. */
1647 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1649 t = DECL_RESULT (fndecl);
1650 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1651 DECL_BY_REFERENCE (t) = 1;
1652 TREE_ADDRESSABLE (t) = 0;
1653 relayout_decl (t);
1654 if (DECL_NAME (t))
1656 /* Adjust DECL_VALUE_EXPR of the original var. */
1657 tree outer = outer_curly_brace_block (current_function_decl);
1658 tree var;
1660 if (outer)
1661 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1662 if (VAR_P (var)
1663 && DECL_NAME (t) == DECL_NAME (var)
1664 && DECL_HAS_VALUE_EXPR_P (var)
1665 && DECL_VALUE_EXPR (var) == t)
1667 tree val = convert_from_reference (t);
1668 SET_DECL_VALUE_EXPR (var, val);
1669 break;
1674 /* If we're a clone, the body is already GIMPLE. */
1675 if (DECL_CLONED_FUNCTION_P (fndecl))
1676 return;
1678 /* Allow cp_genericize calls to be nested. */
1679 tree save_bc_label[2];
1680 save_bc_label[bc_break] = bc_label[bc_break];
1681 save_bc_label[bc_continue] = bc_label[bc_continue];
1682 bc_label[bc_break] = NULL_TREE;
1683 bc_label[bc_continue] = NULL_TREE;
1685 /* Expand all the array notations here. */
1686 if (flag_cilkplus
1687 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1688 DECL_SAVED_TREE (fndecl)
1689 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1691 /* We do want to see every occurrence of the parms, so we can't just use
1692 walk_tree's hash functionality. */
1693 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1695 cp_maybe_instrument_return (fndecl);
1697 /* Do everything else. */
1698 c_genericize (fndecl);
1700 gcc_assert (bc_label[bc_break] == NULL);
1701 gcc_assert (bc_label[bc_continue] == NULL);
1702 bc_label[bc_break] = save_bc_label[bc_break];
1703 bc_label[bc_continue] = save_bc_label[bc_continue];
1706 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1707 NULL if there is in fact nothing to do. ARG2 may be null if FN
1708 actually only takes one argument. */
1710 static tree
1711 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1713 tree defparm, parm, t;
1714 int i = 0;
1715 int nargs;
1716 tree *argarray;
1718 if (fn == NULL)
1719 return NULL;
1721 nargs = list_length (DECL_ARGUMENTS (fn));
1722 argarray = XALLOCAVEC (tree, nargs);
1724 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1725 if (arg2)
1726 defparm = TREE_CHAIN (defparm);
1728 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1729 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1731 tree inner_type = TREE_TYPE (arg1);
1732 tree start1, end1, p1;
1733 tree start2 = NULL, p2 = NULL;
1734 tree ret = NULL, lab;
1736 start1 = arg1;
1737 start2 = arg2;
1740 inner_type = TREE_TYPE (inner_type);
1741 start1 = build4 (ARRAY_REF, inner_type, start1,
1742 size_zero_node, NULL, NULL);
1743 if (arg2)
1744 start2 = build4 (ARRAY_REF, inner_type, start2,
1745 size_zero_node, NULL, NULL);
1747 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1748 start1 = build_fold_addr_expr_loc (input_location, start1);
1749 if (arg2)
1750 start2 = build_fold_addr_expr_loc (input_location, start2);
1752 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1753 end1 = fold_build_pointer_plus (start1, end1);
1755 p1 = create_tmp_var (TREE_TYPE (start1));
1756 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1757 append_to_statement_list (t, &ret);
1759 if (arg2)
1761 p2 = create_tmp_var (TREE_TYPE (start2));
1762 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1763 append_to_statement_list (t, &ret);
1766 lab = create_artificial_label (input_location);
1767 t = build1 (LABEL_EXPR, void_type_node, lab);
1768 append_to_statement_list (t, &ret);
1770 argarray[i++] = p1;
1771 if (arg2)
1772 argarray[i++] = p2;
1773 /* Handle default arguments. */
1774 for (parm = defparm; parm && parm != void_list_node;
1775 parm = TREE_CHAIN (parm), i++)
1776 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1777 TREE_PURPOSE (parm), fn,
1778 i - is_method, tf_warning_or_error);
1779 t = build_call_a (fn, i, argarray);
1780 t = fold_convert (void_type_node, t);
1781 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1782 append_to_statement_list (t, &ret);
1784 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1785 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1786 append_to_statement_list (t, &ret);
1788 if (arg2)
1790 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1791 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1792 append_to_statement_list (t, &ret);
1795 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1796 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1797 append_to_statement_list (t, &ret);
1799 return ret;
1801 else
1803 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1804 if (arg2)
1805 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1806 /* Handle default arguments. */
1807 for (parm = defparm; parm && parm != void_list_node;
1808 parm = TREE_CHAIN (parm), i++)
1809 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1810 TREE_PURPOSE (parm), fn,
1811 i - is_method, tf_warning_or_error);
1812 t = build_call_a (fn, i, argarray);
1813 t = fold_convert (void_type_node, t);
1814 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1818 /* Return code to initialize DECL with its default constructor, or
1819 NULL if there's nothing to do. */
1821 tree
1822 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1824 tree info = CP_OMP_CLAUSE_INFO (clause);
1825 tree ret = NULL;
1827 if (info)
1828 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1830 return ret;
1833 /* Return code to initialize DST with a copy constructor from SRC. */
1835 tree
1836 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1838 tree info = CP_OMP_CLAUSE_INFO (clause);
1839 tree ret = NULL;
1841 if (info)
1842 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1843 if (ret == NULL)
1844 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1846 return ret;
1849 /* Similarly, except use an assignment operator instead. */
1851 tree
1852 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1854 tree info = CP_OMP_CLAUSE_INFO (clause);
1855 tree ret = NULL;
1857 if (info)
1858 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1859 if (ret == NULL)
1860 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1862 return ret;
1865 /* Return code to destroy DECL. */
1867 tree
1868 cxx_omp_clause_dtor (tree clause, tree decl)
1870 tree info = CP_OMP_CLAUSE_INFO (clause);
1871 tree ret = NULL;
1873 if (info)
1874 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1876 return ret;
1879 /* True if OpenMP should privatize what this DECL points to rather
1880 than the DECL itself. */
1882 bool
1883 cxx_omp_privatize_by_reference (const_tree decl)
1885 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1886 || is_invisiref_parm (decl));
1889 /* Return true if DECL is const qualified var having no mutable member. */
1890 bool
1891 cxx_omp_const_qual_no_mutable (tree decl)
1893 tree type = TREE_TYPE (decl);
1894 if (TREE_CODE (type) == REFERENCE_TYPE)
1896 if (!is_invisiref_parm (decl))
1897 return false;
1898 type = TREE_TYPE (type);
1900 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1902 /* NVR doesn't preserve const qualification of the
1903 variable's type. */
1904 tree outer = outer_curly_brace_block (current_function_decl);
1905 tree var;
1907 if (outer)
1908 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1909 if (VAR_P (var)
1910 && DECL_NAME (decl) == DECL_NAME (var)
1911 && (TYPE_MAIN_VARIANT (type)
1912 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1914 if (TYPE_READONLY (TREE_TYPE (var)))
1915 type = TREE_TYPE (var);
1916 break;
1921 if (type == error_mark_node)
1922 return false;
1924 /* Variables with const-qualified type having no mutable member
1925 are predetermined shared. */
1926 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1927 return true;
1929 return false;
1932 /* True if OpenMP sharing attribute of DECL is predetermined. */
1934 enum omp_clause_default_kind
1935 cxx_omp_predetermined_sharing (tree decl)
1937 /* Static data members are predetermined shared. */
1938 if (TREE_STATIC (decl))
1940 tree ctx = CP_DECL_CONTEXT (decl);
1941 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1942 return OMP_CLAUSE_DEFAULT_SHARED;
1945 /* Const qualified vars having no mutable member are predetermined
1946 shared. */
1947 if (cxx_omp_const_qual_no_mutable (decl))
1948 return OMP_CLAUSE_DEFAULT_SHARED;
1950 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1953 /* Finalize an implicitly determined clause. */
1955 void
1956 cxx_omp_finish_clause (tree c, gimple_seq *)
1958 tree decl, inner_type;
1959 bool make_shared = false;
1961 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1962 return;
1964 decl = OMP_CLAUSE_DECL (c);
1965 decl = require_complete_type (decl);
1966 inner_type = TREE_TYPE (decl);
1967 if (decl == error_mark_node)
1968 make_shared = true;
1969 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1970 inner_type = TREE_TYPE (inner_type);
1972 /* We're interested in the base element, not arrays. */
1973 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1974 inner_type = TREE_TYPE (inner_type);
1976 /* Check for special function availability by building a call to one.
1977 Save the results, because later we won't be in the right context
1978 for making these queries. */
1979 if (!make_shared
1980 && CLASS_TYPE_P (inner_type)
1981 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1982 make_shared = true;
1984 if (make_shared)
1986 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1987 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1988 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1992 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1993 disregarded in OpenMP construct, because it is going to be
1994 remapped during OpenMP lowering. SHARED is true if DECL
1995 is going to be shared, false if it is going to be privatized. */
1997 bool
1998 cxx_omp_disregard_value_expr (tree decl, bool shared)
2000 return !shared
2001 && VAR_P (decl)
2002 && DECL_HAS_VALUE_EXPR_P (decl)
2003 && DECL_ARTIFICIAL (decl)
2004 && DECL_LANG_SPECIFIC (decl)
2005 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2008 /* Fold expression X which is used as an rvalue if RVAL is true. */
2010 static tree
2011 cp_fold_maybe_rvalue (tree x, bool rval)
2013 while (true)
2015 x = cp_fold (x);
2016 if (rval && DECL_P (x)
2017 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2019 tree v = decl_constant_value (x);
2020 if (v != x && v != error_mark_node)
2022 x = v;
2023 continue;
2026 break;
2028 return x;
2031 /* Fold expression X which is used as an rvalue. */
2033 static tree
2034 cp_fold_rvalue (tree x)
2036 return cp_fold_maybe_rvalue (x, true);
2039 /* Perform folding on expression X. */
2041 tree
2042 cp_fully_fold (tree x)
2044 if (processing_template_decl)
2045 return x;
2046 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2047 have to call both. */
2048 if (cxx_dialect >= cxx11)
2049 x = maybe_constant_value (x);
2050 return cp_fold_rvalue (x);
2053 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2054 and certain changes are made to the folding done. Or should be (FIXME). We
2055 never touch maybe_const, as it is only used for the C front-end
2056 C_MAYBE_CONST_EXPR. */
2058 tree
2059 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2061 return cp_fold_maybe_rvalue (x, !lval);
2064 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2066 /* Dispose of the whole FOLD_CACHE. */
2068 void
2069 clear_fold_cache (void)
2071 if (fold_cache != NULL)
2072 fold_cache->empty ();
2075 /* This function tries to fold an expression X.
2076 To avoid combinatorial explosion, folding results are kept in fold_cache.
2077 If we are processing a template or X is invalid, we don't fold at all.
2078 For performance reasons we don't cache expressions representing a
2079 declaration or constant.
2080 Function returns X or its folded variant. */
2082 static tree
2083 cp_fold (tree x)
2085 tree op0, op1, op2, op3;
2086 tree org_x = x, r = NULL_TREE;
2087 enum tree_code code;
2088 location_t loc;
2089 bool rval_ops = true;
2091 if (!x || x == error_mark_node)
2092 return x;
2094 if (processing_template_decl
2095 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2096 return x;
2098 /* Don't bother to cache DECLs or constants. */
2099 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2100 return x;
2102 if (fold_cache == NULL)
2103 fold_cache = hash_map<tree, tree>::create_ggc (101);
2105 if (tree *cached = fold_cache->get (x))
2106 return *cached;
2108 code = TREE_CODE (x);
2109 switch (code)
2111 case CLEANUP_POINT_EXPR:
2112 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2113 effects. */
2114 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2115 if (!TREE_SIDE_EFFECTS (r))
2116 x = r;
2117 break;
2119 case SIZEOF_EXPR:
2120 x = fold_sizeof_expr (x);
2121 break;
2123 case VIEW_CONVERT_EXPR:
2124 rval_ops = false;
2125 /* FALLTHRU */
2126 case CONVERT_EXPR:
2127 case NOP_EXPR:
2128 case NON_LVALUE_EXPR:
2130 if (VOID_TYPE_P (TREE_TYPE (x)))
2131 return x;
2133 loc = EXPR_LOCATION (x);
2134 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2136 if (code == CONVERT_EXPR
2137 && SCALAR_TYPE_P (TREE_TYPE (x))
2138 && op0 != void_node)
2139 /* During parsing we used convert_to_*_nofold; re-convert now using the
2140 folding variants, since fold() doesn't do those transformations. */
2141 x = fold (convert (TREE_TYPE (x), op0));
2142 else if (op0 != TREE_OPERAND (x, 0))
2144 if (op0 == error_mark_node)
2145 x = error_mark_node;
2146 else
2147 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2149 else
2150 x = fold (x);
2152 /* Conversion of an out-of-range value has implementation-defined
2153 behavior; the language considers it different from arithmetic
2154 overflow, which is undefined. */
2155 if (TREE_CODE (op0) == INTEGER_CST
2156 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2157 TREE_OVERFLOW (x) = false;
2159 break;
2161 case INDIRECT_REF:
2162 /* We don't need the decltype(auto) obfuscation anymore. */
2163 if (REF_PARENTHESIZED_P (x))
2165 tree p = maybe_undo_parenthesized_ref (x);
2166 return cp_fold (p);
2168 goto unary;
2170 case ADDR_EXPR:
2171 case REALPART_EXPR:
2172 case IMAGPART_EXPR:
2173 rval_ops = false;
2174 /* FALLTHRU */
2175 case CONJ_EXPR:
2176 case FIX_TRUNC_EXPR:
2177 case FLOAT_EXPR:
2178 case NEGATE_EXPR:
2179 case ABS_EXPR:
2180 case BIT_NOT_EXPR:
2181 case TRUTH_NOT_EXPR:
2182 case FIXED_CONVERT_EXPR:
2183 unary:
2185 loc = EXPR_LOCATION (x);
2186 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2188 if (op0 != TREE_OPERAND (x, 0))
2190 if (op0 == error_mark_node)
2191 x = error_mark_node;
2192 else
2194 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2195 if (code == INDIRECT_REF
2196 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2198 TREE_READONLY (x) = TREE_READONLY (org_x);
2199 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2200 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2204 else
2205 x = fold (x);
2207 gcc_assert (TREE_CODE (x) != COND_EXPR
2208 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2209 break;
2211 case UNARY_PLUS_EXPR:
2212 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2213 if (op0 == error_mark_node)
2214 x = error_mark_node;
2215 else
2216 x = fold_convert (TREE_TYPE (x), op0);
2217 break;
2219 case POSTDECREMENT_EXPR:
2220 case POSTINCREMENT_EXPR:
2221 case INIT_EXPR:
2222 case PREDECREMENT_EXPR:
2223 case PREINCREMENT_EXPR:
2224 case COMPOUND_EXPR:
2225 case MODIFY_EXPR:
2226 rval_ops = false;
2227 /* FALLTHRU */
2228 case POINTER_PLUS_EXPR:
2229 case PLUS_EXPR:
2230 case MINUS_EXPR:
2231 case MULT_EXPR:
2232 case TRUNC_DIV_EXPR:
2233 case CEIL_DIV_EXPR:
2234 case FLOOR_DIV_EXPR:
2235 case ROUND_DIV_EXPR:
2236 case TRUNC_MOD_EXPR:
2237 case CEIL_MOD_EXPR:
2238 case ROUND_MOD_EXPR:
2239 case RDIV_EXPR:
2240 case EXACT_DIV_EXPR:
2241 case MIN_EXPR:
2242 case MAX_EXPR:
2243 case LSHIFT_EXPR:
2244 case RSHIFT_EXPR:
2245 case LROTATE_EXPR:
2246 case RROTATE_EXPR:
2247 case BIT_AND_EXPR:
2248 case BIT_IOR_EXPR:
2249 case BIT_XOR_EXPR:
2250 case TRUTH_AND_EXPR:
2251 case TRUTH_ANDIF_EXPR:
2252 case TRUTH_OR_EXPR:
2253 case TRUTH_ORIF_EXPR:
2254 case TRUTH_XOR_EXPR:
2255 case LT_EXPR: case LE_EXPR:
2256 case GT_EXPR: case GE_EXPR:
2257 case EQ_EXPR: case NE_EXPR:
2258 case UNORDERED_EXPR: case ORDERED_EXPR:
2259 case UNLT_EXPR: case UNLE_EXPR:
2260 case UNGT_EXPR: case UNGE_EXPR:
2261 case UNEQ_EXPR: case LTGT_EXPR:
2262 case RANGE_EXPR: case COMPLEX_EXPR:
2264 loc = EXPR_LOCATION (x);
2265 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2266 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2268 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2270 if (op0 == error_mark_node || op1 == error_mark_node)
2271 x = error_mark_node;
2272 else
2273 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2275 else
2276 x = fold (x);
2278 if (TREE_NO_WARNING (org_x)
2279 && warn_nonnull_compare
2280 && COMPARISON_CLASS_P (org_x))
2282 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2284 else if (COMPARISON_CLASS_P (x))
2285 TREE_NO_WARNING (x) = 1;
2286 /* Otherwise give up on optimizing these, let GIMPLE folders
2287 optimize those later on. */
2288 else if (op0 != TREE_OPERAND (org_x, 0)
2289 || op1 != TREE_OPERAND (org_x, 1))
2291 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2292 TREE_NO_WARNING (x) = 1;
2294 else
2295 x = org_x;
2297 break;
2299 case VEC_COND_EXPR:
2300 case COND_EXPR:
2302 /* Don't bother folding a void condition, since it can't produce a
2303 constant value. Also, some statement-level uses of COND_EXPR leave
2304 one of the branches NULL, so folding would crash. */
2305 if (VOID_TYPE_P (TREE_TYPE (x)))
2306 return x;
2308 loc = EXPR_LOCATION (x);
2309 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2310 op1 = cp_fold (TREE_OPERAND (x, 1));
2311 op2 = cp_fold (TREE_OPERAND (x, 2));
2313 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2315 warning_sentinel s (warn_int_in_bool_context);
2316 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2317 op1 = cp_truthvalue_conversion (op1);
2318 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2319 op2 = cp_truthvalue_conversion (op2);
2322 if (op0 != TREE_OPERAND (x, 0)
2323 || op1 != TREE_OPERAND (x, 1)
2324 || op2 != TREE_OPERAND (x, 2))
2326 if (op0 == error_mark_node
2327 || op1 == error_mark_node
2328 || op2 == error_mark_node)
2329 x = error_mark_node;
2330 else
2331 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2333 else
2334 x = fold (x);
2336 /* A COND_EXPR might have incompatible types in branches if one or both
2337 arms are bitfields. If folding exposed such a branch, fix it up. */
2338 if (TREE_CODE (x) != code
2339 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2340 x = fold_convert (TREE_TYPE (org_x), x);
2342 break;
2344 case CALL_EXPR:
2346 int i, m, sv = optimize, nw = sv, changed = 0;
2347 tree callee = get_callee_fndecl (x);
2349 /* Some built-in function calls will be evaluated at compile-time in
2350 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2351 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2352 if (callee && DECL_BUILT_IN (callee) && !optimize
2353 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2354 && current_function_decl
2355 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2356 nw = 1;
2358 x = copy_node (x);
2360 m = call_expr_nargs (x);
2361 for (i = 0; i < m; i++)
2363 r = cp_fold (CALL_EXPR_ARG (x, i));
2364 if (r != CALL_EXPR_ARG (x, i))
2366 if (r == error_mark_node)
2368 x = error_mark_node;
2369 break;
2371 changed = 1;
2373 CALL_EXPR_ARG (x, i) = r;
2375 if (x == error_mark_node)
2376 break;
2378 optimize = nw;
2379 r = fold (x);
2380 optimize = sv;
2382 if (TREE_CODE (r) != CALL_EXPR)
2384 x = cp_fold (r);
2385 break;
2388 optimize = nw;
2390 /* Invoke maybe_constant_value for functions declared
2391 constexpr and not called with AGGR_INIT_EXPRs.
2392 TODO:
2393 Do constexpr expansion of expressions where the call itself is not
2394 constant, but the call followed by an INDIRECT_REF is. */
2395 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2396 && !flag_no_inline)
2397 r = maybe_constant_value (x);
2398 optimize = sv;
2400 if (TREE_CODE (r) != CALL_EXPR)
2402 if (DECL_CONSTRUCTOR_P (callee))
2404 loc = EXPR_LOCATION (x);
2405 tree s = build_fold_indirect_ref_loc (loc,
2406 CALL_EXPR_ARG (x, 0));
2407 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2409 x = r;
2410 break;
2413 if (!changed)
2414 x = org_x;
2415 break;
2418 case CONSTRUCTOR:
2420 unsigned i;
2421 constructor_elt *p;
2422 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2423 vec<constructor_elt, va_gc> *nelts = NULL;
2424 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2426 tree op = cp_fold (p->value);
2427 if (op != p->value)
2429 if (op == error_mark_node)
2431 x = error_mark_node;
2432 vec_free (nelts);
2433 break;
2435 if (nelts == NULL)
2436 nelts = elts->copy ();
2437 (*nelts)[i].value = op;
2440 if (nelts)
2441 x = build_constructor (TREE_TYPE (x), nelts);
2442 break;
2444 case TREE_VEC:
2446 bool changed = false;
2447 vec<tree, va_gc> *vec = make_tree_vector ();
2448 int i, n = TREE_VEC_LENGTH (x);
2449 vec_safe_reserve (vec, n);
2451 for (i = 0; i < n; i++)
2453 tree op = cp_fold (TREE_VEC_ELT (x, i));
2454 vec->quick_push (op);
2455 if (op != TREE_VEC_ELT (x, i))
2456 changed = true;
2459 if (changed)
2461 r = copy_node (x);
2462 for (i = 0; i < n; i++)
2463 TREE_VEC_ELT (r, i) = (*vec)[i];
2464 x = r;
2467 release_tree_vector (vec);
2470 break;
2472 case ARRAY_REF:
2473 case ARRAY_RANGE_REF:
2475 loc = EXPR_LOCATION (x);
2476 op0 = cp_fold (TREE_OPERAND (x, 0));
2477 op1 = cp_fold (TREE_OPERAND (x, 1));
2478 op2 = cp_fold (TREE_OPERAND (x, 2));
2479 op3 = cp_fold (TREE_OPERAND (x, 3));
2481 if (op0 != TREE_OPERAND (x, 0)
2482 || op1 != TREE_OPERAND (x, 1)
2483 || op2 != TREE_OPERAND (x, 2)
2484 || op3 != TREE_OPERAND (x, 3))
2486 if (op0 == error_mark_node
2487 || op1 == error_mark_node
2488 || op2 == error_mark_node
2489 || op3 == error_mark_node)
2490 x = error_mark_node;
2491 else
2493 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2494 TREE_READONLY (x) = TREE_READONLY (org_x);
2495 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2496 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2500 x = fold (x);
2501 break;
2503 case SAVE_EXPR:
2504 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2505 folding, evaluates to an invariant. In that case no need to wrap
2506 this folded tree with a SAVE_EXPR. */
2507 r = cp_fold (TREE_OPERAND (x, 0));
2508 if (tree_invariant_p (r))
2509 x = r;
2510 break;
2512 default:
2513 return org_x;
2516 fold_cache->put (org_x, x);
2517 /* Prevent that we try to fold an already folded result again. */
2518 if (x != org_x)
2519 fold_cache->put (x, x);
2521 return x;
2524 #include "gt-cp-cp-gimplify.h"