[gcc]
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob262485a5c1ff74a84774997a7ad88aa83c9883b5
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
40 /* Forward declarations. */
42 static tree cp_genericize_r (tree *, int *, void *);
43 static tree cp_fold_r (tree *, int *, void *);
44 static void cp_genericize_tree (tree*, bool);
45 static tree cp_fold (tree);
47 /* Local declarations. */
49 enum bc_t { bc_break = 0, bc_continue = 1 };
51 /* Stack of labels which are targets for "break" or "continue",
52 linked through TREE_CHAIN. */
53 static tree bc_label[2];
55 /* Begin a scope which can be exited by a break or continue statement. BC
56 indicates which.
58 Just creates a label with location LOCATION and pushes it into the current
59 context. */
61 static tree
62 begin_bc_block (enum bc_t bc, location_t location)
64 tree label = create_artificial_label (location);
65 DECL_CHAIN (label) = bc_label[bc];
66 bc_label[bc] = label;
67 if (bc == bc_break)
68 LABEL_DECL_BREAK (label) = true;
69 else
70 LABEL_DECL_CONTINUE (label) = true;
71 return label;
74 /* Finish a scope which can be exited by a break or continue statement.
75 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
76 an expression for the contents of the scope.
78 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
79 BLOCK. Otherwise, just forget the label. */
81 static void
82 finish_bc_block (tree *block, enum bc_t bc, tree label)
84 gcc_assert (label == bc_label[bc]);
86 if (TREE_USED (label))
87 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
88 block);
90 bc_label[bc] = DECL_CHAIN (label);
91 DECL_CHAIN (label) = NULL_TREE;
94 /* Get the LABEL_EXPR to represent a break or continue statement
95 in the current block scope. BC indicates which. */
97 static tree
98 get_bc_label (enum bc_t bc)
100 tree label = bc_label[bc];
102 /* Mark the label used for finish_bc_block. */
103 TREE_USED (label) = 1;
104 return label;
107 /* Genericize a TRY_BLOCK. */
109 static void
110 genericize_try_block (tree *stmt_p)
112 tree body = TRY_STMTS (*stmt_p);
113 tree cleanup = TRY_HANDLERS (*stmt_p);
115 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
118 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
120 static void
121 genericize_catch_block (tree *stmt_p)
123 tree type = HANDLER_TYPE (*stmt_p);
124 tree body = HANDLER_BODY (*stmt_p);
126 /* FIXME should the caught type go in TREE_TYPE? */
127 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
130 /* A terser interface for building a representation of an exception
131 specification. */
133 static tree
134 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
136 tree t;
138 /* FIXME should the allowed types go in TREE_TYPE? */
139 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
140 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
142 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
143 append_to_statement_list (body, &TREE_OPERAND (t, 0));
145 return t;
148 /* Genericize an EH_SPEC_BLOCK by converting it to a
149 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
151 static void
152 genericize_eh_spec_block (tree *stmt_p)
154 tree body = EH_SPEC_STMTS (*stmt_p);
155 tree allowed = EH_SPEC_RAISES (*stmt_p);
156 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
158 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
159 TREE_NO_WARNING (*stmt_p) = true;
160 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
163 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
165 static void
166 genericize_if_stmt (tree *stmt_p)
168 tree stmt, cond, then_, else_;
169 location_t locus = EXPR_LOCATION (*stmt_p);
171 stmt = *stmt_p;
172 cond = IF_COND (stmt);
173 then_ = THEN_CLAUSE (stmt);
174 else_ = ELSE_CLAUSE (stmt);
176 if (!then_)
177 then_ = build_empty_stmt (locus);
178 if (!else_)
179 else_ = build_empty_stmt (locus);
181 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
182 stmt = then_;
183 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
184 stmt = else_;
185 else
186 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
187 if (!EXPR_HAS_LOCATION (stmt))
188 protected_set_expr_location (stmt, locus);
189 *stmt_p = stmt;
192 /* Build a generic representation of one of the C loop forms. COND is the
193 loop condition or NULL_TREE. BODY is the (possibly compound) statement
194 controlled by the loop. INCR is the increment expression of a for-loop,
195 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
196 evaluated before the loop body as in while and for loops, or after the
197 loop body as in do-while loops. */
199 static void
200 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
201 tree incr, bool cond_is_first, int *walk_subtrees,
202 void *data)
204 tree blab, clab;
205 tree exit = NULL;
206 tree stmt_list = NULL;
208 blab = begin_bc_block (bc_break, start_locus);
209 clab = begin_bc_block (bc_continue, start_locus);
211 protected_set_expr_location (incr, start_locus);
213 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214 cp_walk_tree (&body, cp_genericize_r, data, NULL);
215 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216 *walk_subtrees = 0;
218 if (cond && TREE_CODE (cond) != INTEGER_CST)
220 /* If COND is constant, don't bother building an exit. If it's false,
221 we won't build a loop. If it's true, any exits are in the body. */
222 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
223 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
224 get_bc_label (bc_break));
225 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
226 build_empty_stmt (cloc), exit);
229 if (exit && cond_is_first)
230 append_to_statement_list (exit, &stmt_list);
231 append_to_statement_list (body, &stmt_list);
232 finish_bc_block (&stmt_list, bc_continue, clab);
233 append_to_statement_list (incr, &stmt_list);
234 if (exit && !cond_is_first)
235 append_to_statement_list (exit, &stmt_list);
237 if (!stmt_list)
238 stmt_list = build_empty_stmt (start_locus);
240 tree loop;
241 if (cond && integer_zerop (cond))
243 if (cond_is_first)
244 loop = fold_build3_loc (start_locus, COND_EXPR,
245 void_type_node, cond, stmt_list,
246 build_empty_stmt (start_locus));
247 else
248 loop = stmt_list;
250 else
252 location_t loc = start_locus;
253 if (!cond || integer_nonzerop (cond))
254 loc = EXPR_LOCATION (expr_first (body));
255 if (loc == UNKNOWN_LOCATION)
256 loc = start_locus;
257 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
260 stmt_list = NULL;
261 append_to_statement_list (loop, &stmt_list);
262 finish_bc_block (&stmt_list, bc_break, blab);
263 if (!stmt_list)
264 stmt_list = build_empty_stmt (start_locus);
266 *stmt_p = stmt_list;
269 /* Genericize a FOR_STMT node *STMT_P. */
271 static void
272 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
274 tree stmt = *stmt_p;
275 tree expr = NULL;
276 tree loop;
277 tree init = FOR_INIT_STMT (stmt);
279 if (init)
281 cp_walk_tree (&init, cp_genericize_r, data, NULL);
282 append_to_statement_list (init, &expr);
285 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
286 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
287 append_to_statement_list (loop, &expr);
288 if (expr == NULL_TREE)
289 expr = loop;
290 *stmt_p = expr;
293 /* Genericize a WHILE_STMT node *STMT_P. */
295 static void
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
298 tree stmt = *stmt_p;
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
305 static void
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 tree stmt = *stmt_p;
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
315 static void
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 tree stmt = *stmt_p;
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
325 if (!body)
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
333 *walk_subtrees = 0;
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
341 static void
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list_force (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
351 *stmt_p = stmt_list;
354 /* Genericize a BREAK_STMT node *STMT_P. */
356 static void
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
366 static void
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
369 tree stmt = *stmt_p;
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 if (TREE_CODE (stmt) != OMP_TASKLOOP)
375 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
380 *walk_subtrees = 0;
382 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
385 /* Hook into the middle of gimplifying an OMP_FOR node. */
387 static enum gimplify_status
388 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
390 tree for_stmt = *expr_p;
391 gimple_seq seq = NULL;
393 /* Protect ourselves from recursion. */
394 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
395 return GS_UNHANDLED;
396 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
398 gimplify_and_add (for_stmt, &seq);
399 gimple_seq_add_seq (pre_p, seq);
401 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
403 return GS_ALL_DONE;
406 /* Gimplify an EXPR_STMT node. */
408 static void
409 gimplify_expr_stmt (tree *stmt_p)
411 tree stmt = EXPR_STMT_EXPR (*stmt_p);
413 if (stmt == error_mark_node)
414 stmt = NULL;
416 /* Gimplification of a statement expression will nullify the
417 statement if all its side effects are moved to *PRE_P and *POST_P.
419 In this case we will not want to emit the gimplified statement.
420 However, we may still want to emit a warning, so we do that before
421 gimplification. */
422 if (stmt && warn_unused_value)
424 if (!TREE_SIDE_EFFECTS (stmt))
426 if (!IS_EMPTY_STMT (stmt)
427 && !VOID_TYPE_P (TREE_TYPE (stmt))
428 && !TREE_NO_WARNING (stmt))
429 warning (OPT_Wunused_value, "statement with no effect");
431 else
432 warn_if_unused_value (stmt, input_location);
435 if (stmt == NULL_TREE)
436 stmt = alloc_stmt_list ();
438 *stmt_p = stmt;
441 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 static void
444 cp_gimplify_init_expr (tree *expr_p)
446 tree from = TREE_OPERAND (*expr_p, 1);
447 tree to = TREE_OPERAND (*expr_p, 0);
448 tree t;
450 /* What about code that pulls out the temp and uses it elsewhere? I
451 think that such code never uses the TARGET_EXPR as an initializer. If
452 I'm wrong, we'll abort because the temp won't have any RTL. In that
453 case, I guess we'll need to replace references somehow. */
454 if (TREE_CODE (from) == TARGET_EXPR)
455 from = TARGET_EXPR_INITIAL (from);
457 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
458 inside the TARGET_EXPR. */
459 for (t = from; t; )
461 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
463 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
464 replace the slot operand with our target.
466 Should we add a target parm to gimplify_expr instead? No, as in this
467 case we want to replace the INIT_EXPR. */
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR
469 || TREE_CODE (sub) == VEC_INIT_EXPR)
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
472 AGGR_INIT_EXPR_SLOT (sub) = to;
473 else
474 VEC_INIT_EXPR_SLOT (sub) = to;
475 *expr_p = from;
477 /* The initialization is now a side-effect, so the container can
478 become void. */
479 if (from != sub)
480 TREE_TYPE (from) = void_type_node;
483 /* Handle aggregate NSDMI. */
484 replace_placeholders (sub, to);
486 if (t == sub)
487 break;
488 else
489 t = TREE_OPERAND (t, 1);
494 /* Gimplify a MUST_NOT_THROW_EXPR. */
496 static enum gimplify_status
497 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
499 tree stmt = *expr_p;
500 tree temp = voidify_wrapper_expr (stmt, NULL);
501 tree body = TREE_OPERAND (stmt, 0);
502 gimple_seq try_ = NULL;
503 gimple_seq catch_ = NULL;
504 gimple *mnt;
506 gimplify_and_add (body, &try_);
507 mnt = gimple_build_eh_must_not_throw (terminate_fn);
508 gimple_seq_add_stmt_without_update (&catch_, mnt);
509 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
511 gimple_seq_add_stmt_without_update (pre_p, mnt);
512 if (temp)
514 *expr_p = temp;
515 return GS_OK;
518 *expr_p = NULL;
519 return GS_ALL_DONE;
522 /* Return TRUE if an operand (OP) of a given TYPE being copied is
523 really just an empty class copy.
525 Check that the operand has a simple form so that TARGET_EXPRs and
526 non-empty CONSTRUCTORs get reduced properly, and we leave the
527 return slot optimization alone because it isn't a copy. */
529 static bool
530 simple_empty_class_p (tree type, tree op)
532 return
533 ((TREE_CODE (op) == COMPOUND_EXPR
534 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
535 || TREE_CODE (op) == EMPTY_CLASS_EXPR
536 || is_gimple_lvalue (op)
537 || INDIRECT_REF_P (op)
538 || (TREE_CODE (op) == CONSTRUCTOR
539 && CONSTRUCTOR_NELTS (op) == 0
540 && !TREE_CLOBBER_P (op))
541 || (TREE_CODE (op) == CALL_EXPR
542 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
543 && is_really_empty_class (type);
546 /* Returns true if evaluating E as an lvalue has side-effects;
547 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
548 have side-effects until there is a read or write through it. */
550 static bool
551 lvalue_has_side_effects (tree e)
553 if (!TREE_SIDE_EFFECTS (e))
554 return false;
555 while (handled_component_p (e))
557 if (TREE_CODE (e) == ARRAY_REF
558 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
559 return true;
560 e = TREE_OPERAND (e, 0);
562 if (DECL_P (e))
563 /* Just naming a variable has no side-effects. */
564 return false;
565 else if (INDIRECT_REF_P (e))
566 /* Similarly, indirection has no side-effects. */
567 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
568 else
569 /* For anything else, trust TREE_SIDE_EFFECTS. */
570 return TREE_SIDE_EFFECTS (e);
573 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
576 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
578 int saved_stmts_are_full_exprs_p = 0;
579 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
580 enum tree_code code = TREE_CODE (*expr_p);
581 enum gimplify_status ret;
583 if (STATEMENT_CODE_P (code))
585 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
586 current_stmt_tree ()->stmts_are_full_exprs_p
587 = STMT_IS_FULL_EXPR_P (*expr_p);
590 switch (code)
592 case AGGR_INIT_EXPR:
593 simplify_aggr_init_expr (expr_p);
594 ret = GS_OK;
595 break;
597 case VEC_INIT_EXPR:
599 location_t loc = input_location;
600 tree init = VEC_INIT_EXPR_INIT (*expr_p);
601 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
602 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
603 input_location = EXPR_LOCATION (*expr_p);
604 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
605 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
606 from_array,
607 tf_warning_or_error);
608 hash_set<tree> pset;
609 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
610 cp_genericize_tree (expr_p, false);
611 ret = GS_OK;
612 input_location = loc;
614 break;
616 case THROW_EXPR:
617 /* FIXME communicate throw type to back end, probably by moving
618 THROW_EXPR into ../tree.def. */
619 *expr_p = TREE_OPERAND (*expr_p, 0);
620 ret = GS_OK;
621 break;
623 case MUST_NOT_THROW_EXPR:
624 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
625 break;
627 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
628 LHS of an assignment might also be involved in the RHS, as in bug
629 25979. */
630 case INIT_EXPR:
631 if (fn_contains_cilk_spawn_p (cfun))
633 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
634 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
635 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
636 return GS_ERROR;
639 cp_gimplify_init_expr (expr_p);
640 if (TREE_CODE (*expr_p) != INIT_EXPR)
641 return GS_OK;
642 /* Fall through. */
643 case MODIFY_EXPR:
644 modify_expr_case:
646 if (fn_contains_cilk_spawn_p (cfun)
647 && cilk_cp_detect_spawn_and_unwrap (expr_p)
648 && !seen_error ())
649 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
650 /* If the back end isn't clever enough to know that the lhs and rhs
651 types are the same, add an explicit conversion. */
652 tree op0 = TREE_OPERAND (*expr_p, 0);
653 tree op1 = TREE_OPERAND (*expr_p, 1);
655 if (!error_operand_p (op0)
656 && !error_operand_p (op1)
657 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
658 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
659 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
660 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
661 TREE_TYPE (op0), op1);
663 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
665 /* Remove any copies of empty classes. Also drop volatile
666 variables on the RHS to avoid infinite recursion from
667 gimplify_expr trying to load the value. */
668 if (TREE_SIDE_EFFECTS (op1))
670 if (TREE_THIS_VOLATILE (op1)
671 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
672 op1 = build_fold_addr_expr (op1);
674 gimplify_and_add (op1, pre_p);
676 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
677 is_gimple_lvalue, fb_lvalue);
678 *expr_p = TREE_OPERAND (*expr_p, 0);
680 /* P0145 says that the RHS is sequenced before the LHS.
681 gimplify_modify_expr gimplifies the RHS before the LHS, but that
682 isn't quite strong enough in two cases:
684 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
685 mean it's evaluated after the LHS.
687 2) the value calculation of the RHS is also sequenced before the
688 LHS, so for scalar assignment we need to preevaluate if the
689 RHS could be affected by LHS side-effects even if it has no
690 side-effects of its own. We don't need this for classes because
691 class assignment takes its RHS by reference. */
692 else if (flag_strong_eval_order > 1
693 && TREE_CODE (*expr_p) == MODIFY_EXPR
694 && lvalue_has_side_effects (op0)
695 && (TREE_CODE (op1) == CALL_EXPR
696 || (SCALAR_TYPE_P (TREE_TYPE (op1))
697 && !TREE_CONSTANT (op1))))
698 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
700 ret = GS_OK;
701 break;
703 case EMPTY_CLASS_EXPR:
704 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
705 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
706 ret = GS_OK;
707 break;
709 case BASELINK:
710 *expr_p = BASELINK_FUNCTIONS (*expr_p);
711 ret = GS_OK;
712 break;
714 case TRY_BLOCK:
715 genericize_try_block (expr_p);
716 ret = GS_OK;
717 break;
719 case HANDLER:
720 genericize_catch_block (expr_p);
721 ret = GS_OK;
722 break;
724 case EH_SPEC_BLOCK:
725 genericize_eh_spec_block (expr_p);
726 ret = GS_OK;
727 break;
729 case USING_STMT:
730 gcc_unreachable ();
732 case FOR_STMT:
733 case WHILE_STMT:
734 case DO_STMT:
735 case SWITCH_STMT:
736 case CONTINUE_STMT:
737 case BREAK_STMT:
738 gcc_unreachable ();
740 case OMP_FOR:
741 case OMP_SIMD:
742 case OMP_DISTRIBUTE:
743 case OMP_TASKLOOP:
744 ret = cp_gimplify_omp_for (expr_p, pre_p);
745 break;
747 case EXPR_STMT:
748 gimplify_expr_stmt (expr_p);
749 ret = GS_OK;
750 break;
752 case UNARY_PLUS_EXPR:
754 tree arg = TREE_OPERAND (*expr_p, 0);
755 tree type = TREE_TYPE (*expr_p);
756 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
757 : arg;
758 ret = GS_OK;
760 break;
762 case CILK_SPAWN_STMT:
763 gcc_assert(fn_contains_cilk_spawn_p (cfun)
764 && cilk_cp_detect_spawn_and_unwrap (expr_p));
766 if (!seen_error ())
767 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
768 return GS_ERROR;
770 case CALL_EXPR:
771 if (fn_contains_cilk_spawn_p (cfun)
772 && cilk_cp_detect_spawn_and_unwrap (expr_p)
773 && !seen_error ())
774 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
775 ret = GS_OK;
776 if (!CALL_EXPR_FN (*expr_p))
777 /* Internal function call. */;
778 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
780 /* This is a call to a (compound) assignment operator that used
781 the operator syntax; gimplify the RHS first. */
782 gcc_assert (call_expr_nargs (*expr_p) == 2);
783 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
784 enum gimplify_status t
785 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
786 if (t == GS_ERROR)
787 ret = GS_ERROR;
789 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
791 /* Leave the last argument for gimplify_call_expr, to avoid problems
792 with __builtin_va_arg_pack(). */
793 int nargs = call_expr_nargs (*expr_p) - 1;
794 for (int i = 0; i < nargs; ++i)
796 enum gimplify_status t
797 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
798 if (t == GS_ERROR)
799 ret = GS_ERROR;
802 else if (flag_strong_eval_order
803 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
805 /* If flag_strong_eval_order, evaluate the object argument first. */
806 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
807 if (POINTER_TYPE_P (fntype))
808 fntype = TREE_TYPE (fntype);
809 if (TREE_CODE (fntype) == METHOD_TYPE)
811 enum gimplify_status t
812 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
813 if (t == GS_ERROR)
814 ret = GS_ERROR;
817 break;
819 case RETURN_EXPR:
820 if (TREE_OPERAND (*expr_p, 0)
821 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
822 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
824 expr_p = &TREE_OPERAND (*expr_p, 0);
825 code = TREE_CODE (*expr_p);
826 /* Avoid going through the INIT_EXPR case, which can
827 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
828 goto modify_expr_case;
830 /* Fall through. */
832 default:
833 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
834 break;
837 /* Restore saved state. */
838 if (STATEMENT_CODE_P (code))
839 current_stmt_tree ()->stmts_are_full_exprs_p
840 = saved_stmts_are_full_exprs_p;
842 return ret;
845 static inline bool
846 is_invisiref_parm (const_tree t)
848 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
849 && DECL_BY_REFERENCE (t));
852 /* Return true if the uid in both int tree maps are equal. */
854 bool
855 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
857 return (a->uid == b->uid);
860 /* Hash a UID in a cxx_int_tree_map. */
862 unsigned int
863 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
865 return item->uid;
868 /* A stable comparison routine for use with splay trees and DECLs. */
870 static int
871 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
873 tree a = (tree) xa;
874 tree b = (tree) xb;
876 return DECL_UID (a) - DECL_UID (b);
879 /* OpenMP context during genericization. */
881 struct cp_genericize_omp_taskreg
883 bool is_parallel;
884 bool default_shared;
885 struct cp_genericize_omp_taskreg *outer;
886 splay_tree variables;
889 /* Return true if genericization should try to determine if
890 DECL is firstprivate or shared within task regions. */
892 static bool
893 omp_var_to_track (tree decl)
895 tree type = TREE_TYPE (decl);
896 if (is_invisiref_parm (decl))
897 type = TREE_TYPE (type);
898 else if (TREE_CODE (type) == REFERENCE_TYPE)
899 type = TREE_TYPE (type);
900 while (TREE_CODE (type) == ARRAY_TYPE)
901 type = TREE_TYPE (type);
902 if (type == error_mark_node || !CLASS_TYPE_P (type))
903 return false;
904 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
905 return false;
906 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
907 return false;
908 return true;
911 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
913 static void
914 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
916 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
917 (splay_tree_key) decl);
918 if (n == NULL)
920 int flags = OMP_CLAUSE_DEFAULT_SHARED;
921 if (omp_ctx->outer)
922 omp_cxx_notice_variable (omp_ctx->outer, decl);
923 if (!omp_ctx->default_shared)
925 struct cp_genericize_omp_taskreg *octx;
927 for (octx = omp_ctx->outer; octx; octx = octx->outer)
929 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
930 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
932 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
933 break;
935 if (octx->is_parallel)
936 break;
938 if (octx == NULL
939 && (TREE_CODE (decl) == PARM_DECL
940 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
941 && DECL_CONTEXT (decl) == current_function_decl)))
942 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
943 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
945 /* DECL is implicitly determined firstprivate in
946 the current task construct. Ensure copy ctor and
947 dtor are instantiated, because during gimplification
948 it will be already too late. */
949 tree type = TREE_TYPE (decl);
950 if (is_invisiref_parm (decl))
951 type = TREE_TYPE (type);
952 else if (TREE_CODE (type) == REFERENCE_TYPE)
953 type = TREE_TYPE (type);
954 while (TREE_CODE (type) == ARRAY_TYPE)
955 type = TREE_TYPE (type);
956 get_copy_ctor (type, tf_none);
957 get_dtor (type, tf_none);
960 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
964 /* Genericization context. */
966 struct cp_genericize_data
968 hash_set<tree> *p_set;
969 vec<tree> bind_expr_stack;
970 struct cp_genericize_omp_taskreg *omp_ctx;
971 tree try_block;
972 bool no_sanitize_p;
973 bool handle_invisiref_parm_p;
976 /* Perform any pre-gimplification folding of C++ front end trees to
977 GENERIC.
978 Note: The folding of none-omp cases is something to move into
979 the middle-end. As for now we have most foldings only on GENERIC
980 in fold-const, we need to perform this before transformation to
981 GIMPLE-form. */
983 static tree
984 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
986 tree stmt;
987 enum tree_code code;
989 *stmt_p = stmt = cp_fold (*stmt_p);
991 if (((hash_set<tree> *) data)->add (stmt))
993 /* Don't walk subtrees of stmts we've already walked once, otherwise
994 we can have exponential complexity with e.g. lots of nested
995 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
996 always the same tree, which the first time cp_fold_r has been
997 called on it had the subtrees walked. */
998 *walk_subtrees = 0;
999 return NULL;
1002 code = TREE_CODE (stmt);
1003 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1004 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1005 || code == OACC_LOOP)
1007 tree x;
1008 int i, n;
1010 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1011 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1012 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1013 x = OMP_FOR_COND (stmt);
1014 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1016 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1017 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1019 else if (x && TREE_CODE (x) == TREE_VEC)
1021 n = TREE_VEC_LENGTH (x);
1022 for (i = 0; i < n; i++)
1024 tree o = TREE_VEC_ELT (x, i);
1025 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1026 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1029 x = OMP_FOR_INCR (stmt);
1030 if (x && TREE_CODE (x) == TREE_VEC)
1032 n = TREE_VEC_LENGTH (x);
1033 for (i = 0; i < n; i++)
1035 tree o = TREE_VEC_ELT (x, i);
1036 if (o && TREE_CODE (o) == MODIFY_EXPR)
1037 o = TREE_OPERAND (o, 1);
1038 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1039 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1041 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1046 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1047 *walk_subtrees = 0;
1050 return NULL;
1053 /* Fold ALL the trees! FIXME we should be able to remove this, but
1054 apparently that still causes optimization regressions. */
1056 void
1057 cp_fold_function (tree fndecl)
1059 hash_set<tree> pset;
1060 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1063 /* Perform any pre-gimplification lowering of C++ front end trees to
1064 GENERIC. */
1066 static tree
1067 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1069 tree stmt = *stmt_p;
1070 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1071 hash_set<tree> *p_set = wtd->p_set;
1073 /* If in an OpenMP context, note var uses. */
1074 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1075 && (VAR_P (stmt)
1076 || TREE_CODE (stmt) == PARM_DECL
1077 || TREE_CODE (stmt) == RESULT_DECL)
1078 && omp_var_to_track (stmt))
1079 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1081 /* Dereference invisible reference parms. */
1082 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1084 *stmt_p = convert_from_reference (stmt);
1085 p_set->add (*stmt_p);
1086 *walk_subtrees = 0;
1087 return NULL;
1090 /* Map block scope extern declarations to visible declarations with the
1091 same name and type in outer scopes if any. */
1092 if (cp_function_chain->extern_decl_map
1093 && VAR_OR_FUNCTION_DECL_P (stmt)
1094 && DECL_EXTERNAL (stmt))
1096 struct cxx_int_tree_map *h, in;
1097 in.uid = DECL_UID (stmt);
1098 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1099 if (h)
1101 *stmt_p = h->to;
1102 *walk_subtrees = 0;
1103 return NULL;
1107 if (TREE_CODE (stmt) == INTEGER_CST
1108 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1109 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1110 && !wtd->no_sanitize_p)
1112 ubsan_maybe_instrument_reference (stmt_p);
1113 if (*stmt_p != stmt)
1115 *walk_subtrees = 0;
1116 return NULL_TREE;
1120 /* Other than invisiref parms, don't walk the same tree twice. */
1121 if (p_set->contains (stmt))
1123 *walk_subtrees = 0;
1124 return NULL_TREE;
1127 switch (TREE_CODE (stmt))
1129 case ADDR_EXPR:
1130 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1132 /* If in an OpenMP context, note var uses. */
1133 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1134 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1135 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1136 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1137 *walk_subtrees = 0;
1139 break;
1141 case RETURN_EXPR:
1142 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1143 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1144 *walk_subtrees = 0;
1145 break;
1147 case OMP_CLAUSE:
1148 switch (OMP_CLAUSE_CODE (stmt))
1150 case OMP_CLAUSE_LASTPRIVATE:
1151 /* Don't dereference an invisiref in OpenMP clauses. */
1152 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1154 *walk_subtrees = 0;
1155 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1156 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1157 cp_genericize_r, data, NULL);
1159 break;
1160 case OMP_CLAUSE_PRIVATE:
1161 /* Don't dereference an invisiref in OpenMP clauses. */
1162 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1163 *walk_subtrees = 0;
1164 else if (wtd->omp_ctx != NULL)
1166 /* Private clause doesn't cause any references to the
1167 var in outer contexts, avoid calling
1168 omp_cxx_notice_variable for it. */
1169 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1170 wtd->omp_ctx = NULL;
1171 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1172 data, NULL);
1173 wtd->omp_ctx = old;
1174 *walk_subtrees = 0;
1176 break;
1177 case OMP_CLAUSE_SHARED:
1178 case OMP_CLAUSE_FIRSTPRIVATE:
1179 case OMP_CLAUSE_COPYIN:
1180 case OMP_CLAUSE_COPYPRIVATE:
1181 /* Don't dereference an invisiref in OpenMP clauses. */
1182 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1183 *walk_subtrees = 0;
1184 break;
1185 case OMP_CLAUSE_REDUCTION:
1186 /* Don't dereference an invisiref in reduction clause's
1187 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1188 still needs to be genericized. */
1189 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1191 *walk_subtrees = 0;
1192 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1193 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1194 cp_genericize_r, data, NULL);
1195 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1196 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1197 cp_genericize_r, data, NULL);
1199 break;
1200 default:
1201 break;
1203 break;
1205 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1206 to lower this construct before scanning it, so we need to lower these
1207 before doing anything else. */
1208 case CLEANUP_STMT:
1209 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1210 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1211 : TRY_FINALLY_EXPR,
1212 void_type_node,
1213 CLEANUP_BODY (stmt),
1214 CLEANUP_EXPR (stmt));
1215 break;
1217 case IF_STMT:
1218 genericize_if_stmt (stmt_p);
1219 /* *stmt_p has changed, tail recurse to handle it again. */
1220 return cp_genericize_r (stmt_p, walk_subtrees, data);
1222 /* COND_EXPR might have incompatible types in branches if one or both
1223 arms are bitfields. Fix it up now. */
1224 case COND_EXPR:
1226 tree type_left
1227 = (TREE_OPERAND (stmt, 1)
1228 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1229 : NULL_TREE);
1230 tree type_right
1231 = (TREE_OPERAND (stmt, 2)
1232 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1233 : NULL_TREE);
1234 if (type_left
1235 && !useless_type_conversion_p (TREE_TYPE (stmt),
1236 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1238 TREE_OPERAND (stmt, 1)
1239 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1240 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1241 type_left));
1243 if (type_right
1244 && !useless_type_conversion_p (TREE_TYPE (stmt),
1245 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1247 TREE_OPERAND (stmt, 2)
1248 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1249 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1250 type_right));
1253 break;
1255 case BIND_EXPR:
1256 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1258 tree decl;
1259 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1260 if (VAR_P (decl)
1261 && !DECL_EXTERNAL (decl)
1262 && omp_var_to_track (decl))
1264 splay_tree_node n
1265 = splay_tree_lookup (wtd->omp_ctx->variables,
1266 (splay_tree_key) decl);
1267 if (n == NULL)
1268 splay_tree_insert (wtd->omp_ctx->variables,
1269 (splay_tree_key) decl,
1270 TREE_STATIC (decl)
1271 ? OMP_CLAUSE_DEFAULT_SHARED
1272 : OMP_CLAUSE_DEFAULT_PRIVATE);
1275 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1277 /* The point here is to not sanitize static initializers. */
1278 bool no_sanitize_p = wtd->no_sanitize_p;
1279 wtd->no_sanitize_p = true;
1280 for (tree decl = BIND_EXPR_VARS (stmt);
1281 decl;
1282 decl = DECL_CHAIN (decl))
1283 if (VAR_P (decl)
1284 && TREE_STATIC (decl)
1285 && DECL_INITIAL (decl))
1286 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1287 wtd->no_sanitize_p = no_sanitize_p;
1289 wtd->bind_expr_stack.safe_push (stmt);
1290 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1291 cp_genericize_r, data, NULL);
1292 wtd->bind_expr_stack.pop ();
1293 break;
1295 case USING_STMT:
1297 tree block = NULL_TREE;
1299 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1300 BLOCK, and append an IMPORTED_DECL to its
1301 BLOCK_VARS chained list. */
1302 if (wtd->bind_expr_stack.exists ())
1304 int i;
1305 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1306 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1307 break;
1309 if (block)
1311 tree using_directive;
1312 gcc_assert (TREE_OPERAND (stmt, 0));
1314 using_directive = make_node (IMPORTED_DECL);
1315 TREE_TYPE (using_directive) = void_type_node;
1317 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1318 = TREE_OPERAND (stmt, 0);
1319 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1320 BLOCK_VARS (block) = using_directive;
1322 /* The USING_STMT won't appear in GENERIC. */
1323 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1324 *walk_subtrees = 0;
1326 break;
1328 case DECL_EXPR:
1329 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1331 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1332 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1333 *walk_subtrees = 0;
1335 else
1337 tree d = DECL_EXPR_DECL (stmt);
1338 if (VAR_P (d))
1339 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1341 break;
1343 case OMP_PARALLEL:
1344 case OMP_TASK:
1345 case OMP_TASKLOOP:
1347 struct cp_genericize_omp_taskreg omp_ctx;
1348 tree c, decl;
1349 splay_tree_node n;
1351 *walk_subtrees = 0;
1352 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1353 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1354 omp_ctx.default_shared = omp_ctx.is_parallel;
1355 omp_ctx.outer = wtd->omp_ctx;
1356 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1357 wtd->omp_ctx = &omp_ctx;
1358 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1359 switch (OMP_CLAUSE_CODE (c))
1361 case OMP_CLAUSE_SHARED:
1362 case OMP_CLAUSE_PRIVATE:
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_LASTPRIVATE:
1365 decl = OMP_CLAUSE_DECL (c);
1366 if (decl == error_mark_node || !omp_var_to_track (decl))
1367 break;
1368 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1369 if (n != NULL)
1370 break;
1371 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1372 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1373 ? OMP_CLAUSE_DEFAULT_SHARED
1374 : OMP_CLAUSE_DEFAULT_PRIVATE);
1375 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1376 omp_cxx_notice_variable (omp_ctx.outer, decl);
1377 break;
1378 case OMP_CLAUSE_DEFAULT:
1379 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1380 omp_ctx.default_shared = true;
1381 default:
1382 break;
1384 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1385 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1386 else
1387 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1388 wtd->omp_ctx = omp_ctx.outer;
1389 splay_tree_delete (omp_ctx.variables);
1391 break;
1393 case TRY_BLOCK:
1395 *walk_subtrees = 0;
1396 tree try_block = wtd->try_block;
1397 wtd->try_block = stmt;
1398 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1399 wtd->try_block = try_block;
1400 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1402 break;
1404 case MUST_NOT_THROW_EXPR:
1405 /* MUST_NOT_THROW_COND might be something else with TM. */
1406 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1408 *walk_subtrees = 0;
1409 tree try_block = wtd->try_block;
1410 wtd->try_block = stmt;
1411 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1412 wtd->try_block = try_block;
1414 break;
1416 case THROW_EXPR:
1418 location_t loc = location_of (stmt);
1419 if (TREE_NO_WARNING (stmt))
1420 /* Never mind. */;
1421 else if (wtd->try_block)
1423 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1424 && warning_at (loc, OPT_Wterminate,
1425 "throw will always call terminate()")
1426 && cxx_dialect >= cxx11
1427 && DECL_DESTRUCTOR_P (current_function_decl))
1428 inform (loc, "in C++11 destructors default to noexcept");
1430 else
1432 if (warn_cxx11_compat && cxx_dialect < cxx11
1433 && DECL_DESTRUCTOR_P (current_function_decl)
1434 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1435 == NULL_TREE)
1436 && (get_defaulted_eh_spec (current_function_decl)
1437 == empty_except_spec))
1438 warning_at (loc, OPT_Wc__11_compat,
1439 "in C++11 this throw will terminate because "
1440 "destructors default to noexcept");
1443 break;
1445 case CONVERT_EXPR:
1446 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1447 break;
1449 case FOR_STMT:
1450 genericize_for_stmt (stmt_p, walk_subtrees, data);
1451 break;
1453 case WHILE_STMT:
1454 genericize_while_stmt (stmt_p, walk_subtrees, data);
1455 break;
1457 case DO_STMT:
1458 genericize_do_stmt (stmt_p, walk_subtrees, data);
1459 break;
1461 case SWITCH_STMT:
1462 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1463 break;
1465 case CONTINUE_STMT:
1466 genericize_continue_stmt (stmt_p);
1467 break;
1469 case BREAK_STMT:
1470 genericize_break_stmt (stmt_p);
1471 break;
1473 case OMP_FOR:
1474 case OMP_SIMD:
1475 case OMP_DISTRIBUTE:
1476 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1477 break;
1479 case PTRMEM_CST:
1480 /* By the time we get here we're handing off to the back end, so we don't
1481 need or want to preserve PTRMEM_CST anymore. */
1482 *stmt_p = cplus_expand_constant (stmt);
1483 *walk_subtrees = 0;
1484 break;
1486 case MEM_REF:
1487 /* For MEM_REF, make sure not to sanitize the second operand even
1488 if it has reference type. It is just an offset with a type
1489 holding other information. There is no other processing we
1490 need to do for INTEGER_CSTs, so just ignore the second argument
1491 unconditionally. */
1492 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1493 *walk_subtrees = 0;
1494 break;
1496 case NOP_EXPR:
1497 if (!wtd->no_sanitize_p
1498 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1499 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1500 ubsan_maybe_instrument_reference (stmt_p);
1501 break;
1503 case CALL_EXPR:
1504 if (!wtd->no_sanitize_p
1505 && sanitize_flags_p ((SANITIZE_NULL
1506 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1508 tree fn = CALL_EXPR_FN (stmt);
1509 if (fn != NULL_TREE
1510 && !error_operand_p (fn)
1511 && POINTER_TYPE_P (TREE_TYPE (fn))
1512 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1514 bool is_ctor
1515 = TREE_CODE (fn) == ADDR_EXPR
1516 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1517 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1518 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1519 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1520 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1521 cp_ubsan_maybe_instrument_member_call (stmt);
1524 break;
1526 default:
1527 if (IS_TYPE_OR_DECL_P (stmt))
1528 *walk_subtrees = 0;
1529 break;
1532 p_set->add (*stmt_p);
1534 return NULL;
1537 /* Lower C++ front end trees to GENERIC in T_P. */
1539 static void
1540 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1542 struct cp_genericize_data wtd;
1544 wtd.p_set = new hash_set<tree>;
1545 wtd.bind_expr_stack.create (0);
1546 wtd.omp_ctx = NULL;
1547 wtd.try_block = NULL_TREE;
1548 wtd.no_sanitize_p = false;
1549 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1550 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1551 delete wtd.p_set;
1552 wtd.bind_expr_stack.release ();
1553 if (sanitize_flags_p (SANITIZE_VPTR))
1554 cp_ubsan_instrument_member_accesses (t_p);
1557 /* If a function that should end with a return in non-void
1558 function doesn't obviously end with return, add ubsan
1559 instrumentation code to verify it at runtime. */
1561 static void
1562 cp_ubsan_maybe_instrument_return (tree fndecl)
1564 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1565 || DECL_CONSTRUCTOR_P (fndecl)
1566 || DECL_DESTRUCTOR_P (fndecl)
1567 || !targetm.warn_func_return (fndecl))
1568 return;
1570 tree t = DECL_SAVED_TREE (fndecl);
1571 while (t)
1573 switch (TREE_CODE (t))
1575 case BIND_EXPR:
1576 t = BIND_EXPR_BODY (t);
1577 continue;
1578 case TRY_FINALLY_EXPR:
1579 t = TREE_OPERAND (t, 0);
1580 continue;
1581 case STATEMENT_LIST:
1583 tree_stmt_iterator i = tsi_last (t);
1584 if (!tsi_end_p (i))
1586 t = tsi_stmt (i);
1587 continue;
1590 break;
1591 case RETURN_EXPR:
1592 return;
1593 default:
1594 break;
1596 break;
1598 if (t == NULL_TREE)
1599 return;
1600 tree *p = &DECL_SAVED_TREE (fndecl);
1601 if (TREE_CODE (*p) == BIND_EXPR)
1602 p = &BIND_EXPR_BODY (*p);
1603 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1604 append_to_statement_list (t, p);
1607 void
1608 cp_genericize (tree fndecl)
1610 tree t;
1612 /* Fix up the types of parms passed by invisible reference. */
1613 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1614 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1616 /* If a function's arguments are copied to create a thunk,
1617 then DECL_BY_REFERENCE will be set -- but the type of the
1618 argument will be a pointer type, so we will never get
1619 here. */
1620 gcc_assert (!DECL_BY_REFERENCE (t));
1621 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1622 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1623 DECL_BY_REFERENCE (t) = 1;
1624 TREE_ADDRESSABLE (t) = 0;
1625 relayout_decl (t);
1628 /* Do the same for the return value. */
1629 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1631 t = DECL_RESULT (fndecl);
1632 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1633 DECL_BY_REFERENCE (t) = 1;
1634 TREE_ADDRESSABLE (t) = 0;
1635 relayout_decl (t);
1636 if (DECL_NAME (t))
1638 /* Adjust DECL_VALUE_EXPR of the original var. */
1639 tree outer = outer_curly_brace_block (current_function_decl);
1640 tree var;
1642 if (outer)
1643 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1644 if (VAR_P (var)
1645 && DECL_NAME (t) == DECL_NAME (var)
1646 && DECL_HAS_VALUE_EXPR_P (var)
1647 && DECL_VALUE_EXPR (var) == t)
1649 tree val = convert_from_reference (t);
1650 SET_DECL_VALUE_EXPR (var, val);
1651 break;
1656 /* If we're a clone, the body is already GIMPLE. */
1657 if (DECL_CLONED_FUNCTION_P (fndecl))
1658 return;
1660 /* Allow cp_genericize calls to be nested. */
1661 tree save_bc_label[2];
1662 save_bc_label[bc_break] = bc_label[bc_break];
1663 save_bc_label[bc_continue] = bc_label[bc_continue];
1664 bc_label[bc_break] = NULL_TREE;
1665 bc_label[bc_continue] = NULL_TREE;
1667 /* Expand all the array notations here. */
1668 if (flag_cilkplus
1669 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1670 DECL_SAVED_TREE (fndecl)
1671 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1673 /* We do want to see every occurrence of the parms, so we can't just use
1674 walk_tree's hash functionality. */
1675 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1677 if (sanitize_flags_p (SANITIZE_RETURN)
1678 && current_function_decl != NULL_TREE)
1679 cp_ubsan_maybe_instrument_return (fndecl);
1681 /* Do everything else. */
1682 c_genericize (fndecl);
1684 gcc_assert (bc_label[bc_break] == NULL);
1685 gcc_assert (bc_label[bc_continue] == NULL);
1686 bc_label[bc_break] = save_bc_label[bc_break];
1687 bc_label[bc_continue] = save_bc_label[bc_continue];
1690 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1691 NULL if there is in fact nothing to do. ARG2 may be null if FN
1692 actually only takes one argument. */
1694 static tree
1695 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1697 tree defparm, parm, t;
1698 int i = 0;
1699 int nargs;
1700 tree *argarray;
1702 if (fn == NULL)
1703 return NULL;
1705 nargs = list_length (DECL_ARGUMENTS (fn));
1706 argarray = XALLOCAVEC (tree, nargs);
1708 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1709 if (arg2)
1710 defparm = TREE_CHAIN (defparm);
1712 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1714 tree inner_type = TREE_TYPE (arg1);
1715 tree start1, end1, p1;
1716 tree start2 = NULL, p2 = NULL;
1717 tree ret = NULL, lab;
1719 start1 = arg1;
1720 start2 = arg2;
1723 inner_type = TREE_TYPE (inner_type);
1724 start1 = build4 (ARRAY_REF, inner_type, start1,
1725 size_zero_node, NULL, NULL);
1726 if (arg2)
1727 start2 = build4 (ARRAY_REF, inner_type, start2,
1728 size_zero_node, NULL, NULL);
1730 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1731 start1 = build_fold_addr_expr_loc (input_location, start1);
1732 if (arg2)
1733 start2 = build_fold_addr_expr_loc (input_location, start2);
1735 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1736 end1 = fold_build_pointer_plus (start1, end1);
1738 p1 = create_tmp_var (TREE_TYPE (start1));
1739 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1740 append_to_statement_list (t, &ret);
1742 if (arg2)
1744 p2 = create_tmp_var (TREE_TYPE (start2));
1745 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1746 append_to_statement_list (t, &ret);
1749 lab = create_artificial_label (input_location);
1750 t = build1 (LABEL_EXPR, void_type_node, lab);
1751 append_to_statement_list (t, &ret);
1753 argarray[i++] = p1;
1754 if (arg2)
1755 argarray[i++] = p2;
1756 /* Handle default arguments. */
1757 for (parm = defparm; parm && parm != void_list_node;
1758 parm = TREE_CHAIN (parm), i++)
1759 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1760 TREE_PURPOSE (parm), fn, i,
1761 tf_warning_or_error);
1762 t = build_call_a (fn, i, argarray);
1763 t = fold_convert (void_type_node, t);
1764 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1765 append_to_statement_list (t, &ret);
1767 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1768 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1769 append_to_statement_list (t, &ret);
1771 if (arg2)
1773 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1774 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1775 append_to_statement_list (t, &ret);
1778 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1779 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1780 append_to_statement_list (t, &ret);
1782 return ret;
1784 else
1786 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1787 if (arg2)
1788 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1789 /* Handle default arguments. */
1790 for (parm = defparm; parm && parm != void_list_node;
1791 parm = TREE_CHAIN (parm), i++)
1792 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1793 TREE_PURPOSE (parm),
1794 fn, i, tf_warning_or_error);
1795 t = build_call_a (fn, i, argarray);
1796 t = fold_convert (void_type_node, t);
1797 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1801 /* Return code to initialize DECL with its default constructor, or
1802 NULL if there's nothing to do. */
1804 tree
1805 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1807 tree info = CP_OMP_CLAUSE_INFO (clause);
1808 tree ret = NULL;
1810 if (info)
1811 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1813 return ret;
1816 /* Return code to initialize DST with a copy constructor from SRC. */
1818 tree
1819 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1821 tree info = CP_OMP_CLAUSE_INFO (clause);
1822 tree ret = NULL;
1824 if (info)
1825 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1826 if (ret == NULL)
1827 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1829 return ret;
1832 /* Similarly, except use an assignment operator instead. */
1834 tree
1835 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1837 tree info = CP_OMP_CLAUSE_INFO (clause);
1838 tree ret = NULL;
1840 if (info)
1841 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1842 if (ret == NULL)
1843 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1845 return ret;
1848 /* Return code to destroy DECL. */
1850 tree
1851 cxx_omp_clause_dtor (tree clause, tree decl)
1853 tree info = CP_OMP_CLAUSE_INFO (clause);
1854 tree ret = NULL;
1856 if (info)
1857 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1859 return ret;
1862 /* True if OpenMP should privatize what this DECL points to rather
1863 than the DECL itself. */
1865 bool
1866 cxx_omp_privatize_by_reference (const_tree decl)
1868 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1869 || is_invisiref_parm (decl));
1872 /* Return true if DECL is const qualified var having no mutable member. */
1873 bool
1874 cxx_omp_const_qual_no_mutable (tree decl)
1876 tree type = TREE_TYPE (decl);
1877 if (TREE_CODE (type) == REFERENCE_TYPE)
1879 if (!is_invisiref_parm (decl))
1880 return false;
1881 type = TREE_TYPE (type);
1883 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1885 /* NVR doesn't preserve const qualification of the
1886 variable's type. */
1887 tree outer = outer_curly_brace_block (current_function_decl);
1888 tree var;
1890 if (outer)
1891 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1892 if (VAR_P (var)
1893 && DECL_NAME (decl) == DECL_NAME (var)
1894 && (TYPE_MAIN_VARIANT (type)
1895 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1897 if (TYPE_READONLY (TREE_TYPE (var)))
1898 type = TREE_TYPE (var);
1899 break;
1904 if (type == error_mark_node)
1905 return false;
1907 /* Variables with const-qualified type having no mutable member
1908 are predetermined shared. */
1909 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1910 return true;
1912 return false;
1915 /* True if OpenMP sharing attribute of DECL is predetermined. */
1917 enum omp_clause_default_kind
1918 cxx_omp_predetermined_sharing (tree decl)
1920 /* Static data members are predetermined shared. */
1921 if (TREE_STATIC (decl))
1923 tree ctx = CP_DECL_CONTEXT (decl);
1924 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1925 return OMP_CLAUSE_DEFAULT_SHARED;
1928 /* Const qualified vars having no mutable member are predetermined
1929 shared. */
1930 if (cxx_omp_const_qual_no_mutable (decl))
1931 return OMP_CLAUSE_DEFAULT_SHARED;
1933 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1936 /* Finalize an implicitly determined clause. */
1938 void
1939 cxx_omp_finish_clause (tree c, gimple_seq *)
1941 tree decl, inner_type;
1942 bool make_shared = false;
1944 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1945 return;
1947 decl = OMP_CLAUSE_DECL (c);
1948 decl = require_complete_type (decl);
1949 inner_type = TREE_TYPE (decl);
1950 if (decl == error_mark_node)
1951 make_shared = true;
1952 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1953 inner_type = TREE_TYPE (inner_type);
1955 /* We're interested in the base element, not arrays. */
1956 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1957 inner_type = TREE_TYPE (inner_type);
1959 /* Check for special function availability by building a call to one.
1960 Save the results, because later we won't be in the right context
1961 for making these queries. */
1962 if (!make_shared
1963 && CLASS_TYPE_P (inner_type)
1964 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1965 make_shared = true;
1967 if (make_shared)
1969 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1970 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1971 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1975 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1976 disregarded in OpenMP construct, because it is going to be
1977 remapped during OpenMP lowering. SHARED is true if DECL
1978 is going to be shared, false if it is going to be privatized. */
1980 bool
1981 cxx_omp_disregard_value_expr (tree decl, bool shared)
1983 return !shared
1984 && VAR_P (decl)
1985 && DECL_HAS_VALUE_EXPR_P (decl)
1986 && DECL_ARTIFICIAL (decl)
1987 && DECL_LANG_SPECIFIC (decl)
1988 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1991 /* Fold expression X which is used as an rvalue if RVAL is true. */
1993 static tree
1994 cp_fold_maybe_rvalue (tree x, bool rval)
1996 while (true)
1998 x = cp_fold (x);
1999 if (rval && DECL_P (x)
2000 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2002 tree v = decl_constant_value (x);
2003 if (v != x && v != error_mark_node)
2005 x = v;
2006 continue;
2009 break;
2011 return x;
2014 /* Fold expression X which is used as an rvalue. */
2016 static tree
2017 cp_fold_rvalue (tree x)
2019 return cp_fold_maybe_rvalue (x, true);
2022 /* Perform folding on expression X. */
2024 tree
2025 cp_fully_fold (tree x)
2027 if (processing_template_decl)
2028 return x;
2029 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2030 have to call both. */
2031 if (cxx_dialect >= cxx11)
2032 x = maybe_constant_value (x);
2033 return cp_fold_rvalue (x);
2036 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2037 and certain changes are made to the folding done. Or should be (FIXME). We
2038 never touch maybe_const, as it is only used for the C front-end
2039 C_MAYBE_CONST_EXPR. */
2041 tree
2042 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2044 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2045 INTEGER_CST. */
2046 return cp_fold_rvalue (x);
2049 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2051 /* Dispose of the whole FOLD_CACHE. */
2053 void
2054 clear_fold_cache (void)
2056 if (fold_cache != NULL)
2057 fold_cache->empty ();
2060 /* This function tries to fold an expression X.
2061 To avoid combinatorial explosion, folding results are kept in fold_cache.
2062 If we are processing a template or X is invalid, we don't fold at all.
2063 For performance reasons we don't cache expressions representing a
2064 declaration or constant.
2065 Function returns X or its folded variant. */
2067 static tree
2068 cp_fold (tree x)
2070 tree op0, op1, op2, op3;
2071 tree org_x = x, r = NULL_TREE;
2072 enum tree_code code;
2073 location_t loc;
2074 bool rval_ops = true;
2076 if (!x || x == error_mark_node)
2077 return x;
2079 if (processing_template_decl
2080 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2081 return x;
2083 /* Don't bother to cache DECLs or constants. */
2084 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2085 return x;
2087 if (fold_cache == NULL)
2088 fold_cache = hash_map<tree, tree>::create_ggc (101);
2090 if (tree *cached = fold_cache->get (x))
2091 return *cached;
2093 code = TREE_CODE (x);
2094 switch (code)
2096 case CLEANUP_POINT_EXPR:
2097 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2098 effects. */
2099 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2100 if (!TREE_SIDE_EFFECTS (r))
2101 x = r;
2102 break;
2104 case SIZEOF_EXPR:
2105 x = fold_sizeof_expr (x);
2106 break;
2108 case VIEW_CONVERT_EXPR:
2109 rval_ops = false;
2110 /* FALLTHRU */
2111 case CONVERT_EXPR:
2112 case NOP_EXPR:
2113 case NON_LVALUE_EXPR:
2115 if (VOID_TYPE_P (TREE_TYPE (x)))
2116 return x;
2118 loc = EXPR_LOCATION (x);
2119 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2121 if (code == CONVERT_EXPR
2122 && SCALAR_TYPE_P (TREE_TYPE (x))
2123 && op0 != void_node)
2124 /* During parsing we used convert_to_*_nofold; re-convert now using the
2125 folding variants, since fold() doesn't do those transformations. */
2126 x = fold (convert (TREE_TYPE (x), op0));
2127 else if (op0 != TREE_OPERAND (x, 0))
2129 if (op0 == error_mark_node)
2130 x = error_mark_node;
2131 else
2132 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2134 else
2135 x = fold (x);
2137 /* Conversion of an out-of-range value has implementation-defined
2138 behavior; the language considers it different from arithmetic
2139 overflow, which is undefined. */
2140 if (TREE_CODE (op0) == INTEGER_CST
2141 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2142 TREE_OVERFLOW (x) = false;
2144 break;
2146 case INDIRECT_REF:
2147 /* We don't need the decltype(auto) obfuscation anymore. */
2148 if (REF_PARENTHESIZED_P (x))
2150 tree p = maybe_undo_parenthesized_ref (x);
2151 return cp_fold (p);
2153 goto unary;
2155 case ADDR_EXPR:
2156 case REALPART_EXPR:
2157 case IMAGPART_EXPR:
2158 rval_ops = false;
2159 /* FALLTHRU */
2160 case CONJ_EXPR:
2161 case FIX_TRUNC_EXPR:
2162 case FLOAT_EXPR:
2163 case NEGATE_EXPR:
2164 case ABS_EXPR:
2165 case BIT_NOT_EXPR:
2166 case TRUTH_NOT_EXPR:
2167 case FIXED_CONVERT_EXPR:
2168 unary:
2170 loc = EXPR_LOCATION (x);
2171 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2173 if (op0 != TREE_OPERAND (x, 0))
2175 if (op0 == error_mark_node)
2176 x = error_mark_node;
2177 else
2179 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2180 if (code == INDIRECT_REF
2181 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2183 TREE_READONLY (x) = TREE_READONLY (org_x);
2184 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2185 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2189 else
2190 x = fold (x);
2192 gcc_assert (TREE_CODE (x) != COND_EXPR
2193 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2194 break;
2196 case UNARY_PLUS_EXPR:
2197 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2198 if (op0 == error_mark_node)
2199 x = error_mark_node;
2200 else
2201 x = fold_convert (TREE_TYPE (x), op0);
2202 break;
2204 case POSTDECREMENT_EXPR:
2205 case POSTINCREMENT_EXPR:
2206 case INIT_EXPR:
2207 case PREDECREMENT_EXPR:
2208 case PREINCREMENT_EXPR:
2209 case COMPOUND_EXPR:
2210 case MODIFY_EXPR:
2211 rval_ops = false;
2212 /* FALLTHRU */
2213 case POINTER_PLUS_EXPR:
2214 case PLUS_EXPR:
2215 case MINUS_EXPR:
2216 case MULT_EXPR:
2217 case TRUNC_DIV_EXPR:
2218 case CEIL_DIV_EXPR:
2219 case FLOOR_DIV_EXPR:
2220 case ROUND_DIV_EXPR:
2221 case TRUNC_MOD_EXPR:
2222 case CEIL_MOD_EXPR:
2223 case ROUND_MOD_EXPR:
2224 case RDIV_EXPR:
2225 case EXACT_DIV_EXPR:
2226 case MIN_EXPR:
2227 case MAX_EXPR:
2228 case LSHIFT_EXPR:
2229 case RSHIFT_EXPR:
2230 case LROTATE_EXPR:
2231 case RROTATE_EXPR:
2232 case BIT_AND_EXPR:
2233 case BIT_IOR_EXPR:
2234 case BIT_XOR_EXPR:
2235 case TRUTH_AND_EXPR:
2236 case TRUTH_ANDIF_EXPR:
2237 case TRUTH_OR_EXPR:
2238 case TRUTH_ORIF_EXPR:
2239 case TRUTH_XOR_EXPR:
2240 case LT_EXPR: case LE_EXPR:
2241 case GT_EXPR: case GE_EXPR:
2242 case EQ_EXPR: case NE_EXPR:
2243 case UNORDERED_EXPR: case ORDERED_EXPR:
2244 case UNLT_EXPR: case UNLE_EXPR:
2245 case UNGT_EXPR: case UNGE_EXPR:
2246 case UNEQ_EXPR: case LTGT_EXPR:
2247 case RANGE_EXPR: case COMPLEX_EXPR:
2249 loc = EXPR_LOCATION (x);
2250 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2251 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2253 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2255 if (op0 == error_mark_node || op1 == error_mark_node)
2256 x = error_mark_node;
2257 else
2258 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2260 else
2261 x = fold (x);
2263 if (TREE_NO_WARNING (org_x)
2264 && warn_nonnull_compare
2265 && COMPARISON_CLASS_P (org_x))
2267 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2269 else if (COMPARISON_CLASS_P (x))
2270 TREE_NO_WARNING (x) = 1;
2271 /* Otherwise give up on optimizing these, let GIMPLE folders
2272 optimize those later on. */
2273 else if (op0 != TREE_OPERAND (org_x, 0)
2274 || op1 != TREE_OPERAND (org_x, 1))
2276 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2277 TREE_NO_WARNING (x) = 1;
2279 else
2280 x = org_x;
2282 break;
2284 case VEC_COND_EXPR:
2285 case COND_EXPR:
2287 /* Don't bother folding a void condition, since it can't produce a
2288 constant value. Also, some statement-level uses of COND_EXPR leave
2289 one of the branches NULL, so folding would crash. */
2290 if (VOID_TYPE_P (TREE_TYPE (x)))
2291 return x;
2293 loc = EXPR_LOCATION (x);
2294 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2295 op1 = cp_fold (TREE_OPERAND (x, 1));
2296 op2 = cp_fold (TREE_OPERAND (x, 2));
2298 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2300 warning_sentinel s (warn_int_in_bool_context);
2301 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2302 op1 = cp_truthvalue_conversion (op1);
2303 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2304 op2 = cp_truthvalue_conversion (op2);
2307 if (op0 != TREE_OPERAND (x, 0)
2308 || op1 != TREE_OPERAND (x, 1)
2309 || op2 != TREE_OPERAND (x, 2))
2311 if (op0 == error_mark_node
2312 || op1 == error_mark_node
2313 || op2 == error_mark_node)
2314 x = error_mark_node;
2315 else
2316 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2318 else
2319 x = fold (x);
2321 /* A COND_EXPR might have incompatible types in branches if one or both
2322 arms are bitfields. If folding exposed such a branch, fix it up. */
2323 if (TREE_CODE (x) != code
2324 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2325 x = fold_convert (TREE_TYPE (org_x), x);
2327 break;
2329 case CALL_EXPR:
2331 int i, m, sv = optimize, nw = sv, changed = 0;
2332 tree callee = get_callee_fndecl (x);
2334 /* Some built-in function calls will be evaluated at compile-time in
2335 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2336 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2337 if (callee && DECL_BUILT_IN (callee) && !optimize
2338 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2339 && current_function_decl
2340 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2341 nw = 1;
2343 x = copy_node (x);
2345 m = call_expr_nargs (x);
2346 for (i = 0; i < m; i++)
2348 r = cp_fold (CALL_EXPR_ARG (x, i));
2349 if (r != CALL_EXPR_ARG (x, i))
2351 if (r == error_mark_node)
2353 x = error_mark_node;
2354 break;
2356 changed = 1;
2358 CALL_EXPR_ARG (x, i) = r;
2360 if (x == error_mark_node)
2361 break;
2363 optimize = nw;
2364 r = fold (x);
2365 optimize = sv;
2367 if (TREE_CODE (r) != CALL_EXPR)
2369 x = cp_fold (r);
2370 break;
2373 optimize = nw;
2375 /* Invoke maybe_constant_value for functions declared
2376 constexpr and not called with AGGR_INIT_EXPRs.
2377 TODO:
2378 Do constexpr expansion of expressions where the call itself is not
2379 constant, but the call followed by an INDIRECT_REF is. */
2380 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2381 && !flag_no_inline)
2382 r = maybe_constant_value (x);
2383 optimize = sv;
2385 if (TREE_CODE (r) != CALL_EXPR)
2387 if (DECL_CONSTRUCTOR_P (callee))
2389 loc = EXPR_LOCATION (x);
2390 tree s = build_fold_indirect_ref_loc (loc,
2391 CALL_EXPR_ARG (x, 0));
2392 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2394 x = r;
2395 break;
2398 if (!changed)
2399 x = org_x;
2400 break;
2403 case CONSTRUCTOR:
2405 unsigned i;
2406 constructor_elt *p;
2407 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2408 vec<constructor_elt, va_gc> *nelts = NULL;
2409 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2411 tree op = cp_fold (p->value);
2412 if (op != p->value)
2414 if (op == error_mark_node)
2416 x = error_mark_node;
2417 vec_free (nelts);
2418 break;
2420 if (nelts == NULL)
2421 nelts = elts->copy ();
2422 (*nelts)[i].value = op;
2425 if (nelts)
2426 x = build_constructor (TREE_TYPE (x), nelts);
2427 break;
2429 case TREE_VEC:
2431 bool changed = false;
2432 vec<tree, va_gc> *vec = make_tree_vector ();
2433 int i, n = TREE_VEC_LENGTH (x);
2434 vec_safe_reserve (vec, n);
2436 for (i = 0; i < n; i++)
2438 tree op = cp_fold (TREE_VEC_ELT (x, i));
2439 vec->quick_push (op);
2440 if (op != TREE_VEC_ELT (x, i))
2441 changed = true;
2444 if (changed)
2446 r = copy_node (x);
2447 for (i = 0; i < n; i++)
2448 TREE_VEC_ELT (r, i) = (*vec)[i];
2449 x = r;
2452 release_tree_vector (vec);
2455 break;
2457 case ARRAY_REF:
2458 case ARRAY_RANGE_REF:
2460 loc = EXPR_LOCATION (x);
2461 op0 = cp_fold (TREE_OPERAND (x, 0));
2462 op1 = cp_fold (TREE_OPERAND (x, 1));
2463 op2 = cp_fold (TREE_OPERAND (x, 2));
2464 op3 = cp_fold (TREE_OPERAND (x, 3));
2466 if (op0 != TREE_OPERAND (x, 0)
2467 || op1 != TREE_OPERAND (x, 1)
2468 || op2 != TREE_OPERAND (x, 2)
2469 || op3 != TREE_OPERAND (x, 3))
2471 if (op0 == error_mark_node
2472 || op1 == error_mark_node
2473 || op2 == error_mark_node
2474 || op3 == error_mark_node)
2475 x = error_mark_node;
2476 else
2478 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2479 TREE_READONLY (x) = TREE_READONLY (org_x);
2480 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2481 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2485 x = fold (x);
2486 break;
2488 case SAVE_EXPR:
2489 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2490 folding, evaluates to an invariant. In that case no need to wrap
2491 this folded tree with a SAVE_EXPR. */
2492 r = cp_fold (TREE_OPERAND (x, 0));
2493 if (tree_invariant_p (r))
2494 x = r;
2495 break;
2497 default:
2498 return org_x;
2501 fold_cache->put (org_x, x);
2502 /* Prevent that we try to fold an already folded result again. */
2503 if (x != org_x)
2504 fold_cache->put (x, x);
2506 return x;
2509 #include "gt-cp-cp-gimplify.h"