PR c++/80119
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob354ae1af852699f4ad35a62abb64f174e2df9ed3
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*, bool);
42 static tree cp_fold (tree);
44 /* Local declarations. */
46 enum bc_t { bc_break = 0, bc_continue = 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
53 indicates which.
55 Just creates a label with location LOCATION and pushes it into the current
56 context. */
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
63 bc_label[bc] = label;
64 if (bc == bc_break)
65 LABEL_DECL_BREAK (label) = true;
66 else
67 LABEL_DECL_CONTINUE (label) = true;
68 return label;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 gcc_assert (label == bc_label[bc]);
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
96 static void
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
98 gimple_seq *post_p)
100 int ii = 0;
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
113 static tree
114 get_bc_label (enum bc_t bc)
116 tree label = bc_label[bc];
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
120 return label;
123 /* Genericize a TRY_BLOCK. */
125 static void
126 genericize_try_block (tree *stmt_p)
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
136 static void
137 genericize_catch_block (tree *stmt_p)
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
146 /* A terser interface for building a representation of an exception
147 specification. */
149 static tree
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
152 tree t;
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
161 return t;
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
167 static void
168 genericize_eh_spec_block (tree *stmt_p)
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
181 static void
182 genericize_if_stmt (tree *stmt_p)
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
187 stmt = *stmt_p;
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
192 if (!then_)
193 then_ = build_empty_stmt (locus);
194 if (!else_)
195 else_ = build_empty_stmt (locus);
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
198 stmt = then_;
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
200 stmt = else_;
201 else
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
205 *stmt_p = stmt;
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
215 static void
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
218 void *data)
220 tree blab, clab;
221 tree exit = NULL;
222 tree stmt_list = NULL;
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
227 protected_set_expr_location (incr, start_locus);
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232 *walk_subtrees = 0;
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
253 if (!stmt_list)
254 stmt_list = build_empty_stmt (start_locus);
256 tree loop;
257 if (cond && integer_zerop (cond))
259 if (cond_is_first)
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
263 else
264 loop = stmt_list;
266 else
268 location_t loc = start_locus;
269 if (!cond || integer_nonzerop (cond))
270 loc = EXPR_LOCATION (expr_first (body));
271 if (loc == UNKNOWN_LOCATION)
272 loc = start_locus;
273 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
276 stmt_list = NULL;
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
279 if (!stmt_list)
280 stmt_list = build_empty_stmt (start_locus);
282 *stmt_p = stmt_list;
285 /* Genericize a FOR_STMT node *STMT_P. */
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
290 tree stmt = *stmt_p;
291 tree expr = NULL;
292 tree loop;
293 tree init = FOR_INIT_STMT (stmt);
295 if (init)
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
305 expr = loop;
306 *stmt_p = expr;
309 /* Genericize a WHILE_STMT node *STMT_P. */
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
314 tree stmt = *stmt_p;
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
319 /* Genericize a DO_STMT node *STMT_P. */
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
324 tree stmt = *stmt_p;
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
334 tree stmt = *stmt_p;
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
338 break_block = begin_bc_block (bc_break, stmt_locus);
340 body = SWITCH_STMT_BODY (stmt);
341 if (!body)
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
349 *walk_subtrees = 0;
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
357 static void
358 genericize_continue_stmt (tree *stmt_p)
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list_force (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
367 *stmt_p = stmt_list;
370 /* Genericize a BREAK_STMT node *STMT_P. */
372 static void
373 genericize_break_stmt (tree *stmt_p)
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
380 /* Genericize a OMP_FOR node *STMT_P. */
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
385 tree stmt = *stmt_p;
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 if (TREE_CODE (stmt) != OMP_TASKLOOP)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396 *walk_subtrees = 0;
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411 return GS_UNHANDLED;
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
419 return GS_ALL_DONE;
422 /* Gimplify an EXPR_STMT node. */
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
429 if (stmt == error_mark_node)
430 stmt = NULL;
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
437 gimplification. */
438 if (stmt && warn_unused_value)
440 if (!TREE_SIDE_EFFECTS (stmt))
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
447 else
448 warn_if_unused_value (stmt, input_location);
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
454 *stmt_p = stmt;
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
464 tree t;
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
475 for (t = from; t; )
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
489 else
490 VEC_INIT_EXPR_SLOT (sub) = to;
491 *expr_p = from;
493 /* The initialization is now a side-effect, so the container can
494 become void. */
495 if (from != sub)
496 TREE_TYPE (from) = void_type_node;
499 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub, to);
503 if (t == sub)
504 break;
505 else
506 t = TREE_OPERAND (t, 1);
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
516 tree stmt = *expr_p;
517 tree temp = voidify_wrapper_expr (stmt, NULL);
518 tree body = TREE_OPERAND (stmt, 0);
519 gimple_seq try_ = NULL;
520 gimple_seq catch_ = NULL;
521 gimple *mnt;
523 gimplify_and_add (body, &try_);
524 mnt = gimple_build_eh_must_not_throw (terminate_node);
525 gimple_seq_add_stmt_without_update (&catch_, mnt);
526 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
528 gimple_seq_add_stmt_without_update (pre_p, mnt);
529 if (temp)
531 *expr_p = temp;
532 return GS_OK;
535 *expr_p = NULL;
536 return GS_ALL_DONE;
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
546 static bool
547 simple_empty_class_p (tree type, tree op)
549 return
550 ((TREE_CODE (op) == COMPOUND_EXPR
551 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
552 || TREE_CODE (op) == EMPTY_CLASS_EXPR
553 || is_gimple_lvalue (op)
554 || INDIRECT_REF_P (op)
555 || (TREE_CODE (op) == CONSTRUCTOR
556 && CONSTRUCTOR_NELTS (op) == 0
557 && !TREE_CLOBBER_P (op))
558 || (TREE_CODE (op) == CALL_EXPR
559 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
560 && is_really_empty_class (type);
563 /* Returns true if evaluating E as an lvalue has side-effects;
564 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
565 have side-effects until there is a read or write through it. */
567 static bool
568 lvalue_has_side_effects (tree e)
570 if (!TREE_SIDE_EFFECTS (e))
571 return false;
572 while (handled_component_p (e))
574 if (TREE_CODE (e) == ARRAY_REF
575 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
576 return true;
577 e = TREE_OPERAND (e, 0);
579 if (DECL_P (e))
580 /* Just naming a variable has no side-effects. */
581 return false;
582 else if (INDIRECT_REF_P (e))
583 /* Similarly, indirection has no side-effects. */
584 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
585 else
586 /* For anything else, trust TREE_SIDE_EFFECTS. */
587 return TREE_SIDE_EFFECTS (e);
590 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
593 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
595 int saved_stmts_are_full_exprs_p = 0;
596 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
597 enum tree_code code = TREE_CODE (*expr_p);
598 enum gimplify_status ret;
600 if (STATEMENT_CODE_P (code))
602 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
603 current_stmt_tree ()->stmts_are_full_exprs_p
604 = STMT_IS_FULL_EXPR_P (*expr_p);
607 switch (code)
609 case AGGR_INIT_EXPR:
610 simplify_aggr_init_expr (expr_p);
611 ret = GS_OK;
612 break;
614 case VEC_INIT_EXPR:
616 location_t loc = input_location;
617 tree init = VEC_INIT_EXPR_INIT (*expr_p);
618 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
619 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
620 input_location = EXPR_LOCATION (*expr_p);
621 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
622 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
623 from_array,
624 tf_warning_or_error);
625 hash_set<tree> pset;
626 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
627 cp_genericize_tree (expr_p, false);
628 ret = GS_OK;
629 input_location = loc;
631 break;
633 case THROW_EXPR:
634 /* FIXME communicate throw type to back end, probably by moving
635 THROW_EXPR into ../tree.def. */
636 *expr_p = TREE_OPERAND (*expr_p, 0);
637 ret = GS_OK;
638 break;
640 case MUST_NOT_THROW_EXPR:
641 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
642 break;
644 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
645 LHS of an assignment might also be involved in the RHS, as in bug
646 25979. */
647 case INIT_EXPR:
648 if (fn_contains_cilk_spawn_p (cfun))
650 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
652 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
653 pre_p, post_p);
654 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
656 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
657 return GS_ERROR;
660 cp_gimplify_init_expr (expr_p);
661 if (TREE_CODE (*expr_p) != INIT_EXPR)
662 return GS_OK;
663 /* Fall through. */
664 case MODIFY_EXPR:
665 modify_expr_case:
667 if (fn_contains_cilk_spawn_p (cfun)
668 && cilk_cp_detect_spawn_and_unwrap (expr_p)
669 && !seen_error ())
671 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
672 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
674 /* If the back end isn't clever enough to know that the lhs and rhs
675 types are the same, add an explicit conversion. */
676 tree op0 = TREE_OPERAND (*expr_p, 0);
677 tree op1 = TREE_OPERAND (*expr_p, 1);
679 if (!error_operand_p (op0)
680 && !error_operand_p (op1)
681 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
682 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
683 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
684 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
685 TREE_TYPE (op0), op1);
687 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
689 /* Remove any copies of empty classes. Also drop volatile
690 variables on the RHS to avoid infinite recursion from
691 gimplify_expr trying to load the value. */
692 if (TREE_SIDE_EFFECTS (op1))
694 if (TREE_THIS_VOLATILE (op1)
695 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
696 op1 = build_fold_addr_expr (op1);
698 gimplify_and_add (op1, pre_p);
700 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
701 is_gimple_lvalue, fb_lvalue);
702 *expr_p = TREE_OPERAND (*expr_p, 0);
704 /* P0145 says that the RHS is sequenced before the LHS.
705 gimplify_modify_expr gimplifies the RHS before the LHS, but that
706 isn't quite strong enough in two cases:
708 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
709 mean it's evaluated after the LHS.
711 2) the value calculation of the RHS is also sequenced before the
712 LHS, so for scalar assignment we need to preevaluate if the
713 RHS could be affected by LHS side-effects even if it has no
714 side-effects of its own. We don't need this for classes because
715 class assignment takes its RHS by reference. */
716 else if (flag_strong_eval_order > 1
717 && TREE_CODE (*expr_p) == MODIFY_EXPR
718 && lvalue_has_side_effects (op0)
719 && (TREE_CODE (op1) == CALL_EXPR
720 || (SCALAR_TYPE_P (TREE_TYPE (op1))
721 && !TREE_CONSTANT (op1))))
722 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
724 ret = GS_OK;
725 break;
727 case EMPTY_CLASS_EXPR:
728 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
729 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
730 ret = GS_OK;
731 break;
733 case BASELINK:
734 *expr_p = BASELINK_FUNCTIONS (*expr_p);
735 ret = GS_OK;
736 break;
738 case TRY_BLOCK:
739 genericize_try_block (expr_p);
740 ret = GS_OK;
741 break;
743 case HANDLER:
744 genericize_catch_block (expr_p);
745 ret = GS_OK;
746 break;
748 case EH_SPEC_BLOCK:
749 genericize_eh_spec_block (expr_p);
750 ret = GS_OK;
751 break;
753 case USING_STMT:
754 gcc_unreachable ();
756 case FOR_STMT:
757 case WHILE_STMT:
758 case DO_STMT:
759 case SWITCH_STMT:
760 case CONTINUE_STMT:
761 case BREAK_STMT:
762 gcc_unreachable ();
764 case OMP_FOR:
765 case OMP_SIMD:
766 case OMP_DISTRIBUTE:
767 case OMP_TASKLOOP:
768 ret = cp_gimplify_omp_for (expr_p, pre_p);
769 break;
771 case EXPR_STMT:
772 gimplify_expr_stmt (expr_p);
773 ret = GS_OK;
774 break;
776 case UNARY_PLUS_EXPR:
778 tree arg = TREE_OPERAND (*expr_p, 0);
779 tree type = TREE_TYPE (*expr_p);
780 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
781 : arg;
782 ret = GS_OK;
784 break;
786 case CILK_SPAWN_STMT:
787 gcc_assert(fn_contains_cilk_spawn_p (cfun)
788 && cilk_cp_detect_spawn_and_unwrap (expr_p));
790 if (!seen_error ())
792 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
793 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
795 return GS_ERROR;
797 case CALL_EXPR:
798 if (fn_contains_cilk_spawn_p (cfun)
799 && cilk_cp_detect_spawn_and_unwrap (expr_p)
800 && !seen_error ())
802 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
803 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
805 ret = GS_OK;
806 if (!CALL_EXPR_FN (*expr_p))
807 /* Internal function call. */;
808 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
810 /* This is a call to a (compound) assignment operator that used
811 the operator syntax; gimplify the RHS first. */
812 gcc_assert (call_expr_nargs (*expr_p) == 2);
813 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
814 enum gimplify_status t
815 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
816 if (t == GS_ERROR)
817 ret = GS_ERROR;
819 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
821 /* Leave the last argument for gimplify_call_expr, to avoid problems
822 with __builtin_va_arg_pack(). */
823 int nargs = call_expr_nargs (*expr_p) - 1;
824 for (int i = 0; i < nargs; ++i)
826 enum gimplify_status t
827 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
828 if (t == GS_ERROR)
829 ret = GS_ERROR;
832 else if (flag_strong_eval_order
833 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
835 /* If flag_strong_eval_order, evaluate the object argument first. */
836 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
837 if (POINTER_TYPE_P (fntype))
838 fntype = TREE_TYPE (fntype);
839 if (TREE_CODE (fntype) == METHOD_TYPE)
841 enum gimplify_status t
842 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
843 if (t == GS_ERROR)
844 ret = GS_ERROR;
847 break;
849 case RETURN_EXPR:
850 if (TREE_OPERAND (*expr_p, 0)
851 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
852 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
854 expr_p = &TREE_OPERAND (*expr_p, 0);
855 code = TREE_CODE (*expr_p);
856 /* Avoid going through the INIT_EXPR case, which can
857 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
858 goto modify_expr_case;
860 /* Fall through. */
862 default:
863 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
864 break;
867 /* Restore saved state. */
868 if (STATEMENT_CODE_P (code))
869 current_stmt_tree ()->stmts_are_full_exprs_p
870 = saved_stmts_are_full_exprs_p;
872 return ret;
875 static inline bool
876 is_invisiref_parm (const_tree t)
878 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
879 && DECL_BY_REFERENCE (t));
882 /* Return true if the uid in both int tree maps are equal. */
884 bool
885 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
887 return (a->uid == b->uid);
890 /* Hash a UID in a cxx_int_tree_map. */
892 unsigned int
893 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
895 return item->uid;
898 /* A stable comparison routine for use with splay trees and DECLs. */
900 static int
901 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
903 tree a = (tree) xa;
904 tree b = (tree) xb;
906 return DECL_UID (a) - DECL_UID (b);
909 /* OpenMP context during genericization. */
911 struct cp_genericize_omp_taskreg
913 bool is_parallel;
914 bool default_shared;
915 struct cp_genericize_omp_taskreg *outer;
916 splay_tree variables;
919 /* Return true if genericization should try to determine if
920 DECL is firstprivate or shared within task regions. */
922 static bool
923 omp_var_to_track (tree decl)
925 tree type = TREE_TYPE (decl);
926 if (is_invisiref_parm (decl))
927 type = TREE_TYPE (type);
928 while (TREE_CODE (type) == ARRAY_TYPE)
929 type = TREE_TYPE (type);
930 if (type == error_mark_node || !CLASS_TYPE_P (type))
931 return false;
932 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
933 return false;
934 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
935 return false;
936 return true;
939 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
941 static void
942 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
944 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
945 (splay_tree_key) decl);
946 if (n == NULL)
948 int flags = OMP_CLAUSE_DEFAULT_SHARED;
949 if (omp_ctx->outer)
950 omp_cxx_notice_variable (omp_ctx->outer, decl);
951 if (!omp_ctx->default_shared)
953 struct cp_genericize_omp_taskreg *octx;
955 for (octx = omp_ctx->outer; octx; octx = octx->outer)
957 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
958 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
960 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
961 break;
963 if (octx->is_parallel)
964 break;
966 if (octx == NULL
967 && (TREE_CODE (decl) == PARM_DECL
968 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
969 && DECL_CONTEXT (decl) == current_function_decl)))
970 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
971 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
973 /* DECL is implicitly determined firstprivate in
974 the current task construct. Ensure copy ctor and
975 dtor are instantiated, because during gimplification
976 it will be already too late. */
977 tree type = TREE_TYPE (decl);
978 if (is_invisiref_parm (decl))
979 type = TREE_TYPE (type);
980 while (TREE_CODE (type) == ARRAY_TYPE)
981 type = TREE_TYPE (type);
982 get_copy_ctor (type, tf_none);
983 get_dtor (type, tf_none);
986 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
990 /* Genericization context. */
992 struct cp_genericize_data
994 hash_set<tree> *p_set;
995 vec<tree> bind_expr_stack;
996 struct cp_genericize_omp_taskreg *omp_ctx;
997 tree try_block;
998 bool no_sanitize_p;
999 bool handle_invisiref_parm_p;
1002 /* Perform any pre-gimplification folding of C++ front end trees to
1003 GENERIC.
1004 Note: The folding of none-omp cases is something to move into
1005 the middle-end. As for now we have most foldings only on GENERIC
1006 in fold-const, we need to perform this before transformation to
1007 GIMPLE-form. */
1009 static tree
1010 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1012 tree stmt;
1013 enum tree_code code;
1015 *stmt_p = stmt = cp_fold (*stmt_p);
1017 if (((hash_set<tree> *) data)->add (stmt))
1019 /* Don't walk subtrees of stmts we've already walked once, otherwise
1020 we can have exponential complexity with e.g. lots of nested
1021 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1022 always the same tree, which the first time cp_fold_r has been
1023 called on it had the subtrees walked. */
1024 *walk_subtrees = 0;
1025 return NULL;
1028 code = TREE_CODE (stmt);
1029 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1030 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1031 || code == OACC_LOOP)
1033 tree x;
1034 int i, n;
1036 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1037 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1038 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1039 x = OMP_FOR_COND (stmt);
1040 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1042 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1043 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1045 else if (x && TREE_CODE (x) == TREE_VEC)
1047 n = TREE_VEC_LENGTH (x);
1048 for (i = 0; i < n; i++)
1050 tree o = TREE_VEC_ELT (x, i);
1051 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1052 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1055 x = OMP_FOR_INCR (stmt);
1056 if (x && TREE_CODE (x) == TREE_VEC)
1058 n = TREE_VEC_LENGTH (x);
1059 for (i = 0; i < n; i++)
1061 tree o = TREE_VEC_ELT (x, i);
1062 if (o && TREE_CODE (o) == MODIFY_EXPR)
1063 o = TREE_OPERAND (o, 1);
1064 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1065 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1067 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1068 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1072 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1073 *walk_subtrees = 0;
1076 return NULL;
1079 /* Fold ALL the trees! FIXME we should be able to remove this, but
1080 apparently that still causes optimization regressions. */
1082 void
1083 cp_fold_function (tree fndecl)
1085 hash_set<tree> pset;
1086 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1089 /* Perform any pre-gimplification lowering of C++ front end trees to
1090 GENERIC. */
1092 static tree
1093 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1095 tree stmt = *stmt_p;
1096 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1097 hash_set<tree> *p_set = wtd->p_set;
1099 /* If in an OpenMP context, note var uses. */
1100 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1101 && (VAR_P (stmt)
1102 || TREE_CODE (stmt) == PARM_DECL
1103 || TREE_CODE (stmt) == RESULT_DECL)
1104 && omp_var_to_track (stmt))
1105 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1107 /* Dereference invisible reference parms. */
1108 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1110 *stmt_p = convert_from_reference (stmt);
1111 p_set->add (*stmt_p);
1112 *walk_subtrees = 0;
1113 return NULL;
1116 /* Map block scope extern declarations to visible declarations with the
1117 same name and type in outer scopes if any. */
1118 if (cp_function_chain->extern_decl_map
1119 && VAR_OR_FUNCTION_DECL_P (stmt)
1120 && DECL_EXTERNAL (stmt))
1122 struct cxx_int_tree_map *h, in;
1123 in.uid = DECL_UID (stmt);
1124 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1125 if (h)
1127 *stmt_p = h->to;
1128 *walk_subtrees = 0;
1129 return NULL;
1133 /* Other than invisiref parms, don't walk the same tree twice. */
1134 if (p_set->contains (stmt))
1136 *walk_subtrees = 0;
1137 return NULL_TREE;
1140 if (TREE_CODE (stmt) == ADDR_EXPR
1141 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1143 /* If in an OpenMP context, note var uses. */
1144 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1145 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1146 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1147 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1148 *walk_subtrees = 0;
1150 else if (TREE_CODE (stmt) == RETURN_EXPR
1151 && TREE_OPERAND (stmt, 0)
1152 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1153 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1154 *walk_subtrees = 0;
1155 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1156 switch (OMP_CLAUSE_CODE (stmt))
1158 case OMP_CLAUSE_LASTPRIVATE:
1159 /* Don't dereference an invisiref in OpenMP clauses. */
1160 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1162 *walk_subtrees = 0;
1163 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1164 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1165 cp_genericize_r, data, NULL);
1167 break;
1168 case OMP_CLAUSE_PRIVATE:
1169 /* Don't dereference an invisiref in OpenMP clauses. */
1170 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1171 *walk_subtrees = 0;
1172 else if (wtd->omp_ctx != NULL)
1174 /* Private clause doesn't cause any references to the
1175 var in outer contexts, avoid calling
1176 omp_cxx_notice_variable for it. */
1177 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1178 wtd->omp_ctx = NULL;
1179 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1180 data, NULL);
1181 wtd->omp_ctx = old;
1182 *walk_subtrees = 0;
1184 break;
1185 case OMP_CLAUSE_SHARED:
1186 case OMP_CLAUSE_FIRSTPRIVATE:
1187 case OMP_CLAUSE_COPYIN:
1188 case OMP_CLAUSE_COPYPRIVATE:
1189 /* Don't dereference an invisiref in OpenMP clauses. */
1190 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1191 *walk_subtrees = 0;
1192 break;
1193 case OMP_CLAUSE_REDUCTION:
1194 /* Don't dereference an invisiref in reduction clause's
1195 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1196 still needs to be genericized. */
1197 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1199 *walk_subtrees = 0;
1200 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1201 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1202 cp_genericize_r, data, NULL);
1203 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1204 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1205 cp_genericize_r, data, NULL);
1207 break;
1208 default:
1209 break;
1211 else if (IS_TYPE_OR_DECL_P (stmt))
1212 *walk_subtrees = 0;
1214 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1215 to lower this construct before scanning it, so we need to lower these
1216 before doing anything else. */
1217 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1218 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1219 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1220 : TRY_FINALLY_EXPR,
1221 void_type_node,
1222 CLEANUP_BODY (stmt),
1223 CLEANUP_EXPR (stmt));
1225 else if (TREE_CODE (stmt) == IF_STMT)
1227 genericize_if_stmt (stmt_p);
1228 /* *stmt_p has changed, tail recurse to handle it again. */
1229 return cp_genericize_r (stmt_p, walk_subtrees, data);
1232 /* COND_EXPR might have incompatible types in branches if one or both
1233 arms are bitfields. Fix it up now. */
1234 else if (TREE_CODE (stmt) == COND_EXPR)
1236 tree type_left
1237 = (TREE_OPERAND (stmt, 1)
1238 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1239 : NULL_TREE);
1240 tree type_right
1241 = (TREE_OPERAND (stmt, 2)
1242 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1243 : NULL_TREE);
1244 if (type_left
1245 && !useless_type_conversion_p (TREE_TYPE (stmt),
1246 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1248 TREE_OPERAND (stmt, 1)
1249 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1250 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1251 type_left));
1253 if (type_right
1254 && !useless_type_conversion_p (TREE_TYPE (stmt),
1255 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1257 TREE_OPERAND (stmt, 2)
1258 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1259 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1260 type_right));
1264 else if (TREE_CODE (stmt) == BIND_EXPR)
1266 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1268 tree decl;
1269 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1270 if (VAR_P (decl)
1271 && !DECL_EXTERNAL (decl)
1272 && omp_var_to_track (decl))
1274 splay_tree_node n
1275 = splay_tree_lookup (wtd->omp_ctx->variables,
1276 (splay_tree_key) decl);
1277 if (n == NULL)
1278 splay_tree_insert (wtd->omp_ctx->variables,
1279 (splay_tree_key) decl,
1280 TREE_STATIC (decl)
1281 ? OMP_CLAUSE_DEFAULT_SHARED
1282 : OMP_CLAUSE_DEFAULT_PRIVATE);
1285 if (flag_sanitize
1286 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1288 /* The point here is to not sanitize static initializers. */
1289 bool no_sanitize_p = wtd->no_sanitize_p;
1290 wtd->no_sanitize_p = true;
1291 for (tree decl = BIND_EXPR_VARS (stmt);
1292 decl;
1293 decl = DECL_CHAIN (decl))
1294 if (VAR_P (decl)
1295 && TREE_STATIC (decl)
1296 && DECL_INITIAL (decl))
1297 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1298 wtd->no_sanitize_p = no_sanitize_p;
1300 wtd->bind_expr_stack.safe_push (stmt);
1301 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1302 cp_genericize_r, data, NULL);
1303 wtd->bind_expr_stack.pop ();
1306 else if (TREE_CODE (stmt) == USING_STMT)
1308 tree block = NULL_TREE;
1310 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1311 BLOCK, and append an IMPORTED_DECL to its
1312 BLOCK_VARS chained list. */
1313 if (wtd->bind_expr_stack.exists ())
1315 int i;
1316 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1317 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1318 break;
1320 if (block)
1322 tree using_directive;
1323 gcc_assert (TREE_OPERAND (stmt, 0));
1325 using_directive = make_node (IMPORTED_DECL);
1326 TREE_TYPE (using_directive) = void_type_node;
1328 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1329 = TREE_OPERAND (stmt, 0);
1330 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1331 BLOCK_VARS (block) = using_directive;
1333 /* The USING_STMT won't appear in GENERIC. */
1334 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1335 *walk_subtrees = 0;
1338 else if (TREE_CODE (stmt) == DECL_EXPR
1339 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1341 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1342 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1343 *walk_subtrees = 0;
1345 else if (TREE_CODE (stmt) == DECL_EXPR)
1347 tree d = DECL_EXPR_DECL (stmt);
1348 if (VAR_P (d))
1349 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1351 else if (TREE_CODE (stmt) == OMP_PARALLEL
1352 || TREE_CODE (stmt) == OMP_TASK
1353 || TREE_CODE (stmt) == OMP_TASKLOOP)
1355 struct cp_genericize_omp_taskreg omp_ctx;
1356 tree c, decl;
1357 splay_tree_node n;
1359 *walk_subtrees = 0;
1360 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1361 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1362 omp_ctx.default_shared = omp_ctx.is_parallel;
1363 omp_ctx.outer = wtd->omp_ctx;
1364 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1365 wtd->omp_ctx = &omp_ctx;
1366 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1367 switch (OMP_CLAUSE_CODE (c))
1369 case OMP_CLAUSE_SHARED:
1370 case OMP_CLAUSE_PRIVATE:
1371 case OMP_CLAUSE_FIRSTPRIVATE:
1372 case OMP_CLAUSE_LASTPRIVATE:
1373 decl = OMP_CLAUSE_DECL (c);
1374 if (decl == error_mark_node || !omp_var_to_track (decl))
1375 break;
1376 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1377 if (n != NULL)
1378 break;
1379 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1381 ? OMP_CLAUSE_DEFAULT_SHARED
1382 : OMP_CLAUSE_DEFAULT_PRIVATE);
1383 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1384 && omp_ctx.outer)
1385 omp_cxx_notice_variable (omp_ctx.outer, decl);
1386 break;
1387 case OMP_CLAUSE_DEFAULT:
1388 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1389 omp_ctx.default_shared = true;
1390 default:
1391 break;
1393 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1394 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1395 else
1396 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1397 wtd->omp_ctx = omp_ctx.outer;
1398 splay_tree_delete (omp_ctx.variables);
1400 else if (TREE_CODE (stmt) == TRY_BLOCK)
1402 *walk_subtrees = 0;
1403 tree try_block = wtd->try_block;
1404 wtd->try_block = stmt;
1405 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1406 wtd->try_block = try_block;
1407 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1409 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1411 /* MUST_NOT_THROW_COND might be something else with TM. */
1412 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1414 *walk_subtrees = 0;
1415 tree try_block = wtd->try_block;
1416 wtd->try_block = stmt;
1417 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1418 wtd->try_block = try_block;
1421 else if (TREE_CODE (stmt) == THROW_EXPR)
1423 location_t loc = location_of (stmt);
1424 if (TREE_NO_WARNING (stmt))
1425 /* Never mind. */;
1426 else if (wtd->try_block)
1428 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1429 && warning_at (loc, OPT_Wterminate,
1430 "throw will always call terminate()")
1431 && cxx_dialect >= cxx11
1432 && DECL_DESTRUCTOR_P (current_function_decl))
1433 inform (loc, "in C++11 destructors default to noexcept");
1435 else
1437 if (warn_cxx11_compat && cxx_dialect < cxx11
1438 && DECL_DESTRUCTOR_P (current_function_decl)
1439 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1440 == NULL_TREE)
1441 && (get_defaulted_eh_spec (current_function_decl)
1442 == empty_except_spec))
1443 warning_at (loc, OPT_Wc__11_compat,
1444 "in C++11 this throw will terminate because "
1445 "destructors default to noexcept");
1448 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1449 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1450 else if (TREE_CODE (stmt) == FOR_STMT)
1451 genericize_for_stmt (stmt_p, walk_subtrees, data);
1452 else if (TREE_CODE (stmt) == WHILE_STMT)
1453 genericize_while_stmt (stmt_p, walk_subtrees, data);
1454 else if (TREE_CODE (stmt) == DO_STMT)
1455 genericize_do_stmt (stmt_p, walk_subtrees, data);
1456 else if (TREE_CODE (stmt) == SWITCH_STMT)
1457 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1458 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1459 genericize_continue_stmt (stmt_p);
1460 else if (TREE_CODE (stmt) == BREAK_STMT)
1461 genericize_break_stmt (stmt_p);
1462 else if (TREE_CODE (stmt) == OMP_FOR
1463 || TREE_CODE (stmt) == OMP_SIMD
1464 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1465 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1466 else if (TREE_CODE (stmt) == PTRMEM_CST)
1468 /* By the time we get here we're handing off to the back end, so we don't
1469 need or want to preserve PTRMEM_CST anymore. */
1470 *stmt_p = cplus_expand_constant (stmt);
1471 *walk_subtrees = 0;
1473 else if ((flag_sanitize
1474 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1475 && !wtd->no_sanitize_p)
1477 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1478 && TREE_CODE (stmt) == NOP_EXPR
1479 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1480 ubsan_maybe_instrument_reference (stmt);
1481 else if (TREE_CODE (stmt) == CALL_EXPR)
1483 tree fn = CALL_EXPR_FN (stmt);
1484 if (fn != NULL_TREE
1485 && !error_operand_p (fn)
1486 && POINTER_TYPE_P (TREE_TYPE (fn))
1487 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1489 bool is_ctor
1490 = TREE_CODE (fn) == ADDR_EXPR
1491 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1492 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1493 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1494 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1495 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1496 cp_ubsan_maybe_instrument_member_call (stmt);
1501 p_set->add (*stmt_p);
1503 return NULL;
1506 /* Lower C++ front end trees to GENERIC in T_P. */
1508 static void
1509 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1511 struct cp_genericize_data wtd;
1513 wtd.p_set = new hash_set<tree>;
1514 wtd.bind_expr_stack.create (0);
1515 wtd.omp_ctx = NULL;
1516 wtd.try_block = NULL_TREE;
1517 wtd.no_sanitize_p = false;
1518 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1519 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1520 delete wtd.p_set;
1521 wtd.bind_expr_stack.release ();
1522 if (flag_sanitize & SANITIZE_VPTR)
1523 cp_ubsan_instrument_member_accesses (t_p);
1526 /* If a function that should end with a return in non-void
1527 function doesn't obviously end with return, add ubsan
1528 instrumentation code to verify it at runtime. */
1530 static void
1531 cp_ubsan_maybe_instrument_return (tree fndecl)
1533 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1534 || DECL_CONSTRUCTOR_P (fndecl)
1535 || DECL_DESTRUCTOR_P (fndecl)
1536 || !targetm.warn_func_return (fndecl))
1537 return;
1539 tree t = DECL_SAVED_TREE (fndecl);
1540 while (t)
1542 switch (TREE_CODE (t))
1544 case BIND_EXPR:
1545 t = BIND_EXPR_BODY (t);
1546 continue;
1547 case TRY_FINALLY_EXPR:
1548 t = TREE_OPERAND (t, 0);
1549 continue;
1550 case STATEMENT_LIST:
1552 tree_stmt_iterator i = tsi_last (t);
1553 if (!tsi_end_p (i))
1555 t = tsi_stmt (i);
1556 continue;
1559 break;
1560 case RETURN_EXPR:
1561 return;
1562 default:
1563 break;
1565 break;
1567 if (t == NULL_TREE)
1568 return;
1569 tree *p = &DECL_SAVED_TREE (fndecl);
1570 if (TREE_CODE (*p) == BIND_EXPR)
1571 p = &BIND_EXPR_BODY (*p);
1572 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1573 append_to_statement_list (t, p);
1576 void
1577 cp_genericize (tree fndecl)
1579 tree t;
1581 /* Fix up the types of parms passed by invisible reference. */
1582 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1583 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1585 /* If a function's arguments are copied to create a thunk,
1586 then DECL_BY_REFERENCE will be set -- but the type of the
1587 argument will be a pointer type, so we will never get
1588 here. */
1589 gcc_assert (!DECL_BY_REFERENCE (t));
1590 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1591 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1592 DECL_BY_REFERENCE (t) = 1;
1593 TREE_ADDRESSABLE (t) = 0;
1594 relayout_decl (t);
1597 /* Do the same for the return value. */
1598 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1600 t = DECL_RESULT (fndecl);
1601 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1602 DECL_BY_REFERENCE (t) = 1;
1603 TREE_ADDRESSABLE (t) = 0;
1604 relayout_decl (t);
1605 if (DECL_NAME (t))
1607 /* Adjust DECL_VALUE_EXPR of the original var. */
1608 tree outer = outer_curly_brace_block (current_function_decl);
1609 tree var;
1611 if (outer)
1612 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1613 if (DECL_NAME (t) == DECL_NAME (var)
1614 && DECL_HAS_VALUE_EXPR_P (var)
1615 && DECL_VALUE_EXPR (var) == t)
1617 tree val = convert_from_reference (t);
1618 SET_DECL_VALUE_EXPR (var, val);
1619 break;
1624 /* If we're a clone, the body is already GIMPLE. */
1625 if (DECL_CLONED_FUNCTION_P (fndecl))
1626 return;
1628 /* Allow cp_genericize calls to be nested. */
1629 tree save_bc_label[2];
1630 save_bc_label[bc_break] = bc_label[bc_break];
1631 save_bc_label[bc_continue] = bc_label[bc_continue];
1632 bc_label[bc_break] = NULL_TREE;
1633 bc_label[bc_continue] = NULL_TREE;
1635 /* Expand all the array notations here. */
1636 if (flag_cilkplus
1637 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1638 DECL_SAVED_TREE (fndecl)
1639 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1641 /* We do want to see every occurrence of the parms, so we can't just use
1642 walk_tree's hash functionality. */
1643 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1645 if (flag_sanitize & SANITIZE_RETURN
1646 && do_ubsan_in_current_function ())
1647 cp_ubsan_maybe_instrument_return (fndecl);
1649 /* Do everything else. */
1650 c_genericize (fndecl);
1652 gcc_assert (bc_label[bc_break] == NULL);
1653 gcc_assert (bc_label[bc_continue] == NULL);
1654 bc_label[bc_break] = save_bc_label[bc_break];
1655 bc_label[bc_continue] = save_bc_label[bc_continue];
1658 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1659 NULL if there is in fact nothing to do. ARG2 may be null if FN
1660 actually only takes one argument. */
1662 static tree
1663 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1665 tree defparm, parm, t;
1666 int i = 0;
1667 int nargs;
1668 tree *argarray;
1670 if (fn == NULL)
1671 return NULL;
1673 nargs = list_length (DECL_ARGUMENTS (fn));
1674 argarray = XALLOCAVEC (tree, nargs);
1676 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1677 if (arg2)
1678 defparm = TREE_CHAIN (defparm);
1680 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1682 tree inner_type = TREE_TYPE (arg1);
1683 tree start1, end1, p1;
1684 tree start2 = NULL, p2 = NULL;
1685 tree ret = NULL, lab;
1687 start1 = arg1;
1688 start2 = arg2;
1691 inner_type = TREE_TYPE (inner_type);
1692 start1 = build4 (ARRAY_REF, inner_type, start1,
1693 size_zero_node, NULL, NULL);
1694 if (arg2)
1695 start2 = build4 (ARRAY_REF, inner_type, start2,
1696 size_zero_node, NULL, NULL);
1698 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1699 start1 = build_fold_addr_expr_loc (input_location, start1);
1700 if (arg2)
1701 start2 = build_fold_addr_expr_loc (input_location, start2);
1703 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1704 end1 = fold_build_pointer_plus (start1, end1);
1706 p1 = create_tmp_var (TREE_TYPE (start1));
1707 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1708 append_to_statement_list (t, &ret);
1710 if (arg2)
1712 p2 = create_tmp_var (TREE_TYPE (start2));
1713 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1714 append_to_statement_list (t, &ret);
1717 lab = create_artificial_label (input_location);
1718 t = build1 (LABEL_EXPR, void_type_node, lab);
1719 append_to_statement_list (t, &ret);
1721 argarray[i++] = p1;
1722 if (arg2)
1723 argarray[i++] = p2;
1724 /* Handle default arguments. */
1725 for (parm = defparm; parm && parm != void_list_node;
1726 parm = TREE_CHAIN (parm), i++)
1727 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1728 TREE_PURPOSE (parm), fn, i,
1729 tf_warning_or_error);
1730 t = build_call_a (fn, i, argarray);
1731 t = fold_convert (void_type_node, t);
1732 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1733 append_to_statement_list (t, &ret);
1735 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1736 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1737 append_to_statement_list (t, &ret);
1739 if (arg2)
1741 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1742 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1743 append_to_statement_list (t, &ret);
1746 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1747 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1748 append_to_statement_list (t, &ret);
1750 return ret;
1752 else
1754 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1755 if (arg2)
1756 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1757 /* Handle default arguments. */
1758 for (parm = defparm; parm && parm != void_list_node;
1759 parm = TREE_CHAIN (parm), i++)
1760 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1761 TREE_PURPOSE (parm),
1762 fn, i, tf_warning_or_error);
1763 t = build_call_a (fn, i, argarray);
1764 t = fold_convert (void_type_node, t);
1765 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1769 /* Return code to initialize DECL with its default constructor, or
1770 NULL if there's nothing to do. */
1772 tree
1773 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1775 tree info = CP_OMP_CLAUSE_INFO (clause);
1776 tree ret = NULL;
1778 if (info)
1779 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1781 return ret;
1784 /* Return code to initialize DST with a copy constructor from SRC. */
1786 tree
1787 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1789 tree info = CP_OMP_CLAUSE_INFO (clause);
1790 tree ret = NULL;
1792 if (info)
1793 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1794 if (ret == NULL)
1795 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1797 return ret;
1800 /* Similarly, except use an assignment operator instead. */
1802 tree
1803 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1805 tree info = CP_OMP_CLAUSE_INFO (clause);
1806 tree ret = NULL;
1808 if (info)
1809 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1810 if (ret == NULL)
1811 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1813 return ret;
1816 /* Return code to destroy DECL. */
1818 tree
1819 cxx_omp_clause_dtor (tree clause, tree decl)
1821 tree info = CP_OMP_CLAUSE_INFO (clause);
1822 tree ret = NULL;
1824 if (info)
1825 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1827 return ret;
1830 /* True if OpenMP should privatize what this DECL points to rather
1831 than the DECL itself. */
1833 bool
1834 cxx_omp_privatize_by_reference (const_tree decl)
1836 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1837 || is_invisiref_parm (decl));
1840 /* Return true if DECL is const qualified var having no mutable member. */
1841 bool
1842 cxx_omp_const_qual_no_mutable (tree decl)
1844 tree type = TREE_TYPE (decl);
1845 if (TREE_CODE (type) == REFERENCE_TYPE)
1847 if (!is_invisiref_parm (decl))
1848 return false;
1849 type = TREE_TYPE (type);
1851 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1853 /* NVR doesn't preserve const qualification of the
1854 variable's type. */
1855 tree outer = outer_curly_brace_block (current_function_decl);
1856 tree var;
1858 if (outer)
1859 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1860 if (DECL_NAME (decl) == DECL_NAME (var)
1861 && (TYPE_MAIN_VARIANT (type)
1862 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1864 if (TYPE_READONLY (TREE_TYPE (var)))
1865 type = TREE_TYPE (var);
1866 break;
1871 if (type == error_mark_node)
1872 return false;
1874 /* Variables with const-qualified type having no mutable member
1875 are predetermined shared. */
1876 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1877 return true;
1879 return false;
1882 /* True if OpenMP sharing attribute of DECL is predetermined. */
1884 enum omp_clause_default_kind
1885 cxx_omp_predetermined_sharing (tree decl)
1887 /* Static data members are predetermined shared. */
1888 if (TREE_STATIC (decl))
1890 tree ctx = CP_DECL_CONTEXT (decl);
1891 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1892 return OMP_CLAUSE_DEFAULT_SHARED;
1895 /* Const qualified vars having no mutable member are predetermined
1896 shared. */
1897 if (cxx_omp_const_qual_no_mutable (decl))
1898 return OMP_CLAUSE_DEFAULT_SHARED;
1900 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1903 /* Finalize an implicitly determined clause. */
1905 void
1906 cxx_omp_finish_clause (tree c, gimple_seq *)
1908 tree decl, inner_type;
1909 bool make_shared = false;
1911 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1912 return;
1914 decl = OMP_CLAUSE_DECL (c);
1915 decl = require_complete_type (decl);
1916 inner_type = TREE_TYPE (decl);
1917 if (decl == error_mark_node)
1918 make_shared = true;
1919 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1920 inner_type = TREE_TYPE (inner_type);
1922 /* We're interested in the base element, not arrays. */
1923 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1924 inner_type = TREE_TYPE (inner_type);
1926 /* Check for special function availability by building a call to one.
1927 Save the results, because later we won't be in the right context
1928 for making these queries. */
1929 if (!make_shared
1930 && CLASS_TYPE_P (inner_type)
1931 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1932 make_shared = true;
1934 if (make_shared)
1935 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1938 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1939 disregarded in OpenMP construct, because it is going to be
1940 remapped during OpenMP lowering. SHARED is true if DECL
1941 is going to be shared, false if it is going to be privatized. */
1943 bool
1944 cxx_omp_disregard_value_expr (tree decl, bool shared)
1946 return !shared
1947 && VAR_P (decl)
1948 && DECL_HAS_VALUE_EXPR_P (decl)
1949 && DECL_ARTIFICIAL (decl)
1950 && DECL_LANG_SPECIFIC (decl)
1951 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1954 /* Perform folding on expression X. */
1956 tree
1957 cp_fully_fold (tree x)
1959 if (processing_template_decl)
1960 return x;
1961 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
1962 have to call both. */
1963 if (cxx_dialect >= cxx11)
1964 x = maybe_constant_value (x);
1965 return cp_fold (x);
1968 /* Fold expression X which is used as an rvalue if RVAL is true. */
1970 static tree
1971 cp_fold_maybe_rvalue (tree x, bool rval)
1973 while (true)
1975 x = cp_fold (x);
1976 if (rval && DECL_P (x)
1977 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1979 tree v = decl_constant_value (x);
1980 if (v != x && v != error_mark_node)
1982 x = v;
1983 continue;
1986 break;
1988 return x;
1991 /* Fold expression X which is used as an rvalue. */
1993 static tree
1994 cp_fold_rvalue (tree x)
1996 return cp_fold_maybe_rvalue (x, true);
1999 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2000 and certain changes are made to the folding done. Or should be (FIXME). We
2001 never touch maybe_const, as it is only used for the C front-end
2002 C_MAYBE_CONST_EXPR. */
2004 tree
2005 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2007 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2008 INTEGER_CST. */
2009 return cp_fold_rvalue (x);
2012 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2014 /* Dispose of the whole FOLD_CACHE. */
2016 void
2017 clear_fold_cache (void)
2019 if (fold_cache != NULL)
2020 fold_cache->empty ();
2023 /* This function tries to fold an expression X.
2024 To avoid combinatorial explosion, folding results are kept in fold_cache.
2025 If we are processing a template or X is invalid, we don't fold at all.
2026 For performance reasons we don't cache expressions representing a
2027 declaration or constant.
2028 Function returns X or its folded variant. */
2030 static tree
2031 cp_fold (tree x)
2033 tree op0, op1, op2, op3;
2034 tree org_x = x, r = NULL_TREE;
2035 enum tree_code code;
2036 location_t loc;
2037 bool rval_ops = true;
2039 if (!x || x == error_mark_node)
2040 return x;
2042 if (processing_template_decl
2043 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2044 return x;
2046 /* Don't bother to cache DECLs or constants. */
2047 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2048 return x;
2050 if (fold_cache == NULL)
2051 fold_cache = hash_map<tree, tree>::create_ggc (101);
2053 if (tree *cached = fold_cache->get (x))
2054 return *cached;
2056 code = TREE_CODE (x);
2057 switch (code)
2059 case CLEANUP_POINT_EXPR:
2060 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2061 effects. */
2062 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2063 if (!TREE_SIDE_EFFECTS (r))
2064 x = r;
2065 break;
2067 case SIZEOF_EXPR:
2068 x = fold_sizeof_expr (x);
2069 break;
2071 case VIEW_CONVERT_EXPR:
2072 rval_ops = false;
2073 /* FALLTHRU */
2074 case CONVERT_EXPR:
2075 case NOP_EXPR:
2076 case NON_LVALUE_EXPR:
2078 if (VOID_TYPE_P (TREE_TYPE (x)))
2079 return x;
2081 loc = EXPR_LOCATION (x);
2082 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2084 if (code == CONVERT_EXPR
2085 && SCALAR_TYPE_P (TREE_TYPE (x))
2086 && op0 != void_node)
2087 /* During parsing we used convert_to_*_nofold; re-convert now using the
2088 folding variants, since fold() doesn't do those transformations. */
2089 x = fold (convert (TREE_TYPE (x), op0));
2090 else if (op0 != TREE_OPERAND (x, 0))
2092 if (op0 == error_mark_node)
2093 x = error_mark_node;
2094 else
2095 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2097 else
2098 x = fold (x);
2100 /* Conversion of an out-of-range value has implementation-defined
2101 behavior; the language considers it different from arithmetic
2102 overflow, which is undefined. */
2103 if (TREE_CODE (op0) == INTEGER_CST
2104 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2105 TREE_OVERFLOW (x) = false;
2107 break;
2109 case INDIRECT_REF:
2110 /* We don't need the decltype(auto) obfuscation anymore. */
2111 if (REF_PARENTHESIZED_P (x))
2113 tree p = maybe_undo_parenthesized_ref (x);
2114 return cp_fold (p);
2116 goto unary;
2118 case ADDR_EXPR:
2119 case REALPART_EXPR:
2120 case IMAGPART_EXPR:
2121 rval_ops = false;
2122 /* FALLTHRU */
2123 case CONJ_EXPR:
2124 case FIX_TRUNC_EXPR:
2125 case FLOAT_EXPR:
2126 case NEGATE_EXPR:
2127 case ABS_EXPR:
2128 case BIT_NOT_EXPR:
2129 case TRUTH_NOT_EXPR:
2130 case FIXED_CONVERT_EXPR:
2131 unary:
2133 loc = EXPR_LOCATION (x);
2134 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2136 if (op0 != TREE_OPERAND (x, 0))
2138 if (op0 == error_mark_node)
2139 x = error_mark_node;
2140 else
2142 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2143 if (code == INDIRECT_REF
2144 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2146 TREE_READONLY (x) = TREE_READONLY (org_x);
2147 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2148 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2152 else
2153 x = fold (x);
2155 gcc_assert (TREE_CODE (x) != COND_EXPR
2156 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2157 break;
2159 case UNARY_PLUS_EXPR:
2160 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2161 if (op0 == error_mark_node)
2162 x = error_mark_node;
2163 else
2164 x = fold_convert (TREE_TYPE (x), op0);
2165 break;
2167 case POSTDECREMENT_EXPR:
2168 case POSTINCREMENT_EXPR:
2169 case INIT_EXPR:
2170 case PREDECREMENT_EXPR:
2171 case PREINCREMENT_EXPR:
2172 case COMPOUND_EXPR:
2173 case MODIFY_EXPR:
2174 rval_ops = false;
2175 /* FALLTHRU */
2176 case POINTER_PLUS_EXPR:
2177 case PLUS_EXPR:
2178 case MINUS_EXPR:
2179 case MULT_EXPR:
2180 case TRUNC_DIV_EXPR:
2181 case CEIL_DIV_EXPR:
2182 case FLOOR_DIV_EXPR:
2183 case ROUND_DIV_EXPR:
2184 case TRUNC_MOD_EXPR:
2185 case CEIL_MOD_EXPR:
2186 case ROUND_MOD_EXPR:
2187 case RDIV_EXPR:
2188 case EXACT_DIV_EXPR:
2189 case MIN_EXPR:
2190 case MAX_EXPR:
2191 case LSHIFT_EXPR:
2192 case RSHIFT_EXPR:
2193 case LROTATE_EXPR:
2194 case RROTATE_EXPR:
2195 case BIT_AND_EXPR:
2196 case BIT_IOR_EXPR:
2197 case BIT_XOR_EXPR:
2198 case TRUTH_AND_EXPR:
2199 case TRUTH_ANDIF_EXPR:
2200 case TRUTH_OR_EXPR:
2201 case TRUTH_ORIF_EXPR:
2202 case TRUTH_XOR_EXPR:
2203 case LT_EXPR: case LE_EXPR:
2204 case GT_EXPR: case GE_EXPR:
2205 case EQ_EXPR: case NE_EXPR:
2206 case UNORDERED_EXPR: case ORDERED_EXPR:
2207 case UNLT_EXPR: case UNLE_EXPR:
2208 case UNGT_EXPR: case UNGE_EXPR:
2209 case UNEQ_EXPR: case LTGT_EXPR:
2210 case RANGE_EXPR: case COMPLEX_EXPR:
2212 loc = EXPR_LOCATION (x);
2213 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2214 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2216 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2218 if (op0 == error_mark_node || op1 == error_mark_node)
2219 x = error_mark_node;
2220 else
2221 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2223 else
2224 x = fold (x);
2226 if (TREE_NO_WARNING (org_x)
2227 && warn_nonnull_compare
2228 && COMPARISON_CLASS_P (org_x))
2230 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2232 else if (COMPARISON_CLASS_P (x))
2233 TREE_NO_WARNING (x) = 1;
2234 /* Otherwise give up on optimizing these, let GIMPLE folders
2235 optimize those later on. */
2236 else if (op0 != TREE_OPERAND (org_x, 0)
2237 || op1 != TREE_OPERAND (org_x, 1))
2239 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2240 TREE_NO_WARNING (x) = 1;
2242 else
2243 x = org_x;
2245 break;
2247 case VEC_COND_EXPR:
2248 case COND_EXPR:
2250 /* Don't bother folding a void condition, since it can't produce a
2251 constant value. Also, some statement-level uses of COND_EXPR leave
2252 one of the branches NULL, so folding would crash. */
2253 if (VOID_TYPE_P (TREE_TYPE (x)))
2254 return x;
2256 loc = EXPR_LOCATION (x);
2257 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2258 op1 = cp_fold (TREE_OPERAND (x, 1));
2259 op2 = cp_fold (TREE_OPERAND (x, 2));
2261 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2263 warning_sentinel s (warn_int_in_bool_context);
2264 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2265 op1 = cp_truthvalue_conversion (op1);
2266 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2267 op2 = cp_truthvalue_conversion (op2);
2270 if (op0 != TREE_OPERAND (x, 0)
2271 || op1 != TREE_OPERAND (x, 1)
2272 || op2 != TREE_OPERAND (x, 2))
2274 if (op0 == error_mark_node
2275 || op1 == error_mark_node
2276 || op2 == error_mark_node)
2277 x = error_mark_node;
2278 else
2279 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2281 else
2282 x = fold (x);
2284 /* A COND_EXPR might have incompatible types in branches if one or both
2285 arms are bitfields. If folding exposed such a branch, fix it up. */
2286 if (TREE_CODE (x) != code)
2287 if (tree type = is_bitfield_expr_with_lowered_type (x))
2288 x = fold_convert (type, x);
2290 break;
2292 case CALL_EXPR:
2294 int i, m, sv = optimize, nw = sv, changed = 0;
2295 tree callee = get_callee_fndecl (x);
2297 /* Some built-in function calls will be evaluated at compile-time in
2298 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2299 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2300 if (callee && DECL_BUILT_IN (callee) && !optimize
2301 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2302 && current_function_decl
2303 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2304 nw = 1;
2306 x = copy_node (x);
2308 m = call_expr_nargs (x);
2309 for (i = 0; i < m; i++)
2311 r = cp_fold (CALL_EXPR_ARG (x, i));
2312 if (r != CALL_EXPR_ARG (x, i))
2314 if (r == error_mark_node)
2316 x = error_mark_node;
2317 break;
2319 changed = 1;
2321 CALL_EXPR_ARG (x, i) = r;
2323 if (x == error_mark_node)
2324 break;
2326 optimize = nw;
2327 r = fold (x);
2328 optimize = sv;
2330 if (TREE_CODE (r) != CALL_EXPR)
2332 x = cp_fold (r);
2333 break;
2336 optimize = nw;
2338 /* Invoke maybe_constant_value for functions declared
2339 constexpr and not called with AGGR_INIT_EXPRs.
2340 TODO:
2341 Do constexpr expansion of expressions where the call itself is not
2342 constant, but the call followed by an INDIRECT_REF is. */
2343 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2344 && !flag_no_inline)
2345 r = maybe_constant_value (x);
2346 optimize = sv;
2348 if (TREE_CODE (r) != CALL_EXPR)
2350 if (DECL_CONSTRUCTOR_P (callee))
2352 loc = EXPR_LOCATION (x);
2353 tree s = build_fold_indirect_ref_loc (loc,
2354 CALL_EXPR_ARG (x, 0));
2355 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2357 x = r;
2358 break;
2361 if (!changed)
2362 x = org_x;
2363 break;
2366 case CONSTRUCTOR:
2368 unsigned i;
2369 constructor_elt *p;
2370 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2371 vec<constructor_elt, va_gc> *nelts = NULL;
2372 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2374 tree op = cp_fold (p->value);
2375 if (op != p->value)
2377 if (op == error_mark_node)
2379 x = error_mark_node;
2380 vec_free (nelts);
2381 break;
2383 if (nelts == NULL)
2384 nelts = elts->copy ();
2385 (*nelts)[i].value = op;
2388 if (nelts)
2389 x = build_constructor (TREE_TYPE (x), nelts);
2390 break;
2392 case TREE_VEC:
2394 bool changed = false;
2395 vec<tree, va_gc> *vec = make_tree_vector ();
2396 int i, n = TREE_VEC_LENGTH (x);
2397 vec_safe_reserve (vec, n);
2399 for (i = 0; i < n; i++)
2401 tree op = cp_fold (TREE_VEC_ELT (x, i));
2402 vec->quick_push (op);
2403 if (op != TREE_VEC_ELT (x, i))
2404 changed = true;
2407 if (changed)
2409 r = copy_node (x);
2410 for (i = 0; i < n; i++)
2411 TREE_VEC_ELT (r, i) = (*vec)[i];
2412 x = r;
2415 release_tree_vector (vec);
2418 break;
2420 case ARRAY_REF:
2421 case ARRAY_RANGE_REF:
2423 loc = EXPR_LOCATION (x);
2424 op0 = cp_fold (TREE_OPERAND (x, 0));
2425 op1 = cp_fold (TREE_OPERAND (x, 1));
2426 op2 = cp_fold (TREE_OPERAND (x, 2));
2427 op3 = cp_fold (TREE_OPERAND (x, 3));
2429 if (op0 != TREE_OPERAND (x, 0)
2430 || op1 != TREE_OPERAND (x, 1)
2431 || op2 != TREE_OPERAND (x, 2)
2432 || op3 != TREE_OPERAND (x, 3))
2434 if (op0 == error_mark_node
2435 || op1 == error_mark_node
2436 || op2 == error_mark_node
2437 || op3 == error_mark_node)
2438 x = error_mark_node;
2439 else
2441 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2442 TREE_READONLY (x) = TREE_READONLY (org_x);
2443 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2444 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2448 x = fold (x);
2449 break;
2451 default:
2452 return org_x;
2455 fold_cache->put (org_x, x);
2456 /* Prevent that we try to fold an already folded result again. */
2457 if (x != org_x)
2458 fold_cache->put (x, x);
2460 return x;
2463 #include "gt-cp-cp-gimplify.h"