libgo: add misc/cgo files
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobf010f6c63be10fbf08bda6a56ef83dbda72a3413
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36 #include "asan.h"
38 /* Forward declarations. */
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
45 /* Local declarations. */
47 enum bc_t { bc_break = 0, bc_continue = 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
54 indicates which.
56 Just creates a label with location LOCATION and pushes it into the current
57 context. */
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
62 tree label = create_artificial_label (location);
63 DECL_CHAIN (label) = bc_label[bc];
64 bc_label[bc] = label;
65 if (bc == bc_break)
66 LABEL_DECL_BREAK (label) = true;
67 else
68 LABEL_DECL_CONTINUE (label) = true;
69 return label;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 gcc_assert (label == bc_label[bc]);
84 if (TREE_USED (label))
85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 block);
88 bc_label[bc] = DECL_CHAIN (label);
89 DECL_CHAIN (label) = NULL_TREE;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
95 static tree
96 get_bc_label (enum bc_t bc)
98 tree label = bc_label[bc];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label) = 1;
102 return label;
105 /* Genericize a TRY_BLOCK. */
107 static void
108 genericize_try_block (tree *stmt_p)
110 tree body = TRY_STMTS (*stmt_p);
111 tree cleanup = TRY_HANDLERS (*stmt_p);
113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 static void
119 genericize_catch_block (tree *stmt_p)
121 tree type = HANDLER_TYPE (*stmt_p);
122 tree body = HANDLER_BODY (*stmt_p);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
128 /* A terser interface for building a representation of an exception
129 specification. */
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 tree t;
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 return t;
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
152 tree body = EH_SPEC_STMTS (*stmt_p);
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 TREE_NO_WARNING (*stmt_p) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163 static void
164 genericize_if_stmt (tree *stmt_p)
166 tree stmt, cond, then_, else_;
167 location_t locus = EXPR_LOCATION (*stmt_p);
169 stmt = *stmt_p;
170 cond = IF_COND (stmt);
171 then_ = THEN_CLAUSE (stmt);
172 else_ = ELSE_CLAUSE (stmt);
174 if (!then_)
175 then_ = build_empty_stmt (locus);
176 if (!else_)
177 else_ = build_empty_stmt (locus);
179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180 stmt = then_;
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = else_;
183 else
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185 if (!EXPR_HAS_LOCATION (stmt))
186 protected_set_expr_location (stmt, locus);
187 *stmt_p = stmt;
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 tree incr, bool cond_is_first, int *walk_subtrees,
200 void *data)
202 tree blab, clab;
203 tree exit = NULL;
204 tree stmt_list = NULL;
206 blab = begin_bc_block (bc_break, start_locus);
207 clab = begin_bc_block (bc_continue, start_locus);
209 protected_set_expr_location (incr, start_locus);
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
216 if (cond && TREE_CODE (cond) != INTEGER_CST)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 build_empty_stmt (cloc), exit);
227 if (exit && cond_is_first)
228 append_to_statement_list (exit, &stmt_list);
229 append_to_statement_list (body, &stmt_list);
230 finish_bc_block (&stmt_list, bc_continue, clab);
231 append_to_statement_list (incr, &stmt_list);
232 if (exit && !cond_is_first)
233 append_to_statement_list (exit, &stmt_list);
235 if (!stmt_list)
236 stmt_list = build_empty_stmt (start_locus);
238 tree loop;
239 if (cond && integer_zerop (cond))
241 if (cond_is_first)
242 loop = fold_build3_loc (start_locus, COND_EXPR,
243 void_type_node, cond, stmt_list,
244 build_empty_stmt (start_locus));
245 else
246 loop = stmt_list;
248 else
250 location_t loc = start_locus;
251 if (!cond || integer_nonzerop (cond))
252 loc = EXPR_LOCATION (expr_first (body));
253 if (loc == UNKNOWN_LOCATION)
254 loc = start_locus;
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 *stmt_p = stmt_list;
267 /* Genericize a FOR_STMT node *STMT_P. */
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
277 if (init)
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
291 /* Genericize a WHILE_STMT node *STMT_P. */
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 /* Genericize a DO_STMT node *STMT_P. */
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
320 break_block = begin_bc_block (bc_break, stmt_locus);
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
333 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
334 finish_bc_block (stmt_p, bc_break, break_block);
337 /* Genericize a CONTINUE_STMT node *STMT_P. */
339 static void
340 genericize_continue_stmt (tree *stmt_p)
342 tree stmt_list = NULL;
343 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
344 tree label = get_bc_label (bc_continue);
345 location_t location = EXPR_LOCATION (*stmt_p);
346 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
347 append_to_statement_list_force (pred, &stmt_list);
348 append_to_statement_list (jump, &stmt_list);
349 *stmt_p = stmt_list;
352 /* Genericize a BREAK_STMT node *STMT_P. */
354 static void
355 genericize_break_stmt (tree *stmt_p)
357 tree label = get_bc_label (bc_break);
358 location_t location = EXPR_LOCATION (*stmt_p);
359 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
362 /* Genericize a OMP_FOR node *STMT_P. */
364 static void
365 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
367 tree stmt = *stmt_p;
368 location_t locus = EXPR_LOCATION (stmt);
369 tree clab = begin_bc_block (bc_continue, locus);
371 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
372 if (TREE_CODE (stmt) != OMP_TASKLOOP)
373 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
378 *walk_subtrees = 0;
380 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
383 /* Hook into the middle of gimplifying an OMP_FOR node. */
385 static enum gimplify_status
386 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
388 tree for_stmt = *expr_p;
389 gimple_seq seq = NULL;
391 /* Protect ourselves from recursion. */
392 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
393 return GS_UNHANDLED;
394 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
396 gimplify_and_add (for_stmt, &seq);
397 gimple_seq_add_seq (pre_p, seq);
399 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
401 return GS_ALL_DONE;
404 /* Gimplify an EXPR_STMT node. */
406 static void
407 gimplify_expr_stmt (tree *stmt_p)
409 tree stmt = EXPR_STMT_EXPR (*stmt_p);
411 if (stmt == error_mark_node)
412 stmt = NULL;
414 /* Gimplification of a statement expression will nullify the
415 statement if all its side effects are moved to *PRE_P and *POST_P.
417 In this case we will not want to emit the gimplified statement.
418 However, we may still want to emit a warning, so we do that before
419 gimplification. */
420 if (stmt && warn_unused_value)
422 if (!TREE_SIDE_EFFECTS (stmt))
424 if (!IS_EMPTY_STMT (stmt)
425 && !VOID_TYPE_P (TREE_TYPE (stmt))
426 && !TREE_NO_WARNING (stmt))
427 warning (OPT_Wunused_value, "statement with no effect");
429 else
430 warn_if_unused_value (stmt, input_location);
433 if (stmt == NULL_TREE)
434 stmt = alloc_stmt_list ();
436 *stmt_p = stmt;
439 /* Gimplify initialization from an AGGR_INIT_EXPR. */
441 static void
442 cp_gimplify_init_expr (tree *expr_p)
444 tree from = TREE_OPERAND (*expr_p, 1);
445 tree to = TREE_OPERAND (*expr_p, 0);
446 tree t;
448 /* What about code that pulls out the temp and uses it elsewhere? I
449 think that such code never uses the TARGET_EXPR as an initializer. If
450 I'm wrong, we'll abort because the temp won't have any RTL. In that
451 case, I guess we'll need to replace references somehow. */
452 if (TREE_CODE (from) == TARGET_EXPR)
453 from = TARGET_EXPR_INITIAL (from);
455 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
456 inside the TARGET_EXPR. */
457 for (t = from; t; )
459 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
461 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
462 replace the slot operand with our target.
464 Should we add a target parm to gimplify_expr instead? No, as in this
465 case we want to replace the INIT_EXPR. */
466 if (TREE_CODE (sub) == AGGR_INIT_EXPR
467 || TREE_CODE (sub) == VEC_INIT_EXPR)
469 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
470 AGGR_INIT_EXPR_SLOT (sub) = to;
471 else
472 VEC_INIT_EXPR_SLOT (sub) = to;
473 *expr_p = from;
475 /* The initialization is now a side-effect, so the container can
476 become void. */
477 if (from != sub)
478 TREE_TYPE (from) = void_type_node;
481 /* Handle aggregate NSDMI. */
482 replace_placeholders (sub, to);
484 if (t == sub)
485 break;
486 else
487 t = TREE_OPERAND (t, 1);
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
497 tree stmt = *expr_p;
498 tree temp = voidify_wrapper_expr (stmt, NULL);
499 tree body = TREE_OPERAND (stmt, 0);
500 gimple_seq try_ = NULL;
501 gimple_seq catch_ = NULL;
502 gimple *mnt;
504 gimplify_and_add (body, &try_);
505 mnt = gimple_build_eh_must_not_throw (terminate_fn);
506 gimple_seq_add_stmt_without_update (&catch_, mnt);
507 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
509 gimple_seq_add_stmt_without_update (pre_p, mnt);
510 if (temp)
512 *expr_p = temp;
513 return GS_OK;
516 *expr_p = NULL;
517 return GS_ALL_DONE;
520 /* Return TRUE if an operand (OP) of a given TYPE being copied is
521 really just an empty class copy.
523 Check that the operand has a simple form so that TARGET_EXPRs and
524 non-empty CONSTRUCTORs get reduced properly, and we leave the
525 return slot optimization alone because it isn't a copy. */
527 static bool
528 simple_empty_class_p (tree type, tree op)
530 return
531 ((TREE_CODE (op) == COMPOUND_EXPR
532 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
533 || TREE_CODE (op) == EMPTY_CLASS_EXPR
534 || is_gimple_lvalue (op)
535 || INDIRECT_REF_P (op)
536 || (TREE_CODE (op) == CONSTRUCTOR
537 && CONSTRUCTOR_NELTS (op) == 0
538 && !TREE_CLOBBER_P (op))
539 || (TREE_CODE (op) == CALL_EXPR
540 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
541 && is_really_empty_class (type);
544 /* Returns true if evaluating E as an lvalue has side-effects;
545 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
546 have side-effects until there is a read or write through it. */
548 static bool
549 lvalue_has_side_effects (tree e)
551 if (!TREE_SIDE_EFFECTS (e))
552 return false;
553 while (handled_component_p (e))
555 if (TREE_CODE (e) == ARRAY_REF
556 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
557 return true;
558 e = TREE_OPERAND (e, 0);
560 if (DECL_P (e))
561 /* Just naming a variable has no side-effects. */
562 return false;
563 else if (INDIRECT_REF_P (e))
564 /* Similarly, indirection has no side-effects. */
565 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
566 else
567 /* For anything else, trust TREE_SIDE_EFFECTS. */
568 return TREE_SIDE_EFFECTS (e);
571 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
574 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
576 int saved_stmts_are_full_exprs_p = 0;
577 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
578 enum tree_code code = TREE_CODE (*expr_p);
579 enum gimplify_status ret;
581 if (STATEMENT_CODE_P (code))
583 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
584 current_stmt_tree ()->stmts_are_full_exprs_p
585 = STMT_IS_FULL_EXPR_P (*expr_p);
588 switch (code)
590 case AGGR_INIT_EXPR:
591 simplify_aggr_init_expr (expr_p);
592 ret = GS_OK;
593 break;
595 case VEC_INIT_EXPR:
597 location_t loc = input_location;
598 tree init = VEC_INIT_EXPR_INIT (*expr_p);
599 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
600 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
601 input_location = EXPR_LOCATION (*expr_p);
602 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
603 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
604 from_array,
605 tf_warning_or_error);
606 hash_set<tree> pset;
607 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
608 cp_genericize_tree (expr_p, false);
609 ret = GS_OK;
610 input_location = loc;
612 break;
614 case THROW_EXPR:
615 /* FIXME communicate throw type to back end, probably by moving
616 THROW_EXPR into ../tree.def. */
617 *expr_p = TREE_OPERAND (*expr_p, 0);
618 ret = GS_OK;
619 break;
621 case MUST_NOT_THROW_EXPR:
622 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
623 break;
625 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
626 LHS of an assignment might also be involved in the RHS, as in bug
627 25979. */
628 case INIT_EXPR:
629 if (fn_contains_cilk_spawn_p (cfun))
631 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
632 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
633 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
634 return GS_ERROR;
637 cp_gimplify_init_expr (expr_p);
638 if (TREE_CODE (*expr_p) != INIT_EXPR)
639 return GS_OK;
640 /* Fall through. */
641 case MODIFY_EXPR:
642 modify_expr_case:
644 if (fn_contains_cilk_spawn_p (cfun)
645 && cilk_cp_detect_spawn_and_unwrap (expr_p)
646 && !seen_error ())
647 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
648 /* If the back end isn't clever enough to know that the lhs and rhs
649 types are the same, add an explicit conversion. */
650 tree op0 = TREE_OPERAND (*expr_p, 0);
651 tree op1 = TREE_OPERAND (*expr_p, 1);
653 if (!error_operand_p (op0)
654 && !error_operand_p (op1)
655 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
656 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
657 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
658 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
659 TREE_TYPE (op0), op1);
661 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
663 /* Remove any copies of empty classes. Also drop volatile
664 variables on the RHS to avoid infinite recursion from
665 gimplify_expr trying to load the value. */
666 if (TREE_SIDE_EFFECTS (op1))
668 if (TREE_THIS_VOLATILE (op1)
669 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
670 op1 = build_fold_addr_expr (op1);
672 gimplify_and_add (op1, pre_p);
674 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
675 is_gimple_lvalue, fb_lvalue);
676 *expr_p = TREE_OPERAND (*expr_p, 0);
678 /* P0145 says that the RHS is sequenced before the LHS.
679 gimplify_modify_expr gimplifies the RHS before the LHS, but that
680 isn't quite strong enough in two cases:
682 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
683 mean it's evaluated after the LHS.
685 2) the value calculation of the RHS is also sequenced before the
686 LHS, so for scalar assignment we need to preevaluate if the
687 RHS could be affected by LHS side-effects even if it has no
688 side-effects of its own. We don't need this for classes because
689 class assignment takes its RHS by reference. */
690 else if (flag_strong_eval_order > 1
691 && TREE_CODE (*expr_p) == MODIFY_EXPR
692 && lvalue_has_side_effects (op0)
693 && (TREE_CODE (op1) == CALL_EXPR
694 || (SCALAR_TYPE_P (TREE_TYPE (op1))
695 && !TREE_CONSTANT (op1))))
696 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
698 ret = GS_OK;
699 break;
701 case EMPTY_CLASS_EXPR:
702 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
703 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
704 ret = GS_OK;
705 break;
707 case BASELINK:
708 *expr_p = BASELINK_FUNCTIONS (*expr_p);
709 ret = GS_OK;
710 break;
712 case TRY_BLOCK:
713 genericize_try_block (expr_p);
714 ret = GS_OK;
715 break;
717 case HANDLER:
718 genericize_catch_block (expr_p);
719 ret = GS_OK;
720 break;
722 case EH_SPEC_BLOCK:
723 genericize_eh_spec_block (expr_p);
724 ret = GS_OK;
725 break;
727 case USING_STMT:
728 gcc_unreachable ();
730 case FOR_STMT:
731 case WHILE_STMT:
732 case DO_STMT:
733 case SWITCH_STMT:
734 case CONTINUE_STMT:
735 case BREAK_STMT:
736 gcc_unreachable ();
738 case OMP_FOR:
739 case OMP_SIMD:
740 case OMP_DISTRIBUTE:
741 case OMP_TASKLOOP:
742 ret = cp_gimplify_omp_for (expr_p, pre_p);
743 break;
745 case EXPR_STMT:
746 gimplify_expr_stmt (expr_p);
747 ret = GS_OK;
748 break;
750 case UNARY_PLUS_EXPR:
752 tree arg = TREE_OPERAND (*expr_p, 0);
753 tree type = TREE_TYPE (*expr_p);
754 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
755 : arg;
756 ret = GS_OK;
758 break;
760 case CILK_SPAWN_STMT:
761 gcc_assert(fn_contains_cilk_spawn_p (cfun)
762 && cilk_cp_detect_spawn_and_unwrap (expr_p));
764 if (!seen_error ())
765 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
766 return GS_ERROR;
768 case CALL_EXPR:
769 if (fn_contains_cilk_spawn_p (cfun)
770 && cilk_cp_detect_spawn_and_unwrap (expr_p)
771 && !seen_error ())
772 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
773 ret = GS_OK;
774 if (!CALL_EXPR_FN (*expr_p))
775 /* Internal function call. */;
776 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
778 /* This is a call to a (compound) assignment operator that used
779 the operator syntax; gimplify the RHS first. */
780 gcc_assert (call_expr_nargs (*expr_p) == 2);
781 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
782 enum gimplify_status t
783 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
784 if (t == GS_ERROR)
785 ret = GS_ERROR;
787 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
789 /* Leave the last argument for gimplify_call_expr, to avoid problems
790 with __builtin_va_arg_pack(). */
791 int nargs = call_expr_nargs (*expr_p) - 1;
792 for (int i = 0; i < nargs; ++i)
794 enum gimplify_status t
795 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
796 if (t == GS_ERROR)
797 ret = GS_ERROR;
800 else if (flag_strong_eval_order
801 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
803 /* If flag_strong_eval_order, evaluate the object argument first. */
804 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
805 if (POINTER_TYPE_P (fntype))
806 fntype = TREE_TYPE (fntype);
807 if (TREE_CODE (fntype) == METHOD_TYPE)
809 enum gimplify_status t
810 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
811 if (t == GS_ERROR)
812 ret = GS_ERROR;
815 break;
817 case RETURN_EXPR:
818 if (TREE_OPERAND (*expr_p, 0)
819 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
820 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
822 expr_p = &TREE_OPERAND (*expr_p, 0);
823 code = TREE_CODE (*expr_p);
824 /* Avoid going through the INIT_EXPR case, which can
825 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
826 goto modify_expr_case;
828 /* Fall through. */
830 default:
831 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
832 break;
835 /* Restore saved state. */
836 if (STATEMENT_CODE_P (code))
837 current_stmt_tree ()->stmts_are_full_exprs_p
838 = saved_stmts_are_full_exprs_p;
840 return ret;
843 static inline bool
844 is_invisiref_parm (const_tree t)
846 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
847 && DECL_BY_REFERENCE (t));
850 /* Return true if the uid in both int tree maps are equal. */
852 bool
853 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
855 return (a->uid == b->uid);
858 /* Hash a UID in a cxx_int_tree_map. */
860 unsigned int
861 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
863 return item->uid;
866 /* A stable comparison routine for use with splay trees and DECLs. */
868 static int
869 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
871 tree a = (tree) xa;
872 tree b = (tree) xb;
874 return DECL_UID (a) - DECL_UID (b);
877 /* OpenMP context during genericization. */
879 struct cp_genericize_omp_taskreg
881 bool is_parallel;
882 bool default_shared;
883 struct cp_genericize_omp_taskreg *outer;
884 splay_tree variables;
887 /* Return true if genericization should try to determine if
888 DECL is firstprivate or shared within task regions. */
890 static bool
891 omp_var_to_track (tree decl)
893 tree type = TREE_TYPE (decl);
894 if (is_invisiref_parm (decl))
895 type = TREE_TYPE (type);
896 while (TREE_CODE (type) == ARRAY_TYPE)
897 type = TREE_TYPE (type);
898 if (type == error_mark_node || !CLASS_TYPE_P (type))
899 return false;
900 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
901 return false;
902 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
903 return false;
904 return true;
907 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
909 static void
910 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
912 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
913 (splay_tree_key) decl);
914 if (n == NULL)
916 int flags = OMP_CLAUSE_DEFAULT_SHARED;
917 if (omp_ctx->outer)
918 omp_cxx_notice_variable (omp_ctx->outer, decl);
919 if (!omp_ctx->default_shared)
921 struct cp_genericize_omp_taskreg *octx;
923 for (octx = omp_ctx->outer; octx; octx = octx->outer)
925 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
926 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
928 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
929 break;
931 if (octx->is_parallel)
932 break;
934 if (octx == NULL
935 && (TREE_CODE (decl) == PARM_DECL
936 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
937 && DECL_CONTEXT (decl) == current_function_decl)))
938 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
939 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
941 /* DECL is implicitly determined firstprivate in
942 the current task construct. Ensure copy ctor and
943 dtor are instantiated, because during gimplification
944 it will be already too late. */
945 tree type = TREE_TYPE (decl);
946 if (is_invisiref_parm (decl))
947 type = TREE_TYPE (type);
948 while (TREE_CODE (type) == ARRAY_TYPE)
949 type = TREE_TYPE (type);
950 get_copy_ctor (type, tf_none);
951 get_dtor (type, tf_none);
954 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
958 /* Genericization context. */
960 struct cp_genericize_data
962 hash_set<tree> *p_set;
963 vec<tree> bind_expr_stack;
964 struct cp_genericize_omp_taskreg *omp_ctx;
965 tree try_block;
966 bool no_sanitize_p;
967 bool handle_invisiref_parm_p;
970 /* Perform any pre-gimplification folding of C++ front end trees to
971 GENERIC.
972 Note: The folding of none-omp cases is something to move into
973 the middle-end. As for now we have most foldings only on GENERIC
974 in fold-const, we need to perform this before transformation to
975 GIMPLE-form. */
977 static tree
978 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
980 tree stmt;
981 enum tree_code code;
983 *stmt_p = stmt = cp_fold (*stmt_p);
985 if (((hash_set<tree> *) data)->add (stmt))
987 /* Don't walk subtrees of stmts we've already walked once, otherwise
988 we can have exponential complexity with e.g. lots of nested
989 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
990 always the same tree, which the first time cp_fold_r has been
991 called on it had the subtrees walked. */
992 *walk_subtrees = 0;
993 return NULL;
996 code = TREE_CODE (stmt);
997 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
998 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
999 || code == OACC_LOOP)
1001 tree x;
1002 int i, n;
1004 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1005 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1006 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1007 x = OMP_FOR_COND (stmt);
1008 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1010 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1011 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1013 else if (x && TREE_CODE (x) == TREE_VEC)
1015 n = TREE_VEC_LENGTH (x);
1016 for (i = 0; i < n; i++)
1018 tree o = TREE_VEC_ELT (x, i);
1019 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1020 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1023 x = OMP_FOR_INCR (stmt);
1024 if (x && TREE_CODE (x) == TREE_VEC)
1026 n = TREE_VEC_LENGTH (x);
1027 for (i = 0; i < n; i++)
1029 tree o = TREE_VEC_ELT (x, i);
1030 if (o && TREE_CODE (o) == MODIFY_EXPR)
1031 o = TREE_OPERAND (o, 1);
1032 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1033 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1035 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1036 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1040 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1041 *walk_subtrees = 0;
1044 return NULL;
1047 /* Fold ALL the trees! FIXME we should be able to remove this, but
1048 apparently that still causes optimization regressions. */
1050 void
1051 cp_fold_function (tree fndecl)
1053 hash_set<tree> pset;
1054 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1057 /* Perform any pre-gimplification lowering of C++ front end trees to
1058 GENERIC. */
1060 static tree
1061 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1063 tree stmt = *stmt_p;
1064 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1065 hash_set<tree> *p_set = wtd->p_set;
1067 /* If in an OpenMP context, note var uses. */
1068 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1069 && (VAR_P (stmt)
1070 || TREE_CODE (stmt) == PARM_DECL
1071 || TREE_CODE (stmt) == RESULT_DECL)
1072 && omp_var_to_track (stmt))
1073 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1075 /* Dereference invisible reference parms. */
1076 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1078 *stmt_p = convert_from_reference (stmt);
1079 p_set->add (*stmt_p);
1080 *walk_subtrees = 0;
1081 return NULL;
1084 /* Map block scope extern declarations to visible declarations with the
1085 same name and type in outer scopes if any. */
1086 if (cp_function_chain->extern_decl_map
1087 && VAR_OR_FUNCTION_DECL_P (stmt)
1088 && DECL_EXTERNAL (stmt))
1090 struct cxx_int_tree_map *h, in;
1091 in.uid = DECL_UID (stmt);
1092 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1093 if (h)
1095 *stmt_p = h->to;
1096 *walk_subtrees = 0;
1097 return NULL;
1101 if (TREE_CODE (stmt) == INTEGER_CST
1102 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1103 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1104 && !wtd->no_sanitize_p)
1106 ubsan_maybe_instrument_reference (stmt_p);
1107 if (*stmt_p != stmt)
1109 *walk_subtrees = 0;
1110 return NULL_TREE;
1114 /* Other than invisiref parms, don't walk the same tree twice. */
1115 if (p_set->contains (stmt))
1117 *walk_subtrees = 0;
1118 return NULL_TREE;
1121 switch (TREE_CODE (stmt))
1123 case ADDR_EXPR:
1124 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1126 /* If in an OpenMP context, note var uses. */
1127 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1128 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1129 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1130 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1131 *walk_subtrees = 0;
1133 break;
1135 case RETURN_EXPR:
1136 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1137 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1138 *walk_subtrees = 0;
1139 break;
1141 case OMP_CLAUSE:
1142 switch (OMP_CLAUSE_CODE (stmt))
1144 case OMP_CLAUSE_LASTPRIVATE:
1145 /* Don't dereference an invisiref in OpenMP clauses. */
1146 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1148 *walk_subtrees = 0;
1149 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1150 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1151 cp_genericize_r, data, NULL);
1153 break;
1154 case OMP_CLAUSE_PRIVATE:
1155 /* Don't dereference an invisiref in OpenMP clauses. */
1156 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1157 *walk_subtrees = 0;
1158 else if (wtd->omp_ctx != NULL)
1160 /* Private clause doesn't cause any references to the
1161 var in outer contexts, avoid calling
1162 omp_cxx_notice_variable for it. */
1163 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1164 wtd->omp_ctx = NULL;
1165 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1166 data, NULL);
1167 wtd->omp_ctx = old;
1168 *walk_subtrees = 0;
1170 break;
1171 case OMP_CLAUSE_SHARED:
1172 case OMP_CLAUSE_FIRSTPRIVATE:
1173 case OMP_CLAUSE_COPYIN:
1174 case OMP_CLAUSE_COPYPRIVATE:
1175 /* Don't dereference an invisiref in OpenMP clauses. */
1176 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1177 *walk_subtrees = 0;
1178 break;
1179 case OMP_CLAUSE_REDUCTION:
1180 /* Don't dereference an invisiref in reduction clause's
1181 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1182 still needs to be genericized. */
1183 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1185 *walk_subtrees = 0;
1186 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1187 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1188 cp_genericize_r, data, NULL);
1189 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1190 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1191 cp_genericize_r, data, NULL);
1193 break;
1194 default:
1195 break;
1197 break;
1199 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1200 to lower this construct before scanning it, so we need to lower these
1201 before doing anything else. */
1202 case CLEANUP_STMT:
1203 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1204 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1205 : TRY_FINALLY_EXPR,
1206 void_type_node,
1207 CLEANUP_BODY (stmt),
1208 CLEANUP_EXPR (stmt));
1209 break;
1211 case IF_STMT:
1212 genericize_if_stmt (stmt_p);
1213 /* *stmt_p has changed, tail recurse to handle it again. */
1214 return cp_genericize_r (stmt_p, walk_subtrees, data);
1216 /* COND_EXPR might have incompatible types in branches if one or both
1217 arms are bitfields. Fix it up now. */
1218 case COND_EXPR:
1220 tree type_left
1221 = (TREE_OPERAND (stmt, 1)
1222 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1223 : NULL_TREE);
1224 tree type_right
1225 = (TREE_OPERAND (stmt, 2)
1226 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1227 : NULL_TREE);
1228 if (type_left
1229 && !useless_type_conversion_p (TREE_TYPE (stmt),
1230 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1232 TREE_OPERAND (stmt, 1)
1233 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1234 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1235 type_left));
1237 if (type_right
1238 && !useless_type_conversion_p (TREE_TYPE (stmt),
1239 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1241 TREE_OPERAND (stmt, 2)
1242 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1243 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1244 type_right));
1247 break;
1249 case BIND_EXPR:
1250 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1252 tree decl;
1253 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1254 if (VAR_P (decl)
1255 && !DECL_EXTERNAL (decl)
1256 && omp_var_to_track (decl))
1258 splay_tree_node n
1259 = splay_tree_lookup (wtd->omp_ctx->variables,
1260 (splay_tree_key) decl);
1261 if (n == NULL)
1262 splay_tree_insert (wtd->omp_ctx->variables,
1263 (splay_tree_key) decl,
1264 TREE_STATIC (decl)
1265 ? OMP_CLAUSE_DEFAULT_SHARED
1266 : OMP_CLAUSE_DEFAULT_PRIVATE);
1269 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1271 /* The point here is to not sanitize static initializers. */
1272 bool no_sanitize_p = wtd->no_sanitize_p;
1273 wtd->no_sanitize_p = true;
1274 for (tree decl = BIND_EXPR_VARS (stmt);
1275 decl;
1276 decl = DECL_CHAIN (decl))
1277 if (VAR_P (decl)
1278 && TREE_STATIC (decl)
1279 && DECL_INITIAL (decl))
1280 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1281 wtd->no_sanitize_p = no_sanitize_p;
1283 wtd->bind_expr_stack.safe_push (stmt);
1284 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1285 cp_genericize_r, data, NULL);
1286 wtd->bind_expr_stack.pop ();
1287 break;
1289 case USING_STMT:
1291 tree block = NULL_TREE;
1293 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1294 BLOCK, and append an IMPORTED_DECL to its
1295 BLOCK_VARS chained list. */
1296 if (wtd->bind_expr_stack.exists ())
1298 int i;
1299 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1300 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1301 break;
1303 if (block)
1305 tree using_directive;
1306 gcc_assert (TREE_OPERAND (stmt, 0));
1308 using_directive = make_node (IMPORTED_DECL);
1309 TREE_TYPE (using_directive) = void_type_node;
1311 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1312 = TREE_OPERAND (stmt, 0);
1313 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1314 BLOCK_VARS (block) = using_directive;
1316 /* The USING_STMT won't appear in GENERIC. */
1317 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1318 *walk_subtrees = 0;
1320 break;
1322 case DECL_EXPR:
1323 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1325 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1326 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1327 *walk_subtrees = 0;
1329 else
1331 tree d = DECL_EXPR_DECL (stmt);
1332 if (VAR_P (d))
1333 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1335 break;
1337 case OMP_PARALLEL:
1338 case OMP_TASK:
1339 case OMP_TASKLOOP:
1341 struct cp_genericize_omp_taskreg omp_ctx;
1342 tree c, decl;
1343 splay_tree_node n;
1345 *walk_subtrees = 0;
1346 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1347 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1348 omp_ctx.default_shared = omp_ctx.is_parallel;
1349 omp_ctx.outer = wtd->omp_ctx;
1350 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1351 wtd->omp_ctx = &omp_ctx;
1352 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1353 switch (OMP_CLAUSE_CODE (c))
1355 case OMP_CLAUSE_SHARED:
1356 case OMP_CLAUSE_PRIVATE:
1357 case OMP_CLAUSE_FIRSTPRIVATE:
1358 case OMP_CLAUSE_LASTPRIVATE:
1359 decl = OMP_CLAUSE_DECL (c);
1360 if (decl == error_mark_node || !omp_var_to_track (decl))
1361 break;
1362 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1363 if (n != NULL)
1364 break;
1365 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1366 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1367 ? OMP_CLAUSE_DEFAULT_SHARED
1368 : OMP_CLAUSE_DEFAULT_PRIVATE);
1369 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1370 omp_cxx_notice_variable (omp_ctx.outer, decl);
1371 break;
1372 case OMP_CLAUSE_DEFAULT:
1373 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1374 omp_ctx.default_shared = true;
1375 default:
1376 break;
1378 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1379 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1380 else
1381 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1382 wtd->omp_ctx = omp_ctx.outer;
1383 splay_tree_delete (omp_ctx.variables);
1385 break;
1387 case TRY_BLOCK:
1389 *walk_subtrees = 0;
1390 tree try_block = wtd->try_block;
1391 wtd->try_block = stmt;
1392 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1393 wtd->try_block = try_block;
1394 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1396 break;
1398 case MUST_NOT_THROW_EXPR:
1399 /* MUST_NOT_THROW_COND might be something else with TM. */
1400 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1402 *walk_subtrees = 0;
1403 tree try_block = wtd->try_block;
1404 wtd->try_block = stmt;
1405 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1406 wtd->try_block = try_block;
1408 break;
1410 case THROW_EXPR:
1412 location_t loc = location_of (stmt);
1413 if (TREE_NO_WARNING (stmt))
1414 /* Never mind. */;
1415 else if (wtd->try_block)
1417 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1418 && warning_at (loc, OPT_Wterminate,
1419 "throw will always call terminate()")
1420 && cxx_dialect >= cxx11
1421 && DECL_DESTRUCTOR_P (current_function_decl))
1422 inform (loc, "in C++11 destructors default to noexcept");
1424 else
1426 if (warn_cxx11_compat && cxx_dialect < cxx11
1427 && DECL_DESTRUCTOR_P (current_function_decl)
1428 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1429 == NULL_TREE)
1430 && (get_defaulted_eh_spec (current_function_decl)
1431 == empty_except_spec))
1432 warning_at (loc, OPT_Wc__11_compat,
1433 "in C++11 this throw will terminate because "
1434 "destructors default to noexcept");
1437 break;
1439 case CONVERT_EXPR:
1440 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1441 break;
1443 case FOR_STMT:
1444 genericize_for_stmt (stmt_p, walk_subtrees, data);
1445 break;
1447 case WHILE_STMT:
1448 genericize_while_stmt (stmt_p, walk_subtrees, data);
1449 break;
1451 case DO_STMT:
1452 genericize_do_stmt (stmt_p, walk_subtrees, data);
1453 break;
1455 case SWITCH_STMT:
1456 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1457 break;
1459 case CONTINUE_STMT:
1460 genericize_continue_stmt (stmt_p);
1461 break;
1463 case BREAK_STMT:
1464 genericize_break_stmt (stmt_p);
1465 break;
1467 case OMP_FOR:
1468 case OMP_SIMD:
1469 case OMP_DISTRIBUTE:
1470 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1471 break;
1473 case PTRMEM_CST:
1474 /* By the time we get here we're handing off to the back end, so we don't
1475 need or want to preserve PTRMEM_CST anymore. */
1476 *stmt_p = cplus_expand_constant (stmt);
1477 *walk_subtrees = 0;
1478 break;
1480 case MEM_REF:
1481 /* For MEM_REF, make sure not to sanitize the second operand even
1482 if it has reference type. It is just an offset with a type
1483 holding other information. There is no other processing we
1484 need to do for INTEGER_CSTs, so just ignore the second argument
1485 unconditionally. */
1486 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1487 *walk_subtrees = 0;
1488 break;
1490 case NOP_EXPR:
1491 if (!wtd->no_sanitize_p
1492 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1493 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1494 ubsan_maybe_instrument_reference (stmt_p);
1495 break;
1497 case CALL_EXPR:
1498 if (!wtd->no_sanitize_p
1499 && sanitize_flags_p ((SANITIZE_NULL
1500 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1502 tree fn = CALL_EXPR_FN (stmt);
1503 if (fn != NULL_TREE
1504 && !error_operand_p (fn)
1505 && POINTER_TYPE_P (TREE_TYPE (fn))
1506 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1508 bool is_ctor
1509 = TREE_CODE (fn) == ADDR_EXPR
1510 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1511 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1512 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1513 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1514 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1515 cp_ubsan_maybe_instrument_member_call (stmt);
1518 break;
1520 default:
1521 if (IS_TYPE_OR_DECL_P (stmt))
1522 *walk_subtrees = 0;
1523 break;
1526 p_set->add (*stmt_p);
1528 return NULL;
1531 /* Lower C++ front end trees to GENERIC in T_P. */
1533 static void
1534 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1536 struct cp_genericize_data wtd;
1538 wtd.p_set = new hash_set<tree>;
1539 wtd.bind_expr_stack.create (0);
1540 wtd.omp_ctx = NULL;
1541 wtd.try_block = NULL_TREE;
1542 wtd.no_sanitize_p = false;
1543 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1544 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1545 delete wtd.p_set;
1546 wtd.bind_expr_stack.release ();
1547 if (sanitize_flags_p (SANITIZE_VPTR))
1548 cp_ubsan_instrument_member_accesses (t_p);
1551 /* If a function that should end with a return in non-void
1552 function doesn't obviously end with return, add ubsan
1553 instrumentation code to verify it at runtime. */
1555 static void
1556 cp_ubsan_maybe_instrument_return (tree fndecl)
1558 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1559 || DECL_CONSTRUCTOR_P (fndecl)
1560 || DECL_DESTRUCTOR_P (fndecl)
1561 || !targetm.warn_func_return (fndecl))
1562 return;
1564 tree t = DECL_SAVED_TREE (fndecl);
1565 while (t)
1567 switch (TREE_CODE (t))
1569 case BIND_EXPR:
1570 t = BIND_EXPR_BODY (t);
1571 continue;
1572 case TRY_FINALLY_EXPR:
1573 t = TREE_OPERAND (t, 0);
1574 continue;
1575 case STATEMENT_LIST:
1577 tree_stmt_iterator i = tsi_last (t);
1578 if (!tsi_end_p (i))
1580 t = tsi_stmt (i);
1581 continue;
1584 break;
1585 case RETURN_EXPR:
1586 return;
1587 default:
1588 break;
1590 break;
1592 if (t == NULL_TREE)
1593 return;
1594 tree *p = &DECL_SAVED_TREE (fndecl);
1595 if (TREE_CODE (*p) == BIND_EXPR)
1596 p = &BIND_EXPR_BODY (*p);
1597 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1598 append_to_statement_list (t, p);
1601 void
1602 cp_genericize (tree fndecl)
1604 tree t;
1606 /* Fix up the types of parms passed by invisible reference. */
1607 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1608 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1610 /* If a function's arguments are copied to create a thunk,
1611 then DECL_BY_REFERENCE will be set -- but the type of the
1612 argument will be a pointer type, so we will never get
1613 here. */
1614 gcc_assert (!DECL_BY_REFERENCE (t));
1615 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1616 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1617 DECL_BY_REFERENCE (t) = 1;
1618 TREE_ADDRESSABLE (t) = 0;
1619 relayout_decl (t);
1622 /* Do the same for the return value. */
1623 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1625 t = DECL_RESULT (fndecl);
1626 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1627 DECL_BY_REFERENCE (t) = 1;
1628 TREE_ADDRESSABLE (t) = 0;
1629 relayout_decl (t);
1630 if (DECL_NAME (t))
1632 /* Adjust DECL_VALUE_EXPR of the original var. */
1633 tree outer = outer_curly_brace_block (current_function_decl);
1634 tree var;
1636 if (outer)
1637 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1638 if (VAR_P (var)
1639 && DECL_NAME (t) == DECL_NAME (var)
1640 && DECL_HAS_VALUE_EXPR_P (var)
1641 && DECL_VALUE_EXPR (var) == t)
1643 tree val = convert_from_reference (t);
1644 SET_DECL_VALUE_EXPR (var, val);
1645 break;
1650 /* If we're a clone, the body is already GIMPLE. */
1651 if (DECL_CLONED_FUNCTION_P (fndecl))
1652 return;
1654 /* Allow cp_genericize calls to be nested. */
1655 tree save_bc_label[2];
1656 save_bc_label[bc_break] = bc_label[bc_break];
1657 save_bc_label[bc_continue] = bc_label[bc_continue];
1658 bc_label[bc_break] = NULL_TREE;
1659 bc_label[bc_continue] = NULL_TREE;
1661 /* Expand all the array notations here. */
1662 if (flag_cilkplus
1663 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1664 DECL_SAVED_TREE (fndecl)
1665 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1667 /* We do want to see every occurrence of the parms, so we can't just use
1668 walk_tree's hash functionality. */
1669 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1671 if (sanitize_flags_p (SANITIZE_RETURN))
1672 cp_ubsan_maybe_instrument_return (fndecl);
1674 /* Do everything else. */
1675 c_genericize (fndecl);
1677 gcc_assert (bc_label[bc_break] == NULL);
1678 gcc_assert (bc_label[bc_continue] == NULL);
1679 bc_label[bc_break] = save_bc_label[bc_break];
1680 bc_label[bc_continue] = save_bc_label[bc_continue];
1683 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1684 NULL if there is in fact nothing to do. ARG2 may be null if FN
1685 actually only takes one argument. */
1687 static tree
1688 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1690 tree defparm, parm, t;
1691 int i = 0;
1692 int nargs;
1693 tree *argarray;
1695 if (fn == NULL)
1696 return NULL;
1698 nargs = list_length (DECL_ARGUMENTS (fn));
1699 argarray = XALLOCAVEC (tree, nargs);
1701 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1702 if (arg2)
1703 defparm = TREE_CHAIN (defparm);
1705 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1707 tree inner_type = TREE_TYPE (arg1);
1708 tree start1, end1, p1;
1709 tree start2 = NULL, p2 = NULL;
1710 tree ret = NULL, lab;
1712 start1 = arg1;
1713 start2 = arg2;
1716 inner_type = TREE_TYPE (inner_type);
1717 start1 = build4 (ARRAY_REF, inner_type, start1,
1718 size_zero_node, NULL, NULL);
1719 if (arg2)
1720 start2 = build4 (ARRAY_REF, inner_type, start2,
1721 size_zero_node, NULL, NULL);
1723 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1724 start1 = build_fold_addr_expr_loc (input_location, start1);
1725 if (arg2)
1726 start2 = build_fold_addr_expr_loc (input_location, start2);
1728 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1729 end1 = fold_build_pointer_plus (start1, end1);
1731 p1 = create_tmp_var (TREE_TYPE (start1));
1732 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1733 append_to_statement_list (t, &ret);
1735 if (arg2)
1737 p2 = create_tmp_var (TREE_TYPE (start2));
1738 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1739 append_to_statement_list (t, &ret);
1742 lab = create_artificial_label (input_location);
1743 t = build1 (LABEL_EXPR, void_type_node, lab);
1744 append_to_statement_list (t, &ret);
1746 argarray[i++] = p1;
1747 if (arg2)
1748 argarray[i++] = p2;
1749 /* Handle default arguments. */
1750 for (parm = defparm; parm && parm != void_list_node;
1751 parm = TREE_CHAIN (parm), i++)
1752 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1753 TREE_PURPOSE (parm), fn, i,
1754 tf_warning_or_error);
1755 t = build_call_a (fn, i, argarray);
1756 t = fold_convert (void_type_node, t);
1757 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1758 append_to_statement_list (t, &ret);
1760 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1761 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1762 append_to_statement_list (t, &ret);
1764 if (arg2)
1766 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1767 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1768 append_to_statement_list (t, &ret);
1771 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1772 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1773 append_to_statement_list (t, &ret);
1775 return ret;
1777 else
1779 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1780 if (arg2)
1781 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1782 /* Handle default arguments. */
1783 for (parm = defparm; parm && parm != void_list_node;
1784 parm = TREE_CHAIN (parm), i++)
1785 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1786 TREE_PURPOSE (parm),
1787 fn, i, tf_warning_or_error);
1788 t = build_call_a (fn, i, argarray);
1789 t = fold_convert (void_type_node, t);
1790 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1794 /* Return code to initialize DECL with its default constructor, or
1795 NULL if there's nothing to do. */
1797 tree
1798 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1800 tree info = CP_OMP_CLAUSE_INFO (clause);
1801 tree ret = NULL;
1803 if (info)
1804 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1806 return ret;
1809 /* Return code to initialize DST with a copy constructor from SRC. */
1811 tree
1812 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1814 tree info = CP_OMP_CLAUSE_INFO (clause);
1815 tree ret = NULL;
1817 if (info)
1818 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1819 if (ret == NULL)
1820 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1822 return ret;
1825 /* Similarly, except use an assignment operator instead. */
1827 tree
1828 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1830 tree info = CP_OMP_CLAUSE_INFO (clause);
1831 tree ret = NULL;
1833 if (info)
1834 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1835 if (ret == NULL)
1836 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1838 return ret;
1841 /* Return code to destroy DECL. */
1843 tree
1844 cxx_omp_clause_dtor (tree clause, tree decl)
1846 tree info = CP_OMP_CLAUSE_INFO (clause);
1847 tree ret = NULL;
1849 if (info)
1850 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1852 return ret;
1855 /* True if OpenMP should privatize what this DECL points to rather
1856 than the DECL itself. */
1858 bool
1859 cxx_omp_privatize_by_reference (const_tree decl)
1861 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1862 || is_invisiref_parm (decl));
1865 /* Return true if DECL is const qualified var having no mutable member. */
1866 bool
1867 cxx_omp_const_qual_no_mutable (tree decl)
1869 tree type = TREE_TYPE (decl);
1870 if (TREE_CODE (type) == REFERENCE_TYPE)
1872 if (!is_invisiref_parm (decl))
1873 return false;
1874 type = TREE_TYPE (type);
1876 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1878 /* NVR doesn't preserve const qualification of the
1879 variable's type. */
1880 tree outer = outer_curly_brace_block (current_function_decl);
1881 tree var;
1883 if (outer)
1884 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1885 if (VAR_P (var)
1886 && DECL_NAME (decl) == DECL_NAME (var)
1887 && (TYPE_MAIN_VARIANT (type)
1888 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1890 if (TYPE_READONLY (TREE_TYPE (var)))
1891 type = TREE_TYPE (var);
1892 break;
1897 if (type == error_mark_node)
1898 return false;
1900 /* Variables with const-qualified type having no mutable member
1901 are predetermined shared. */
1902 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1903 return true;
1905 return false;
1908 /* True if OpenMP sharing attribute of DECL is predetermined. */
1910 enum omp_clause_default_kind
1911 cxx_omp_predetermined_sharing (tree decl)
1913 /* Static data members are predetermined shared. */
1914 if (TREE_STATIC (decl))
1916 tree ctx = CP_DECL_CONTEXT (decl);
1917 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1918 return OMP_CLAUSE_DEFAULT_SHARED;
1921 /* Const qualified vars having no mutable member are predetermined
1922 shared. */
1923 if (cxx_omp_const_qual_no_mutable (decl))
1924 return OMP_CLAUSE_DEFAULT_SHARED;
1926 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1929 /* Finalize an implicitly determined clause. */
1931 void
1932 cxx_omp_finish_clause (tree c, gimple_seq *)
1934 tree decl, inner_type;
1935 bool make_shared = false;
1937 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1938 return;
1940 decl = OMP_CLAUSE_DECL (c);
1941 decl = require_complete_type (decl);
1942 inner_type = TREE_TYPE (decl);
1943 if (decl == error_mark_node)
1944 make_shared = true;
1945 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1946 inner_type = TREE_TYPE (inner_type);
1948 /* We're interested in the base element, not arrays. */
1949 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1950 inner_type = TREE_TYPE (inner_type);
1952 /* Check for special function availability by building a call to one.
1953 Save the results, because later we won't be in the right context
1954 for making these queries. */
1955 if (!make_shared
1956 && CLASS_TYPE_P (inner_type)
1957 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1958 make_shared = true;
1960 if (make_shared)
1962 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1963 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1964 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1968 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1969 disregarded in OpenMP construct, because it is going to be
1970 remapped during OpenMP lowering. SHARED is true if DECL
1971 is going to be shared, false if it is going to be privatized. */
1973 bool
1974 cxx_omp_disregard_value_expr (tree decl, bool shared)
1976 return !shared
1977 && VAR_P (decl)
1978 && DECL_HAS_VALUE_EXPR_P (decl)
1979 && DECL_ARTIFICIAL (decl)
1980 && DECL_LANG_SPECIFIC (decl)
1981 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1984 /* Fold expression X which is used as an rvalue if RVAL is true. */
1986 static tree
1987 cp_fold_maybe_rvalue (tree x, bool rval)
1989 while (true)
1991 x = cp_fold (x);
1992 if (rval && DECL_P (x)
1993 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1995 tree v = decl_constant_value (x);
1996 if (v != x && v != error_mark_node)
1998 x = v;
1999 continue;
2002 break;
2004 return x;
2007 /* Fold expression X which is used as an rvalue. */
2009 static tree
2010 cp_fold_rvalue (tree x)
2012 return cp_fold_maybe_rvalue (x, true);
2015 /* Perform folding on expression X. */
2017 tree
2018 cp_fully_fold (tree x)
2020 if (processing_template_decl)
2021 return x;
2022 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2023 have to call both. */
2024 if (cxx_dialect >= cxx11)
2025 x = maybe_constant_value (x);
2026 return cp_fold_rvalue (x);
2029 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2030 and certain changes are made to the folding done. Or should be (FIXME). We
2031 never touch maybe_const, as it is only used for the C front-end
2032 C_MAYBE_CONST_EXPR. */
2034 tree
2035 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2037 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2038 INTEGER_CST. */
2039 return cp_fold_rvalue (x);
2042 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2044 /* Dispose of the whole FOLD_CACHE. */
2046 void
2047 clear_fold_cache (void)
2049 if (fold_cache != NULL)
2050 fold_cache->empty ();
2053 /* This function tries to fold an expression X.
2054 To avoid combinatorial explosion, folding results are kept in fold_cache.
2055 If we are processing a template or X is invalid, we don't fold at all.
2056 For performance reasons we don't cache expressions representing a
2057 declaration or constant.
2058 Function returns X or its folded variant. */
2060 static tree
2061 cp_fold (tree x)
2063 tree op0, op1, op2, op3;
2064 tree org_x = x, r = NULL_TREE;
2065 enum tree_code code;
2066 location_t loc;
2067 bool rval_ops = true;
2069 if (!x || x == error_mark_node)
2070 return x;
2072 if (processing_template_decl
2073 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2074 return x;
2076 /* Don't bother to cache DECLs or constants. */
2077 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2078 return x;
2080 if (fold_cache == NULL)
2081 fold_cache = hash_map<tree, tree>::create_ggc (101);
2083 if (tree *cached = fold_cache->get (x))
2084 return *cached;
2086 code = TREE_CODE (x);
2087 switch (code)
2089 case CLEANUP_POINT_EXPR:
2090 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2091 effects. */
2092 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2093 if (!TREE_SIDE_EFFECTS (r))
2094 x = r;
2095 break;
2097 case SIZEOF_EXPR:
2098 x = fold_sizeof_expr (x);
2099 break;
2101 case VIEW_CONVERT_EXPR:
2102 rval_ops = false;
2103 /* FALLTHRU */
2104 case CONVERT_EXPR:
2105 case NOP_EXPR:
2106 case NON_LVALUE_EXPR:
2108 if (VOID_TYPE_P (TREE_TYPE (x)))
2109 return x;
2111 loc = EXPR_LOCATION (x);
2112 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2114 if (code == CONVERT_EXPR
2115 && SCALAR_TYPE_P (TREE_TYPE (x))
2116 && op0 != void_node)
2117 /* During parsing we used convert_to_*_nofold; re-convert now using the
2118 folding variants, since fold() doesn't do those transformations. */
2119 x = fold (convert (TREE_TYPE (x), op0));
2120 else if (op0 != TREE_OPERAND (x, 0))
2122 if (op0 == error_mark_node)
2123 x = error_mark_node;
2124 else
2125 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2127 else
2128 x = fold (x);
2130 /* Conversion of an out-of-range value has implementation-defined
2131 behavior; the language considers it different from arithmetic
2132 overflow, which is undefined. */
2133 if (TREE_CODE (op0) == INTEGER_CST
2134 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2135 TREE_OVERFLOW (x) = false;
2137 break;
2139 case INDIRECT_REF:
2140 /* We don't need the decltype(auto) obfuscation anymore. */
2141 if (REF_PARENTHESIZED_P (x))
2143 tree p = maybe_undo_parenthesized_ref (x);
2144 return cp_fold (p);
2146 goto unary;
2148 case ADDR_EXPR:
2149 case REALPART_EXPR:
2150 case IMAGPART_EXPR:
2151 rval_ops = false;
2152 /* FALLTHRU */
2153 case CONJ_EXPR:
2154 case FIX_TRUNC_EXPR:
2155 case FLOAT_EXPR:
2156 case NEGATE_EXPR:
2157 case ABS_EXPR:
2158 case BIT_NOT_EXPR:
2159 case TRUTH_NOT_EXPR:
2160 case FIXED_CONVERT_EXPR:
2161 unary:
2163 loc = EXPR_LOCATION (x);
2164 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2166 if (op0 != TREE_OPERAND (x, 0))
2168 if (op0 == error_mark_node)
2169 x = error_mark_node;
2170 else
2172 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2173 if (code == INDIRECT_REF
2174 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2176 TREE_READONLY (x) = TREE_READONLY (org_x);
2177 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2178 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2182 else
2183 x = fold (x);
2185 gcc_assert (TREE_CODE (x) != COND_EXPR
2186 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2187 break;
2189 case UNARY_PLUS_EXPR:
2190 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2191 if (op0 == error_mark_node)
2192 x = error_mark_node;
2193 else
2194 x = fold_convert (TREE_TYPE (x), op0);
2195 break;
2197 case POSTDECREMENT_EXPR:
2198 case POSTINCREMENT_EXPR:
2199 case INIT_EXPR:
2200 case PREDECREMENT_EXPR:
2201 case PREINCREMENT_EXPR:
2202 case COMPOUND_EXPR:
2203 case MODIFY_EXPR:
2204 rval_ops = false;
2205 /* FALLTHRU */
2206 case POINTER_PLUS_EXPR:
2207 case PLUS_EXPR:
2208 case MINUS_EXPR:
2209 case MULT_EXPR:
2210 case TRUNC_DIV_EXPR:
2211 case CEIL_DIV_EXPR:
2212 case FLOOR_DIV_EXPR:
2213 case ROUND_DIV_EXPR:
2214 case TRUNC_MOD_EXPR:
2215 case CEIL_MOD_EXPR:
2216 case ROUND_MOD_EXPR:
2217 case RDIV_EXPR:
2218 case EXACT_DIV_EXPR:
2219 case MIN_EXPR:
2220 case MAX_EXPR:
2221 case LSHIFT_EXPR:
2222 case RSHIFT_EXPR:
2223 case LROTATE_EXPR:
2224 case RROTATE_EXPR:
2225 case BIT_AND_EXPR:
2226 case BIT_IOR_EXPR:
2227 case BIT_XOR_EXPR:
2228 case TRUTH_AND_EXPR:
2229 case TRUTH_ANDIF_EXPR:
2230 case TRUTH_OR_EXPR:
2231 case TRUTH_ORIF_EXPR:
2232 case TRUTH_XOR_EXPR:
2233 case LT_EXPR: case LE_EXPR:
2234 case GT_EXPR: case GE_EXPR:
2235 case EQ_EXPR: case NE_EXPR:
2236 case UNORDERED_EXPR: case ORDERED_EXPR:
2237 case UNLT_EXPR: case UNLE_EXPR:
2238 case UNGT_EXPR: case UNGE_EXPR:
2239 case UNEQ_EXPR: case LTGT_EXPR:
2240 case RANGE_EXPR: case COMPLEX_EXPR:
2242 loc = EXPR_LOCATION (x);
2243 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2244 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2246 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2248 if (op0 == error_mark_node || op1 == error_mark_node)
2249 x = error_mark_node;
2250 else
2251 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2253 else
2254 x = fold (x);
2256 if (TREE_NO_WARNING (org_x)
2257 && warn_nonnull_compare
2258 && COMPARISON_CLASS_P (org_x))
2260 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2262 else if (COMPARISON_CLASS_P (x))
2263 TREE_NO_WARNING (x) = 1;
2264 /* Otherwise give up on optimizing these, let GIMPLE folders
2265 optimize those later on. */
2266 else if (op0 != TREE_OPERAND (org_x, 0)
2267 || op1 != TREE_OPERAND (org_x, 1))
2269 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2270 TREE_NO_WARNING (x) = 1;
2272 else
2273 x = org_x;
2275 break;
2277 case VEC_COND_EXPR:
2278 case COND_EXPR:
2280 /* Don't bother folding a void condition, since it can't produce a
2281 constant value. Also, some statement-level uses of COND_EXPR leave
2282 one of the branches NULL, so folding would crash. */
2283 if (VOID_TYPE_P (TREE_TYPE (x)))
2284 return x;
2286 loc = EXPR_LOCATION (x);
2287 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2288 op1 = cp_fold (TREE_OPERAND (x, 1));
2289 op2 = cp_fold (TREE_OPERAND (x, 2));
2291 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2293 warning_sentinel s (warn_int_in_bool_context);
2294 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2295 op1 = cp_truthvalue_conversion (op1);
2296 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2297 op2 = cp_truthvalue_conversion (op2);
2300 if (op0 != TREE_OPERAND (x, 0)
2301 || op1 != TREE_OPERAND (x, 1)
2302 || op2 != TREE_OPERAND (x, 2))
2304 if (op0 == error_mark_node
2305 || op1 == error_mark_node
2306 || op2 == error_mark_node)
2307 x = error_mark_node;
2308 else
2309 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2311 else
2312 x = fold (x);
2314 /* A COND_EXPR might have incompatible types in branches if one or both
2315 arms are bitfields. If folding exposed such a branch, fix it up. */
2316 if (TREE_CODE (x) != code)
2317 if (tree type = is_bitfield_expr_with_lowered_type (x))
2318 x = fold_convert (type, x);
2320 break;
2322 case CALL_EXPR:
2324 int i, m, sv = optimize, nw = sv, changed = 0;
2325 tree callee = get_callee_fndecl (x);
2327 /* Some built-in function calls will be evaluated at compile-time in
2328 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2329 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2330 if (callee && DECL_BUILT_IN (callee) && !optimize
2331 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2332 && current_function_decl
2333 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2334 nw = 1;
2336 x = copy_node (x);
2338 m = call_expr_nargs (x);
2339 for (i = 0; i < m; i++)
2341 r = cp_fold (CALL_EXPR_ARG (x, i));
2342 if (r != CALL_EXPR_ARG (x, i))
2344 if (r == error_mark_node)
2346 x = error_mark_node;
2347 break;
2349 changed = 1;
2351 CALL_EXPR_ARG (x, i) = r;
2353 if (x == error_mark_node)
2354 break;
2356 optimize = nw;
2357 r = fold (x);
2358 optimize = sv;
2360 if (TREE_CODE (r) != CALL_EXPR)
2362 x = cp_fold (r);
2363 break;
2366 optimize = nw;
2368 /* Invoke maybe_constant_value for functions declared
2369 constexpr and not called with AGGR_INIT_EXPRs.
2370 TODO:
2371 Do constexpr expansion of expressions where the call itself is not
2372 constant, but the call followed by an INDIRECT_REF is. */
2373 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2374 && !flag_no_inline)
2375 r = maybe_constant_value (x);
2376 optimize = sv;
2378 if (TREE_CODE (r) != CALL_EXPR)
2380 if (DECL_CONSTRUCTOR_P (callee))
2382 loc = EXPR_LOCATION (x);
2383 tree s = build_fold_indirect_ref_loc (loc,
2384 CALL_EXPR_ARG (x, 0));
2385 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2387 x = r;
2388 break;
2391 if (!changed)
2392 x = org_x;
2393 break;
2396 case CONSTRUCTOR:
2398 unsigned i;
2399 constructor_elt *p;
2400 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2401 vec<constructor_elt, va_gc> *nelts = NULL;
2402 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2404 tree op = cp_fold (p->value);
2405 if (op != p->value)
2407 if (op == error_mark_node)
2409 x = error_mark_node;
2410 vec_free (nelts);
2411 break;
2413 if (nelts == NULL)
2414 nelts = elts->copy ();
2415 (*nelts)[i].value = op;
2418 if (nelts)
2419 x = build_constructor (TREE_TYPE (x), nelts);
2420 break;
2422 case TREE_VEC:
2424 bool changed = false;
2425 vec<tree, va_gc> *vec = make_tree_vector ();
2426 int i, n = TREE_VEC_LENGTH (x);
2427 vec_safe_reserve (vec, n);
2429 for (i = 0; i < n; i++)
2431 tree op = cp_fold (TREE_VEC_ELT (x, i));
2432 vec->quick_push (op);
2433 if (op != TREE_VEC_ELT (x, i))
2434 changed = true;
2437 if (changed)
2439 r = copy_node (x);
2440 for (i = 0; i < n; i++)
2441 TREE_VEC_ELT (r, i) = (*vec)[i];
2442 x = r;
2445 release_tree_vector (vec);
2448 break;
2450 case ARRAY_REF:
2451 case ARRAY_RANGE_REF:
2453 loc = EXPR_LOCATION (x);
2454 op0 = cp_fold (TREE_OPERAND (x, 0));
2455 op1 = cp_fold (TREE_OPERAND (x, 1));
2456 op2 = cp_fold (TREE_OPERAND (x, 2));
2457 op3 = cp_fold (TREE_OPERAND (x, 3));
2459 if (op0 != TREE_OPERAND (x, 0)
2460 || op1 != TREE_OPERAND (x, 1)
2461 || op2 != TREE_OPERAND (x, 2)
2462 || op3 != TREE_OPERAND (x, 3))
2464 if (op0 == error_mark_node
2465 || op1 == error_mark_node
2466 || op2 == error_mark_node
2467 || op3 == error_mark_node)
2468 x = error_mark_node;
2469 else
2471 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2472 TREE_READONLY (x) = TREE_READONLY (org_x);
2473 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2474 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2478 x = fold (x);
2479 break;
2481 case SAVE_EXPR:
2482 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2483 folding, evaluates to an invariant. In that case no need to wrap
2484 this folded tree with a SAVE_EXPR. */
2485 r = cp_fold (TREE_OPERAND (x, 0));
2486 if (tree_invariant_p (r))
2487 x = r;
2488 break;
2490 default:
2491 return org_x;
2494 fold_cache->put (org_x, x);
2495 /* Prevent that we try to fold an already folded result again. */
2496 if (x != org_x)
2497 fold_cache->put (x, x);
2499 return x;
2502 #include "gt-cp-cp-gimplify.h"