gcc/
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobb95489e78162e4238c585c4ce06cce3559968b93
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "stor-layout.h"
30 #include "cp-tree.h"
31 #include "c-family/c-common.h"
32 #include "tree-iterator.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "flags.h"
43 #include "splay-tree.h"
44 #include "target.h"
45 #include "c-family/c-ubsan.h"
46 #include "cilk.h"
47 #include "gimplify.h"
48 #include "gimple-expr.h"
50 /* Forward declarations. */
52 static tree cp_genericize_r (tree *, int *, void *);
53 static void cp_genericize_tree (tree*);
55 /* Local declarations. */
57 enum bc_t { bc_break = 0, bc_continue = 1 };
59 /* Stack of labels which are targets for "break" or "continue",
60 linked through TREE_CHAIN. */
61 static tree bc_label[2];
63 /* Begin a scope which can be exited by a break or continue statement. BC
64 indicates which.
66 Just creates a label with location LOCATION and pushes it into the current
67 context. */
69 static tree
70 begin_bc_block (enum bc_t bc, location_t location)
72 tree label = create_artificial_label (location);
73 DECL_CHAIN (label) = bc_label[bc];
74 bc_label[bc] = label;
75 if (bc == bc_break)
76 LABEL_DECL_BREAK (label) = true;
77 else
78 LABEL_DECL_CONTINUE (label) = true;
79 return label;
82 /* Finish a scope which can be exited by a break or continue statement.
83 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
84 an expression for the contents of the scope.
86 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
87 BLOCK. Otherwise, just forget the label. */
89 static void
90 finish_bc_block (tree *block, enum bc_t bc, tree label)
92 gcc_assert (label == bc_label[bc]);
94 if (TREE_USED (label))
95 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
96 block);
98 bc_label[bc] = DECL_CHAIN (label);
99 DECL_CHAIN (label) = NULL_TREE;
102 /* Get the LABEL_EXPR to represent a break or continue statement
103 in the current block scope. BC indicates which. */
105 static tree
106 get_bc_label (enum bc_t bc)
108 tree label = bc_label[bc];
110 /* Mark the label used for finish_bc_block. */
111 TREE_USED (label) = 1;
112 return label;
115 /* Genericize a TRY_BLOCK. */
117 static void
118 genericize_try_block (tree *stmt_p)
120 tree body = TRY_STMTS (*stmt_p);
121 tree cleanup = TRY_HANDLERS (*stmt_p);
123 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
126 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
128 static void
129 genericize_catch_block (tree *stmt_p)
131 tree type = HANDLER_TYPE (*stmt_p);
132 tree body = HANDLER_BODY (*stmt_p);
134 /* FIXME should the caught type go in TREE_TYPE? */
135 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
138 /* A terser interface for building a representation of an exception
139 specification. */
141 static tree
142 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
144 tree t;
146 /* FIXME should the allowed types go in TREE_TYPE? */
147 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
148 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
150 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
151 append_to_statement_list (body, &TREE_OPERAND (t, 0));
153 return t;
156 /* Genericize an EH_SPEC_BLOCK by converting it to a
157 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
159 static void
160 genericize_eh_spec_block (tree *stmt_p)
162 tree body = EH_SPEC_STMTS (*stmt_p);
163 tree allowed = EH_SPEC_RAISES (*stmt_p);
164 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
166 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
167 TREE_NO_WARNING (*stmt_p) = true;
168 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
171 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
173 static void
174 genericize_if_stmt (tree *stmt_p)
176 tree stmt, cond, then_, else_;
177 location_t locus = EXPR_LOCATION (*stmt_p);
179 stmt = *stmt_p;
180 cond = IF_COND (stmt);
181 then_ = THEN_CLAUSE (stmt);
182 else_ = ELSE_CLAUSE (stmt);
184 if (!then_)
185 then_ = build_empty_stmt (locus);
186 if (!else_)
187 else_ = build_empty_stmt (locus);
189 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
190 stmt = then_;
191 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
192 stmt = else_;
193 else
194 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
195 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
196 SET_EXPR_LOCATION (stmt, locus);
197 *stmt_p = stmt;
200 /* Build a generic representation of one of the C loop forms. COND is the
201 loop condition or NULL_TREE. BODY is the (possibly compound) statement
202 controlled by the loop. INCR is the increment expression of a for-loop,
203 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
204 evaluated before the loop body as in while and for loops, or after the
205 loop body as in do-while loops. */
207 static void
208 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
209 tree incr, bool cond_is_first, int *walk_subtrees,
210 void *data)
212 tree blab, clab;
213 tree exit = NULL;
214 tree stmt_list = NULL;
216 blab = begin_bc_block (bc_break, start_locus);
217 clab = begin_bc_block (bc_continue, start_locus);
219 if (incr && EXPR_P (incr))
220 SET_EXPR_LOCATION (incr, start_locus);
222 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
223 cp_walk_tree (&body, cp_genericize_r, data, NULL);
224 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
225 *walk_subtrees = 0;
227 if (cond && TREE_CODE (cond) != INTEGER_CST)
229 /* If COND is constant, don't bother building an exit. If it's false,
230 we won't build a loop. If it's true, any exits are in the body. */
231 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
232 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
233 get_bc_label (bc_break));
234 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
235 build_empty_stmt (cloc), exit);
238 if (exit && cond_is_first)
239 append_to_statement_list (exit, &stmt_list);
240 append_to_statement_list (body, &stmt_list);
241 finish_bc_block (&stmt_list, bc_continue, clab);
242 append_to_statement_list (incr, &stmt_list);
243 if (exit && !cond_is_first)
244 append_to_statement_list (exit, &stmt_list);
246 if (!stmt_list)
247 stmt_list = build_empty_stmt (start_locus);
249 tree loop;
250 if (cond && integer_zerop (cond))
252 if (cond_is_first)
253 loop = fold_build3_loc (start_locus, COND_EXPR,
254 void_type_node, cond, stmt_list,
255 build_empty_stmt (start_locus));
256 else
257 loop = stmt_list;
259 else
260 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
262 stmt_list = NULL;
263 append_to_statement_list (loop, &stmt_list);
264 finish_bc_block (&stmt_list, bc_break, blab);
265 if (!stmt_list)
266 stmt_list = build_empty_stmt (start_locus);
268 *stmt_p = stmt_list;
271 /* Genericize a FOR_STMT node *STMT_P. */
273 static void
274 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
276 tree stmt = *stmt_p;
277 tree expr = NULL;
278 tree loop;
279 tree init = FOR_INIT_STMT (stmt);
281 if (init)
283 cp_walk_tree (&init, cp_genericize_r, data, NULL);
284 append_to_statement_list (init, &expr);
287 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
288 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
289 append_to_statement_list (loop, &expr);
290 if (expr == NULL_TREE)
291 expr = loop;
292 *stmt_p = expr;
295 /* Genericize a WHILE_STMT node *STMT_P. */
297 static void
298 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
300 tree stmt = *stmt_p;
301 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
302 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
305 /* Genericize a DO_STMT node *STMT_P. */
307 static void
308 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
310 tree stmt = *stmt_p;
311 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
312 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
315 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
317 static void
318 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
320 tree stmt = *stmt_p;
321 tree break_block, body, cond, type;
322 location_t stmt_locus = EXPR_LOCATION (stmt);
324 break_block = begin_bc_block (bc_break, stmt_locus);
326 body = SWITCH_STMT_BODY (stmt);
327 if (!body)
328 body = build_empty_stmt (stmt_locus);
329 cond = SWITCH_STMT_COND (stmt);
330 type = SWITCH_STMT_TYPE (stmt);
332 cp_walk_tree (&body, cp_genericize_r, data, NULL);
333 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
334 cp_walk_tree (&type, cp_genericize_r, data, NULL);
335 *walk_subtrees = 0;
337 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
338 finish_bc_block (stmt_p, bc_break, break_block);
341 /* Genericize a CONTINUE_STMT node *STMT_P. */
343 static void
344 genericize_continue_stmt (tree *stmt_p)
346 tree stmt_list = NULL;
347 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
348 tree label = get_bc_label (bc_continue);
349 location_t location = EXPR_LOCATION (*stmt_p);
350 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
351 append_to_statement_list (pred, &stmt_list);
352 append_to_statement_list (jump, &stmt_list);
353 *stmt_p = stmt_list;
356 /* Genericize a BREAK_STMT node *STMT_P. */
358 static void
359 genericize_break_stmt (tree *stmt_p)
361 tree label = get_bc_label (bc_break);
362 location_t location = EXPR_LOCATION (*stmt_p);
363 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
366 /* Genericize a OMP_FOR node *STMT_P. */
368 static void
369 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
371 tree stmt = *stmt_p;
372 location_t locus = EXPR_LOCATION (stmt);
373 tree clab = begin_bc_block (bc_continue, locus);
375 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
381 *walk_subtrees = 0;
383 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
391 tree for_stmt = *expr_p;
392 gimple_seq seq = NULL;
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
396 return GS_UNHANDLED;
397 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
399 gimplify_and_add (for_stmt, &seq);
400 gimple_seq_add_seq (pre_p, seq);
402 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
404 return GS_ALL_DONE;
407 /* Gimplify an EXPR_STMT node. */
409 static void
410 gimplify_expr_stmt (tree *stmt_p)
412 tree stmt = EXPR_STMT_EXPR (*stmt_p);
414 if (stmt == error_mark_node)
415 stmt = NULL;
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
422 gimplification. */
423 if (stmt && warn_unused_value)
425 if (!TREE_SIDE_EFFECTS (stmt))
427 if (!IS_EMPTY_STMT (stmt)
428 && !VOID_TYPE_P (TREE_TYPE (stmt))
429 && !TREE_NO_WARNING (stmt))
430 warning (OPT_Wunused_value, "statement with no effect");
432 else
433 warn_if_unused_value (stmt, input_location);
436 if (stmt == NULL_TREE)
437 stmt = alloc_stmt_list ();
439 *stmt_p = stmt;
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
444 static void
445 cp_gimplify_init_expr (tree *expr_p)
447 tree from = TREE_OPERAND (*expr_p, 1);
448 tree to = TREE_OPERAND (*expr_p, 0);
449 tree t;
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from) == TARGET_EXPR)
456 from = TARGET_EXPR_INITIAL (from);
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
460 for (t = from; t; )
462 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub) == AGGR_INIT_EXPR
470 || TREE_CODE (sub) == VEC_INIT_EXPR)
472 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
473 AGGR_INIT_EXPR_SLOT (sub) = to;
474 else
475 VEC_INIT_EXPR_SLOT (sub) = to;
476 *expr_p = from;
478 /* The initialization is now a side-effect, so the container can
479 become void. */
480 if (from != sub)
481 TREE_TYPE (from) = void_type_node;
484 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
485 /* Handle aggregate NSDMI. */
486 replace_placeholders (sub, to);
488 if (t == sub)
489 break;
490 else
491 t = TREE_OPERAND (t, 1);
496 /* Gimplify a MUST_NOT_THROW_EXPR. */
498 static enum gimplify_status
499 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
501 tree stmt = *expr_p;
502 tree temp = voidify_wrapper_expr (stmt, NULL);
503 tree body = TREE_OPERAND (stmt, 0);
504 gimple_seq try_ = NULL;
505 gimple_seq catch_ = NULL;
506 gimple mnt;
508 gimplify_and_add (body, &try_);
509 mnt = gimple_build_eh_must_not_throw (terminate_node);
510 gimple_seq_add_stmt_without_update (&catch_, mnt);
511 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
513 gimple_seq_add_stmt_without_update (pre_p, mnt);
514 if (temp)
516 *expr_p = temp;
517 return GS_OK;
520 *expr_p = NULL;
521 return GS_ALL_DONE;
524 /* Return TRUE if an operand (OP) of a given TYPE being copied is
525 really just an empty class copy.
527 Check that the operand has a simple form so that TARGET_EXPRs and
528 non-empty CONSTRUCTORs get reduced properly, and we leave the
529 return slot optimization alone because it isn't a copy. */
531 static bool
532 simple_empty_class_p (tree type, tree op)
534 return
535 ((TREE_CODE (op) == COMPOUND_EXPR
536 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
537 || is_gimple_lvalue (op)
538 || INDIRECT_REF_P (op)
539 || (TREE_CODE (op) == CONSTRUCTOR
540 && CONSTRUCTOR_NELTS (op) == 0
541 && !TREE_CLOBBER_P (op))
542 || (TREE_CODE (op) == CALL_EXPR
543 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
544 && is_really_empty_class (type);
547 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
550 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
552 int saved_stmts_are_full_exprs_p = 0;
553 enum tree_code code = TREE_CODE (*expr_p);
554 enum gimplify_status ret;
556 if (STATEMENT_CODE_P (code))
558 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
559 current_stmt_tree ()->stmts_are_full_exprs_p
560 = STMT_IS_FULL_EXPR_P (*expr_p);
563 switch (code)
565 case PTRMEM_CST:
566 *expr_p = cplus_expand_constant (*expr_p);
567 ret = GS_OK;
568 break;
570 case AGGR_INIT_EXPR:
571 simplify_aggr_init_expr (expr_p);
572 ret = GS_OK;
573 break;
575 case VEC_INIT_EXPR:
577 location_t loc = input_location;
578 tree init = VEC_INIT_EXPR_INIT (*expr_p);
579 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
580 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
581 input_location = EXPR_LOCATION (*expr_p);
582 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
583 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
584 from_array,
585 tf_warning_or_error);
586 cp_genericize_tree (expr_p);
587 ret = GS_OK;
588 input_location = loc;
590 break;
592 case THROW_EXPR:
593 /* FIXME communicate throw type to back end, probably by moving
594 THROW_EXPR into ../tree.def. */
595 *expr_p = TREE_OPERAND (*expr_p, 0);
596 ret = GS_OK;
597 break;
599 case MUST_NOT_THROW_EXPR:
600 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
601 break;
603 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
604 LHS of an assignment might also be involved in the RHS, as in bug
605 25979. */
606 case INIT_EXPR:
607 if (fn_contains_cilk_spawn_p (cfun)
608 && cilk_detect_spawn_and_unwrap (expr_p)
609 && !seen_error ())
610 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
611 cp_gimplify_init_expr (expr_p);
612 if (TREE_CODE (*expr_p) != INIT_EXPR)
613 return GS_OK;
614 /* Otherwise fall through. */
615 case MODIFY_EXPR:
616 modify_expr_case:
618 if (fn_contains_cilk_spawn_p (cfun)
619 && cilk_detect_spawn_and_unwrap (expr_p)
620 && !seen_error ())
621 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
623 /* If the back end isn't clever enough to know that the lhs and rhs
624 types are the same, add an explicit conversion. */
625 tree op0 = TREE_OPERAND (*expr_p, 0);
626 tree op1 = TREE_OPERAND (*expr_p, 1);
628 if (!error_operand_p (op0)
629 && !error_operand_p (op1)
630 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
631 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
632 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
633 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
634 TREE_TYPE (op0), op1);
636 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
638 /* Remove any copies of empty classes. Also drop volatile
639 variables on the RHS to avoid infinite recursion from
640 gimplify_expr trying to load the value. */
641 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
642 is_gimple_lvalue, fb_lvalue);
643 if (TREE_SIDE_EFFECTS (op1))
645 if (TREE_THIS_VOLATILE (op1)
646 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
647 op1 = build_fold_addr_expr (op1);
649 gimplify_and_add (op1, pre_p);
651 *expr_p = TREE_OPERAND (*expr_p, 0);
654 ret = GS_OK;
655 break;
657 case EMPTY_CLASS_EXPR:
658 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
659 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
660 ret = GS_OK;
661 break;
663 case BASELINK:
664 *expr_p = BASELINK_FUNCTIONS (*expr_p);
665 ret = GS_OK;
666 break;
668 case TRY_BLOCK:
669 genericize_try_block (expr_p);
670 ret = GS_OK;
671 break;
673 case HANDLER:
674 genericize_catch_block (expr_p);
675 ret = GS_OK;
676 break;
678 case EH_SPEC_BLOCK:
679 genericize_eh_spec_block (expr_p);
680 ret = GS_OK;
681 break;
683 case USING_STMT:
684 gcc_unreachable ();
686 case FOR_STMT:
687 case WHILE_STMT:
688 case DO_STMT:
689 case SWITCH_STMT:
690 case CONTINUE_STMT:
691 case BREAK_STMT:
692 gcc_unreachable ();
694 case OMP_FOR:
695 case OMP_SIMD:
696 case OMP_DISTRIBUTE:
697 ret = cp_gimplify_omp_for (expr_p, pre_p);
698 break;
700 case EXPR_STMT:
701 gimplify_expr_stmt (expr_p);
702 ret = GS_OK;
703 break;
705 case UNARY_PLUS_EXPR:
707 tree arg = TREE_OPERAND (*expr_p, 0);
708 tree type = TREE_TYPE (*expr_p);
709 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
710 : arg;
711 ret = GS_OK;
713 break;
715 case CILK_SPAWN_STMT:
716 gcc_assert
717 (fn_contains_cilk_spawn_p (cfun)
718 && cilk_detect_spawn_and_unwrap (expr_p));
720 /* If errors are seen, then just process it as a CALL_EXPR. */
721 if (!seen_error ())
722 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
724 case CALL_EXPR:
725 if (fn_contains_cilk_spawn_p (cfun)
726 && cilk_detect_spawn_and_unwrap (expr_p)
727 && !seen_error ())
728 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
730 /* DR 1030 says that we need to evaluate the elements of an
731 initializer-list in forward order even when it's used as arguments to
732 a constructor. So if the target wants to evaluate them in reverse
733 order and there's more than one argument other than 'this', gimplify
734 them in order. */
735 ret = GS_OK;
736 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
737 && call_expr_nargs (*expr_p) > 2)
739 int nargs = call_expr_nargs (*expr_p);
740 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
741 for (int i = 1; i < nargs; ++i)
743 enum gimplify_status t
744 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
745 if (t == GS_ERROR)
746 ret = GS_ERROR;
749 break;
751 case RETURN_EXPR:
752 if (TREE_OPERAND (*expr_p, 0)
753 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
754 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
756 expr_p = &TREE_OPERAND (*expr_p, 0);
757 code = TREE_CODE (*expr_p);
758 /* Avoid going through the INIT_EXPR case, which can
759 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
760 goto modify_expr_case;
762 /* Fall through. */
764 default:
765 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
766 break;
769 /* Restore saved state. */
770 if (STATEMENT_CODE_P (code))
771 current_stmt_tree ()->stmts_are_full_exprs_p
772 = saved_stmts_are_full_exprs_p;
774 return ret;
777 static inline bool
778 is_invisiref_parm (const_tree t)
780 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
781 && DECL_BY_REFERENCE (t));
784 /* Return true if the uid in both int tree maps are equal. */
786 bool
787 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
789 return (a->uid == b->uid);
792 /* Hash a UID in a cxx_int_tree_map. */
794 unsigned int
795 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
797 return item->uid;
800 /* A stable comparison routine for use with splay trees and DECLs. */
802 static int
803 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
805 tree a = (tree) xa;
806 tree b = (tree) xb;
808 return DECL_UID (a) - DECL_UID (b);
811 /* OpenMP context during genericization. */
813 struct cp_genericize_omp_taskreg
815 bool is_parallel;
816 bool default_shared;
817 struct cp_genericize_omp_taskreg *outer;
818 splay_tree variables;
821 /* Return true if genericization should try to determine if
822 DECL is firstprivate or shared within task regions. */
824 static bool
825 omp_var_to_track (tree decl)
827 tree type = TREE_TYPE (decl);
828 if (is_invisiref_parm (decl))
829 type = TREE_TYPE (type);
830 while (TREE_CODE (type) == ARRAY_TYPE)
831 type = TREE_TYPE (type);
832 if (type == error_mark_node || !CLASS_TYPE_P (type))
833 return false;
834 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
835 return false;
836 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
837 return false;
838 return true;
841 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
843 static void
844 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
846 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
847 (splay_tree_key) decl);
848 if (n == NULL)
850 int flags = OMP_CLAUSE_DEFAULT_SHARED;
851 if (omp_ctx->outer)
852 omp_cxx_notice_variable (omp_ctx->outer, decl);
853 if (!omp_ctx->default_shared)
855 struct cp_genericize_omp_taskreg *octx;
857 for (octx = omp_ctx->outer; octx; octx = octx->outer)
859 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
860 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
862 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
863 break;
865 if (octx->is_parallel)
866 break;
868 if (octx == NULL
869 && (TREE_CODE (decl) == PARM_DECL
870 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
871 && DECL_CONTEXT (decl) == current_function_decl)))
872 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
873 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
875 /* DECL is implicitly determined firstprivate in
876 the current task construct. Ensure copy ctor and
877 dtor are instantiated, because during gimplification
878 it will be already too late. */
879 tree type = TREE_TYPE (decl);
880 if (is_invisiref_parm (decl))
881 type = TREE_TYPE (type);
882 while (TREE_CODE (type) == ARRAY_TYPE)
883 type = TREE_TYPE (type);
884 get_copy_ctor (type, tf_none);
885 get_dtor (type, tf_none);
888 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
892 /* Genericization context. */
894 struct cp_genericize_data
896 hash_set<tree> *p_set;
897 vec<tree> bind_expr_stack;
898 struct cp_genericize_omp_taskreg *omp_ctx;
899 tree try_block;
900 bool no_sanitize_p;
903 /* Perform any pre-gimplification lowering of C++ front end trees to
904 GENERIC. */
906 static tree
907 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
909 tree stmt = *stmt_p;
910 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
911 hash_set<tree> *p_set = wtd->p_set;
913 /* If in an OpenMP context, note var uses. */
914 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
915 && (VAR_P (stmt)
916 || TREE_CODE (stmt) == PARM_DECL
917 || TREE_CODE (stmt) == RESULT_DECL)
918 && omp_var_to_track (stmt))
919 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
921 if (is_invisiref_parm (stmt)
922 /* Don't dereference parms in a thunk, pass the references through. */
923 && !(DECL_THUNK_P (current_function_decl)
924 && TREE_CODE (stmt) == PARM_DECL))
926 *stmt_p = convert_from_reference (stmt);
927 *walk_subtrees = 0;
928 return NULL;
931 /* Map block scope extern declarations to visible declarations with the
932 same name and type in outer scopes if any. */
933 if (cp_function_chain->extern_decl_map
934 && VAR_OR_FUNCTION_DECL_P (stmt)
935 && DECL_EXTERNAL (stmt))
937 struct cxx_int_tree_map *h, in;
938 in.uid = DECL_UID (stmt);
939 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
940 if (h)
942 *stmt_p = h->to;
943 *walk_subtrees = 0;
944 return NULL;
948 /* Other than invisiref parms, don't walk the same tree twice. */
949 if (p_set->contains (stmt))
951 *walk_subtrees = 0;
952 return NULL_TREE;
955 if (TREE_CODE (stmt) == ADDR_EXPR
956 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
958 /* If in an OpenMP context, note var uses. */
959 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
960 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
961 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
962 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
963 *walk_subtrees = 0;
965 else if (TREE_CODE (stmt) == RETURN_EXPR
966 && TREE_OPERAND (stmt, 0)
967 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
968 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
969 *walk_subtrees = 0;
970 else if (TREE_CODE (stmt) == OMP_CLAUSE)
971 switch (OMP_CLAUSE_CODE (stmt))
973 case OMP_CLAUSE_LASTPRIVATE:
974 /* Don't dereference an invisiref in OpenMP clauses. */
975 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
977 *walk_subtrees = 0;
978 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
979 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
980 cp_genericize_r, data, NULL);
982 break;
983 case OMP_CLAUSE_PRIVATE:
984 /* Don't dereference an invisiref in OpenMP clauses. */
985 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
986 *walk_subtrees = 0;
987 else if (wtd->omp_ctx != NULL)
989 /* Private clause doesn't cause any references to the
990 var in outer contexts, avoid calling
991 omp_cxx_notice_variable for it. */
992 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
993 wtd->omp_ctx = NULL;
994 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
995 data, NULL);
996 wtd->omp_ctx = old;
997 *walk_subtrees = 0;
999 break;
1000 case OMP_CLAUSE_SHARED:
1001 case OMP_CLAUSE_FIRSTPRIVATE:
1002 case OMP_CLAUSE_COPYIN:
1003 case OMP_CLAUSE_COPYPRIVATE:
1004 /* Don't dereference an invisiref in OpenMP clauses. */
1005 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1006 *walk_subtrees = 0;
1007 break;
1008 case OMP_CLAUSE_REDUCTION:
1009 /* Don't dereference an invisiref in reduction clause's
1010 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1011 still needs to be genericized. */
1012 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1014 *walk_subtrees = 0;
1015 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1016 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1017 cp_genericize_r, data, NULL);
1018 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1019 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1020 cp_genericize_r, data, NULL);
1022 break;
1023 default:
1024 break;
1026 else if (IS_TYPE_OR_DECL_P (stmt))
1027 *walk_subtrees = 0;
1029 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1030 to lower this construct before scanning it, so we need to lower these
1031 before doing anything else. */
1032 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1033 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1034 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1035 : TRY_FINALLY_EXPR,
1036 void_type_node,
1037 CLEANUP_BODY (stmt),
1038 CLEANUP_EXPR (stmt));
1040 else if (TREE_CODE (stmt) == IF_STMT)
1042 genericize_if_stmt (stmt_p);
1043 /* *stmt_p has changed, tail recurse to handle it again. */
1044 return cp_genericize_r (stmt_p, walk_subtrees, data);
1047 /* COND_EXPR might have incompatible types in branches if one or both
1048 arms are bitfields. Fix it up now. */
1049 else if (TREE_CODE (stmt) == COND_EXPR)
1051 tree type_left
1052 = (TREE_OPERAND (stmt, 1)
1053 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1054 : NULL_TREE);
1055 tree type_right
1056 = (TREE_OPERAND (stmt, 2)
1057 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1058 : NULL_TREE);
1059 if (type_left
1060 && !useless_type_conversion_p (TREE_TYPE (stmt),
1061 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1063 TREE_OPERAND (stmt, 1)
1064 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1065 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1066 type_left));
1068 if (type_right
1069 && !useless_type_conversion_p (TREE_TYPE (stmt),
1070 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1072 TREE_OPERAND (stmt, 2)
1073 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1074 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1075 type_right));
1079 else if (TREE_CODE (stmt) == BIND_EXPR)
1081 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1083 tree decl;
1084 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1085 if (VAR_P (decl)
1086 && !DECL_EXTERNAL (decl)
1087 && omp_var_to_track (decl))
1089 splay_tree_node n
1090 = splay_tree_lookup (wtd->omp_ctx->variables,
1091 (splay_tree_key) decl);
1092 if (n == NULL)
1093 splay_tree_insert (wtd->omp_ctx->variables,
1094 (splay_tree_key) decl,
1095 TREE_STATIC (decl)
1096 ? OMP_CLAUSE_DEFAULT_SHARED
1097 : OMP_CLAUSE_DEFAULT_PRIVATE);
1100 if (flag_sanitize
1101 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1103 /* The point here is to not sanitize static initializers. */
1104 bool no_sanitize_p = wtd->no_sanitize_p;
1105 wtd->no_sanitize_p = true;
1106 for (tree decl = BIND_EXPR_VARS (stmt);
1107 decl;
1108 decl = DECL_CHAIN (decl))
1109 if (VAR_P (decl)
1110 && TREE_STATIC (decl)
1111 && DECL_INITIAL (decl))
1112 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1113 wtd->no_sanitize_p = no_sanitize_p;
1115 wtd->bind_expr_stack.safe_push (stmt);
1116 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1117 cp_genericize_r, data, NULL);
1118 wtd->bind_expr_stack.pop ();
1121 else if (TREE_CODE (stmt) == USING_STMT)
1123 tree block = NULL_TREE;
1125 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1126 BLOCK, and append an IMPORTED_DECL to its
1127 BLOCK_VARS chained list. */
1128 if (wtd->bind_expr_stack.exists ())
1130 int i;
1131 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1132 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1133 break;
1135 if (block)
1137 tree using_directive;
1138 gcc_assert (TREE_OPERAND (stmt, 0));
1140 using_directive = make_node (IMPORTED_DECL);
1141 TREE_TYPE (using_directive) = void_type_node;
1143 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1144 = TREE_OPERAND (stmt, 0);
1145 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1146 BLOCK_VARS (block) = using_directive;
1148 /* The USING_STMT won't appear in GENERIC. */
1149 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1150 *walk_subtrees = 0;
1153 else if (TREE_CODE (stmt) == DECL_EXPR
1154 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1156 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1157 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1158 *walk_subtrees = 0;
1160 else if (TREE_CODE (stmt) == DECL_EXPR)
1162 tree d = DECL_EXPR_DECL (stmt);
1163 if (TREE_CODE (d) == VAR_DECL)
1164 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1166 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1168 struct cp_genericize_omp_taskreg omp_ctx;
1169 tree c, decl;
1170 splay_tree_node n;
1172 *walk_subtrees = 0;
1173 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1174 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1175 omp_ctx.default_shared = omp_ctx.is_parallel;
1176 omp_ctx.outer = wtd->omp_ctx;
1177 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1178 wtd->omp_ctx = &omp_ctx;
1179 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1180 switch (OMP_CLAUSE_CODE (c))
1182 case OMP_CLAUSE_SHARED:
1183 case OMP_CLAUSE_PRIVATE:
1184 case OMP_CLAUSE_FIRSTPRIVATE:
1185 case OMP_CLAUSE_LASTPRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (decl == error_mark_node || !omp_var_to_track (decl))
1188 break;
1189 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1190 if (n != NULL)
1191 break;
1192 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1193 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1194 ? OMP_CLAUSE_DEFAULT_SHARED
1195 : OMP_CLAUSE_DEFAULT_PRIVATE);
1196 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1197 && omp_ctx.outer)
1198 omp_cxx_notice_variable (omp_ctx.outer, decl);
1199 break;
1200 case OMP_CLAUSE_DEFAULT:
1201 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1202 omp_ctx.default_shared = true;
1203 default:
1204 break;
1206 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1207 wtd->omp_ctx = omp_ctx.outer;
1208 splay_tree_delete (omp_ctx.variables);
1210 else if (TREE_CODE (stmt) == TRY_BLOCK)
1212 *walk_subtrees = 0;
1213 tree try_block = wtd->try_block;
1214 wtd->try_block = stmt;
1215 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1216 wtd->try_block = try_block;
1217 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1219 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1221 /* MUST_NOT_THROW_COND might be something else with TM. */
1222 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1224 *walk_subtrees = 0;
1225 tree try_block = wtd->try_block;
1226 wtd->try_block = stmt;
1227 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1228 wtd->try_block = try_block;
1231 else if (TREE_CODE (stmt) == THROW_EXPR)
1233 location_t loc = location_of (stmt);
1234 if (TREE_NO_WARNING (stmt))
1235 /* Never mind. */;
1236 else if (wtd->try_block)
1238 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1239 && warning_at (loc, OPT_Wterminate,
1240 "throw will always call terminate()")
1241 && cxx_dialect >= cxx11
1242 && DECL_DESTRUCTOR_P (current_function_decl))
1243 inform (loc, "in C++11 destructors default to noexcept");
1245 else
1247 if (warn_cxx11_compat && cxx_dialect < cxx11
1248 && DECL_DESTRUCTOR_P (current_function_decl)
1249 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1250 == NULL_TREE)
1251 && (get_defaulted_eh_spec (current_function_decl)
1252 == empty_except_spec))
1253 warning_at (loc, OPT_Wc__11_compat,
1254 "in C++11 this throw will terminate because "
1255 "destructors default to noexcept");
1258 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1259 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1260 else if (TREE_CODE (stmt) == FOR_STMT)
1261 genericize_for_stmt (stmt_p, walk_subtrees, data);
1262 else if (TREE_CODE (stmt) == WHILE_STMT)
1263 genericize_while_stmt (stmt_p, walk_subtrees, data);
1264 else if (TREE_CODE (stmt) == DO_STMT)
1265 genericize_do_stmt (stmt_p, walk_subtrees, data);
1266 else if (TREE_CODE (stmt) == SWITCH_STMT)
1267 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1268 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1269 genericize_continue_stmt (stmt_p);
1270 else if (TREE_CODE (stmt) == BREAK_STMT)
1271 genericize_break_stmt (stmt_p);
1272 else if (TREE_CODE (stmt) == OMP_FOR
1273 || TREE_CODE (stmt) == OMP_SIMD
1274 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1275 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1276 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1278 if (SIZEOF_EXPR_TYPE_P (stmt))
1279 *stmt_p
1280 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1281 SIZEOF_EXPR, false);
1282 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1283 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1284 SIZEOF_EXPR, false);
1285 else
1286 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1287 SIZEOF_EXPR, false);
1288 if (*stmt_p == error_mark_node)
1289 *stmt_p = size_one_node;
1290 return NULL;
1292 else if ((flag_sanitize
1293 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1294 && !wtd->no_sanitize_p)
1296 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1297 && TREE_CODE (stmt) == NOP_EXPR
1298 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1299 ubsan_maybe_instrument_reference (stmt);
1300 else if (TREE_CODE (stmt) == CALL_EXPR)
1302 tree fn = CALL_EXPR_FN (stmt);
1303 if (fn != NULL_TREE
1304 && !error_operand_p (fn)
1305 && POINTER_TYPE_P (TREE_TYPE (fn))
1306 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1308 bool is_ctor
1309 = TREE_CODE (fn) == ADDR_EXPR
1310 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1311 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1312 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1313 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1314 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1315 cp_ubsan_maybe_instrument_member_call (stmt);
1320 p_set->add (*stmt_p);
1322 return NULL;
1325 /* Lower C++ front end trees to GENERIC in T_P. */
1327 static void
1328 cp_genericize_tree (tree* t_p)
1330 struct cp_genericize_data wtd;
1332 wtd.p_set = new hash_set<tree>;
1333 wtd.bind_expr_stack.create (0);
1334 wtd.omp_ctx = NULL;
1335 wtd.try_block = NULL_TREE;
1336 wtd.no_sanitize_p = false;
1337 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1338 delete wtd.p_set;
1339 wtd.bind_expr_stack.release ();
1340 if (flag_sanitize & SANITIZE_VPTR)
1341 cp_ubsan_instrument_member_accesses (t_p);
1344 /* If a function that should end with a return in non-void
1345 function doesn't obviously end with return, add ubsan
1346 instrumentation code to verify it at runtime. */
1348 static void
1349 cp_ubsan_maybe_instrument_return (tree fndecl)
1351 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1352 || DECL_CONSTRUCTOR_P (fndecl)
1353 || DECL_DESTRUCTOR_P (fndecl)
1354 || !targetm.warn_func_return (fndecl))
1355 return;
1357 tree t = DECL_SAVED_TREE (fndecl);
1358 while (t)
1360 switch (TREE_CODE (t))
1362 case BIND_EXPR:
1363 t = BIND_EXPR_BODY (t);
1364 continue;
1365 case TRY_FINALLY_EXPR:
1366 t = TREE_OPERAND (t, 0);
1367 continue;
1368 case STATEMENT_LIST:
1370 tree_stmt_iterator i = tsi_last (t);
1371 if (!tsi_end_p (i))
1373 t = tsi_stmt (i);
1374 continue;
1377 break;
1378 case RETURN_EXPR:
1379 return;
1380 default:
1381 break;
1383 break;
1385 if (t == NULL_TREE)
1386 return;
1387 t = DECL_SAVED_TREE (fndecl);
1388 if (TREE_CODE (t) == BIND_EXPR
1389 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1391 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1392 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1393 tsi_link_after (&i, t, TSI_NEW_STMT);
1397 void
1398 cp_genericize (tree fndecl)
1400 tree t;
1402 /* Fix up the types of parms passed by invisible reference. */
1403 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1404 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1406 /* If a function's arguments are copied to create a thunk,
1407 then DECL_BY_REFERENCE will be set -- but the type of the
1408 argument will be a pointer type, so we will never get
1409 here. */
1410 gcc_assert (!DECL_BY_REFERENCE (t));
1411 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1412 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1413 DECL_BY_REFERENCE (t) = 1;
1414 TREE_ADDRESSABLE (t) = 0;
1415 relayout_decl (t);
1418 /* Do the same for the return value. */
1419 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1421 t = DECL_RESULT (fndecl);
1422 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1423 DECL_BY_REFERENCE (t) = 1;
1424 TREE_ADDRESSABLE (t) = 0;
1425 relayout_decl (t);
1426 if (DECL_NAME (t))
1428 /* Adjust DECL_VALUE_EXPR of the original var. */
1429 tree outer = outer_curly_brace_block (current_function_decl);
1430 tree var;
1432 if (outer)
1433 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1434 if (DECL_NAME (t) == DECL_NAME (var)
1435 && DECL_HAS_VALUE_EXPR_P (var)
1436 && DECL_VALUE_EXPR (var) == t)
1438 tree val = convert_from_reference (t);
1439 SET_DECL_VALUE_EXPR (var, val);
1440 break;
1445 /* If we're a clone, the body is already GIMPLE. */
1446 if (DECL_CLONED_FUNCTION_P (fndecl))
1447 return;
1449 /* Expand all the array notations here. */
1450 if (flag_cilkplus
1451 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1452 DECL_SAVED_TREE (fndecl) =
1453 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1455 /* We do want to see every occurrence of the parms, so we can't just use
1456 walk_tree's hash functionality. */
1457 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1459 if (flag_sanitize & SANITIZE_RETURN
1460 && do_ubsan_in_current_function ())
1461 cp_ubsan_maybe_instrument_return (fndecl);
1463 /* Do everything else. */
1464 c_genericize (fndecl);
1466 gcc_assert (bc_label[bc_break] == NULL);
1467 gcc_assert (bc_label[bc_continue] == NULL);
1470 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1471 NULL if there is in fact nothing to do. ARG2 may be null if FN
1472 actually only takes one argument. */
1474 static tree
1475 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1477 tree defparm, parm, t;
1478 int i = 0;
1479 int nargs;
1480 tree *argarray;
1482 if (fn == NULL)
1483 return NULL;
1485 nargs = list_length (DECL_ARGUMENTS (fn));
1486 argarray = XALLOCAVEC (tree, nargs);
1488 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1489 if (arg2)
1490 defparm = TREE_CHAIN (defparm);
1492 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1494 tree inner_type = TREE_TYPE (arg1);
1495 tree start1, end1, p1;
1496 tree start2 = NULL, p2 = NULL;
1497 tree ret = NULL, lab;
1499 start1 = arg1;
1500 start2 = arg2;
1503 inner_type = TREE_TYPE (inner_type);
1504 start1 = build4 (ARRAY_REF, inner_type, start1,
1505 size_zero_node, NULL, NULL);
1506 if (arg2)
1507 start2 = build4 (ARRAY_REF, inner_type, start2,
1508 size_zero_node, NULL, NULL);
1510 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1511 start1 = build_fold_addr_expr_loc (input_location, start1);
1512 if (arg2)
1513 start2 = build_fold_addr_expr_loc (input_location, start2);
1515 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1516 end1 = fold_build_pointer_plus (start1, end1);
1518 p1 = create_tmp_var (TREE_TYPE (start1));
1519 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1520 append_to_statement_list (t, &ret);
1522 if (arg2)
1524 p2 = create_tmp_var (TREE_TYPE (start2));
1525 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1526 append_to_statement_list (t, &ret);
1529 lab = create_artificial_label (input_location);
1530 t = build1 (LABEL_EXPR, void_type_node, lab);
1531 append_to_statement_list (t, &ret);
1533 argarray[i++] = p1;
1534 if (arg2)
1535 argarray[i++] = p2;
1536 /* Handle default arguments. */
1537 for (parm = defparm; parm && parm != void_list_node;
1538 parm = TREE_CHAIN (parm), i++)
1539 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1540 TREE_PURPOSE (parm), fn, i,
1541 tf_warning_or_error);
1542 t = build_call_a (fn, i, argarray);
1543 t = fold_convert (void_type_node, t);
1544 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1545 append_to_statement_list (t, &ret);
1547 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1548 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1549 append_to_statement_list (t, &ret);
1551 if (arg2)
1553 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1554 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1555 append_to_statement_list (t, &ret);
1558 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1559 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1560 append_to_statement_list (t, &ret);
1562 return ret;
1564 else
1566 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1567 if (arg2)
1568 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1569 /* Handle default arguments. */
1570 for (parm = defparm; parm && parm != void_list_node;
1571 parm = TREE_CHAIN (parm), i++)
1572 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1573 TREE_PURPOSE (parm),
1574 fn, i, tf_warning_or_error);
1575 t = build_call_a (fn, i, argarray);
1576 t = fold_convert (void_type_node, t);
1577 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1581 /* Return code to initialize DECL with its default constructor, or
1582 NULL if there's nothing to do. */
1584 tree
1585 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1587 tree info = CP_OMP_CLAUSE_INFO (clause);
1588 tree ret = NULL;
1590 if (info)
1591 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1593 return ret;
1596 /* Return code to initialize DST with a copy constructor from SRC. */
1598 tree
1599 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1601 tree info = CP_OMP_CLAUSE_INFO (clause);
1602 tree ret = NULL;
1604 if (info)
1605 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1606 if (ret == NULL)
1607 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1609 return ret;
1612 /* Similarly, except use an assignment operator instead. */
1614 tree
1615 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1617 tree info = CP_OMP_CLAUSE_INFO (clause);
1618 tree ret = NULL;
1620 if (info)
1621 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1622 if (ret == NULL)
1623 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1625 return ret;
1628 /* Return code to destroy DECL. */
1630 tree
1631 cxx_omp_clause_dtor (tree clause, tree decl)
1633 tree info = CP_OMP_CLAUSE_INFO (clause);
1634 tree ret = NULL;
1636 if (info)
1637 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1639 return ret;
1642 /* True if OpenMP should privatize what this DECL points to rather
1643 than the DECL itself. */
1645 bool
1646 cxx_omp_privatize_by_reference (const_tree decl)
1648 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1649 || is_invisiref_parm (decl));
1652 /* Return true if DECL is const qualified var having no mutable member. */
1653 bool
1654 cxx_omp_const_qual_no_mutable (tree decl)
1656 tree type = TREE_TYPE (decl);
1657 if (TREE_CODE (type) == REFERENCE_TYPE)
1659 if (!is_invisiref_parm (decl))
1660 return false;
1661 type = TREE_TYPE (type);
1663 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1665 /* NVR doesn't preserve const qualification of the
1666 variable's type. */
1667 tree outer = outer_curly_brace_block (current_function_decl);
1668 tree var;
1670 if (outer)
1671 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1672 if (DECL_NAME (decl) == DECL_NAME (var)
1673 && (TYPE_MAIN_VARIANT (type)
1674 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1676 if (TYPE_READONLY (TREE_TYPE (var)))
1677 type = TREE_TYPE (var);
1678 break;
1683 if (type == error_mark_node)
1684 return false;
1686 /* Variables with const-qualified type having no mutable member
1687 are predetermined shared. */
1688 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1689 return true;
1691 return false;
1694 /* True if OpenMP sharing attribute of DECL is predetermined. */
1696 enum omp_clause_default_kind
1697 cxx_omp_predetermined_sharing (tree decl)
1699 /* Static data members are predetermined shared. */
1700 if (TREE_STATIC (decl))
1702 tree ctx = CP_DECL_CONTEXT (decl);
1703 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1704 return OMP_CLAUSE_DEFAULT_SHARED;
1707 /* Const qualified vars having no mutable member are predetermined
1708 shared. */
1709 if (cxx_omp_const_qual_no_mutable (decl))
1710 return OMP_CLAUSE_DEFAULT_SHARED;
1712 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1715 /* Finalize an implicitly determined clause. */
1717 void
1718 cxx_omp_finish_clause (tree c, gimple_seq *)
1720 tree decl, inner_type;
1721 bool make_shared = false;
1723 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1724 return;
1726 decl = OMP_CLAUSE_DECL (c);
1727 decl = require_complete_type (decl);
1728 inner_type = TREE_TYPE (decl);
1729 if (decl == error_mark_node)
1730 make_shared = true;
1731 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1733 if (is_invisiref_parm (decl))
1734 inner_type = TREE_TYPE (inner_type);
1735 else
1737 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1738 decl);
1739 make_shared = true;
1743 /* We're interested in the base element, not arrays. */
1744 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1745 inner_type = TREE_TYPE (inner_type);
1747 /* Check for special function availability by building a call to one.
1748 Save the results, because later we won't be in the right context
1749 for making these queries. */
1750 if (!make_shared
1751 && CLASS_TYPE_P (inner_type)
1752 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1753 make_shared = true;
1755 if (make_shared)
1756 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;