* gcc.dg/vmx/unpack.c: Use dg-additional-options rather than
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobc36d3399133d1f49241d9b5102f6346961f6fe8a
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "function.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "tree.h"
30 #include "cp-tree.h"
31 #include "gimple.h"
32 #include "hard-reg-set.h"
33 #include "alias.h"
34 #include "stor-layout.h"
35 #include "c-family/c-common.h"
36 #include "tree-iterator.h"
37 #include "internal-fn.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "splay-tree.h"
41 #include "target.h"
42 #include "c-family/c-ubsan.h"
43 #include "cilk.h"
44 #include "gimplify.h"
46 /* Forward declarations. */
48 static tree cp_genericize_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*);
51 /* Local declarations. */
53 enum bc_t { bc_break = 0, bc_continue = 1 };
55 /* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57 static tree bc_label[2];
59 /* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
65 static tree
66 begin_bc_block (enum bc_t bc, location_t location)
68 tree label = create_artificial_label (location);
69 DECL_CHAIN (label) = bc_label[bc];
70 bc_label[bc] = label;
71 if (bc == bc_break)
72 LABEL_DECL_BREAK (label) = true;
73 else
74 LABEL_DECL_CONTINUE (label) = true;
75 return label;
78 /* Finish a scope which can be exited by a break or continue statement.
79 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
80 an expression for the contents of the scope.
82 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
83 BLOCK. Otherwise, just forget the label. */
85 static void
86 finish_bc_block (tree *block, enum bc_t bc, tree label)
88 gcc_assert (label == bc_label[bc]);
90 if (TREE_USED (label))
91 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
92 block);
94 bc_label[bc] = DECL_CHAIN (label);
95 DECL_CHAIN (label) = NULL_TREE;
98 /* Get the LABEL_EXPR to represent a break or continue statement
99 in the current block scope. BC indicates which. */
101 static tree
102 get_bc_label (enum bc_t bc)
104 tree label = bc_label[bc];
106 /* Mark the label used for finish_bc_block. */
107 TREE_USED (label) = 1;
108 return label;
111 /* Genericize a TRY_BLOCK. */
113 static void
114 genericize_try_block (tree *stmt_p)
116 tree body = TRY_STMTS (*stmt_p);
117 tree cleanup = TRY_HANDLERS (*stmt_p);
119 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
122 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
124 static void
125 genericize_catch_block (tree *stmt_p)
127 tree type = HANDLER_TYPE (*stmt_p);
128 tree body = HANDLER_BODY (*stmt_p);
130 /* FIXME should the caught type go in TREE_TYPE? */
131 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
134 /* A terser interface for building a representation of an exception
135 specification. */
137 static tree
138 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
140 tree t;
142 /* FIXME should the allowed types go in TREE_TYPE? */
143 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
144 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
146 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
147 append_to_statement_list (body, &TREE_OPERAND (t, 0));
149 return t;
152 /* Genericize an EH_SPEC_BLOCK by converting it to a
153 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
155 static void
156 genericize_eh_spec_block (tree *stmt_p)
158 tree body = EH_SPEC_STMTS (*stmt_p);
159 tree allowed = EH_SPEC_RAISES (*stmt_p);
160 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
162 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
163 TREE_NO_WARNING (*stmt_p) = true;
164 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
167 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
169 static void
170 genericize_if_stmt (tree *stmt_p)
172 tree stmt, cond, then_, else_;
173 location_t locus = EXPR_LOCATION (*stmt_p);
175 stmt = *stmt_p;
176 cond = IF_COND (stmt);
177 then_ = THEN_CLAUSE (stmt);
178 else_ = ELSE_CLAUSE (stmt);
180 if (!then_)
181 then_ = build_empty_stmt (locus);
182 if (!else_)
183 else_ = build_empty_stmt (locus);
185 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
186 stmt = then_;
187 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
188 stmt = else_;
189 else
190 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
191 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
192 SET_EXPR_LOCATION (stmt, locus);
193 *stmt_p = stmt;
196 /* Build a generic representation of one of the C loop forms. COND is the
197 loop condition or NULL_TREE. BODY is the (possibly compound) statement
198 controlled by the loop. INCR is the increment expression of a for-loop,
199 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
200 evaluated before the loop body as in while and for loops, or after the
201 loop body as in do-while loops. */
203 static void
204 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
205 tree incr, bool cond_is_first, int *walk_subtrees,
206 void *data)
208 tree blab, clab;
209 tree exit = NULL;
210 tree stmt_list = NULL;
212 blab = begin_bc_block (bc_break, start_locus);
213 clab = begin_bc_block (bc_continue, start_locus);
215 if (incr && EXPR_P (incr))
216 SET_EXPR_LOCATION (incr, start_locus);
218 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
219 cp_walk_tree (&body, cp_genericize_r, data, NULL);
220 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
221 *walk_subtrees = 0;
223 if (cond && TREE_CODE (cond) != INTEGER_CST)
225 /* If COND is constant, don't bother building an exit. If it's false,
226 we won't build a loop. If it's true, any exits are in the body. */
227 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
228 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
229 get_bc_label (bc_break));
230 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
231 build_empty_stmt (cloc), exit);
234 if (exit && cond_is_first)
235 append_to_statement_list (exit, &stmt_list);
236 append_to_statement_list (body, &stmt_list);
237 finish_bc_block (&stmt_list, bc_continue, clab);
238 append_to_statement_list (incr, &stmt_list);
239 if (exit && !cond_is_first)
240 append_to_statement_list (exit, &stmt_list);
242 if (!stmt_list)
243 stmt_list = build_empty_stmt (start_locus);
245 tree loop;
246 if (cond && integer_zerop (cond))
248 if (cond_is_first)
249 loop = fold_build3_loc (start_locus, COND_EXPR,
250 void_type_node, cond, stmt_list,
251 build_empty_stmt (start_locus));
252 else
253 loop = stmt_list;
255 else
256 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 *stmt_p = stmt_list;
267 /* Genericize a FOR_STMT node *STMT_P. */
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
277 if (init)
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
291 /* Genericize a WHILE_STMT node *STMT_P. */
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 /* Genericize a DO_STMT node *STMT_P. */
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
320 break_block = begin_bc_block (bc_break, stmt_locus);
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
333 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
334 finish_bc_block (stmt_p, bc_break, break_block);
337 /* Genericize a CONTINUE_STMT node *STMT_P. */
339 static void
340 genericize_continue_stmt (tree *stmt_p)
342 tree stmt_list = NULL;
343 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
344 tree label = get_bc_label (bc_continue);
345 location_t location = EXPR_LOCATION (*stmt_p);
346 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
347 append_to_statement_list (pred, &stmt_list);
348 append_to_statement_list (jump, &stmt_list);
349 *stmt_p = stmt_list;
352 /* Genericize a BREAK_STMT node *STMT_P. */
354 static void
355 genericize_break_stmt (tree *stmt_p)
357 tree label = get_bc_label (bc_break);
358 location_t location = EXPR_LOCATION (*stmt_p);
359 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
362 /* Genericize a OMP_FOR node *STMT_P. */
364 static void
365 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
367 tree stmt = *stmt_p;
368 location_t locus = EXPR_LOCATION (stmt);
369 tree clab = begin_bc_block (bc_continue, locus);
371 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
372 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
373 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
377 *walk_subtrees = 0;
379 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
382 /* Hook into the middle of gimplifying an OMP_FOR node. */
384 static enum gimplify_status
385 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
387 tree for_stmt = *expr_p;
388 gimple_seq seq = NULL;
390 /* Protect ourselves from recursion. */
391 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
392 return GS_UNHANDLED;
393 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
395 gimplify_and_add (for_stmt, &seq);
396 gimple_seq_add_seq (pre_p, seq);
398 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
400 return GS_ALL_DONE;
403 /* Gimplify an EXPR_STMT node. */
405 static void
406 gimplify_expr_stmt (tree *stmt_p)
408 tree stmt = EXPR_STMT_EXPR (*stmt_p);
410 if (stmt == error_mark_node)
411 stmt = NULL;
413 /* Gimplification of a statement expression will nullify the
414 statement if all its side effects are moved to *PRE_P and *POST_P.
416 In this case we will not want to emit the gimplified statement.
417 However, we may still want to emit a warning, so we do that before
418 gimplification. */
419 if (stmt && warn_unused_value)
421 if (!TREE_SIDE_EFFECTS (stmt))
423 if (!IS_EMPTY_STMT (stmt)
424 && !VOID_TYPE_P (TREE_TYPE (stmt))
425 && !TREE_NO_WARNING (stmt))
426 warning (OPT_Wunused_value, "statement with no effect");
428 else
429 warn_if_unused_value (stmt, input_location);
432 if (stmt == NULL_TREE)
433 stmt = alloc_stmt_list ();
435 *stmt_p = stmt;
438 /* Gimplify initialization from an AGGR_INIT_EXPR. */
440 static void
441 cp_gimplify_init_expr (tree *expr_p)
443 tree from = TREE_OPERAND (*expr_p, 1);
444 tree to = TREE_OPERAND (*expr_p, 0);
445 tree t;
447 /* What about code that pulls out the temp and uses it elsewhere? I
448 think that such code never uses the TARGET_EXPR as an initializer. If
449 I'm wrong, we'll abort because the temp won't have any RTL. In that
450 case, I guess we'll need to replace references somehow. */
451 if (TREE_CODE (from) == TARGET_EXPR)
452 from = TARGET_EXPR_INITIAL (from);
454 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
455 inside the TARGET_EXPR. */
456 for (t = from; t; )
458 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
460 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
461 replace the slot operand with our target.
463 Should we add a target parm to gimplify_expr instead? No, as in this
464 case we want to replace the INIT_EXPR. */
465 if (TREE_CODE (sub) == AGGR_INIT_EXPR
466 || TREE_CODE (sub) == VEC_INIT_EXPR)
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
469 AGGR_INIT_EXPR_SLOT (sub) = to;
470 else
471 VEC_INIT_EXPR_SLOT (sub) = to;
472 *expr_p = from;
474 /* The initialization is now a side-effect, so the container can
475 become void. */
476 if (from != sub)
477 TREE_TYPE (from) = void_type_node;
480 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
481 /* Handle aggregate NSDMI. */
482 replace_placeholders (sub, to);
484 if (t == sub)
485 break;
486 else
487 t = TREE_OPERAND (t, 1);
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
497 tree stmt = *expr_p;
498 tree temp = voidify_wrapper_expr (stmt, NULL);
499 tree body = TREE_OPERAND (stmt, 0);
500 gimple_seq try_ = NULL;
501 gimple_seq catch_ = NULL;
502 gimple mnt;
504 gimplify_and_add (body, &try_);
505 mnt = gimple_build_eh_must_not_throw (terminate_node);
506 gimple_seq_add_stmt_without_update (&catch_, mnt);
507 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
509 gimple_seq_add_stmt_without_update (pre_p, mnt);
510 if (temp)
512 *expr_p = temp;
513 return GS_OK;
516 *expr_p = NULL;
517 return GS_ALL_DONE;
520 /* Return TRUE if an operand (OP) of a given TYPE being copied is
521 really just an empty class copy.
523 Check that the operand has a simple form so that TARGET_EXPRs and
524 non-empty CONSTRUCTORs get reduced properly, and we leave the
525 return slot optimization alone because it isn't a copy. */
527 static bool
528 simple_empty_class_p (tree type, tree op)
530 return
531 ((TREE_CODE (op) == COMPOUND_EXPR
532 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
533 || is_gimple_lvalue (op)
534 || INDIRECT_REF_P (op)
535 || (TREE_CODE (op) == CONSTRUCTOR
536 && CONSTRUCTOR_NELTS (op) == 0
537 && !TREE_CLOBBER_P (op))
538 || (TREE_CODE (op) == CALL_EXPR
539 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
540 && is_really_empty_class (type);
543 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
546 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
548 int saved_stmts_are_full_exprs_p = 0;
549 enum tree_code code = TREE_CODE (*expr_p);
550 enum gimplify_status ret;
552 if (STATEMENT_CODE_P (code))
554 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
555 current_stmt_tree ()->stmts_are_full_exprs_p
556 = STMT_IS_FULL_EXPR_P (*expr_p);
559 switch (code)
561 case PTRMEM_CST:
562 *expr_p = cplus_expand_constant (*expr_p);
563 ret = GS_OK;
564 break;
566 case AGGR_INIT_EXPR:
567 simplify_aggr_init_expr (expr_p);
568 ret = GS_OK;
569 break;
571 case VEC_INIT_EXPR:
573 location_t loc = input_location;
574 tree init = VEC_INIT_EXPR_INIT (*expr_p);
575 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
576 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
577 input_location = EXPR_LOCATION (*expr_p);
578 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
579 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
580 from_array,
581 tf_warning_or_error);
582 cp_genericize_tree (expr_p);
583 ret = GS_OK;
584 input_location = loc;
586 break;
588 case THROW_EXPR:
589 /* FIXME communicate throw type to back end, probably by moving
590 THROW_EXPR into ../tree.def. */
591 *expr_p = TREE_OPERAND (*expr_p, 0);
592 ret = GS_OK;
593 break;
595 case MUST_NOT_THROW_EXPR:
596 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
597 break;
599 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
600 LHS of an assignment might also be involved in the RHS, as in bug
601 25979. */
602 case INIT_EXPR:
603 if (fn_contains_cilk_spawn_p (cfun)
604 && cilk_detect_spawn_and_unwrap (expr_p)
605 && !seen_error ())
606 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
607 cp_gimplify_init_expr (expr_p);
608 if (TREE_CODE (*expr_p) != INIT_EXPR)
609 return GS_OK;
610 /* Otherwise fall through. */
611 case MODIFY_EXPR:
612 modify_expr_case:
614 if (fn_contains_cilk_spawn_p (cfun)
615 && cilk_detect_spawn_and_unwrap (expr_p)
616 && !seen_error ())
617 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
619 /* If the back end isn't clever enough to know that the lhs and rhs
620 types are the same, add an explicit conversion. */
621 tree op0 = TREE_OPERAND (*expr_p, 0);
622 tree op1 = TREE_OPERAND (*expr_p, 1);
624 if (!error_operand_p (op0)
625 && !error_operand_p (op1)
626 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
627 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
628 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
629 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
630 TREE_TYPE (op0), op1);
632 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
634 /* Remove any copies of empty classes. Also drop volatile
635 variables on the RHS to avoid infinite recursion from
636 gimplify_expr trying to load the value. */
637 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
638 is_gimple_lvalue, fb_lvalue);
639 if (TREE_SIDE_EFFECTS (op1))
641 if (TREE_THIS_VOLATILE (op1)
642 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
643 op1 = build_fold_addr_expr (op1);
645 gimplify_and_add (op1, pre_p);
647 *expr_p = TREE_OPERAND (*expr_p, 0);
650 ret = GS_OK;
651 break;
653 case EMPTY_CLASS_EXPR:
654 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
655 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
656 ret = GS_OK;
657 break;
659 case BASELINK:
660 *expr_p = BASELINK_FUNCTIONS (*expr_p);
661 ret = GS_OK;
662 break;
664 case TRY_BLOCK:
665 genericize_try_block (expr_p);
666 ret = GS_OK;
667 break;
669 case HANDLER:
670 genericize_catch_block (expr_p);
671 ret = GS_OK;
672 break;
674 case EH_SPEC_BLOCK:
675 genericize_eh_spec_block (expr_p);
676 ret = GS_OK;
677 break;
679 case USING_STMT:
680 gcc_unreachable ();
682 case FOR_STMT:
683 case WHILE_STMT:
684 case DO_STMT:
685 case SWITCH_STMT:
686 case CONTINUE_STMT:
687 case BREAK_STMT:
688 gcc_unreachable ();
690 case OMP_FOR:
691 case OMP_SIMD:
692 case OMP_DISTRIBUTE:
693 ret = cp_gimplify_omp_for (expr_p, pre_p);
694 break;
696 case EXPR_STMT:
697 gimplify_expr_stmt (expr_p);
698 ret = GS_OK;
699 break;
701 case UNARY_PLUS_EXPR:
703 tree arg = TREE_OPERAND (*expr_p, 0);
704 tree type = TREE_TYPE (*expr_p);
705 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
706 : arg;
707 ret = GS_OK;
709 break;
711 case CILK_SPAWN_STMT:
712 gcc_assert
713 (fn_contains_cilk_spawn_p (cfun)
714 && cilk_detect_spawn_and_unwrap (expr_p));
716 /* If errors are seen, then just process it as a CALL_EXPR. */
717 if (!seen_error ())
718 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
720 case CALL_EXPR:
721 if (fn_contains_cilk_spawn_p (cfun)
722 && cilk_detect_spawn_and_unwrap (expr_p)
723 && !seen_error ())
724 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
726 /* DR 1030 says that we need to evaluate the elements of an
727 initializer-list in forward order even when it's used as arguments to
728 a constructor. So if the target wants to evaluate them in reverse
729 order and there's more than one argument other than 'this', gimplify
730 them in order. */
731 ret = GS_OK;
732 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
733 && call_expr_nargs (*expr_p) > 2)
735 int nargs = call_expr_nargs (*expr_p);
736 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
737 for (int i = 1; i < nargs; ++i)
739 enum gimplify_status t
740 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
741 if (t == GS_ERROR)
742 ret = GS_ERROR;
745 break;
747 case RETURN_EXPR:
748 if (TREE_OPERAND (*expr_p, 0)
749 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
750 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
752 expr_p = &TREE_OPERAND (*expr_p, 0);
753 code = TREE_CODE (*expr_p);
754 /* Avoid going through the INIT_EXPR case, which can
755 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
756 goto modify_expr_case;
758 /* Fall through. */
760 default:
761 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
762 break;
765 /* Restore saved state. */
766 if (STATEMENT_CODE_P (code))
767 current_stmt_tree ()->stmts_are_full_exprs_p
768 = saved_stmts_are_full_exprs_p;
770 return ret;
773 static inline bool
774 is_invisiref_parm (const_tree t)
776 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
777 && DECL_BY_REFERENCE (t));
780 /* Return true if the uid in both int tree maps are equal. */
782 bool
783 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
785 return (a->uid == b->uid);
788 /* Hash a UID in a cxx_int_tree_map. */
790 unsigned int
791 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
793 return item->uid;
796 /* A stable comparison routine for use with splay trees and DECLs. */
798 static int
799 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
801 tree a = (tree) xa;
802 tree b = (tree) xb;
804 return DECL_UID (a) - DECL_UID (b);
807 /* OpenMP context during genericization. */
809 struct cp_genericize_omp_taskreg
811 bool is_parallel;
812 bool default_shared;
813 struct cp_genericize_omp_taskreg *outer;
814 splay_tree variables;
817 /* Return true if genericization should try to determine if
818 DECL is firstprivate or shared within task regions. */
820 static bool
821 omp_var_to_track (tree decl)
823 tree type = TREE_TYPE (decl);
824 if (is_invisiref_parm (decl))
825 type = TREE_TYPE (type);
826 while (TREE_CODE (type) == ARRAY_TYPE)
827 type = TREE_TYPE (type);
828 if (type == error_mark_node || !CLASS_TYPE_P (type))
829 return false;
830 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
831 return false;
832 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
833 return false;
834 return true;
837 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
839 static void
840 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
842 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
843 (splay_tree_key) decl);
844 if (n == NULL)
846 int flags = OMP_CLAUSE_DEFAULT_SHARED;
847 if (omp_ctx->outer)
848 omp_cxx_notice_variable (omp_ctx->outer, decl);
849 if (!omp_ctx->default_shared)
851 struct cp_genericize_omp_taskreg *octx;
853 for (octx = omp_ctx->outer; octx; octx = octx->outer)
855 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
856 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
858 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
859 break;
861 if (octx->is_parallel)
862 break;
864 if (octx == NULL
865 && (TREE_CODE (decl) == PARM_DECL
866 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
867 && DECL_CONTEXT (decl) == current_function_decl)))
868 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
869 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
871 /* DECL is implicitly determined firstprivate in
872 the current task construct. Ensure copy ctor and
873 dtor are instantiated, because during gimplification
874 it will be already too late. */
875 tree type = TREE_TYPE (decl);
876 if (is_invisiref_parm (decl))
877 type = TREE_TYPE (type);
878 while (TREE_CODE (type) == ARRAY_TYPE)
879 type = TREE_TYPE (type);
880 get_copy_ctor (type, tf_none);
881 get_dtor (type, tf_none);
884 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
888 /* Genericization context. */
890 struct cp_genericize_data
892 hash_set<tree> *p_set;
893 vec<tree> bind_expr_stack;
894 struct cp_genericize_omp_taskreg *omp_ctx;
895 tree try_block;
896 bool no_sanitize_p;
899 /* Perform any pre-gimplification lowering of C++ front end trees to
900 GENERIC. */
902 static tree
903 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
905 tree stmt = *stmt_p;
906 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
907 hash_set<tree> *p_set = wtd->p_set;
909 /* If in an OpenMP context, note var uses. */
910 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
911 && (VAR_P (stmt)
912 || TREE_CODE (stmt) == PARM_DECL
913 || TREE_CODE (stmt) == RESULT_DECL)
914 && omp_var_to_track (stmt))
915 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
917 if (is_invisiref_parm (stmt)
918 /* Don't dereference parms in a thunk, pass the references through. */
919 && !(DECL_THUNK_P (current_function_decl)
920 && TREE_CODE (stmt) == PARM_DECL))
922 *stmt_p = convert_from_reference (stmt);
923 *walk_subtrees = 0;
924 return NULL;
927 /* Map block scope extern declarations to visible declarations with the
928 same name and type in outer scopes if any. */
929 if (cp_function_chain->extern_decl_map
930 && VAR_OR_FUNCTION_DECL_P (stmt)
931 && DECL_EXTERNAL (stmt))
933 struct cxx_int_tree_map *h, in;
934 in.uid = DECL_UID (stmt);
935 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
936 if (h)
938 *stmt_p = h->to;
939 *walk_subtrees = 0;
940 return NULL;
944 /* Other than invisiref parms, don't walk the same tree twice. */
945 if (p_set->contains (stmt))
947 *walk_subtrees = 0;
948 return NULL_TREE;
951 if (TREE_CODE (stmt) == ADDR_EXPR
952 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
954 /* If in an OpenMP context, note var uses. */
955 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
956 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
957 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
958 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
959 *walk_subtrees = 0;
961 else if (TREE_CODE (stmt) == RETURN_EXPR
962 && TREE_OPERAND (stmt, 0)
963 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
964 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
965 *walk_subtrees = 0;
966 else if (TREE_CODE (stmt) == OMP_CLAUSE)
967 switch (OMP_CLAUSE_CODE (stmt))
969 case OMP_CLAUSE_LASTPRIVATE:
970 /* Don't dereference an invisiref in OpenMP clauses. */
971 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
973 *walk_subtrees = 0;
974 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
975 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
976 cp_genericize_r, data, NULL);
978 break;
979 case OMP_CLAUSE_PRIVATE:
980 /* Don't dereference an invisiref in OpenMP clauses. */
981 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
982 *walk_subtrees = 0;
983 else if (wtd->omp_ctx != NULL)
985 /* Private clause doesn't cause any references to the
986 var in outer contexts, avoid calling
987 omp_cxx_notice_variable for it. */
988 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
989 wtd->omp_ctx = NULL;
990 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
991 data, NULL);
992 wtd->omp_ctx = old;
993 *walk_subtrees = 0;
995 break;
996 case OMP_CLAUSE_SHARED:
997 case OMP_CLAUSE_FIRSTPRIVATE:
998 case OMP_CLAUSE_COPYIN:
999 case OMP_CLAUSE_COPYPRIVATE:
1000 /* Don't dereference an invisiref in OpenMP clauses. */
1001 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1002 *walk_subtrees = 0;
1003 break;
1004 case OMP_CLAUSE_REDUCTION:
1005 /* Don't dereference an invisiref in reduction clause's
1006 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1007 still needs to be genericized. */
1008 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1010 *walk_subtrees = 0;
1011 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1012 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1013 cp_genericize_r, data, NULL);
1014 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1015 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1016 cp_genericize_r, data, NULL);
1018 break;
1019 default:
1020 break;
1022 else if (IS_TYPE_OR_DECL_P (stmt))
1023 *walk_subtrees = 0;
1025 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1026 to lower this construct before scanning it, so we need to lower these
1027 before doing anything else. */
1028 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1029 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1030 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1031 : TRY_FINALLY_EXPR,
1032 void_type_node,
1033 CLEANUP_BODY (stmt),
1034 CLEANUP_EXPR (stmt));
1036 else if (TREE_CODE (stmt) == IF_STMT)
1038 genericize_if_stmt (stmt_p);
1039 /* *stmt_p has changed, tail recurse to handle it again. */
1040 return cp_genericize_r (stmt_p, walk_subtrees, data);
1043 /* COND_EXPR might have incompatible types in branches if one or both
1044 arms are bitfields. Fix it up now. */
1045 else if (TREE_CODE (stmt) == COND_EXPR)
1047 tree type_left
1048 = (TREE_OPERAND (stmt, 1)
1049 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1050 : NULL_TREE);
1051 tree type_right
1052 = (TREE_OPERAND (stmt, 2)
1053 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1054 : NULL_TREE);
1055 if (type_left
1056 && !useless_type_conversion_p (TREE_TYPE (stmt),
1057 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1059 TREE_OPERAND (stmt, 1)
1060 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1061 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1062 type_left));
1064 if (type_right
1065 && !useless_type_conversion_p (TREE_TYPE (stmt),
1066 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1068 TREE_OPERAND (stmt, 2)
1069 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1070 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1071 type_right));
1075 else if (TREE_CODE (stmt) == BIND_EXPR)
1077 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1079 tree decl;
1080 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1081 if (VAR_P (decl)
1082 && !DECL_EXTERNAL (decl)
1083 && omp_var_to_track (decl))
1085 splay_tree_node n
1086 = splay_tree_lookup (wtd->omp_ctx->variables,
1087 (splay_tree_key) decl);
1088 if (n == NULL)
1089 splay_tree_insert (wtd->omp_ctx->variables,
1090 (splay_tree_key) decl,
1091 TREE_STATIC (decl)
1092 ? OMP_CLAUSE_DEFAULT_SHARED
1093 : OMP_CLAUSE_DEFAULT_PRIVATE);
1096 if (flag_sanitize
1097 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1099 /* The point here is to not sanitize static initializers. */
1100 bool no_sanitize_p = wtd->no_sanitize_p;
1101 wtd->no_sanitize_p = true;
1102 for (tree decl = BIND_EXPR_VARS (stmt);
1103 decl;
1104 decl = DECL_CHAIN (decl))
1105 if (VAR_P (decl)
1106 && TREE_STATIC (decl)
1107 && DECL_INITIAL (decl))
1108 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1109 wtd->no_sanitize_p = no_sanitize_p;
1111 wtd->bind_expr_stack.safe_push (stmt);
1112 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1113 cp_genericize_r, data, NULL);
1114 wtd->bind_expr_stack.pop ();
1117 else if (TREE_CODE (stmt) == USING_STMT)
1119 tree block = NULL_TREE;
1121 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1122 BLOCK, and append an IMPORTED_DECL to its
1123 BLOCK_VARS chained list. */
1124 if (wtd->bind_expr_stack.exists ())
1126 int i;
1127 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1128 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1129 break;
1131 if (block)
1133 tree using_directive;
1134 gcc_assert (TREE_OPERAND (stmt, 0));
1136 using_directive = make_node (IMPORTED_DECL);
1137 TREE_TYPE (using_directive) = void_type_node;
1139 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1140 = TREE_OPERAND (stmt, 0);
1141 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1142 BLOCK_VARS (block) = using_directive;
1144 /* The USING_STMT won't appear in GENERIC. */
1145 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1146 *walk_subtrees = 0;
1149 else if (TREE_CODE (stmt) == DECL_EXPR
1150 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1152 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1153 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1154 *walk_subtrees = 0;
1156 else if (TREE_CODE (stmt) == DECL_EXPR)
1158 tree d = DECL_EXPR_DECL (stmt);
1159 if (TREE_CODE (d) == VAR_DECL)
1160 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1162 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1164 struct cp_genericize_omp_taskreg omp_ctx;
1165 tree c, decl;
1166 splay_tree_node n;
1168 *walk_subtrees = 0;
1169 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1170 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1171 omp_ctx.default_shared = omp_ctx.is_parallel;
1172 omp_ctx.outer = wtd->omp_ctx;
1173 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1174 wtd->omp_ctx = &omp_ctx;
1175 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1176 switch (OMP_CLAUSE_CODE (c))
1178 case OMP_CLAUSE_SHARED:
1179 case OMP_CLAUSE_PRIVATE:
1180 case OMP_CLAUSE_FIRSTPRIVATE:
1181 case OMP_CLAUSE_LASTPRIVATE:
1182 decl = OMP_CLAUSE_DECL (c);
1183 if (decl == error_mark_node || !omp_var_to_track (decl))
1184 break;
1185 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1186 if (n != NULL)
1187 break;
1188 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1189 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1190 ? OMP_CLAUSE_DEFAULT_SHARED
1191 : OMP_CLAUSE_DEFAULT_PRIVATE);
1192 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1193 && omp_ctx.outer)
1194 omp_cxx_notice_variable (omp_ctx.outer, decl);
1195 break;
1196 case OMP_CLAUSE_DEFAULT:
1197 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1198 omp_ctx.default_shared = true;
1199 default:
1200 break;
1202 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1203 wtd->omp_ctx = omp_ctx.outer;
1204 splay_tree_delete (omp_ctx.variables);
1206 else if (TREE_CODE (stmt) == TRY_BLOCK)
1208 *walk_subtrees = 0;
1209 tree try_block = wtd->try_block;
1210 wtd->try_block = stmt;
1211 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1212 wtd->try_block = try_block;
1213 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1215 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1217 /* MUST_NOT_THROW_COND might be something else with TM. */
1218 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1220 *walk_subtrees = 0;
1221 tree try_block = wtd->try_block;
1222 wtd->try_block = stmt;
1223 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1224 wtd->try_block = try_block;
1227 else if (TREE_CODE (stmt) == THROW_EXPR)
1229 location_t loc = location_of (stmt);
1230 if (TREE_NO_WARNING (stmt))
1231 /* Never mind. */;
1232 else if (wtd->try_block)
1234 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1235 && warning_at (loc, OPT_Wterminate,
1236 "throw will always call terminate()")
1237 && cxx_dialect >= cxx11
1238 && DECL_DESTRUCTOR_P (current_function_decl))
1239 inform (loc, "in C++11 destructors default to noexcept");
1241 else
1243 if (warn_cxx11_compat && cxx_dialect < cxx11
1244 && DECL_DESTRUCTOR_P (current_function_decl)
1245 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1246 == NULL_TREE)
1247 && (get_defaulted_eh_spec (current_function_decl)
1248 == empty_except_spec))
1249 warning_at (loc, OPT_Wc__11_compat,
1250 "in C++11 this throw will terminate because "
1251 "destructors default to noexcept");
1254 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1255 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1256 else if (TREE_CODE (stmt) == FOR_STMT)
1257 genericize_for_stmt (stmt_p, walk_subtrees, data);
1258 else if (TREE_CODE (stmt) == WHILE_STMT)
1259 genericize_while_stmt (stmt_p, walk_subtrees, data);
1260 else if (TREE_CODE (stmt) == DO_STMT)
1261 genericize_do_stmt (stmt_p, walk_subtrees, data);
1262 else if (TREE_CODE (stmt) == SWITCH_STMT)
1263 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1264 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1265 genericize_continue_stmt (stmt_p);
1266 else if (TREE_CODE (stmt) == BREAK_STMT)
1267 genericize_break_stmt (stmt_p);
1268 else if (TREE_CODE (stmt) == OMP_FOR
1269 || TREE_CODE (stmt) == OMP_SIMD
1270 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1271 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1272 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1274 if (SIZEOF_EXPR_TYPE_P (stmt))
1275 *stmt_p
1276 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1277 SIZEOF_EXPR, false);
1278 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1279 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1280 SIZEOF_EXPR, false);
1281 else
1282 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1283 SIZEOF_EXPR, false);
1284 if (*stmt_p == error_mark_node)
1285 *stmt_p = size_one_node;
1286 return NULL;
1288 else if ((flag_sanitize
1289 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1290 && !wtd->no_sanitize_p)
1292 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1293 && TREE_CODE (stmt) == NOP_EXPR
1294 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1295 ubsan_maybe_instrument_reference (stmt);
1296 else if (TREE_CODE (stmt) == CALL_EXPR)
1298 tree fn = CALL_EXPR_FN (stmt);
1299 if (fn != NULL_TREE
1300 && !error_operand_p (fn)
1301 && POINTER_TYPE_P (TREE_TYPE (fn))
1302 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1304 bool is_ctor
1305 = TREE_CODE (fn) == ADDR_EXPR
1306 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1307 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1308 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1309 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1310 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1311 cp_ubsan_maybe_instrument_member_call (stmt);
1316 p_set->add (*stmt_p);
1318 return NULL;
1321 /* Lower C++ front end trees to GENERIC in T_P. */
1323 static void
1324 cp_genericize_tree (tree* t_p)
1326 struct cp_genericize_data wtd;
1328 wtd.p_set = new hash_set<tree>;
1329 wtd.bind_expr_stack.create (0);
1330 wtd.omp_ctx = NULL;
1331 wtd.try_block = NULL_TREE;
1332 wtd.no_sanitize_p = false;
1333 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1334 delete wtd.p_set;
1335 wtd.bind_expr_stack.release ();
1336 if (flag_sanitize & SANITIZE_VPTR)
1337 cp_ubsan_instrument_member_accesses (t_p);
1340 /* If a function that should end with a return in non-void
1341 function doesn't obviously end with return, add ubsan
1342 instrumentation code to verify it at runtime. */
1344 static void
1345 cp_ubsan_maybe_instrument_return (tree fndecl)
1347 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1348 || DECL_CONSTRUCTOR_P (fndecl)
1349 || DECL_DESTRUCTOR_P (fndecl)
1350 || !targetm.warn_func_return (fndecl))
1351 return;
1353 tree t = DECL_SAVED_TREE (fndecl);
1354 while (t)
1356 switch (TREE_CODE (t))
1358 case BIND_EXPR:
1359 t = BIND_EXPR_BODY (t);
1360 continue;
1361 case TRY_FINALLY_EXPR:
1362 t = TREE_OPERAND (t, 0);
1363 continue;
1364 case STATEMENT_LIST:
1366 tree_stmt_iterator i = tsi_last (t);
1367 if (!tsi_end_p (i))
1369 t = tsi_stmt (i);
1370 continue;
1373 break;
1374 case RETURN_EXPR:
1375 return;
1376 default:
1377 break;
1379 break;
1381 if (t == NULL_TREE)
1382 return;
1383 t = DECL_SAVED_TREE (fndecl);
1384 if (TREE_CODE (t) == BIND_EXPR
1385 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1387 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1388 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1389 tsi_link_after (&i, t, TSI_NEW_STMT);
1393 void
1394 cp_genericize (tree fndecl)
1396 tree t;
1398 /* Fix up the types of parms passed by invisible reference. */
1399 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1400 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1402 /* If a function's arguments are copied to create a thunk,
1403 then DECL_BY_REFERENCE will be set -- but the type of the
1404 argument will be a pointer type, so we will never get
1405 here. */
1406 gcc_assert (!DECL_BY_REFERENCE (t));
1407 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1408 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1409 DECL_BY_REFERENCE (t) = 1;
1410 TREE_ADDRESSABLE (t) = 0;
1411 relayout_decl (t);
1414 /* Do the same for the return value. */
1415 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1417 t = DECL_RESULT (fndecl);
1418 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1419 DECL_BY_REFERENCE (t) = 1;
1420 TREE_ADDRESSABLE (t) = 0;
1421 relayout_decl (t);
1422 if (DECL_NAME (t))
1424 /* Adjust DECL_VALUE_EXPR of the original var. */
1425 tree outer = outer_curly_brace_block (current_function_decl);
1426 tree var;
1428 if (outer)
1429 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1430 if (DECL_NAME (t) == DECL_NAME (var)
1431 && DECL_HAS_VALUE_EXPR_P (var)
1432 && DECL_VALUE_EXPR (var) == t)
1434 tree val = convert_from_reference (t);
1435 SET_DECL_VALUE_EXPR (var, val);
1436 break;
1441 /* If we're a clone, the body is already GIMPLE. */
1442 if (DECL_CLONED_FUNCTION_P (fndecl))
1443 return;
1445 /* Expand all the array notations here. */
1446 if (flag_cilkplus
1447 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1448 DECL_SAVED_TREE (fndecl) =
1449 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1451 /* We do want to see every occurrence of the parms, so we can't just use
1452 walk_tree's hash functionality. */
1453 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1455 if (flag_sanitize & SANITIZE_RETURN
1456 && do_ubsan_in_current_function ())
1457 cp_ubsan_maybe_instrument_return (fndecl);
1459 /* Do everything else. */
1460 c_genericize (fndecl);
1462 gcc_assert (bc_label[bc_break] == NULL);
1463 gcc_assert (bc_label[bc_continue] == NULL);
1466 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1467 NULL if there is in fact nothing to do. ARG2 may be null if FN
1468 actually only takes one argument. */
1470 static tree
1471 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1473 tree defparm, parm, t;
1474 int i = 0;
1475 int nargs;
1476 tree *argarray;
1478 if (fn == NULL)
1479 return NULL;
1481 nargs = list_length (DECL_ARGUMENTS (fn));
1482 argarray = XALLOCAVEC (tree, nargs);
1484 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1485 if (arg2)
1486 defparm = TREE_CHAIN (defparm);
1488 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1490 tree inner_type = TREE_TYPE (arg1);
1491 tree start1, end1, p1;
1492 tree start2 = NULL, p2 = NULL;
1493 tree ret = NULL, lab;
1495 start1 = arg1;
1496 start2 = arg2;
1499 inner_type = TREE_TYPE (inner_type);
1500 start1 = build4 (ARRAY_REF, inner_type, start1,
1501 size_zero_node, NULL, NULL);
1502 if (arg2)
1503 start2 = build4 (ARRAY_REF, inner_type, start2,
1504 size_zero_node, NULL, NULL);
1506 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1507 start1 = build_fold_addr_expr_loc (input_location, start1);
1508 if (arg2)
1509 start2 = build_fold_addr_expr_loc (input_location, start2);
1511 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1512 end1 = fold_build_pointer_plus (start1, end1);
1514 p1 = create_tmp_var (TREE_TYPE (start1));
1515 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1516 append_to_statement_list (t, &ret);
1518 if (arg2)
1520 p2 = create_tmp_var (TREE_TYPE (start2));
1521 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1522 append_to_statement_list (t, &ret);
1525 lab = create_artificial_label (input_location);
1526 t = build1 (LABEL_EXPR, void_type_node, lab);
1527 append_to_statement_list (t, &ret);
1529 argarray[i++] = p1;
1530 if (arg2)
1531 argarray[i++] = p2;
1532 /* Handle default arguments. */
1533 for (parm = defparm; parm && parm != void_list_node;
1534 parm = TREE_CHAIN (parm), i++)
1535 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1536 TREE_PURPOSE (parm), fn, i,
1537 tf_warning_or_error);
1538 t = build_call_a (fn, i, argarray);
1539 t = fold_convert (void_type_node, t);
1540 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1541 append_to_statement_list (t, &ret);
1543 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1544 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1545 append_to_statement_list (t, &ret);
1547 if (arg2)
1549 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1550 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1551 append_to_statement_list (t, &ret);
1554 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1555 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1556 append_to_statement_list (t, &ret);
1558 return ret;
1560 else
1562 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1563 if (arg2)
1564 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1565 /* Handle default arguments. */
1566 for (parm = defparm; parm && parm != void_list_node;
1567 parm = TREE_CHAIN (parm), i++)
1568 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1569 TREE_PURPOSE (parm),
1570 fn, i, tf_warning_or_error);
1571 t = build_call_a (fn, i, argarray);
1572 t = fold_convert (void_type_node, t);
1573 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1577 /* Return code to initialize DECL with its default constructor, or
1578 NULL if there's nothing to do. */
1580 tree
1581 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1583 tree info = CP_OMP_CLAUSE_INFO (clause);
1584 tree ret = NULL;
1586 if (info)
1587 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1589 return ret;
1592 /* Return code to initialize DST with a copy constructor from SRC. */
1594 tree
1595 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1597 tree info = CP_OMP_CLAUSE_INFO (clause);
1598 tree ret = NULL;
1600 if (info)
1601 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1602 if (ret == NULL)
1603 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1605 return ret;
1608 /* Similarly, except use an assignment operator instead. */
1610 tree
1611 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1613 tree info = CP_OMP_CLAUSE_INFO (clause);
1614 tree ret = NULL;
1616 if (info)
1617 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1618 if (ret == NULL)
1619 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1621 return ret;
1624 /* Return code to destroy DECL. */
1626 tree
1627 cxx_omp_clause_dtor (tree clause, tree decl)
1629 tree info = CP_OMP_CLAUSE_INFO (clause);
1630 tree ret = NULL;
1632 if (info)
1633 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1635 return ret;
1638 /* True if OpenMP should privatize what this DECL points to rather
1639 than the DECL itself. */
1641 bool
1642 cxx_omp_privatize_by_reference (const_tree decl)
1644 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1645 || is_invisiref_parm (decl));
1648 /* Return true if DECL is const qualified var having no mutable member. */
1649 bool
1650 cxx_omp_const_qual_no_mutable (tree decl)
1652 tree type = TREE_TYPE (decl);
1653 if (TREE_CODE (type) == REFERENCE_TYPE)
1655 if (!is_invisiref_parm (decl))
1656 return false;
1657 type = TREE_TYPE (type);
1659 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1661 /* NVR doesn't preserve const qualification of the
1662 variable's type. */
1663 tree outer = outer_curly_brace_block (current_function_decl);
1664 tree var;
1666 if (outer)
1667 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1668 if (DECL_NAME (decl) == DECL_NAME (var)
1669 && (TYPE_MAIN_VARIANT (type)
1670 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1672 if (TYPE_READONLY (TREE_TYPE (var)))
1673 type = TREE_TYPE (var);
1674 break;
1679 if (type == error_mark_node)
1680 return false;
1682 /* Variables with const-qualified type having no mutable member
1683 are predetermined shared. */
1684 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1685 return true;
1687 return false;
1690 /* True if OpenMP sharing attribute of DECL is predetermined. */
1692 enum omp_clause_default_kind
1693 cxx_omp_predetermined_sharing (tree decl)
1695 /* Static data members are predetermined shared. */
1696 if (TREE_STATIC (decl))
1698 tree ctx = CP_DECL_CONTEXT (decl);
1699 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1700 return OMP_CLAUSE_DEFAULT_SHARED;
1703 /* Const qualified vars having no mutable member are predetermined
1704 shared. */
1705 if (cxx_omp_const_qual_no_mutable (decl))
1706 return OMP_CLAUSE_DEFAULT_SHARED;
1708 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1711 /* Finalize an implicitly determined clause. */
1713 void
1714 cxx_omp_finish_clause (tree c, gimple_seq *)
1716 tree decl, inner_type;
1717 bool make_shared = false;
1719 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1720 return;
1722 decl = OMP_CLAUSE_DECL (c);
1723 decl = require_complete_type (decl);
1724 inner_type = TREE_TYPE (decl);
1725 if (decl == error_mark_node)
1726 make_shared = true;
1727 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1729 if (is_invisiref_parm (decl))
1730 inner_type = TREE_TYPE (inner_type);
1731 else
1733 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1734 decl);
1735 make_shared = true;
1739 /* We're interested in the base element, not arrays. */
1740 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1741 inner_type = TREE_TYPE (inner_type);
1743 /* Check for special function availability by building a call to one.
1744 Save the results, because later we won't be in the right context
1745 for making these queries. */
1746 if (!make_shared
1747 && CLASS_TYPE_P (inner_type)
1748 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1749 make_shared = true;
1751 if (make_shared)
1752 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;