re PR bootstrap/55051 (profiledbootstrap failed)
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobf715e963c490d665e4c9a5829e8fa74f15653a8f
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012
5 Free Software Foundation, Inc.
6 Contributed by Jason Merrill <jason@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "cp-tree.h"
30 #include "c-family/c-common.h"
31 #include "tree-iterator.h"
32 #include "gimple.h"
33 #include "hashtab.h"
34 #include "pointer-set.h"
35 #include "flags.h"
36 #include "splay-tree.h"
38 /* Forward declarations. */
40 static tree cp_genericize_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*);
43 /* Local declarations. */
45 enum bc_t { bc_break = 0, bc_continue = 1 };
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label[2];
51 /* Begin a scope which can be exited by a break or continue statement. BC
52 indicates which.
54 Just creates a label with location LOCATION and pushes it into the current
55 context. */
57 static tree
58 begin_bc_block (enum bc_t bc, location_t location)
60 tree label = create_artificial_label (location);
61 DECL_CHAIN (label) = bc_label[bc];
62 bc_label[bc] = label;
63 return label;
66 /* Finish a scope which can be exited by a break or continue statement.
67 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
68 an expression for the contents of the scope.
70 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
71 BLOCK. Otherwise, just forget the label. */
73 static void
74 finish_bc_block (tree *block, enum bc_t bc, tree label)
76 gcc_assert (label == bc_label[bc]);
78 if (TREE_USED (label))
79 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
80 block);
82 bc_label[bc] = DECL_CHAIN (label);
83 DECL_CHAIN (label) = NULL_TREE;
86 /* Get the LABEL_EXPR to represent a break or continue statement
87 in the current block scope. BC indicates which. */
89 static tree
90 get_bc_label (enum bc_t bc)
92 tree label = bc_label[bc];
94 /* Mark the label used for finish_bc_block. */
95 TREE_USED (label) = 1;
96 return label;
99 /* Genericize a TRY_BLOCK. */
101 static void
102 genericize_try_block (tree *stmt_p)
104 tree body = TRY_STMTS (*stmt_p);
105 tree cleanup = TRY_HANDLERS (*stmt_p);
107 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
110 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
112 static void
113 genericize_catch_block (tree *stmt_p)
115 tree type = HANDLER_TYPE (*stmt_p);
116 tree body = HANDLER_BODY (*stmt_p);
118 /* FIXME should the caught type go in TREE_TYPE? */
119 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
122 /* A terser interface for building a representation of an exception
123 specification. */
125 static tree
126 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
128 tree t;
130 /* FIXME should the allowed types go in TREE_TYPE? */
131 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
132 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
134 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
135 append_to_statement_list (body, &TREE_OPERAND (t, 0));
137 return t;
140 /* Genericize an EH_SPEC_BLOCK by converting it to a
141 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
143 static void
144 genericize_eh_spec_block (tree *stmt_p)
146 tree body = EH_SPEC_STMTS (*stmt_p);
147 tree allowed = EH_SPEC_RAISES (*stmt_p);
148 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
150 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
151 TREE_NO_WARNING (*stmt_p) = true;
152 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
155 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
157 static void
158 genericize_if_stmt (tree *stmt_p)
160 tree stmt, cond, then_, else_;
161 location_t locus = EXPR_LOCATION (*stmt_p);
163 stmt = *stmt_p;
164 cond = IF_COND (stmt);
165 then_ = THEN_CLAUSE (stmt);
166 else_ = ELSE_CLAUSE (stmt);
168 if (!then_)
169 then_ = build_empty_stmt (locus);
170 if (!else_)
171 else_ = build_empty_stmt (locus);
173 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
174 stmt = then_;
175 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
176 stmt = else_;
177 else
178 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
179 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
180 SET_EXPR_LOCATION (stmt, locus);
181 *stmt_p = stmt;
184 /* Build a generic representation of one of the C loop forms. COND is the
185 loop condition or NULL_TREE. BODY is the (possibly compound) statement
186 controlled by the loop. INCR is the increment expression of a for-loop,
187 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
188 evaluated before the loop body as in while and for loops, or after the
189 loop body as in do-while loops. */
191 static void
192 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
193 tree incr, bool cond_is_first, int *walk_subtrees,
194 void *data)
196 tree blab, clab;
197 tree entry = NULL, exit = NULL, t;
198 tree stmt_list = NULL;
200 blab = begin_bc_block (bc_break, start_locus);
201 clab = begin_bc_block (bc_continue, start_locus);
203 if (incr && EXPR_P (incr))
204 SET_EXPR_LOCATION (incr, start_locus);
206 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
207 cp_walk_tree (&body, cp_genericize_r, data, NULL);
208 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
209 *walk_subtrees = 0;
211 /* If condition is zero don't generate a loop construct. */
212 if (cond && integer_zerop (cond))
214 if (cond_is_first)
216 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
217 get_bc_label (bc_break));
218 append_to_statement_list (t, &stmt_list);
221 else
223 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
224 tree top = build1 (LABEL_EXPR, void_type_node,
225 create_artificial_label (start_locus));
227 /* If we have an exit condition, then we build an IF with gotos either
228 out of the loop, or to the top of it. If there's no exit condition,
229 then we just build a jump back to the top. */
230 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
232 if (cond && !integer_nonzerop (cond))
234 /* Canonicalize the loop condition to the end. This means
235 generating a branch to the loop condition. Reuse the
236 continue label, if possible. */
237 if (cond_is_first)
239 if (incr)
241 entry = build1 (LABEL_EXPR, void_type_node,
242 create_artificial_label (start_locus));
243 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
244 LABEL_EXPR_LABEL (entry));
246 else
247 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
248 get_bc_label (bc_continue));
249 append_to_statement_list (t, &stmt_list);
252 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
253 exit = fold_build3_loc (start_locus,
254 COND_EXPR, void_type_node, cond, exit, t);
257 append_to_statement_list (top, &stmt_list);
260 append_to_statement_list (body, &stmt_list);
261 finish_bc_block (&stmt_list, bc_continue, clab);
262 append_to_statement_list (incr, &stmt_list);
263 append_to_statement_list (entry, &stmt_list);
264 append_to_statement_list (exit, &stmt_list);
265 finish_bc_block (&stmt_list, bc_break, blab);
267 if (stmt_list == NULL_TREE)
268 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
270 *stmt_p = stmt_list;
273 /* Genericize a FOR_STMT node *STMT_P. */
275 static void
276 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
278 tree stmt = *stmt_p;
279 tree expr = NULL;
280 tree loop;
281 tree init = FOR_INIT_STMT (stmt);
283 if (init)
285 cp_walk_tree (&init, cp_genericize_r, data, NULL);
286 append_to_statement_list (init, &expr);
289 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
290 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
291 append_to_statement_list (loop, &expr);
292 *stmt_p = expr;
295 /* Genericize a WHILE_STMT node *STMT_P. */
297 static void
298 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
300 tree stmt = *stmt_p;
301 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
302 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
305 /* Genericize a DO_STMT node *STMT_P. */
307 static void
308 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
310 tree stmt = *stmt_p;
311 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
312 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
315 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
317 static void
318 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
320 tree stmt = *stmt_p;
321 tree break_block, body, cond, type;
322 location_t stmt_locus = EXPR_LOCATION (stmt);
324 break_block = begin_bc_block (bc_break, stmt_locus);
326 body = SWITCH_STMT_BODY (stmt);
327 if (!body)
328 body = build_empty_stmt (stmt_locus);
329 cond = SWITCH_STMT_COND (stmt);
330 type = SWITCH_STMT_TYPE (stmt);
332 cp_walk_tree (&body, cp_genericize_r, data, NULL);
333 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
334 cp_walk_tree (&type, cp_genericize_r, data, NULL);
335 *walk_subtrees = 0;
337 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
338 finish_bc_block (stmt_p, bc_break, break_block);
341 /* Genericize a CONTINUE_STMT node *STMT_P. */
343 static void
344 genericize_continue_stmt (tree *stmt_p)
346 tree stmt_list = NULL;
347 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
348 tree label = get_bc_label (bc_continue);
349 location_t location = EXPR_LOCATION (*stmt_p);
350 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
351 append_to_statement_list (pred, &stmt_list);
352 append_to_statement_list (jump, &stmt_list);
353 *stmt_p = stmt_list;
356 /* Genericize a BREAK_STMT node *STMT_P. */
358 static void
359 genericize_break_stmt (tree *stmt_p)
361 tree label = get_bc_label (bc_break);
362 location_t location = EXPR_LOCATION (*stmt_p);
363 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
366 /* Genericize a OMP_FOR node *STMT_P. */
368 static void
369 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
371 tree stmt = *stmt_p;
372 location_t locus = EXPR_LOCATION (stmt);
373 tree clab = begin_bc_block (bc_continue, locus);
375 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
381 *walk_subtrees = 0;
383 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
391 tree for_stmt = *expr_p;
392 gimple_seq seq = NULL;
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
396 return GS_UNHANDLED;
397 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
399 gimplify_and_add (for_stmt, &seq);
400 gimple_seq_add_seq (pre_p, seq);
402 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
404 return GS_ALL_DONE;
407 /* Gimplify an EXPR_STMT node. */
409 static void
410 gimplify_expr_stmt (tree *stmt_p)
412 tree stmt = EXPR_STMT_EXPR (*stmt_p);
414 if (stmt == error_mark_node)
415 stmt = NULL;
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
422 gimplification. */
423 if (stmt && warn_unused_value)
425 if (!TREE_SIDE_EFFECTS (stmt))
427 if (!IS_EMPTY_STMT (stmt)
428 && !VOID_TYPE_P (TREE_TYPE (stmt))
429 && !TREE_NO_WARNING (stmt))
430 warning (OPT_Wunused_value, "statement with no effect");
432 else
433 warn_if_unused_value (stmt, input_location);
436 if (stmt == NULL_TREE)
437 stmt = alloc_stmt_list ();
439 *stmt_p = stmt;
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
444 static void
445 cp_gimplify_init_expr (tree *expr_p)
447 tree from = TREE_OPERAND (*expr_p, 1);
448 tree to = TREE_OPERAND (*expr_p, 0);
449 tree t;
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from) == TARGET_EXPR)
456 from = TARGET_EXPR_INITIAL (from);
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
460 for (t = from; t; )
462 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub) == AGGR_INIT_EXPR
470 || TREE_CODE (sub) == VEC_INIT_EXPR)
472 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
473 AGGR_INIT_EXPR_SLOT (sub) = to;
474 else
475 VEC_INIT_EXPR_SLOT (sub) = to;
476 *expr_p = from;
478 /* The initialization is now a side-effect, so the container can
479 become void. */
480 if (from != sub)
481 TREE_TYPE (from) = void_type_node;
484 if (t == sub)
485 break;
486 else
487 t = TREE_OPERAND (t, 1);
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
497 tree stmt = *expr_p;
498 tree temp = voidify_wrapper_expr (stmt, NULL);
499 tree body = TREE_OPERAND (stmt, 0);
500 gimple_seq try_ = NULL;
501 gimple_seq catch_ = NULL;
502 gimple mnt;
504 gimplify_and_add (body, &try_);
505 mnt = gimple_build_eh_must_not_throw (terminate_node);
506 gimple_seq_add_stmt_without_update (&catch_, mnt);
507 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
509 gimple_seq_add_stmt_without_update (pre_p, mnt);
510 if (temp)
512 *expr_p = temp;
513 return GS_OK;
516 *expr_p = NULL;
517 return GS_ALL_DONE;
520 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
523 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
525 int saved_stmts_are_full_exprs_p = 0;
526 enum tree_code code = TREE_CODE (*expr_p);
527 enum gimplify_status ret;
529 if (STATEMENT_CODE_P (code))
531 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
532 current_stmt_tree ()->stmts_are_full_exprs_p
533 = STMT_IS_FULL_EXPR_P (*expr_p);
536 switch (code)
538 case PTRMEM_CST:
539 *expr_p = cplus_expand_constant (*expr_p);
540 ret = GS_OK;
541 break;
543 case AGGR_INIT_EXPR:
544 simplify_aggr_init_expr (expr_p);
545 ret = GS_OK;
546 break;
548 case VEC_INIT_EXPR:
550 location_t loc = input_location;
551 tree init = VEC_INIT_EXPR_INIT (*expr_p);
552 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
553 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
554 input_location = EXPR_LOCATION (*expr_p);
555 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
556 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
557 from_array,
558 tf_warning_or_error);
559 cp_genericize_tree (expr_p);
560 ret = GS_OK;
561 input_location = loc;
563 break;
565 case THROW_EXPR:
566 /* FIXME communicate throw type to back end, probably by moving
567 THROW_EXPR into ../tree.def. */
568 *expr_p = TREE_OPERAND (*expr_p, 0);
569 ret = GS_OK;
570 break;
572 case MUST_NOT_THROW_EXPR:
573 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
574 break;
576 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
577 LHS of an assignment might also be involved in the RHS, as in bug
578 25979. */
579 case INIT_EXPR:
580 cp_gimplify_init_expr (expr_p);
581 if (TREE_CODE (*expr_p) != INIT_EXPR)
582 return GS_OK;
583 /* Otherwise fall through. */
584 case MODIFY_EXPR:
586 /* If the back end isn't clever enough to know that the lhs and rhs
587 types are the same, add an explicit conversion. */
588 tree op0 = TREE_OPERAND (*expr_p, 0);
589 tree op1 = TREE_OPERAND (*expr_p, 1);
591 if (!error_operand_p (op0)
592 && !error_operand_p (op1)
593 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
594 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
595 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
596 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
597 TREE_TYPE (op0), op1);
599 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
600 || (TREE_CODE (op1) == CONSTRUCTOR
601 && CONSTRUCTOR_NELTS (op1) == 0
602 && !TREE_CLOBBER_P (op1))
603 || (TREE_CODE (op1) == CALL_EXPR
604 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
605 && is_really_empty_class (TREE_TYPE (op0)))
607 /* Remove any copies of empty classes. We check that the RHS
608 has a simple form so that TARGET_EXPRs and non-empty
609 CONSTRUCTORs get reduced properly, and we leave the return
610 slot optimization alone because it isn't a copy (FIXME so it
611 shouldn't be represented as one).
613 Also drop volatile variables on the RHS to avoid infinite
614 recursion from gimplify_expr trying to load the value. */
615 if (!TREE_SIDE_EFFECTS (op1)
616 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
617 *expr_p = op0;
618 else if (TREE_CODE (op1) == MEM_REF
619 && TREE_THIS_VOLATILE (op1))
621 /* Similarly for volatile MEM_REFs on the RHS. */
622 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
623 *expr_p = op0;
624 else
625 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
626 TREE_OPERAND (op1, 0), op0);
628 else
629 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
630 op0, op1);
633 ret = GS_OK;
634 break;
636 case EMPTY_CLASS_EXPR:
637 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
638 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
639 ret = GS_OK;
640 break;
642 case BASELINK:
643 *expr_p = BASELINK_FUNCTIONS (*expr_p);
644 ret = GS_OK;
645 break;
647 case TRY_BLOCK:
648 genericize_try_block (expr_p);
649 ret = GS_OK;
650 break;
652 case HANDLER:
653 genericize_catch_block (expr_p);
654 ret = GS_OK;
655 break;
657 case EH_SPEC_BLOCK:
658 genericize_eh_spec_block (expr_p);
659 ret = GS_OK;
660 break;
662 case USING_STMT:
663 gcc_unreachable ();
665 case FOR_STMT:
666 case WHILE_STMT:
667 case DO_STMT:
668 case SWITCH_STMT:
669 case CONTINUE_STMT:
670 case BREAK_STMT:
671 gcc_unreachable ();
673 case OMP_FOR:
674 ret = cp_gimplify_omp_for (expr_p, pre_p);
675 break;
677 case EXPR_STMT:
678 gimplify_expr_stmt (expr_p);
679 ret = GS_OK;
680 break;
682 case UNARY_PLUS_EXPR:
684 tree arg = TREE_OPERAND (*expr_p, 0);
685 tree type = TREE_TYPE (*expr_p);
686 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
687 : arg;
688 ret = GS_OK;
690 break;
692 default:
693 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
694 break;
697 /* Restore saved state. */
698 if (STATEMENT_CODE_P (code))
699 current_stmt_tree ()->stmts_are_full_exprs_p
700 = saved_stmts_are_full_exprs_p;
702 return ret;
705 static inline bool
706 is_invisiref_parm (const_tree t)
708 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
709 && DECL_BY_REFERENCE (t));
712 /* Return true if the uid in both int tree maps are equal. */
715 cxx_int_tree_map_eq (const void *va, const void *vb)
717 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
718 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
719 return (a->uid == b->uid);
722 /* Hash a UID in a cxx_int_tree_map. */
724 unsigned int
725 cxx_int_tree_map_hash (const void *item)
727 return ((const struct cxx_int_tree_map *)item)->uid;
730 /* A stable comparison routine for use with splay trees and DECLs. */
732 static int
733 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
735 tree a = (tree) xa;
736 tree b = (tree) xb;
738 return DECL_UID (a) - DECL_UID (b);
741 /* OpenMP context during genericization. */
743 struct cp_genericize_omp_taskreg
745 bool is_parallel;
746 bool default_shared;
747 struct cp_genericize_omp_taskreg *outer;
748 splay_tree variables;
751 /* Return true if genericization should try to determine if
752 DECL is firstprivate or shared within task regions. */
754 static bool
755 omp_var_to_track (tree decl)
757 tree type = TREE_TYPE (decl);
758 if (is_invisiref_parm (decl))
759 type = TREE_TYPE (type);
760 while (TREE_CODE (type) == ARRAY_TYPE)
761 type = TREE_TYPE (type);
762 if (type == error_mark_node || !CLASS_TYPE_P (type))
763 return false;
764 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
765 return false;
766 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
767 return false;
768 return true;
771 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
773 static void
774 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
776 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
777 (splay_tree_key) decl);
778 if (n == NULL)
780 int flags = OMP_CLAUSE_DEFAULT_SHARED;
781 if (omp_ctx->outer)
782 omp_cxx_notice_variable (omp_ctx->outer, decl);
783 if (!omp_ctx->default_shared)
785 struct cp_genericize_omp_taskreg *octx;
787 for (octx = omp_ctx->outer; octx; octx = octx->outer)
789 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
790 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
792 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
793 break;
795 if (octx->is_parallel)
796 break;
798 if (octx == NULL
799 && (TREE_CODE (decl) == PARM_DECL
800 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
801 && DECL_CONTEXT (decl) == current_function_decl)))
802 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
803 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
805 /* DECL is implicitly determined firstprivate in
806 the current task construct. Ensure copy ctor and
807 dtor are instantiated, because during gimplification
808 it will be already too late. */
809 tree type = TREE_TYPE (decl);
810 if (is_invisiref_parm (decl))
811 type = TREE_TYPE (type);
812 while (TREE_CODE (type) == ARRAY_TYPE)
813 type = TREE_TYPE (type);
814 get_copy_ctor (type, tf_none);
815 get_dtor (type, tf_none);
818 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
822 /* Genericization context. */
824 struct cp_genericize_data
826 struct pointer_set_t *p_set;
827 VEC (tree, heap) *bind_expr_stack;
828 struct cp_genericize_omp_taskreg *omp_ctx;
831 /* Perform any pre-gimplification lowering of C++ front end trees to
832 GENERIC. */
834 static tree
835 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
837 tree stmt = *stmt_p;
838 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
839 struct pointer_set_t *p_set = wtd->p_set;
841 /* If in an OpenMP context, note var uses. */
842 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
843 && (TREE_CODE (stmt) == VAR_DECL
844 || TREE_CODE (stmt) == PARM_DECL
845 || TREE_CODE (stmt) == RESULT_DECL)
846 && omp_var_to_track (stmt))
847 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
849 if (is_invisiref_parm (stmt)
850 /* Don't dereference parms in a thunk, pass the references through. */
851 && !(DECL_THUNK_P (current_function_decl)
852 && TREE_CODE (stmt) == PARM_DECL))
854 *stmt_p = convert_from_reference (stmt);
855 *walk_subtrees = 0;
856 return NULL;
859 /* Map block scope extern declarations to visible declarations with the
860 same name and type in outer scopes if any. */
861 if (cp_function_chain->extern_decl_map
862 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
863 && DECL_EXTERNAL (stmt))
865 struct cxx_int_tree_map *h, in;
866 in.uid = DECL_UID (stmt);
867 h = (struct cxx_int_tree_map *)
868 htab_find_with_hash (cp_function_chain->extern_decl_map,
869 &in, in.uid);
870 if (h)
872 *stmt_p = h->to;
873 *walk_subtrees = 0;
874 return NULL;
878 /* Other than invisiref parms, don't walk the same tree twice. */
879 if (pointer_set_contains (p_set, stmt))
881 *walk_subtrees = 0;
882 return NULL_TREE;
885 if (TREE_CODE (stmt) == ADDR_EXPR
886 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
888 /* If in an OpenMP context, note var uses. */
889 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
890 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
891 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
892 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
893 *walk_subtrees = 0;
895 else if (TREE_CODE (stmt) == RETURN_EXPR
896 && TREE_OPERAND (stmt, 0)
897 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
898 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
899 *walk_subtrees = 0;
900 else if (TREE_CODE (stmt) == OMP_CLAUSE)
901 switch (OMP_CLAUSE_CODE (stmt))
903 case OMP_CLAUSE_LASTPRIVATE:
904 /* Don't dereference an invisiref in OpenMP clauses. */
905 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
907 *walk_subtrees = 0;
908 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
909 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
910 cp_genericize_r, data, NULL);
912 break;
913 case OMP_CLAUSE_PRIVATE:
914 /* Don't dereference an invisiref in OpenMP clauses. */
915 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
916 *walk_subtrees = 0;
917 else if (wtd->omp_ctx != NULL)
919 /* Private clause doesn't cause any references to the
920 var in outer contexts, avoid calling
921 omp_cxx_notice_variable for it. */
922 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
923 wtd->omp_ctx = NULL;
924 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
925 data, NULL);
926 wtd->omp_ctx = old;
927 *walk_subtrees = 0;
929 break;
930 case OMP_CLAUSE_SHARED:
931 case OMP_CLAUSE_FIRSTPRIVATE:
932 case OMP_CLAUSE_COPYIN:
933 case OMP_CLAUSE_COPYPRIVATE:
934 /* Don't dereference an invisiref in OpenMP clauses. */
935 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
936 *walk_subtrees = 0;
937 break;
938 case OMP_CLAUSE_REDUCTION:
939 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
940 break;
941 default:
942 break;
944 else if (IS_TYPE_OR_DECL_P (stmt))
945 *walk_subtrees = 0;
947 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
948 to lower this construct before scanning it, so we need to lower these
949 before doing anything else. */
950 else if (TREE_CODE (stmt) == CLEANUP_STMT)
951 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
952 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
953 : TRY_FINALLY_EXPR,
954 void_type_node,
955 CLEANUP_BODY (stmt),
956 CLEANUP_EXPR (stmt));
958 else if (TREE_CODE (stmt) == IF_STMT)
960 genericize_if_stmt (stmt_p);
961 /* *stmt_p has changed, tail recurse to handle it again. */
962 return cp_genericize_r (stmt_p, walk_subtrees, data);
965 /* COND_EXPR might have incompatible types in branches if one or both
966 arms are bitfields. Fix it up now. */
967 else if (TREE_CODE (stmt) == COND_EXPR)
969 tree type_left
970 = (TREE_OPERAND (stmt, 1)
971 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
972 : NULL_TREE);
973 tree type_right
974 = (TREE_OPERAND (stmt, 2)
975 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
976 : NULL_TREE);
977 if (type_left
978 && !useless_type_conversion_p (TREE_TYPE (stmt),
979 TREE_TYPE (TREE_OPERAND (stmt, 1))))
981 TREE_OPERAND (stmt, 1)
982 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
983 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
984 type_left));
986 if (type_right
987 && !useless_type_conversion_p (TREE_TYPE (stmt),
988 TREE_TYPE (TREE_OPERAND (stmt, 2))))
990 TREE_OPERAND (stmt, 2)
991 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
992 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
993 type_right));
997 else if (TREE_CODE (stmt) == BIND_EXPR)
999 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1001 tree decl;
1002 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1003 if (TREE_CODE (decl) == VAR_DECL
1004 && !DECL_EXTERNAL (decl)
1005 && omp_var_to_track (decl))
1007 splay_tree_node n
1008 = splay_tree_lookup (wtd->omp_ctx->variables,
1009 (splay_tree_key) decl);
1010 if (n == NULL)
1011 splay_tree_insert (wtd->omp_ctx->variables,
1012 (splay_tree_key) decl,
1013 TREE_STATIC (decl)
1014 ? OMP_CLAUSE_DEFAULT_SHARED
1015 : OMP_CLAUSE_DEFAULT_PRIVATE);
1018 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
1019 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1020 cp_genericize_r, data, NULL);
1021 VEC_pop (tree, wtd->bind_expr_stack);
1024 else if (TREE_CODE (stmt) == USING_STMT)
1026 tree block = NULL_TREE;
1028 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1029 BLOCK, and append an IMPORTED_DECL to its
1030 BLOCK_VARS chained list. */
1031 if (wtd->bind_expr_stack)
1033 int i;
1034 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
1035 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
1036 wtd->bind_expr_stack, i))))
1037 break;
1039 if (block)
1041 tree using_directive;
1042 gcc_assert (TREE_OPERAND (stmt, 0));
1044 using_directive = make_node (IMPORTED_DECL);
1045 TREE_TYPE (using_directive) = void_type_node;
1047 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1048 = TREE_OPERAND (stmt, 0);
1049 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1050 BLOCK_VARS (block) = using_directive;
1052 /* The USING_STMT won't appear in GENERIC. */
1053 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1054 *walk_subtrees = 0;
1057 else if (TREE_CODE (stmt) == DECL_EXPR
1058 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1060 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1061 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1062 *walk_subtrees = 0;
1064 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1066 struct cp_genericize_omp_taskreg omp_ctx;
1067 tree c, decl;
1068 splay_tree_node n;
1070 *walk_subtrees = 0;
1071 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1072 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1073 omp_ctx.default_shared = omp_ctx.is_parallel;
1074 omp_ctx.outer = wtd->omp_ctx;
1075 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1076 wtd->omp_ctx = &omp_ctx;
1077 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1078 switch (OMP_CLAUSE_CODE (c))
1080 case OMP_CLAUSE_SHARED:
1081 case OMP_CLAUSE_PRIVATE:
1082 case OMP_CLAUSE_FIRSTPRIVATE:
1083 case OMP_CLAUSE_LASTPRIVATE:
1084 decl = OMP_CLAUSE_DECL (c);
1085 if (decl == error_mark_node || !omp_var_to_track (decl))
1086 break;
1087 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1088 if (n != NULL)
1089 break;
1090 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1091 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1092 ? OMP_CLAUSE_DEFAULT_SHARED
1093 : OMP_CLAUSE_DEFAULT_PRIVATE);
1094 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1095 && omp_ctx.outer)
1096 omp_cxx_notice_variable (omp_ctx.outer, decl);
1097 break;
1098 case OMP_CLAUSE_DEFAULT:
1099 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1100 omp_ctx.default_shared = true;
1101 default:
1102 break;
1104 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1105 wtd->omp_ctx = omp_ctx.outer;
1106 splay_tree_delete (omp_ctx.variables);
1108 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1109 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1110 else if (TREE_CODE (stmt) == FOR_STMT)
1111 genericize_for_stmt (stmt_p, walk_subtrees, data);
1112 else if (TREE_CODE (stmt) == WHILE_STMT)
1113 genericize_while_stmt (stmt_p, walk_subtrees, data);
1114 else if (TREE_CODE (stmt) == DO_STMT)
1115 genericize_do_stmt (stmt_p, walk_subtrees, data);
1116 else if (TREE_CODE (stmt) == SWITCH_STMT)
1117 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1118 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1119 genericize_continue_stmt (stmt_p);
1120 else if (TREE_CODE (stmt) == BREAK_STMT)
1121 genericize_break_stmt (stmt_p);
1122 else if (TREE_CODE (stmt) == OMP_FOR)
1123 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1124 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1126 if (SIZEOF_EXPR_TYPE_P (stmt))
1127 *stmt_p
1128 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1129 SIZEOF_EXPR, false);
1130 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1131 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1132 SIZEOF_EXPR, false);
1133 else
1134 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1135 SIZEOF_EXPR, false);
1136 if (*stmt_p == error_mark_node)
1137 *stmt_p = size_one_node;
1138 return NULL;
1141 pointer_set_insert (p_set, *stmt_p);
1143 return NULL;
1146 /* Lower C++ front end trees to GENERIC in T_P. */
1148 static void
1149 cp_genericize_tree (tree* t_p)
1151 struct cp_genericize_data wtd;
1153 wtd.p_set = pointer_set_create ();
1154 wtd.bind_expr_stack = NULL;
1155 wtd.omp_ctx = NULL;
1156 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1157 pointer_set_destroy (wtd.p_set);
1158 VEC_free (tree, heap, wtd.bind_expr_stack);
1161 void
1162 cp_genericize (tree fndecl)
1164 tree t;
1166 /* Fix up the types of parms passed by invisible reference. */
1167 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1168 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1170 /* If a function's arguments are copied to create a thunk,
1171 then DECL_BY_REFERENCE will be set -- but the type of the
1172 argument will be a pointer type, so we will never get
1173 here. */
1174 gcc_assert (!DECL_BY_REFERENCE (t));
1175 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1176 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1177 DECL_BY_REFERENCE (t) = 1;
1178 TREE_ADDRESSABLE (t) = 0;
1179 relayout_decl (t);
1182 /* Do the same for the return value. */
1183 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1185 t = DECL_RESULT (fndecl);
1186 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1187 DECL_BY_REFERENCE (t) = 1;
1188 TREE_ADDRESSABLE (t) = 0;
1189 relayout_decl (t);
1190 if (DECL_NAME (t))
1192 /* Adjust DECL_VALUE_EXPR of the original var. */
1193 tree outer = outer_curly_brace_block (current_function_decl);
1194 tree var;
1196 if (outer)
1197 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1198 if (DECL_NAME (t) == DECL_NAME (var)
1199 && DECL_HAS_VALUE_EXPR_P (var)
1200 && DECL_VALUE_EXPR (var) == t)
1202 tree val = convert_from_reference (t);
1203 SET_DECL_VALUE_EXPR (var, val);
1204 break;
1209 /* If we're a clone, the body is already GIMPLE. */
1210 if (DECL_CLONED_FUNCTION_P (fndecl))
1211 return;
1213 /* We do want to see every occurrence of the parms, so we can't just use
1214 walk_tree's hash functionality. */
1215 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1217 /* Do everything else. */
1218 c_genericize (fndecl);
1220 gcc_assert (bc_label[bc_break] == NULL);
1221 gcc_assert (bc_label[bc_continue] == NULL);
1224 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1225 NULL if there is in fact nothing to do. ARG2 may be null if FN
1226 actually only takes one argument. */
1228 static tree
1229 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1231 tree defparm, parm, t;
1232 int i = 0;
1233 int nargs;
1234 tree *argarray;
1236 if (fn == NULL)
1237 return NULL;
1239 nargs = list_length (DECL_ARGUMENTS (fn));
1240 argarray = XALLOCAVEC (tree, nargs);
1242 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1243 if (arg2)
1244 defparm = TREE_CHAIN (defparm);
1246 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1248 tree inner_type = TREE_TYPE (arg1);
1249 tree start1, end1, p1;
1250 tree start2 = NULL, p2 = NULL;
1251 tree ret = NULL, lab;
1253 start1 = arg1;
1254 start2 = arg2;
1257 inner_type = TREE_TYPE (inner_type);
1258 start1 = build4 (ARRAY_REF, inner_type, start1,
1259 size_zero_node, NULL, NULL);
1260 if (arg2)
1261 start2 = build4 (ARRAY_REF, inner_type, start2,
1262 size_zero_node, NULL, NULL);
1264 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1265 start1 = build_fold_addr_expr_loc (input_location, start1);
1266 if (arg2)
1267 start2 = build_fold_addr_expr_loc (input_location, start2);
1269 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1270 end1 = fold_build_pointer_plus (start1, end1);
1272 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1273 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1274 append_to_statement_list (t, &ret);
1276 if (arg2)
1278 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1279 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1280 append_to_statement_list (t, &ret);
1283 lab = create_artificial_label (input_location);
1284 t = build1 (LABEL_EXPR, void_type_node, lab);
1285 append_to_statement_list (t, &ret);
1287 argarray[i++] = p1;
1288 if (arg2)
1289 argarray[i++] = p2;
1290 /* Handle default arguments. */
1291 for (parm = defparm; parm && parm != void_list_node;
1292 parm = TREE_CHAIN (parm), i++)
1293 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1294 TREE_PURPOSE (parm), fn, i,
1295 tf_warning_or_error);
1296 t = build_call_a (fn, i, argarray);
1297 t = fold_convert (void_type_node, t);
1298 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1299 append_to_statement_list (t, &ret);
1301 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1302 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1303 append_to_statement_list (t, &ret);
1305 if (arg2)
1307 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1308 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1309 append_to_statement_list (t, &ret);
1312 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1313 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1314 append_to_statement_list (t, &ret);
1316 return ret;
1318 else
1320 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1321 if (arg2)
1322 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1323 /* Handle default arguments. */
1324 for (parm = defparm; parm && parm != void_list_node;
1325 parm = TREE_CHAIN (parm), i++)
1326 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1327 TREE_PURPOSE (parm),
1328 fn, i, tf_warning_or_error);
1329 t = build_call_a (fn, i, argarray);
1330 t = fold_convert (void_type_node, t);
1331 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1335 /* Return code to initialize DECL with its default constructor, or
1336 NULL if there's nothing to do. */
1338 tree
1339 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1341 tree info = CP_OMP_CLAUSE_INFO (clause);
1342 tree ret = NULL;
1344 if (info)
1345 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1347 return ret;
1350 /* Return code to initialize DST with a copy constructor from SRC. */
1352 tree
1353 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1355 tree info = CP_OMP_CLAUSE_INFO (clause);
1356 tree ret = NULL;
1358 if (info)
1359 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1360 if (ret == NULL)
1361 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1363 return ret;
1366 /* Similarly, except use an assignment operator instead. */
1368 tree
1369 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1371 tree info = CP_OMP_CLAUSE_INFO (clause);
1372 tree ret = NULL;
1374 if (info)
1375 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1376 if (ret == NULL)
1377 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1379 return ret;
1382 /* Return code to destroy DECL. */
1384 tree
1385 cxx_omp_clause_dtor (tree clause, tree decl)
1387 tree info = CP_OMP_CLAUSE_INFO (clause);
1388 tree ret = NULL;
1390 if (info)
1391 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1393 return ret;
1396 /* True if OpenMP should privatize what this DECL points to rather
1397 than the DECL itself. */
1399 bool
1400 cxx_omp_privatize_by_reference (const_tree decl)
1402 return is_invisiref_parm (decl);
1405 /* Return true if DECL is const qualified var having no mutable member. */
1406 bool
1407 cxx_omp_const_qual_no_mutable (tree decl)
1409 tree type = TREE_TYPE (decl);
1410 if (TREE_CODE (type) == REFERENCE_TYPE)
1412 if (!is_invisiref_parm (decl))
1413 return false;
1414 type = TREE_TYPE (type);
1416 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1418 /* NVR doesn't preserve const qualification of the
1419 variable's type. */
1420 tree outer = outer_curly_brace_block (current_function_decl);
1421 tree var;
1423 if (outer)
1424 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1425 if (DECL_NAME (decl) == DECL_NAME (var)
1426 && (TYPE_MAIN_VARIANT (type)
1427 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1429 if (TYPE_READONLY (TREE_TYPE (var)))
1430 type = TREE_TYPE (var);
1431 break;
1436 if (type == error_mark_node)
1437 return false;
1439 /* Variables with const-qualified type having no mutable member
1440 are predetermined shared. */
1441 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1442 return true;
1444 return false;
1447 /* True if OpenMP sharing attribute of DECL is predetermined. */
1449 enum omp_clause_default_kind
1450 cxx_omp_predetermined_sharing (tree decl)
1452 /* Static data members are predetermined shared. */
1453 if (TREE_STATIC (decl))
1455 tree ctx = CP_DECL_CONTEXT (decl);
1456 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1457 return OMP_CLAUSE_DEFAULT_SHARED;
1460 /* Const qualified vars having no mutable member are predetermined
1461 shared. */
1462 if (cxx_omp_const_qual_no_mutable (decl))
1463 return OMP_CLAUSE_DEFAULT_SHARED;
1465 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1468 /* Finalize an implicitly determined clause. */
1470 void
1471 cxx_omp_finish_clause (tree c)
1473 tree decl, inner_type;
1474 bool make_shared = false;
1476 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1477 return;
1479 decl = OMP_CLAUSE_DECL (c);
1480 decl = require_complete_type (decl);
1481 inner_type = TREE_TYPE (decl);
1482 if (decl == error_mark_node)
1483 make_shared = true;
1484 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1486 if (is_invisiref_parm (decl))
1487 inner_type = TREE_TYPE (inner_type);
1488 else
1490 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1491 decl);
1492 make_shared = true;
1496 /* We're interested in the base element, not arrays. */
1497 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1498 inner_type = TREE_TYPE (inner_type);
1500 /* Check for special function availability by building a call to one.
1501 Save the results, because later we won't be in the right context
1502 for making these queries. */
1503 if (!make_shared
1504 && CLASS_TYPE_P (inner_type)
1505 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1506 make_shared = true;
1508 if (make_shared)
1509 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;