* gcc.dg/atomic-compare-exchange-1.c,
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob53b0ca8f928208f8414084616aea31554654355d
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "cp-tree.h"
28 #include "c-family/c-common.h"
29 #include "tree-iterator.h"
30 #include "gimple.h"
31 #include "hashtab.h"
32 #include "pointer-set.h"
33 #include "flags.h"
34 #include "splay-tree.h"
36 /* Forward declarations. */
38 static tree cp_genericize_r (tree *, int *, void *);
39 static void cp_genericize_tree (tree*);
41 /* Local declarations. */
43 enum bc_t { bc_break = 0, bc_continue = 1 };
45 /* Stack of labels which are targets for "break" or "continue",
46 linked through TREE_CHAIN. */
47 static tree bc_label[2];
49 /* Begin a scope which can be exited by a break or continue statement. BC
50 indicates which.
52 Just creates a label with location LOCATION and pushes it into the current
53 context. */
55 static tree
56 begin_bc_block (enum bc_t bc, location_t location)
58 tree label = create_artificial_label (location);
59 DECL_CHAIN (label) = bc_label[bc];
60 bc_label[bc] = label;
61 return label;
64 /* Finish a scope which can be exited by a break or continue statement.
65 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
66 an expression for the contents of the scope.
68 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
69 BLOCK. Otherwise, just forget the label. */
71 static void
72 finish_bc_block (tree *block, enum bc_t bc, tree label)
74 gcc_assert (label == bc_label[bc]);
76 if (TREE_USED (label))
77 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
78 block);
80 bc_label[bc] = DECL_CHAIN (label);
81 DECL_CHAIN (label) = NULL_TREE;
84 /* Get the LABEL_EXPR to represent a break or continue statement
85 in the current block scope. BC indicates which. */
87 static tree
88 get_bc_label (enum bc_t bc)
90 tree label = bc_label[bc];
92 /* Mark the label used for finish_bc_block. */
93 TREE_USED (label) = 1;
94 return label;
97 /* Genericize a TRY_BLOCK. */
99 static void
100 genericize_try_block (tree *stmt_p)
102 tree body = TRY_STMTS (*stmt_p);
103 tree cleanup = TRY_HANDLERS (*stmt_p);
105 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
108 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
110 static void
111 genericize_catch_block (tree *stmt_p)
113 tree type = HANDLER_TYPE (*stmt_p);
114 tree body = HANDLER_BODY (*stmt_p);
116 /* FIXME should the caught type go in TREE_TYPE? */
117 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
120 /* A terser interface for building a representation of an exception
121 specification. */
123 static tree
124 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
126 tree t;
128 /* FIXME should the allowed types go in TREE_TYPE? */
129 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
130 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
132 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
133 append_to_statement_list (body, &TREE_OPERAND (t, 0));
135 return t;
138 /* Genericize an EH_SPEC_BLOCK by converting it to a
139 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
141 static void
142 genericize_eh_spec_block (tree *stmt_p)
144 tree body = EH_SPEC_STMTS (*stmt_p);
145 tree allowed = EH_SPEC_RAISES (*stmt_p);
146 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
148 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
149 TREE_NO_WARNING (*stmt_p) = true;
150 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
153 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
155 static void
156 genericize_if_stmt (tree *stmt_p)
158 tree stmt, cond, then_, else_;
159 location_t locus = EXPR_LOCATION (*stmt_p);
161 stmt = *stmt_p;
162 cond = IF_COND (stmt);
163 then_ = THEN_CLAUSE (stmt);
164 else_ = ELSE_CLAUSE (stmt);
166 if (!then_)
167 then_ = build_empty_stmt (locus);
168 if (!else_)
169 else_ = build_empty_stmt (locus);
171 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
172 stmt = then_;
173 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
174 stmt = else_;
175 else
176 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
177 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
178 SET_EXPR_LOCATION (stmt, locus);
179 *stmt_p = stmt;
182 /* Build a generic representation of one of the C loop forms. COND is the
183 loop condition or NULL_TREE. BODY is the (possibly compound) statement
184 controlled by the loop. INCR is the increment expression of a for-loop,
185 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
186 evaluated before the loop body as in while and for loops, or after the
187 loop body as in do-while loops. */
189 static void
190 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
191 tree incr, bool cond_is_first, int *walk_subtrees,
192 void *data)
194 tree blab, clab;
195 tree entry = NULL, exit = NULL, t;
196 tree stmt_list = NULL;
198 blab = begin_bc_block (bc_break, start_locus);
199 clab = begin_bc_block (bc_continue, start_locus);
201 if (incr && EXPR_P (incr))
202 SET_EXPR_LOCATION (incr, start_locus);
204 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
205 cp_walk_tree (&body, cp_genericize_r, data, NULL);
206 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
207 *walk_subtrees = 0;
209 /* If condition is zero don't generate a loop construct. */
210 if (cond && integer_zerop (cond))
212 if (cond_is_first)
214 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
215 get_bc_label (bc_break));
216 append_to_statement_list (t, &stmt_list);
219 else
221 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
222 tree top = build1 (LABEL_EXPR, void_type_node,
223 create_artificial_label (start_locus));
225 /* If we have an exit condition, then we build an IF with gotos either
226 out of the loop, or to the top of it. If there's no exit condition,
227 then we just build a jump back to the top. */
228 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
230 if (cond && !integer_nonzerop (cond))
232 /* Canonicalize the loop condition to the end. This means
233 generating a branch to the loop condition. Reuse the
234 continue label, if possible. */
235 if (cond_is_first)
237 if (incr)
239 entry = build1 (LABEL_EXPR, void_type_node,
240 create_artificial_label (start_locus));
241 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
242 LABEL_EXPR_LABEL (entry));
244 else
245 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
246 get_bc_label (bc_continue));
247 append_to_statement_list (t, &stmt_list);
250 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
251 exit = fold_build3_loc (start_locus,
252 COND_EXPR, void_type_node, cond, exit, t);
255 append_to_statement_list (top, &stmt_list);
258 append_to_statement_list (body, &stmt_list);
259 finish_bc_block (&stmt_list, bc_continue, clab);
260 append_to_statement_list (incr, &stmt_list);
261 append_to_statement_list (entry, &stmt_list);
262 append_to_statement_list (exit, &stmt_list);
263 finish_bc_block (&stmt_list, bc_break, blab);
265 if (stmt_list == NULL_TREE)
266 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
268 *stmt_p = stmt_list;
271 /* Genericize a FOR_STMT node *STMT_P. */
273 static void
274 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
276 tree stmt = *stmt_p;
277 tree expr = NULL;
278 tree loop;
279 tree init = FOR_INIT_STMT (stmt);
281 if (init)
283 cp_walk_tree (&init, cp_genericize_r, data, NULL);
284 append_to_statement_list (init, &expr);
287 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
288 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
289 append_to_statement_list (loop, &expr);
290 *stmt_p = expr;
293 /* Genericize a WHILE_STMT node *STMT_P. */
295 static void
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
298 tree stmt = *stmt_p;
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
305 static void
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 tree stmt = *stmt_p;
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
315 static void
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 tree stmt = *stmt_p;
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
325 if (!body)
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
333 *walk_subtrees = 0;
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
341 static void
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
351 *stmt_p = stmt_list;
354 /* Genericize a BREAK_STMT node *STMT_P. */
356 static void
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
366 static void
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
369 tree stmt = *stmt_p;
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
379 *walk_subtrees = 0;
381 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
384 /* Hook into the middle of gimplifying an OMP_FOR node. */
386 static enum gimplify_status
387 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
389 tree for_stmt = *expr_p;
390 gimple_seq seq = NULL;
392 /* Protect ourselves from recursion. */
393 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
394 return GS_UNHANDLED;
395 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
397 gimplify_and_add (for_stmt, &seq);
398 gimple_seq_add_seq (pre_p, seq);
400 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
402 return GS_ALL_DONE;
405 /* Gimplify an EXPR_STMT node. */
407 static void
408 gimplify_expr_stmt (tree *stmt_p)
410 tree stmt = EXPR_STMT_EXPR (*stmt_p);
412 if (stmt == error_mark_node)
413 stmt = NULL;
415 /* Gimplification of a statement expression will nullify the
416 statement if all its side effects are moved to *PRE_P and *POST_P.
418 In this case we will not want to emit the gimplified statement.
419 However, we may still want to emit a warning, so we do that before
420 gimplification. */
421 if (stmt && warn_unused_value)
423 if (!TREE_SIDE_EFFECTS (stmt))
425 if (!IS_EMPTY_STMT (stmt)
426 && !VOID_TYPE_P (TREE_TYPE (stmt))
427 && !TREE_NO_WARNING (stmt))
428 warning (OPT_Wunused_value, "statement with no effect");
430 else
431 warn_if_unused_value (stmt, input_location);
434 if (stmt == NULL_TREE)
435 stmt = alloc_stmt_list ();
437 *stmt_p = stmt;
440 /* Gimplify initialization from an AGGR_INIT_EXPR. */
442 static void
443 cp_gimplify_init_expr (tree *expr_p)
445 tree from = TREE_OPERAND (*expr_p, 1);
446 tree to = TREE_OPERAND (*expr_p, 0);
447 tree t;
449 /* What about code that pulls out the temp and uses it elsewhere? I
450 think that such code never uses the TARGET_EXPR as an initializer. If
451 I'm wrong, we'll abort because the temp won't have any RTL. In that
452 case, I guess we'll need to replace references somehow. */
453 if (TREE_CODE (from) == TARGET_EXPR)
454 from = TARGET_EXPR_INITIAL (from);
456 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
457 inside the TARGET_EXPR. */
458 for (t = from; t; )
460 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
462 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
463 replace the slot operand with our target.
465 Should we add a target parm to gimplify_expr instead? No, as in this
466 case we want to replace the INIT_EXPR. */
467 if (TREE_CODE (sub) == AGGR_INIT_EXPR
468 || TREE_CODE (sub) == VEC_INIT_EXPR)
470 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
471 AGGR_INIT_EXPR_SLOT (sub) = to;
472 else
473 VEC_INIT_EXPR_SLOT (sub) = to;
474 *expr_p = from;
476 /* The initialization is now a side-effect, so the container can
477 become void. */
478 if (from != sub)
479 TREE_TYPE (from) = void_type_node;
482 if (t == sub)
483 break;
484 else
485 t = TREE_OPERAND (t, 1);
490 /* Gimplify a MUST_NOT_THROW_EXPR. */
492 static enum gimplify_status
493 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
495 tree stmt = *expr_p;
496 tree temp = voidify_wrapper_expr (stmt, NULL);
497 tree body = TREE_OPERAND (stmt, 0);
498 gimple_seq try_ = NULL;
499 gimple_seq catch_ = NULL;
500 gimple mnt;
502 gimplify_and_add (body, &try_);
503 mnt = gimple_build_eh_must_not_throw (terminate_node);
504 gimple_seq_add_stmt_without_update (&catch_, mnt);
505 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
507 gimple_seq_add_stmt_without_update (pre_p, mnt);
508 if (temp)
510 *expr_p = temp;
511 return GS_OK;
514 *expr_p = NULL;
515 return GS_ALL_DONE;
518 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
521 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
523 int saved_stmts_are_full_exprs_p = 0;
524 enum tree_code code = TREE_CODE (*expr_p);
525 enum gimplify_status ret;
527 if (STATEMENT_CODE_P (code))
529 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
530 current_stmt_tree ()->stmts_are_full_exprs_p
531 = STMT_IS_FULL_EXPR_P (*expr_p);
534 switch (code)
536 case PTRMEM_CST:
537 *expr_p = cplus_expand_constant (*expr_p);
538 ret = GS_OK;
539 break;
541 case AGGR_INIT_EXPR:
542 simplify_aggr_init_expr (expr_p);
543 ret = GS_OK;
544 break;
546 case VEC_INIT_EXPR:
548 location_t loc = input_location;
549 tree init = VEC_INIT_EXPR_INIT (*expr_p);
550 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
551 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
552 input_location = EXPR_LOCATION (*expr_p);
553 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
554 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
555 from_array,
556 tf_warning_or_error);
557 cp_genericize_tree (expr_p);
558 ret = GS_OK;
559 input_location = loc;
561 break;
563 case THROW_EXPR:
564 /* FIXME communicate throw type to back end, probably by moving
565 THROW_EXPR into ../tree.def. */
566 *expr_p = TREE_OPERAND (*expr_p, 0);
567 ret = GS_OK;
568 break;
570 case MUST_NOT_THROW_EXPR:
571 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
572 break;
574 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
575 LHS of an assignment might also be involved in the RHS, as in bug
576 25979. */
577 case INIT_EXPR:
578 cp_gimplify_init_expr (expr_p);
579 if (TREE_CODE (*expr_p) != INIT_EXPR)
580 return GS_OK;
581 /* Otherwise fall through. */
582 case MODIFY_EXPR:
584 /* If the back end isn't clever enough to know that the lhs and rhs
585 types are the same, add an explicit conversion. */
586 tree op0 = TREE_OPERAND (*expr_p, 0);
587 tree op1 = TREE_OPERAND (*expr_p, 1);
589 if (!error_operand_p (op0)
590 && !error_operand_p (op1)
591 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
592 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
593 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
594 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
595 TREE_TYPE (op0), op1);
597 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
598 || (TREE_CODE (op1) == CONSTRUCTOR
599 && CONSTRUCTOR_NELTS (op1) == 0
600 && !TREE_CLOBBER_P (op1))
601 || (TREE_CODE (op1) == CALL_EXPR
602 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
603 && is_really_empty_class (TREE_TYPE (op0)))
605 /* Remove any copies of empty classes. We check that the RHS
606 has a simple form so that TARGET_EXPRs and non-empty
607 CONSTRUCTORs get reduced properly, and we leave the return
608 slot optimization alone because it isn't a copy (FIXME so it
609 shouldn't be represented as one).
611 Also drop volatile variables on the RHS to avoid infinite
612 recursion from gimplify_expr trying to load the value. */
613 if (!TREE_SIDE_EFFECTS (op1)
614 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
615 *expr_p = op0;
616 else if (TREE_CODE (op1) == MEM_REF
617 && TREE_THIS_VOLATILE (op1))
619 /* Similarly for volatile MEM_REFs on the RHS. */
620 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
621 *expr_p = op0;
622 else
623 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
624 TREE_OPERAND (op1, 0), op0);
626 else
627 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
628 op0, op1);
631 ret = GS_OK;
632 break;
634 case EMPTY_CLASS_EXPR:
635 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
636 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
637 ret = GS_OK;
638 break;
640 case BASELINK:
641 *expr_p = BASELINK_FUNCTIONS (*expr_p);
642 ret = GS_OK;
643 break;
645 case TRY_BLOCK:
646 genericize_try_block (expr_p);
647 ret = GS_OK;
648 break;
650 case HANDLER:
651 genericize_catch_block (expr_p);
652 ret = GS_OK;
653 break;
655 case EH_SPEC_BLOCK:
656 genericize_eh_spec_block (expr_p);
657 ret = GS_OK;
658 break;
660 case USING_STMT:
661 gcc_unreachable ();
663 case FOR_STMT:
664 case WHILE_STMT:
665 case DO_STMT:
666 case SWITCH_STMT:
667 case CONTINUE_STMT:
668 case BREAK_STMT:
669 gcc_unreachable ();
671 case OMP_FOR:
672 case OMP_SIMD:
673 case OMP_DISTRIBUTE:
674 ret = cp_gimplify_omp_for (expr_p, pre_p);
675 break;
677 case EXPR_STMT:
678 gimplify_expr_stmt (expr_p);
679 ret = GS_OK;
680 break;
682 case UNARY_PLUS_EXPR:
684 tree arg = TREE_OPERAND (*expr_p, 0);
685 tree type = TREE_TYPE (*expr_p);
686 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
687 : arg;
688 ret = GS_OK;
690 break;
692 default:
693 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
694 break;
697 /* Restore saved state. */
698 if (STATEMENT_CODE_P (code))
699 current_stmt_tree ()->stmts_are_full_exprs_p
700 = saved_stmts_are_full_exprs_p;
702 return ret;
705 static inline bool
706 is_invisiref_parm (const_tree t)
708 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
709 && DECL_BY_REFERENCE (t));
712 /* Return true if the uid in both int tree maps are equal. */
715 cxx_int_tree_map_eq (const void *va, const void *vb)
717 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
718 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
719 return (a->uid == b->uid);
722 /* Hash a UID in a cxx_int_tree_map. */
724 unsigned int
725 cxx_int_tree_map_hash (const void *item)
727 return ((const struct cxx_int_tree_map *)item)->uid;
730 /* A stable comparison routine for use with splay trees and DECLs. */
732 static int
733 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
735 tree a = (tree) xa;
736 tree b = (tree) xb;
738 return DECL_UID (a) - DECL_UID (b);
741 /* OpenMP context during genericization. */
743 struct cp_genericize_omp_taskreg
745 bool is_parallel;
746 bool default_shared;
747 struct cp_genericize_omp_taskreg *outer;
748 splay_tree variables;
751 /* Return true if genericization should try to determine if
752 DECL is firstprivate or shared within task regions. */
754 static bool
755 omp_var_to_track (tree decl)
757 tree type = TREE_TYPE (decl);
758 if (is_invisiref_parm (decl))
759 type = TREE_TYPE (type);
760 while (TREE_CODE (type) == ARRAY_TYPE)
761 type = TREE_TYPE (type);
762 if (type == error_mark_node || !CLASS_TYPE_P (type))
763 return false;
764 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
765 return false;
766 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
767 return false;
768 return true;
771 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
773 static void
774 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
776 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
777 (splay_tree_key) decl);
778 if (n == NULL)
780 int flags = OMP_CLAUSE_DEFAULT_SHARED;
781 if (omp_ctx->outer)
782 omp_cxx_notice_variable (omp_ctx->outer, decl);
783 if (!omp_ctx->default_shared)
785 struct cp_genericize_omp_taskreg *octx;
787 for (octx = omp_ctx->outer; octx; octx = octx->outer)
789 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
790 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
792 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
793 break;
795 if (octx->is_parallel)
796 break;
798 if (octx == NULL
799 && (TREE_CODE (decl) == PARM_DECL
800 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
801 && DECL_CONTEXT (decl) == current_function_decl)))
802 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
803 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
805 /* DECL is implicitly determined firstprivate in
806 the current task construct. Ensure copy ctor and
807 dtor are instantiated, because during gimplification
808 it will be already too late. */
809 tree type = TREE_TYPE (decl);
810 if (is_invisiref_parm (decl))
811 type = TREE_TYPE (type);
812 while (TREE_CODE (type) == ARRAY_TYPE)
813 type = TREE_TYPE (type);
814 get_copy_ctor (type, tf_none);
815 get_dtor (type, tf_none);
818 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
822 /* Genericization context. */
824 struct cp_genericize_data
826 struct pointer_set_t *p_set;
827 vec<tree> bind_expr_stack;
828 struct cp_genericize_omp_taskreg *omp_ctx;
831 /* Perform any pre-gimplification lowering of C++ front end trees to
832 GENERIC. */
834 static tree
835 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
837 tree stmt = *stmt_p;
838 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
839 struct pointer_set_t *p_set = wtd->p_set;
841 /* If in an OpenMP context, note var uses. */
842 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
843 && (VAR_P (stmt)
844 || TREE_CODE (stmt) == PARM_DECL
845 || TREE_CODE (stmt) == RESULT_DECL)
846 && omp_var_to_track (stmt))
847 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
849 if (is_invisiref_parm (stmt)
850 /* Don't dereference parms in a thunk, pass the references through. */
851 && !(DECL_THUNK_P (current_function_decl)
852 && TREE_CODE (stmt) == PARM_DECL))
854 *stmt_p = convert_from_reference (stmt);
855 *walk_subtrees = 0;
856 return NULL;
859 /* Map block scope extern declarations to visible declarations with the
860 same name and type in outer scopes if any. */
861 if (cp_function_chain->extern_decl_map
862 && VAR_OR_FUNCTION_DECL_P (stmt)
863 && DECL_EXTERNAL (stmt))
865 struct cxx_int_tree_map *h, in;
866 in.uid = DECL_UID (stmt);
867 h = (struct cxx_int_tree_map *)
868 htab_find_with_hash (cp_function_chain->extern_decl_map,
869 &in, in.uid);
870 if (h)
872 *stmt_p = h->to;
873 *walk_subtrees = 0;
874 return NULL;
878 /* Other than invisiref parms, don't walk the same tree twice. */
879 if (pointer_set_contains (p_set, stmt))
881 *walk_subtrees = 0;
882 return NULL_TREE;
885 if (TREE_CODE (stmt) == ADDR_EXPR
886 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
888 /* If in an OpenMP context, note var uses. */
889 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
890 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
891 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
892 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
893 *walk_subtrees = 0;
895 else if (TREE_CODE (stmt) == RETURN_EXPR
896 && TREE_OPERAND (stmt, 0)
897 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
898 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
899 *walk_subtrees = 0;
900 else if (TREE_CODE (stmt) == OMP_CLAUSE)
901 switch (OMP_CLAUSE_CODE (stmt))
903 case OMP_CLAUSE_LASTPRIVATE:
904 /* Don't dereference an invisiref in OpenMP clauses. */
905 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
907 *walk_subtrees = 0;
908 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
909 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
910 cp_genericize_r, data, NULL);
912 break;
913 case OMP_CLAUSE_PRIVATE:
914 /* Don't dereference an invisiref in OpenMP clauses. */
915 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
916 *walk_subtrees = 0;
917 else if (wtd->omp_ctx != NULL)
919 /* Private clause doesn't cause any references to the
920 var in outer contexts, avoid calling
921 omp_cxx_notice_variable for it. */
922 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
923 wtd->omp_ctx = NULL;
924 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
925 data, NULL);
926 wtd->omp_ctx = old;
927 *walk_subtrees = 0;
929 break;
930 case OMP_CLAUSE_SHARED:
931 case OMP_CLAUSE_FIRSTPRIVATE:
932 case OMP_CLAUSE_COPYIN:
933 case OMP_CLAUSE_COPYPRIVATE:
934 /* Don't dereference an invisiref in OpenMP clauses. */
935 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
936 *walk_subtrees = 0;
937 break;
938 case OMP_CLAUSE_REDUCTION:
939 /* Don't dereference an invisiref in reduction clause's
940 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
941 still needs to be genericized. */
942 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
944 *walk_subtrees = 0;
945 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
946 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
947 cp_genericize_r, data, NULL);
948 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
949 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
950 cp_genericize_r, data, NULL);
952 break;
953 default:
954 break;
956 else if (IS_TYPE_OR_DECL_P (stmt))
957 *walk_subtrees = 0;
959 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
960 to lower this construct before scanning it, so we need to lower these
961 before doing anything else. */
962 else if (TREE_CODE (stmt) == CLEANUP_STMT)
963 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
964 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
965 : TRY_FINALLY_EXPR,
966 void_type_node,
967 CLEANUP_BODY (stmt),
968 CLEANUP_EXPR (stmt));
970 else if (TREE_CODE (stmt) == IF_STMT)
972 genericize_if_stmt (stmt_p);
973 /* *stmt_p has changed, tail recurse to handle it again. */
974 return cp_genericize_r (stmt_p, walk_subtrees, data);
977 /* COND_EXPR might have incompatible types in branches if one or both
978 arms are bitfields. Fix it up now. */
979 else if (TREE_CODE (stmt) == COND_EXPR)
981 tree type_left
982 = (TREE_OPERAND (stmt, 1)
983 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
984 : NULL_TREE);
985 tree type_right
986 = (TREE_OPERAND (stmt, 2)
987 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
988 : NULL_TREE);
989 if (type_left
990 && !useless_type_conversion_p (TREE_TYPE (stmt),
991 TREE_TYPE (TREE_OPERAND (stmt, 1))))
993 TREE_OPERAND (stmt, 1)
994 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
995 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
996 type_left));
998 if (type_right
999 && !useless_type_conversion_p (TREE_TYPE (stmt),
1000 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1002 TREE_OPERAND (stmt, 2)
1003 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1004 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1005 type_right));
1009 else if (TREE_CODE (stmt) == BIND_EXPR)
1011 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1013 tree decl;
1014 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1015 if (VAR_P (decl)
1016 && !DECL_EXTERNAL (decl)
1017 && omp_var_to_track (decl))
1019 splay_tree_node n
1020 = splay_tree_lookup (wtd->omp_ctx->variables,
1021 (splay_tree_key) decl);
1022 if (n == NULL)
1023 splay_tree_insert (wtd->omp_ctx->variables,
1024 (splay_tree_key) decl,
1025 TREE_STATIC (decl)
1026 ? OMP_CLAUSE_DEFAULT_SHARED
1027 : OMP_CLAUSE_DEFAULT_PRIVATE);
1030 wtd->bind_expr_stack.safe_push (stmt);
1031 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1032 cp_genericize_r, data, NULL);
1033 wtd->bind_expr_stack.pop ();
1036 else if (TREE_CODE (stmt) == USING_STMT)
1038 tree block = NULL_TREE;
1040 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1041 BLOCK, and append an IMPORTED_DECL to its
1042 BLOCK_VARS chained list. */
1043 if (wtd->bind_expr_stack.exists ())
1045 int i;
1046 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1047 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1048 break;
1050 if (block)
1052 tree using_directive;
1053 gcc_assert (TREE_OPERAND (stmt, 0));
1055 using_directive = make_node (IMPORTED_DECL);
1056 TREE_TYPE (using_directive) = void_type_node;
1058 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1059 = TREE_OPERAND (stmt, 0);
1060 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1061 BLOCK_VARS (block) = using_directive;
1063 /* The USING_STMT won't appear in GENERIC. */
1064 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1065 *walk_subtrees = 0;
1068 else if (TREE_CODE (stmt) == DECL_EXPR
1069 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1071 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1072 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1073 *walk_subtrees = 0;
1075 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1077 struct cp_genericize_omp_taskreg omp_ctx;
1078 tree c, decl;
1079 splay_tree_node n;
1081 *walk_subtrees = 0;
1082 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1083 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1084 omp_ctx.default_shared = omp_ctx.is_parallel;
1085 omp_ctx.outer = wtd->omp_ctx;
1086 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1087 wtd->omp_ctx = &omp_ctx;
1088 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1089 switch (OMP_CLAUSE_CODE (c))
1091 case OMP_CLAUSE_SHARED:
1092 case OMP_CLAUSE_PRIVATE:
1093 case OMP_CLAUSE_FIRSTPRIVATE:
1094 case OMP_CLAUSE_LASTPRIVATE:
1095 decl = OMP_CLAUSE_DECL (c);
1096 if (decl == error_mark_node || !omp_var_to_track (decl))
1097 break;
1098 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1099 if (n != NULL)
1100 break;
1101 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1102 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1103 ? OMP_CLAUSE_DEFAULT_SHARED
1104 : OMP_CLAUSE_DEFAULT_PRIVATE);
1105 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1106 && omp_ctx.outer)
1107 omp_cxx_notice_variable (omp_ctx.outer, decl);
1108 break;
1109 case OMP_CLAUSE_DEFAULT:
1110 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1111 omp_ctx.default_shared = true;
1112 default:
1113 break;
1115 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1116 wtd->omp_ctx = omp_ctx.outer;
1117 splay_tree_delete (omp_ctx.variables);
1119 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1120 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1121 else if (TREE_CODE (stmt) == FOR_STMT)
1122 genericize_for_stmt (stmt_p, walk_subtrees, data);
1123 else if (TREE_CODE (stmt) == WHILE_STMT)
1124 genericize_while_stmt (stmt_p, walk_subtrees, data);
1125 else if (TREE_CODE (stmt) == DO_STMT)
1126 genericize_do_stmt (stmt_p, walk_subtrees, data);
1127 else if (TREE_CODE (stmt) == SWITCH_STMT)
1128 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1129 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1130 genericize_continue_stmt (stmt_p);
1131 else if (TREE_CODE (stmt) == BREAK_STMT)
1132 genericize_break_stmt (stmt_p);
1133 else if (TREE_CODE (stmt) == OMP_FOR
1134 || TREE_CODE (stmt) == OMP_SIMD
1135 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1136 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1137 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1139 if (SIZEOF_EXPR_TYPE_P (stmt))
1140 *stmt_p
1141 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1142 SIZEOF_EXPR, false);
1143 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1144 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1145 SIZEOF_EXPR, false);
1146 else
1147 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1148 SIZEOF_EXPR, false);
1149 if (*stmt_p == error_mark_node)
1150 *stmt_p = size_one_node;
1151 return NULL;
1154 pointer_set_insert (p_set, *stmt_p);
1156 return NULL;
1159 /* Lower C++ front end trees to GENERIC in T_P. */
1161 static void
1162 cp_genericize_tree (tree* t_p)
1164 struct cp_genericize_data wtd;
1166 wtd.p_set = pointer_set_create ();
1167 wtd.bind_expr_stack.create (0);
1168 wtd.omp_ctx = NULL;
1169 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1170 pointer_set_destroy (wtd.p_set);
1171 wtd.bind_expr_stack.release ();
1174 void
1175 cp_genericize (tree fndecl)
1177 tree t;
1179 /* Fix up the types of parms passed by invisible reference. */
1180 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1181 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1183 /* If a function's arguments are copied to create a thunk,
1184 then DECL_BY_REFERENCE will be set -- but the type of the
1185 argument will be a pointer type, so we will never get
1186 here. */
1187 gcc_assert (!DECL_BY_REFERENCE (t));
1188 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1189 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1190 DECL_BY_REFERENCE (t) = 1;
1191 TREE_ADDRESSABLE (t) = 0;
1192 relayout_decl (t);
1195 /* Do the same for the return value. */
1196 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1198 t = DECL_RESULT (fndecl);
1199 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1200 DECL_BY_REFERENCE (t) = 1;
1201 TREE_ADDRESSABLE (t) = 0;
1202 relayout_decl (t);
1203 if (DECL_NAME (t))
1205 /* Adjust DECL_VALUE_EXPR of the original var. */
1206 tree outer = outer_curly_brace_block (current_function_decl);
1207 tree var;
1209 if (outer)
1210 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1211 if (DECL_NAME (t) == DECL_NAME (var)
1212 && DECL_HAS_VALUE_EXPR_P (var)
1213 && DECL_VALUE_EXPR (var) == t)
1215 tree val = convert_from_reference (t);
1216 SET_DECL_VALUE_EXPR (var, val);
1217 break;
1222 /* If we're a clone, the body is already GIMPLE. */
1223 if (DECL_CLONED_FUNCTION_P (fndecl))
1224 return;
1226 /* Expand all the array notations here. */
1227 if (flag_enable_cilkplus
1228 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1229 DECL_SAVED_TREE (fndecl) =
1230 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1232 /* We do want to see every occurrence of the parms, so we can't just use
1233 walk_tree's hash functionality. */
1234 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1236 /* Do everything else. */
1237 c_genericize (fndecl);
1239 gcc_assert (bc_label[bc_break] == NULL);
1240 gcc_assert (bc_label[bc_continue] == NULL);
1243 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1244 NULL if there is in fact nothing to do. ARG2 may be null if FN
1245 actually only takes one argument. */
1247 static tree
1248 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1250 tree defparm, parm, t;
1251 int i = 0;
1252 int nargs;
1253 tree *argarray;
1255 if (fn == NULL)
1256 return NULL;
1258 nargs = list_length (DECL_ARGUMENTS (fn));
1259 argarray = XALLOCAVEC (tree, nargs);
1261 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1262 if (arg2)
1263 defparm = TREE_CHAIN (defparm);
1265 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1267 tree inner_type = TREE_TYPE (arg1);
1268 tree start1, end1, p1;
1269 tree start2 = NULL, p2 = NULL;
1270 tree ret = NULL, lab;
1272 start1 = arg1;
1273 start2 = arg2;
1276 inner_type = TREE_TYPE (inner_type);
1277 start1 = build4 (ARRAY_REF, inner_type, start1,
1278 size_zero_node, NULL, NULL);
1279 if (arg2)
1280 start2 = build4 (ARRAY_REF, inner_type, start2,
1281 size_zero_node, NULL, NULL);
1283 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1284 start1 = build_fold_addr_expr_loc (input_location, start1);
1285 if (arg2)
1286 start2 = build_fold_addr_expr_loc (input_location, start2);
1288 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1289 end1 = fold_build_pointer_plus (start1, end1);
1291 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1292 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1293 append_to_statement_list (t, &ret);
1295 if (arg2)
1297 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1298 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1299 append_to_statement_list (t, &ret);
1302 lab = create_artificial_label (input_location);
1303 t = build1 (LABEL_EXPR, void_type_node, lab);
1304 append_to_statement_list (t, &ret);
1306 argarray[i++] = p1;
1307 if (arg2)
1308 argarray[i++] = p2;
1309 /* Handle default arguments. */
1310 for (parm = defparm; parm && parm != void_list_node;
1311 parm = TREE_CHAIN (parm), i++)
1312 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1313 TREE_PURPOSE (parm), fn, i,
1314 tf_warning_or_error);
1315 t = build_call_a (fn, i, argarray);
1316 t = fold_convert (void_type_node, t);
1317 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1318 append_to_statement_list (t, &ret);
1320 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1321 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1322 append_to_statement_list (t, &ret);
1324 if (arg2)
1326 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1327 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1328 append_to_statement_list (t, &ret);
1331 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1332 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1333 append_to_statement_list (t, &ret);
1335 return ret;
1337 else
1339 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1340 if (arg2)
1341 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1342 /* Handle default arguments. */
1343 for (parm = defparm; parm && parm != void_list_node;
1344 parm = TREE_CHAIN (parm), i++)
1345 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1346 TREE_PURPOSE (parm),
1347 fn, i, tf_warning_or_error);
1348 t = build_call_a (fn, i, argarray);
1349 t = fold_convert (void_type_node, t);
1350 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1354 /* Return code to initialize DECL with its default constructor, or
1355 NULL if there's nothing to do. */
1357 tree
1358 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1360 tree info = CP_OMP_CLAUSE_INFO (clause);
1361 tree ret = NULL;
1363 if (info)
1364 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1366 return ret;
1369 /* Return code to initialize DST with a copy constructor from SRC. */
1371 tree
1372 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1374 tree info = CP_OMP_CLAUSE_INFO (clause);
1375 tree ret = NULL;
1377 if (info)
1378 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1379 if (ret == NULL)
1380 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1382 return ret;
1385 /* Similarly, except use an assignment operator instead. */
1387 tree
1388 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1390 tree info = CP_OMP_CLAUSE_INFO (clause);
1391 tree ret = NULL;
1393 if (info)
1394 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1395 if (ret == NULL)
1396 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1398 return ret;
1401 /* Return code to destroy DECL. */
1403 tree
1404 cxx_omp_clause_dtor (tree clause, tree decl)
1406 tree info = CP_OMP_CLAUSE_INFO (clause);
1407 tree ret = NULL;
1409 if (info)
1410 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1412 return ret;
1415 /* True if OpenMP should privatize what this DECL points to rather
1416 than the DECL itself. */
1418 bool
1419 cxx_omp_privatize_by_reference (const_tree decl)
1421 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1422 || is_invisiref_parm (decl));
1425 /* Return true if DECL is const qualified var having no mutable member. */
1426 bool
1427 cxx_omp_const_qual_no_mutable (tree decl)
1429 tree type = TREE_TYPE (decl);
1430 if (TREE_CODE (type) == REFERENCE_TYPE)
1432 if (!is_invisiref_parm (decl))
1433 return false;
1434 type = TREE_TYPE (type);
1436 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1438 /* NVR doesn't preserve const qualification of the
1439 variable's type. */
1440 tree outer = outer_curly_brace_block (current_function_decl);
1441 tree var;
1443 if (outer)
1444 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1445 if (DECL_NAME (decl) == DECL_NAME (var)
1446 && (TYPE_MAIN_VARIANT (type)
1447 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1449 if (TYPE_READONLY (TREE_TYPE (var)))
1450 type = TREE_TYPE (var);
1451 break;
1456 if (type == error_mark_node)
1457 return false;
1459 /* Variables with const-qualified type having no mutable member
1460 are predetermined shared. */
1461 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1462 return true;
1464 return false;
1467 /* True if OpenMP sharing attribute of DECL is predetermined. */
1469 enum omp_clause_default_kind
1470 cxx_omp_predetermined_sharing (tree decl)
1472 /* Static data members are predetermined shared. */
1473 if (TREE_STATIC (decl))
1475 tree ctx = CP_DECL_CONTEXT (decl);
1476 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1477 return OMP_CLAUSE_DEFAULT_SHARED;
1480 /* Const qualified vars having no mutable member are predetermined
1481 shared. */
1482 if (cxx_omp_const_qual_no_mutable (decl))
1483 return OMP_CLAUSE_DEFAULT_SHARED;
1485 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1488 /* Finalize an implicitly determined clause. */
1490 void
1491 cxx_omp_finish_clause (tree c)
1493 tree decl, inner_type;
1494 bool make_shared = false;
1496 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1497 return;
1499 decl = OMP_CLAUSE_DECL (c);
1500 decl = require_complete_type (decl);
1501 inner_type = TREE_TYPE (decl);
1502 if (decl == error_mark_node)
1503 make_shared = true;
1504 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1506 if (is_invisiref_parm (decl))
1507 inner_type = TREE_TYPE (inner_type);
1508 else
1510 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1511 decl);
1512 make_shared = true;
1516 /* We're interested in the base element, not arrays. */
1517 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1518 inner_type = TREE_TYPE (inner_type);
1520 /* Check for special function availability by building a call to one.
1521 Save the results, because later we won't be in the right context
1522 for making these queries. */
1523 if (!make_shared
1524 && CLASS_TYPE_P (inner_type)
1525 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1526 make_shared = true;
1528 if (make_shared)
1529 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;