gcc/java/
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob29f4f382e5cbacd65f964db43825a691f9c67987
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "cp-tree.h"
29 #include "c-common.h"
30 #include "toplev.h"
31 #include "tree-iterator.h"
32 #include "gimple.h"
33 #include "hashtab.h"
34 #include "pointer-set.h"
35 #include "flags.h"
37 /* Local declarations. */
39 enum bc_t { bc_break = 0, bc_continue = 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
46 indicates which.
48 Just creates a label and pushes it into the current context. */
50 static tree
51 begin_bc_block (enum bc_t bc)
53 tree label = create_artificial_label (input_location);
54 TREE_CHAIN (label) = bc_label[bc];
55 bc_label[bc] = label;
56 return label;
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
66 static gimple_seq
67 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
69 gcc_assert (label == bc_label[bc]);
71 if (TREE_USED (label))
73 gimple_seq_add_stmt (&body, gimple_build_label (label));
76 bc_label[bc] = TREE_CHAIN (label);
77 TREE_CHAIN (label) = NULL_TREE;
78 return body;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
84 static tree
85 get_bc_label (enum bc_t bc)
87 tree label = bc_label[bc];
89 if (label == NULL_TREE)
91 if (bc == bc_break)
92 error ("break statement not within loop or switch");
93 else
94 error ("continue statement not within loop or switch");
96 return NULL_TREE;
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label) = 1;
101 return label;
104 /* Genericize a TRY_BLOCK. */
106 static void
107 genericize_try_block (tree *stmt_p)
109 tree body = TRY_STMTS (*stmt_p);
110 tree cleanup = TRY_HANDLERS (*stmt_p);
112 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 static void
118 genericize_catch_block (tree *stmt_p)
120 tree type = HANDLER_TYPE (*stmt_p);
121 tree body = HANDLER_BODY (*stmt_p);
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 /* A terser interface for building a representation of an exception
128 specification. */
130 static tree
131 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 tree t;
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140 append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 return t;
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 static void
149 genericize_eh_spec_block (tree *stmt_p)
151 tree body = EH_SPEC_STMTS (*stmt_p);
152 tree allowed = EH_SPEC_RAISES (*stmt_p);
153 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
155 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
158 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
160 static void
161 genericize_if_stmt (tree *stmt_p)
163 tree stmt, cond, then_, else_;
164 location_t locus = EXPR_LOCATION (*stmt_p);
166 stmt = *stmt_p;
167 cond = IF_COND (stmt);
168 then_ = THEN_CLAUSE (stmt);
169 else_ = ELSE_CLAUSE (stmt);
171 if (!then_)
172 then_ = build_empty_stmt (locus);
173 if (!else_)
174 else_ = build_empty_stmt (locus);
176 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
177 stmt = then_;
178 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
179 stmt = else_;
180 else
181 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
182 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
183 SET_EXPR_LOCATION (stmt, locus);
184 *stmt_p = stmt;
187 /* Build a generic representation of one of the C loop forms. COND is the
188 loop condition or NULL_TREE. BODY is the (possibly compound) statement
189 controlled by the loop. INCR is the increment expression of a for-loop,
190 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
191 evaluated before the loop body as in while and for loops, or after the
192 loop body as in do-while loops. */
194 static gimple_seq
195 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
197 gimple top, entry, stmt;
198 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
199 tree cont_block, break_block;
200 location_t stmt_locus;
202 stmt_locus = input_location;
203 stmt_list = NULL;
204 body_seq = NULL;
205 incr_seq = NULL;
206 exit_seq = NULL;
207 entry = NULL;
209 break_block = begin_bc_block (bc_break);
210 cont_block = begin_bc_block (bc_continue);
212 /* If condition is zero don't generate a loop construct. */
213 if (cond && integer_zerop (cond))
215 top = NULL;
216 if (cond_is_first)
218 stmt = gimple_build_goto (get_bc_label (bc_break));
219 gimple_set_location (stmt, stmt_locus);
220 gimple_seq_add_stmt (&stmt_list, stmt);
223 else
225 /* If we use a LOOP_EXPR here, we have to feed the whole thing
226 back through the main gimplifier to lower it. Given that we
227 have to gimplify the loop body NOW so that we can resolve
228 break/continue stmts, seems easier to just expand to gotos. */
229 top = gimple_build_label (create_artificial_label (stmt_locus));
231 /* If we have an exit condition, then we build an IF with gotos either
232 out of the loop, or to the top of it. If there's no exit condition,
233 then we just build a jump back to the top. */
234 if (cond && !integer_nonzerop (cond))
236 if (cond != error_mark_node)
238 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
239 stmt = gimple_build_cond (NE_EXPR, cond,
240 build_int_cst (TREE_TYPE (cond), 0),
241 gimple_label_label (top),
242 get_bc_label (bc_break));
243 gimple_seq_add_stmt (&exit_seq, stmt);
246 if (cond_is_first)
248 if (incr)
250 entry = gimple_build_label
251 (create_artificial_label (stmt_locus));
252 stmt = gimple_build_goto (gimple_label_label (entry));
254 else
255 stmt = gimple_build_goto (get_bc_label (bc_continue));
256 gimple_set_location (stmt, stmt_locus);
257 gimple_seq_add_stmt (&stmt_list, stmt);
260 else
262 stmt = gimple_build_goto (gimple_label_label (top));
263 gimple_seq_add_stmt (&exit_seq, stmt);
267 gimplify_stmt (&body, &body_seq);
268 gimplify_stmt (&incr, &incr_seq);
270 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
272 gimple_seq_add_stmt (&stmt_list, top);
273 gimple_seq_add_seq (&stmt_list, body_seq);
274 gimple_seq_add_seq (&stmt_list, incr_seq);
275 gimple_seq_add_stmt (&stmt_list, entry);
276 gimple_seq_add_seq (&stmt_list, exit_seq);
278 annotate_all_with_location (stmt_list, stmt_locus);
280 return finish_bc_block (bc_break, break_block, stmt_list);
283 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
284 prequeue and hand off to gimplify_cp_loop. */
286 static void
287 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
289 tree stmt = *stmt_p;
291 if (FOR_INIT_STMT (stmt))
292 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
294 gimple_seq_add_seq (pre_p,
295 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
296 FOR_EXPR (stmt), 1));
297 *stmt_p = NULL_TREE;
300 /* Gimplify a WHILE_STMT node. */
302 static void
303 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
305 tree stmt = *stmt_p;
306 gimple_seq_add_seq (pre_p,
307 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
308 NULL_TREE, 1));
309 *stmt_p = NULL_TREE;
312 /* Gimplify a DO_STMT node. */
314 static void
315 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
317 tree stmt = *stmt_p;
318 gimple_seq_add_seq (pre_p,
319 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
320 NULL_TREE, 0));
321 *stmt_p = NULL_TREE;
324 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
326 static void
327 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
329 tree stmt = *stmt_p;
330 tree break_block, body, t;
331 location_t stmt_locus = input_location;
332 gimple_seq seq = NULL;
334 break_block = begin_bc_block (bc_break);
336 body = SWITCH_STMT_BODY (stmt);
337 if (!body)
338 body = build_empty_stmt (stmt_locus);
340 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
341 SWITCH_STMT_COND (stmt), body, NULL_TREE);
342 SET_EXPR_LOCATION (t, stmt_locus);
343 gimplify_and_add (t, &seq);
345 seq = finish_bc_block (bc_break, break_block, seq);
346 gimple_seq_add_seq (pre_p, seq);
347 *stmt_p = NULL_TREE;
350 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
351 in order to properly gimplify CONTINUE statements. Here we merely
352 manage the continue stack; the rest of the job is performed by the
353 regular gimplifier. */
355 static enum gimplify_status
356 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
358 tree for_stmt = *expr_p;
359 tree cont_block;
360 gimple stmt;
361 gimple_seq seq = NULL;
363 /* Protect ourselves from recursion. */
364 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
365 return GS_UNHANDLED;
366 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
368 /* Note that while technically the continue label is enabled too soon
369 here, we should have already diagnosed invalid continues nested within
370 statement expressions within the INIT, COND, or INCR expressions. */
371 cont_block = begin_bc_block (bc_continue);
373 gimplify_and_add (for_stmt, &seq);
374 stmt = gimple_seq_last_stmt (seq);
375 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
376 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
377 gimple_omp_body (stmt)));
378 else
379 seq = finish_bc_block (bc_continue, cont_block, seq);
380 gimple_seq_add_seq (pre_p, seq);
382 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
384 return GS_ALL_DONE;
387 /* Gimplify an EXPR_STMT node. */
389 static void
390 gimplify_expr_stmt (tree *stmt_p)
392 tree stmt = EXPR_STMT_EXPR (*stmt_p);
394 if (stmt == error_mark_node)
395 stmt = NULL;
397 /* Gimplification of a statement expression will nullify the
398 statement if all its side effects are moved to *PRE_P and *POST_P.
400 In this case we will not want to emit the gimplified statement.
401 However, we may still want to emit a warning, so we do that before
402 gimplification. */
403 if (stmt && warn_unused_value)
405 if (!TREE_SIDE_EFFECTS (stmt))
407 if (!IS_EMPTY_STMT (stmt)
408 && !VOID_TYPE_P (TREE_TYPE (stmt))
409 && !TREE_NO_WARNING (stmt))
410 warning (OPT_Wunused_value, "statement with no effect");
412 else
413 warn_if_unused_value (stmt, input_location);
416 if (stmt == NULL_TREE)
417 stmt = alloc_stmt_list ();
419 *stmt_p = stmt;
422 /* Gimplify initialization from an AGGR_INIT_EXPR. */
424 static void
425 cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
427 tree from = TREE_OPERAND (*expr_p, 1);
428 tree to = TREE_OPERAND (*expr_p, 0);
429 tree t;
430 tree slot = NULL_TREE;
432 /* What about code that pulls out the temp and uses it elsewhere? I
433 think that such code never uses the TARGET_EXPR as an initializer. If
434 I'm wrong, we'll abort because the temp won't have any RTL. In that
435 case, I guess we'll need to replace references somehow. */
436 if (TREE_CODE (from) == TARGET_EXPR)
438 slot = TARGET_EXPR_SLOT (from);
439 from = TARGET_EXPR_INITIAL (from);
442 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
443 inside the TARGET_EXPR. */
444 for (t = from; t; )
446 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
448 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
449 replace the slot operand with our target.
451 Should we add a target parm to gimplify_expr instead? No, as in this
452 case we want to replace the INIT_EXPR. */
453 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
455 gimplify_expr (&to, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
456 AGGR_INIT_EXPR_SLOT (sub) = to;
457 *expr_p = from;
459 /* The initialization is now a side-effect, so the container can
460 become void. */
461 if (from != sub)
462 TREE_TYPE (from) = void_type_node;
465 if (t == sub)
466 break;
467 else
468 t = TREE_OPERAND (t, 1);
473 /* Gimplify a MUST_NOT_THROW_EXPR. */
475 static enum gimplify_status
476 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
478 tree stmt = *expr_p;
479 tree temp = voidify_wrapper_expr (stmt, NULL);
480 tree body = TREE_OPERAND (stmt, 0);
482 stmt = build_gimple_eh_filter_tree (body, NULL_TREE,
483 build_call_n (terminate_node, 0));
485 gimplify_and_add (stmt, pre_p);
486 if (temp)
488 *expr_p = temp;
489 return GS_OK;
492 *expr_p = NULL;
493 return GS_ALL_DONE;
496 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
499 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
501 int saved_stmts_are_full_exprs_p = 0;
502 enum tree_code code = TREE_CODE (*expr_p);
503 enum gimplify_status ret;
504 tree block = NULL;
505 VEC(gimple, heap) *bind_expr_stack = NULL;
507 if (STATEMENT_CODE_P (code))
509 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
510 current_stmt_tree ()->stmts_are_full_exprs_p
511 = STMT_IS_FULL_EXPR_P (*expr_p);
514 switch (code)
516 case PTRMEM_CST:
517 *expr_p = cplus_expand_constant (*expr_p);
518 ret = GS_OK;
519 break;
521 case AGGR_INIT_EXPR:
522 simplify_aggr_init_expr (expr_p);
523 ret = GS_OK;
524 break;
526 case THROW_EXPR:
527 /* FIXME communicate throw type to back end, probably by moving
528 THROW_EXPR into ../tree.def. */
529 *expr_p = TREE_OPERAND (*expr_p, 0);
530 ret = GS_OK;
531 break;
533 case MUST_NOT_THROW_EXPR:
534 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
535 break;
537 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
538 LHS of an assignment might also be involved in the RHS, as in bug
539 25979. */
540 case INIT_EXPR:
541 cp_gimplify_init_expr (expr_p, pre_p, post_p);
542 ret = GS_OK;
543 break;
545 case EMPTY_CLASS_EXPR:
546 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
547 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
548 ret = GS_OK;
549 break;
551 case BASELINK:
552 *expr_p = BASELINK_FUNCTIONS (*expr_p);
553 ret = GS_OK;
554 break;
556 case TRY_BLOCK:
557 genericize_try_block (expr_p);
558 ret = GS_OK;
559 break;
561 case HANDLER:
562 genericize_catch_block (expr_p);
563 ret = GS_OK;
564 break;
566 case EH_SPEC_BLOCK:
567 genericize_eh_spec_block (expr_p);
568 ret = GS_OK;
569 break;
571 case USING_STMT:
572 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
573 BLOCK, and append an IMPORTED_DECL to its
574 BLOCK_VARS chained list. */
576 bind_expr_stack = gimple_bind_expr_stack ();
577 if (bind_expr_stack)
579 int i;
580 for (i = VEC_length (gimple, bind_expr_stack) - 1; i >= 0; i--)
581 if ((block = gimple_bind_block (VEC_index (gimple,
582 bind_expr_stack,
583 i))))
584 break;
586 if (block)
588 tree using_directive;
589 gcc_assert (TREE_OPERAND (*expr_p, 0));
591 using_directive = make_node (IMPORTED_DECL);
592 TREE_TYPE (using_directive) = void_type_node;
594 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
595 = TREE_OPERAND (*expr_p, 0);
596 TREE_CHAIN (using_directive) = BLOCK_VARS (block);
597 BLOCK_VARS (block) = using_directive;
599 /* The USING_STMT won't appear in GIMPLE. */
600 *expr_p = NULL;
601 ret = GS_ALL_DONE;
602 break;
604 case FOR_STMT:
605 gimplify_for_stmt (expr_p, pre_p);
606 ret = GS_OK;
607 break;
609 case WHILE_STMT:
610 gimplify_while_stmt (expr_p, pre_p);
611 ret = GS_OK;
612 break;
614 case DO_STMT:
615 gimplify_do_stmt (expr_p, pre_p);
616 ret = GS_OK;
617 break;
619 case SWITCH_STMT:
620 gimplify_switch_stmt (expr_p, pre_p);
621 ret = GS_OK;
622 break;
624 case OMP_FOR:
625 ret = cp_gimplify_omp_for (expr_p, pre_p);
626 break;
628 case CONTINUE_STMT:
629 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
630 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
631 *expr_p = NULL_TREE;
632 ret = GS_ALL_DONE;
633 break;
635 case BREAK_STMT:
636 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
637 *expr_p = NULL_TREE;
638 ret = GS_ALL_DONE;
639 break;
641 case EXPR_STMT:
642 gimplify_expr_stmt (expr_p);
643 ret = GS_OK;
644 break;
646 case UNARY_PLUS_EXPR:
648 tree arg = TREE_OPERAND (*expr_p, 0);
649 tree type = TREE_TYPE (*expr_p);
650 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
651 : arg;
652 ret = GS_OK;
654 break;
656 default:
657 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
658 break;
661 /* Restore saved state. */
662 if (STATEMENT_CODE_P (code))
663 current_stmt_tree ()->stmts_are_full_exprs_p
664 = saved_stmts_are_full_exprs_p;
666 return ret;
669 static inline bool
670 is_invisiref_parm (const_tree t)
672 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
673 && DECL_BY_REFERENCE (t));
676 /* Return true if the uid in both int tree maps are equal. */
679 cxx_int_tree_map_eq (const void *va, const void *vb)
681 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
682 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
683 return (a->uid == b->uid);
686 /* Hash a UID in a cxx_int_tree_map. */
688 unsigned int
689 cxx_int_tree_map_hash (const void *item)
691 return ((const struct cxx_int_tree_map *)item)->uid;
694 /* Perform any pre-gimplification lowering of C++ front end trees to
695 GENERIC. */
697 static tree
698 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
700 tree stmt = *stmt_p;
701 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
703 if (is_invisiref_parm (stmt)
704 /* Don't dereference parms in a thunk, pass the references through. */
705 && !(DECL_THUNK_P (current_function_decl)
706 && TREE_CODE (stmt) == PARM_DECL))
708 *stmt_p = convert_from_reference (stmt);
709 *walk_subtrees = 0;
710 return NULL;
713 /* Map block scope extern declarations to visible declarations with the
714 same name and type in outer scopes if any. */
715 if (cp_function_chain->extern_decl_map
716 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
717 && DECL_EXTERNAL (stmt))
719 struct cxx_int_tree_map *h, in;
720 in.uid = DECL_UID (stmt);
721 h = (struct cxx_int_tree_map *)
722 htab_find_with_hash (cp_function_chain->extern_decl_map,
723 &in, in.uid);
724 if (h)
726 *stmt_p = h->to;
727 *walk_subtrees = 0;
728 return NULL;
732 /* Other than invisiref parms, don't walk the same tree twice. */
733 if (pointer_set_contains (p_set, stmt))
735 *walk_subtrees = 0;
736 return NULL_TREE;
739 if (TREE_CODE (stmt) == ADDR_EXPR
740 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
742 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
743 *walk_subtrees = 0;
745 else if (TREE_CODE (stmt) == RETURN_EXPR
746 && TREE_OPERAND (stmt, 0)
747 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
748 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
749 *walk_subtrees = 0;
750 else if (TREE_CODE (stmt) == OMP_CLAUSE)
751 switch (OMP_CLAUSE_CODE (stmt))
753 case OMP_CLAUSE_LASTPRIVATE:
754 /* Don't dereference an invisiref in OpenMP clauses. */
755 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
757 *walk_subtrees = 0;
758 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
759 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
760 cp_genericize_r, p_set, NULL);
762 break;
763 case OMP_CLAUSE_PRIVATE:
764 case OMP_CLAUSE_SHARED:
765 case OMP_CLAUSE_FIRSTPRIVATE:
766 case OMP_CLAUSE_COPYIN:
767 case OMP_CLAUSE_COPYPRIVATE:
768 /* Don't dereference an invisiref in OpenMP clauses. */
769 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
770 *walk_subtrees = 0;
771 break;
772 case OMP_CLAUSE_REDUCTION:
773 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
774 break;
775 default:
776 break;
778 else if (IS_TYPE_OR_DECL_P (stmt))
779 *walk_subtrees = 0;
781 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
782 to lower this construct before scanning it, so we need to lower these
783 before doing anything else. */
784 else if (TREE_CODE (stmt) == CLEANUP_STMT)
785 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
786 : TRY_FINALLY_EXPR,
787 void_type_node,
788 CLEANUP_BODY (stmt),
789 CLEANUP_EXPR (stmt));
791 else if (TREE_CODE (stmt) == IF_STMT)
793 genericize_if_stmt (stmt_p);
794 /* *stmt_p has changed, tail recurse to handle it again. */
795 return cp_genericize_r (stmt_p, walk_subtrees, data);
798 /* COND_EXPR might have incompatible types in branches if one or both
799 arms are bitfields. Fix it up now. */
800 else if (TREE_CODE (stmt) == COND_EXPR)
802 tree type_left
803 = (TREE_OPERAND (stmt, 1)
804 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
805 : NULL_TREE);
806 tree type_right
807 = (TREE_OPERAND (stmt, 2)
808 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
809 : NULL_TREE);
810 if (type_left
811 && !useless_type_conversion_p (TREE_TYPE (stmt),
812 TREE_TYPE (TREE_OPERAND (stmt, 1))))
814 TREE_OPERAND (stmt, 1)
815 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
816 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
817 type_left));
819 if (type_right
820 && !useless_type_conversion_p (TREE_TYPE (stmt),
821 TREE_TYPE (TREE_OPERAND (stmt, 2))))
823 TREE_OPERAND (stmt, 2)
824 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
825 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
826 type_right));
830 pointer_set_insert (p_set, *stmt_p);
832 return NULL;
835 void
836 cp_genericize (tree fndecl)
838 tree t;
839 struct pointer_set_t *p_set;
841 /* Fix up the types of parms passed by invisible reference. */
842 for (t = DECL_ARGUMENTS (fndecl); t; t = TREE_CHAIN (t))
843 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
845 /* If a function's arguments are copied to create a thunk,
846 then DECL_BY_REFERENCE will be set -- but the type of the
847 argument will be a pointer type, so we will never get
848 here. */
849 gcc_assert (!DECL_BY_REFERENCE (t));
850 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
851 TREE_TYPE (t) = DECL_ARG_TYPE (t);
852 DECL_BY_REFERENCE (t) = 1;
853 TREE_ADDRESSABLE (t) = 0;
854 relayout_decl (t);
857 /* Do the same for the return value. */
858 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
860 t = DECL_RESULT (fndecl);
861 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
862 DECL_BY_REFERENCE (t) = 1;
863 TREE_ADDRESSABLE (t) = 0;
864 relayout_decl (t);
867 /* If we're a clone, the body is already GIMPLE. */
868 if (DECL_CLONED_FUNCTION_P (fndecl))
869 return;
871 /* We do want to see every occurrence of the parms, so we can't just use
872 walk_tree's hash functionality. */
873 p_set = pointer_set_create ();
874 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, p_set, NULL);
875 pointer_set_destroy (p_set);
877 /* Do everything else. */
878 c_genericize (fndecl);
880 gcc_assert (bc_label[bc_break] == NULL);
881 gcc_assert (bc_label[bc_continue] == NULL);
884 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
885 NULL if there is in fact nothing to do. ARG2 may be null if FN
886 actually only takes one argument. */
888 static tree
889 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
891 tree defparm, parm, t;
892 int i = 0;
893 int nargs;
894 tree *argarray;
896 if (fn == NULL)
897 return NULL;
899 nargs = list_length (DECL_ARGUMENTS (fn));
900 argarray = (tree *) alloca (nargs * sizeof (tree));
902 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
903 if (arg2)
904 defparm = TREE_CHAIN (defparm);
906 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
908 tree inner_type = TREE_TYPE (arg1);
909 tree start1, end1, p1;
910 tree start2 = NULL, p2 = NULL;
911 tree ret = NULL, lab;
913 start1 = arg1;
914 start2 = arg2;
917 inner_type = TREE_TYPE (inner_type);
918 start1 = build4 (ARRAY_REF, inner_type, start1,
919 size_zero_node, NULL, NULL);
920 if (arg2)
921 start2 = build4 (ARRAY_REF, inner_type, start2,
922 size_zero_node, NULL, NULL);
924 while (TREE_CODE (inner_type) == ARRAY_TYPE);
925 start1 = build_fold_addr_expr (start1);
926 if (arg2)
927 start2 = build_fold_addr_expr (start2);
929 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
930 end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
932 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
933 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
934 append_to_statement_list (t, &ret);
936 if (arg2)
938 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
939 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
940 append_to_statement_list (t, &ret);
943 lab = create_artificial_label (input_location);
944 t = build1 (LABEL_EXPR, void_type_node, lab);
945 append_to_statement_list (t, &ret);
947 argarray[i++] = p1;
948 if (arg2)
949 argarray[i++] = p2;
950 /* Handle default arguments. */
951 for (parm = defparm; parm && parm != void_list_node;
952 parm = TREE_CHAIN (parm), i++)
953 argarray[i] = convert_default_arg (TREE_VALUE (parm),
954 TREE_PURPOSE (parm), fn, i);
955 t = build_call_a (fn, i, argarray);
956 t = fold_convert (void_type_node, t);
957 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
958 append_to_statement_list (t, &ret);
960 t = TYPE_SIZE_UNIT (inner_type);
961 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
962 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
963 append_to_statement_list (t, &ret);
965 if (arg2)
967 t = TYPE_SIZE_UNIT (inner_type);
968 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
969 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
970 append_to_statement_list (t, &ret);
973 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
974 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
975 append_to_statement_list (t, &ret);
977 return ret;
979 else
981 argarray[i++] = build_fold_addr_expr (arg1);
982 if (arg2)
983 argarray[i++] = build_fold_addr_expr (arg2);
984 /* Handle default arguments. */
985 for (parm = defparm; parm && parm != void_list_node;
986 parm = TREE_CHAIN (parm), i++)
987 argarray[i] = convert_default_arg (TREE_VALUE (parm),
988 TREE_PURPOSE (parm),
989 fn, i);
990 t = build_call_a (fn, i, argarray);
991 t = fold_convert (void_type_node, t);
992 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
996 /* Return code to initialize DECL with its default constructor, or
997 NULL if there's nothing to do. */
999 tree
1000 cxx_omp_clause_default_ctor (tree clause, tree decl,
1001 tree outer ATTRIBUTE_UNUSED)
1003 tree info = CP_OMP_CLAUSE_INFO (clause);
1004 tree ret = NULL;
1006 if (info)
1007 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1009 return ret;
1012 /* Return code to initialize DST with a copy constructor from SRC. */
1014 tree
1015 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1017 tree info = CP_OMP_CLAUSE_INFO (clause);
1018 tree ret = NULL;
1020 if (info)
1021 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1022 if (ret == NULL)
1023 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1025 return ret;
1028 /* Similarly, except use an assignment operator instead. */
1030 tree
1031 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1033 tree info = CP_OMP_CLAUSE_INFO (clause);
1034 tree ret = NULL;
1036 if (info)
1037 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1038 if (ret == NULL)
1039 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1041 return ret;
1044 /* Return code to destroy DECL. */
1046 tree
1047 cxx_omp_clause_dtor (tree clause, tree decl)
1049 tree info = CP_OMP_CLAUSE_INFO (clause);
1050 tree ret = NULL;
1052 if (info)
1053 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1055 return ret;
1058 /* True if OpenMP should privatize what this DECL points to rather
1059 than the DECL itself. */
1061 bool
1062 cxx_omp_privatize_by_reference (const_tree decl)
1064 return is_invisiref_parm (decl);
1067 /* True if OpenMP sharing attribute of DECL is predetermined. */
1069 enum omp_clause_default_kind
1070 cxx_omp_predetermined_sharing (tree decl)
1072 tree type;
1074 /* Static data members are predetermined as shared. */
1075 if (TREE_STATIC (decl))
1077 tree ctx = CP_DECL_CONTEXT (decl);
1078 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1079 return OMP_CLAUSE_DEFAULT_SHARED;
1082 type = TREE_TYPE (decl);
1083 if (TREE_CODE (type) == REFERENCE_TYPE)
1085 if (!is_invisiref_parm (decl))
1086 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1087 type = TREE_TYPE (type);
1089 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1091 /* NVR doesn't preserve const qualification of the
1092 variable's type. */
1093 tree outer = outer_curly_brace_block (current_function_decl);
1094 tree var;
1096 if (outer)
1097 for (var = BLOCK_VARS (outer); var; var = TREE_CHAIN (var))
1098 if (DECL_NAME (decl) == DECL_NAME (var)
1099 && (TYPE_MAIN_VARIANT (type)
1100 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1102 if (TYPE_READONLY (TREE_TYPE (var)))
1103 type = TREE_TYPE (var);
1104 break;
1109 if (type == error_mark_node)
1110 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1112 /* Variables with const-qualified type having no mutable member
1113 are predetermined shared. */
1114 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1115 return OMP_CLAUSE_DEFAULT_SHARED;
1117 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1120 /* Finalize an implicitly determined clause. */
1122 void
1123 cxx_omp_finish_clause (tree c)
1125 tree decl, inner_type;
1126 bool make_shared = false;
1128 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1129 return;
1131 decl = OMP_CLAUSE_DECL (c);
1132 decl = require_complete_type (decl);
1133 inner_type = TREE_TYPE (decl);
1134 if (decl == error_mark_node)
1135 make_shared = true;
1136 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1138 if (is_invisiref_parm (decl))
1139 inner_type = TREE_TYPE (inner_type);
1140 else
1142 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1143 decl);
1144 make_shared = true;
1148 /* We're interested in the base element, not arrays. */
1149 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1150 inner_type = TREE_TYPE (inner_type);
1152 /* Check for special function availability by building a call to one.
1153 Save the results, because later we won't be in the right context
1154 for making these queries. */
1155 if (!make_shared
1156 && CLASS_TYPE_P (inner_type)
1157 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1158 make_shared = true;
1160 if (make_shared)
1161 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;