Factor uses of build_pairwise_scheduling.
[official-gcc/Ramakrishna.git] / gcc / cp / cp-gimplify.c
blob017c8a98e454372fd8bfe79097bbc5fe61639425
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "cp-tree.h"
29 #include "c-common.h"
30 #include "toplev.h"
31 #include "tree-iterator.h"
32 #include "gimple.h"
33 #include "hashtab.h"
34 #include "pointer-set.h"
35 #include "flags.h"
37 /* Local declarations. */
39 enum bc_t { bc_break = 0, bc_continue = 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
46 indicates which.
48 Just creates a label and pushes it into the current context. */
50 static tree
51 begin_bc_block (enum bc_t bc)
53 tree label = create_artificial_label (input_location);
54 TREE_CHAIN (label) = bc_label[bc];
55 bc_label[bc] = label;
56 return label;
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
66 static gimple_seq
67 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
69 gcc_assert (label == bc_label[bc]);
71 if (TREE_USED (label))
73 gimple_seq_add_stmt (&body, gimple_build_label (label));
76 bc_label[bc] = TREE_CHAIN (label);
77 TREE_CHAIN (label) = NULL_TREE;
78 return body;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
84 static tree
85 get_bc_label (enum bc_t bc)
87 tree label = bc_label[bc];
89 if (label == NULL_TREE)
91 if (bc == bc_break)
92 error ("break statement not within loop or switch");
93 else
94 error ("continue statement not within loop or switch");
96 return NULL_TREE;
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label) = 1;
101 return label;
104 /* Genericize a TRY_BLOCK. */
106 static void
107 genericize_try_block (tree *stmt_p)
109 tree body = TRY_STMTS (*stmt_p);
110 tree cleanup = TRY_HANDLERS (*stmt_p);
112 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 static void
118 genericize_catch_block (tree *stmt_p)
120 tree type = HANDLER_TYPE (*stmt_p);
121 tree body = HANDLER_BODY (*stmt_p);
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 /* A terser interface for building a representation of an exception
128 specification. */
130 static tree
131 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 tree t;
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140 append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 return t;
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 static void
149 genericize_eh_spec_block (tree *stmt_p)
151 tree body = EH_SPEC_STMTS (*stmt_p);
152 tree allowed = EH_SPEC_RAISES (*stmt_p);
153 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
155 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
156 TREE_NO_WARNING (*stmt_p) = true;
157 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
162 static void
163 genericize_if_stmt (tree *stmt_p)
165 tree stmt, cond, then_, else_;
166 location_t locus = EXPR_LOCATION (*stmt_p);
168 stmt = *stmt_p;
169 cond = IF_COND (stmt);
170 then_ = THEN_CLAUSE (stmt);
171 else_ = ELSE_CLAUSE (stmt);
173 if (!then_)
174 then_ = build_empty_stmt (locus);
175 if (!else_)
176 else_ = build_empty_stmt (locus);
178 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179 stmt = then_;
180 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
181 stmt = else_;
182 else
183 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
184 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
185 SET_EXPR_LOCATION (stmt, locus);
186 *stmt_p = stmt;
189 /* Build a generic representation of one of the C loop forms. COND is the
190 loop condition or NULL_TREE. BODY is the (possibly compound) statement
191 controlled by the loop. INCR is the increment expression of a for-loop,
192 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
193 evaluated before the loop body as in while and for loops, or after the
194 loop body as in do-while loops. */
196 static gimple_seq
197 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
199 gimple top, entry, stmt;
200 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
201 tree cont_block, break_block;
202 location_t stmt_locus;
204 stmt_locus = input_location;
205 stmt_list = NULL;
206 body_seq = NULL;
207 incr_seq = NULL;
208 exit_seq = NULL;
209 entry = NULL;
211 break_block = begin_bc_block (bc_break);
212 cont_block = begin_bc_block (bc_continue);
214 /* If condition is zero don't generate a loop construct. */
215 if (cond && integer_zerop (cond))
217 top = NULL;
218 if (cond_is_first)
220 stmt = gimple_build_goto (get_bc_label (bc_break));
221 gimple_set_location (stmt, stmt_locus);
222 gimple_seq_add_stmt (&stmt_list, stmt);
225 else
227 /* If we use a LOOP_EXPR here, we have to feed the whole thing
228 back through the main gimplifier to lower it. Given that we
229 have to gimplify the loop body NOW so that we can resolve
230 break/continue stmts, seems easier to just expand to gotos. */
231 top = gimple_build_label (create_artificial_label (stmt_locus));
233 /* If we have an exit condition, then we build an IF with gotos either
234 out of the loop, or to the top of it. If there's no exit condition,
235 then we just build a jump back to the top. */
236 if (cond && !integer_nonzerop (cond))
238 if (cond != error_mark_node)
240 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
241 stmt = gimple_build_cond (NE_EXPR, cond,
242 build_int_cst (TREE_TYPE (cond), 0),
243 gimple_label_label (top),
244 get_bc_label (bc_break));
245 gimple_seq_add_stmt (&exit_seq, stmt);
248 if (cond_is_first)
250 if (incr)
252 entry = gimple_build_label
253 (create_artificial_label (stmt_locus));
254 stmt = gimple_build_goto (gimple_label_label (entry));
256 else
257 stmt = gimple_build_goto (get_bc_label (bc_continue));
258 gimple_set_location (stmt, stmt_locus);
259 gimple_seq_add_stmt (&stmt_list, stmt);
262 else
264 stmt = gimple_build_goto (gimple_label_label (top));
265 gimple_seq_add_stmt (&exit_seq, stmt);
269 gimplify_stmt (&body, &body_seq);
270 gimplify_stmt (&incr, &incr_seq);
272 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
274 gimple_seq_add_stmt (&stmt_list, top);
275 gimple_seq_add_seq (&stmt_list, body_seq);
276 gimple_seq_add_seq (&stmt_list, incr_seq);
277 gimple_seq_add_stmt (&stmt_list, entry);
278 gimple_seq_add_seq (&stmt_list, exit_seq);
280 annotate_all_with_location (stmt_list, stmt_locus);
282 return finish_bc_block (bc_break, break_block, stmt_list);
285 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
286 prequeue and hand off to gimplify_cp_loop. */
288 static void
289 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
291 tree stmt = *stmt_p;
293 if (FOR_INIT_STMT (stmt))
294 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
296 gimple_seq_add_seq (pre_p,
297 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
298 FOR_EXPR (stmt), 1));
299 *stmt_p = NULL_TREE;
302 /* Gimplify a WHILE_STMT node. */
304 static void
305 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
307 tree stmt = *stmt_p;
308 gimple_seq_add_seq (pre_p,
309 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
310 NULL_TREE, 1));
311 *stmt_p = NULL_TREE;
314 /* Gimplify a DO_STMT node. */
316 static void
317 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
319 tree stmt = *stmt_p;
320 gimple_seq_add_seq (pre_p,
321 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
322 NULL_TREE, 0));
323 *stmt_p = NULL_TREE;
326 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
328 static void
329 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
331 tree stmt = *stmt_p;
332 tree break_block, body, t;
333 location_t stmt_locus = input_location;
334 gimple_seq seq = NULL;
336 break_block = begin_bc_block (bc_break);
338 body = SWITCH_STMT_BODY (stmt);
339 if (!body)
340 body = build_empty_stmt (stmt_locus);
342 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
343 SWITCH_STMT_COND (stmt), body, NULL_TREE);
344 SET_EXPR_LOCATION (t, stmt_locus);
345 gimplify_and_add (t, &seq);
347 seq = finish_bc_block (bc_break, break_block, seq);
348 gimple_seq_add_seq (pre_p, seq);
349 *stmt_p = NULL_TREE;
352 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
353 in order to properly gimplify CONTINUE statements. Here we merely
354 manage the continue stack; the rest of the job is performed by the
355 regular gimplifier. */
357 static enum gimplify_status
358 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
360 tree for_stmt = *expr_p;
361 tree cont_block;
362 gimple stmt;
363 gimple_seq seq = NULL;
365 /* Protect ourselves from recursion. */
366 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
367 return GS_UNHANDLED;
368 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
370 /* Note that while technically the continue label is enabled too soon
371 here, we should have already diagnosed invalid continues nested within
372 statement expressions within the INIT, COND, or INCR expressions. */
373 cont_block = begin_bc_block (bc_continue);
375 gimplify_and_add (for_stmt, &seq);
376 stmt = gimple_seq_last_stmt (seq);
377 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
378 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
379 gimple_omp_body (stmt)));
380 else
381 seq = finish_bc_block (bc_continue, cont_block, seq);
382 gimple_seq_add_seq (pre_p, seq);
384 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
386 return GS_ALL_DONE;
389 /* Gimplify an EXPR_STMT node. */
391 static void
392 gimplify_expr_stmt (tree *stmt_p)
394 tree stmt = EXPR_STMT_EXPR (*stmt_p);
396 if (stmt == error_mark_node)
397 stmt = NULL;
399 /* Gimplification of a statement expression will nullify the
400 statement if all its side effects are moved to *PRE_P and *POST_P.
402 In this case we will not want to emit the gimplified statement.
403 However, we may still want to emit a warning, so we do that before
404 gimplification. */
405 if (stmt && warn_unused_value)
407 if (!TREE_SIDE_EFFECTS (stmt))
409 if (!IS_EMPTY_STMT (stmt)
410 && !VOID_TYPE_P (TREE_TYPE (stmt))
411 && !TREE_NO_WARNING (stmt))
412 warning (OPT_Wunused_value, "statement with no effect");
414 else
415 warn_if_unused_value (stmt, input_location);
418 if (stmt == NULL_TREE)
419 stmt = alloc_stmt_list ();
421 *stmt_p = stmt;
424 /* Gimplify initialization from an AGGR_INIT_EXPR. */
426 static void
427 cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
429 tree from = TREE_OPERAND (*expr_p, 1);
430 tree to = TREE_OPERAND (*expr_p, 0);
431 tree t;
432 tree slot = NULL_TREE;
434 /* What about code that pulls out the temp and uses it elsewhere? I
435 think that such code never uses the TARGET_EXPR as an initializer. If
436 I'm wrong, we'll abort because the temp won't have any RTL. In that
437 case, I guess we'll need to replace references somehow. */
438 if (TREE_CODE (from) == TARGET_EXPR)
440 slot = TARGET_EXPR_SLOT (from);
441 from = TARGET_EXPR_INITIAL (from);
444 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
445 inside the TARGET_EXPR. */
446 for (t = from; t; )
448 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
450 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
451 replace the slot operand with our target.
453 Should we add a target parm to gimplify_expr instead? No, as in this
454 case we want to replace the INIT_EXPR. */
455 if (TREE_CODE (sub) == AGGR_INIT_EXPR
456 || TREE_CODE (sub) == VEC_INIT_EXPR)
458 gimplify_expr (&to, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
459 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
460 AGGR_INIT_EXPR_SLOT (sub) = to;
461 else
462 VEC_INIT_EXPR_SLOT (sub) = to;
463 *expr_p = from;
465 /* The initialization is now a side-effect, so the container can
466 become void. */
467 if (from != sub)
468 TREE_TYPE (from) = void_type_node;
471 if (t == sub)
472 break;
473 else
474 t = TREE_OPERAND (t, 1);
479 /* Gimplify a MUST_NOT_THROW_EXPR. */
481 static enum gimplify_status
482 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
484 tree stmt = *expr_p;
485 tree temp = voidify_wrapper_expr (stmt, NULL);
486 tree body = TREE_OPERAND (stmt, 0);
488 stmt = build_gimple_eh_filter_tree (body, NULL_TREE,
489 build_call_n (terminate_node, 0));
491 gimplify_and_add (stmt, pre_p);
492 if (temp)
494 *expr_p = temp;
495 return GS_OK;
498 *expr_p = NULL;
499 return GS_ALL_DONE;
502 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
505 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
507 int saved_stmts_are_full_exprs_p = 0;
508 enum tree_code code = TREE_CODE (*expr_p);
509 enum gimplify_status ret;
511 if (STATEMENT_CODE_P (code))
513 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
514 current_stmt_tree ()->stmts_are_full_exprs_p
515 = STMT_IS_FULL_EXPR_P (*expr_p);
518 switch (code)
520 case PTRMEM_CST:
521 *expr_p = cplus_expand_constant (*expr_p);
522 ret = GS_OK;
523 break;
525 case AGGR_INIT_EXPR:
526 simplify_aggr_init_expr (expr_p);
527 ret = GS_OK;
528 break;
530 case VEC_INIT_EXPR:
532 location_t loc = input_location;
533 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
534 input_location = EXPR_LOCATION (*expr_p);
535 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
536 VEC_INIT_EXPR_INIT (*expr_p), false, 1,
537 tf_warning_or_error);
538 ret = GS_OK;
539 input_location = loc;
541 break;
543 case THROW_EXPR:
544 /* FIXME communicate throw type to back end, probably by moving
545 THROW_EXPR into ../tree.def. */
546 *expr_p = TREE_OPERAND (*expr_p, 0);
547 ret = GS_OK;
548 break;
550 case MUST_NOT_THROW_EXPR:
551 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
552 break;
554 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
555 LHS of an assignment might also be involved in the RHS, as in bug
556 25979. */
557 case INIT_EXPR:
558 cp_gimplify_init_expr (expr_p, pre_p, post_p);
559 ret = GS_OK;
560 break;
562 case EMPTY_CLASS_EXPR:
563 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
564 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
565 ret = GS_OK;
566 break;
568 case BASELINK:
569 *expr_p = BASELINK_FUNCTIONS (*expr_p);
570 ret = GS_OK;
571 break;
573 case TRY_BLOCK:
574 genericize_try_block (expr_p);
575 ret = GS_OK;
576 break;
578 case HANDLER:
579 genericize_catch_block (expr_p);
580 ret = GS_OK;
581 break;
583 case EH_SPEC_BLOCK:
584 genericize_eh_spec_block (expr_p);
585 ret = GS_OK;
586 break;
588 case USING_STMT:
589 gcc_unreachable ();
591 case FOR_STMT:
592 gimplify_for_stmt (expr_p, pre_p);
593 ret = GS_OK;
594 break;
596 case WHILE_STMT:
597 gimplify_while_stmt (expr_p, pre_p);
598 ret = GS_OK;
599 break;
601 case DO_STMT:
602 gimplify_do_stmt (expr_p, pre_p);
603 ret = GS_OK;
604 break;
606 case SWITCH_STMT:
607 gimplify_switch_stmt (expr_p, pre_p);
608 ret = GS_OK;
609 break;
611 case OMP_FOR:
612 ret = cp_gimplify_omp_for (expr_p, pre_p);
613 break;
615 case CONTINUE_STMT:
616 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
617 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
618 *expr_p = NULL_TREE;
619 ret = GS_ALL_DONE;
620 break;
622 case BREAK_STMT:
623 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
624 *expr_p = NULL_TREE;
625 ret = GS_ALL_DONE;
626 break;
628 case EXPR_STMT:
629 gimplify_expr_stmt (expr_p);
630 ret = GS_OK;
631 break;
633 case UNARY_PLUS_EXPR:
635 tree arg = TREE_OPERAND (*expr_p, 0);
636 tree type = TREE_TYPE (*expr_p);
637 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
638 : arg;
639 ret = GS_OK;
641 break;
643 default:
644 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
645 break;
648 /* Restore saved state. */
649 if (STATEMENT_CODE_P (code))
650 current_stmt_tree ()->stmts_are_full_exprs_p
651 = saved_stmts_are_full_exprs_p;
653 return ret;
656 static inline bool
657 is_invisiref_parm (const_tree t)
659 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
660 && DECL_BY_REFERENCE (t));
663 /* Return true if the uid in both int tree maps are equal. */
666 cxx_int_tree_map_eq (const void *va, const void *vb)
668 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
669 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
670 return (a->uid == b->uid);
673 /* Hash a UID in a cxx_int_tree_map. */
675 unsigned int
676 cxx_int_tree_map_hash (const void *item)
678 return ((const struct cxx_int_tree_map *)item)->uid;
681 struct cp_genericize_data
683 struct pointer_set_t *p_set;
684 VEC (tree, heap) *bind_expr_stack;
687 /* Perform any pre-gimplification lowering of C++ front end trees to
688 GENERIC. */
690 static tree
691 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
693 tree stmt = *stmt_p;
694 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
695 struct pointer_set_t *p_set = wtd->p_set;
697 if (is_invisiref_parm (stmt)
698 /* Don't dereference parms in a thunk, pass the references through. */
699 && !(DECL_THUNK_P (current_function_decl)
700 && TREE_CODE (stmt) == PARM_DECL))
702 *stmt_p = convert_from_reference (stmt);
703 *walk_subtrees = 0;
704 return NULL;
707 /* Map block scope extern declarations to visible declarations with the
708 same name and type in outer scopes if any. */
709 if (cp_function_chain->extern_decl_map
710 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
711 && DECL_EXTERNAL (stmt))
713 struct cxx_int_tree_map *h, in;
714 in.uid = DECL_UID (stmt);
715 h = (struct cxx_int_tree_map *)
716 htab_find_with_hash (cp_function_chain->extern_decl_map,
717 &in, in.uid);
718 if (h)
720 *stmt_p = h->to;
721 *walk_subtrees = 0;
722 return NULL;
726 /* Other than invisiref parms, don't walk the same tree twice. */
727 if (pointer_set_contains (p_set, stmt))
729 *walk_subtrees = 0;
730 return NULL_TREE;
733 if (TREE_CODE (stmt) == ADDR_EXPR
734 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
736 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
737 *walk_subtrees = 0;
739 else if (TREE_CODE (stmt) == RETURN_EXPR
740 && TREE_OPERAND (stmt, 0)
741 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
742 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
743 *walk_subtrees = 0;
744 else if (TREE_CODE (stmt) == OMP_CLAUSE)
745 switch (OMP_CLAUSE_CODE (stmt))
747 case OMP_CLAUSE_LASTPRIVATE:
748 /* Don't dereference an invisiref in OpenMP clauses. */
749 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
751 *walk_subtrees = 0;
752 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
753 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
754 cp_genericize_r, data, NULL);
756 break;
757 case OMP_CLAUSE_PRIVATE:
758 case OMP_CLAUSE_SHARED:
759 case OMP_CLAUSE_FIRSTPRIVATE:
760 case OMP_CLAUSE_COPYIN:
761 case OMP_CLAUSE_COPYPRIVATE:
762 /* Don't dereference an invisiref in OpenMP clauses. */
763 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
764 *walk_subtrees = 0;
765 break;
766 case OMP_CLAUSE_REDUCTION:
767 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
768 break;
769 default:
770 break;
772 else if (IS_TYPE_OR_DECL_P (stmt))
773 *walk_subtrees = 0;
775 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
776 to lower this construct before scanning it, so we need to lower these
777 before doing anything else. */
778 else if (TREE_CODE (stmt) == CLEANUP_STMT)
779 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
780 : TRY_FINALLY_EXPR,
781 void_type_node,
782 CLEANUP_BODY (stmt),
783 CLEANUP_EXPR (stmt));
785 else if (TREE_CODE (stmt) == IF_STMT)
787 genericize_if_stmt (stmt_p);
788 /* *stmt_p has changed, tail recurse to handle it again. */
789 return cp_genericize_r (stmt_p, walk_subtrees, data);
792 /* COND_EXPR might have incompatible types in branches if one or both
793 arms are bitfields. Fix it up now. */
794 else if (TREE_CODE (stmt) == COND_EXPR)
796 tree type_left
797 = (TREE_OPERAND (stmt, 1)
798 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
799 : NULL_TREE);
800 tree type_right
801 = (TREE_OPERAND (stmt, 2)
802 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
803 : NULL_TREE);
804 if (type_left
805 && !useless_type_conversion_p (TREE_TYPE (stmt),
806 TREE_TYPE (TREE_OPERAND (stmt, 1))))
808 TREE_OPERAND (stmt, 1)
809 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
810 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
811 type_left));
813 if (type_right
814 && !useless_type_conversion_p (TREE_TYPE (stmt),
815 TREE_TYPE (TREE_OPERAND (stmt, 2))))
817 TREE_OPERAND (stmt, 2)
818 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
819 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
820 type_right));
824 else if (TREE_CODE (stmt) == BIND_EXPR)
826 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
827 cp_walk_tree (&BIND_EXPR_BODY (stmt),
828 cp_genericize_r, data, NULL);
829 VEC_pop (tree, wtd->bind_expr_stack);
832 else if (TREE_CODE (stmt) == USING_STMT)
834 tree block = NULL_TREE;
836 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
837 BLOCK, and append an IMPORTED_DECL to its
838 BLOCK_VARS chained list. */
839 if (wtd->bind_expr_stack)
841 int i;
842 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
843 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
844 wtd->bind_expr_stack, i))))
845 break;
847 if (block)
849 tree using_directive;
850 gcc_assert (TREE_OPERAND (stmt, 0));
852 using_directive = make_node (IMPORTED_DECL);
853 TREE_TYPE (using_directive) = void_type_node;
855 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
856 = TREE_OPERAND (stmt, 0);
857 TREE_CHAIN (using_directive) = BLOCK_VARS (block);
858 BLOCK_VARS (block) = using_directive;
860 /* The USING_STMT won't appear in GENERIC. */
861 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
862 *walk_subtrees = 0;
865 else if (TREE_CODE (stmt) == DECL_EXPR
866 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
868 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
869 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
870 *walk_subtrees = 0;
873 else if (TREE_CODE (stmt) == MODIFY_EXPR
874 && (integer_zerop (cp_expr_size (TREE_OPERAND (stmt, 0)))
875 || integer_zerop (cp_expr_size (TREE_OPERAND (stmt, 1)))))
877 *stmt_p = build2 (COMPOUND_EXPR, TREE_TYPE (stmt),
878 TREE_OPERAND (stmt, 0),
879 TREE_OPERAND (stmt, 1));
882 pointer_set_insert (p_set, *stmt_p);
884 return NULL;
887 void
888 cp_genericize (tree fndecl)
890 tree t;
891 struct cp_genericize_data wtd;
893 /* Fix up the types of parms passed by invisible reference. */
894 for (t = DECL_ARGUMENTS (fndecl); t; t = TREE_CHAIN (t))
895 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
897 /* If a function's arguments are copied to create a thunk,
898 then DECL_BY_REFERENCE will be set -- but the type of the
899 argument will be a pointer type, so we will never get
900 here. */
901 gcc_assert (!DECL_BY_REFERENCE (t));
902 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
903 TREE_TYPE (t) = DECL_ARG_TYPE (t);
904 DECL_BY_REFERENCE (t) = 1;
905 TREE_ADDRESSABLE (t) = 0;
906 relayout_decl (t);
909 /* Do the same for the return value. */
910 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
912 t = DECL_RESULT (fndecl);
913 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
914 DECL_BY_REFERENCE (t) = 1;
915 TREE_ADDRESSABLE (t) = 0;
916 relayout_decl (t);
919 /* If we're a clone, the body is already GIMPLE. */
920 if (DECL_CLONED_FUNCTION_P (fndecl))
921 return;
923 /* We do want to see every occurrence of the parms, so we can't just use
924 walk_tree's hash functionality. */
925 wtd.p_set = pointer_set_create ();
926 wtd.bind_expr_stack = NULL;
927 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
928 pointer_set_destroy (wtd.p_set);
929 VEC_free (tree, heap, wtd.bind_expr_stack);
931 /* Do everything else. */
932 c_genericize (fndecl);
934 gcc_assert (bc_label[bc_break] == NULL);
935 gcc_assert (bc_label[bc_continue] == NULL);
938 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
939 NULL if there is in fact nothing to do. ARG2 may be null if FN
940 actually only takes one argument. */
942 static tree
943 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
945 tree defparm, parm, t;
946 int i = 0;
947 int nargs;
948 tree *argarray;
950 if (fn == NULL)
951 return NULL;
953 nargs = list_length (DECL_ARGUMENTS (fn));
954 argarray = (tree *) alloca (nargs * sizeof (tree));
956 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
957 if (arg2)
958 defparm = TREE_CHAIN (defparm);
960 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
962 tree inner_type = TREE_TYPE (arg1);
963 tree start1, end1, p1;
964 tree start2 = NULL, p2 = NULL;
965 tree ret = NULL, lab;
967 start1 = arg1;
968 start2 = arg2;
971 inner_type = TREE_TYPE (inner_type);
972 start1 = build4 (ARRAY_REF, inner_type, start1,
973 size_zero_node, NULL, NULL);
974 if (arg2)
975 start2 = build4 (ARRAY_REF, inner_type, start2,
976 size_zero_node, NULL, NULL);
978 while (TREE_CODE (inner_type) == ARRAY_TYPE);
979 start1 = build_fold_addr_expr_loc (input_location, start1);
980 if (arg2)
981 start2 = build_fold_addr_expr_loc (input_location, start2);
983 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
984 end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
986 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
987 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
988 append_to_statement_list (t, &ret);
990 if (arg2)
992 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
993 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
994 append_to_statement_list (t, &ret);
997 lab = create_artificial_label (input_location);
998 t = build1 (LABEL_EXPR, void_type_node, lab);
999 append_to_statement_list (t, &ret);
1001 argarray[i++] = p1;
1002 if (arg2)
1003 argarray[i++] = p2;
1004 /* Handle default arguments. */
1005 for (parm = defparm; parm && parm != void_list_node;
1006 parm = TREE_CHAIN (parm), i++)
1007 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1008 TREE_PURPOSE (parm), fn, i);
1009 t = build_call_a (fn, i, argarray);
1010 t = fold_convert (void_type_node, t);
1011 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1012 append_to_statement_list (t, &ret);
1014 t = TYPE_SIZE_UNIT (inner_type);
1015 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
1016 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1017 append_to_statement_list (t, &ret);
1019 if (arg2)
1021 t = TYPE_SIZE_UNIT (inner_type);
1022 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
1023 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1024 append_to_statement_list (t, &ret);
1027 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1028 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1029 append_to_statement_list (t, &ret);
1031 return ret;
1033 else
1035 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1036 if (arg2)
1037 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1038 /* Handle default arguments. */
1039 for (parm = defparm; parm && parm != void_list_node;
1040 parm = TREE_CHAIN (parm), i++)
1041 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1042 TREE_PURPOSE (parm),
1043 fn, i);
1044 t = build_call_a (fn, i, argarray);
1045 t = fold_convert (void_type_node, t);
1046 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1050 /* Return code to initialize DECL with its default constructor, or
1051 NULL if there's nothing to do. */
1053 tree
1054 cxx_omp_clause_default_ctor (tree clause, tree decl,
1055 tree outer ATTRIBUTE_UNUSED)
1057 tree info = CP_OMP_CLAUSE_INFO (clause);
1058 tree ret = NULL;
1060 if (info)
1061 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1063 return ret;
1066 /* Return code to initialize DST with a copy constructor from SRC. */
1068 tree
1069 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1071 tree info = CP_OMP_CLAUSE_INFO (clause);
1072 tree ret = NULL;
1074 if (info)
1075 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1076 if (ret == NULL)
1077 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1079 return ret;
1082 /* Similarly, except use an assignment operator instead. */
1084 tree
1085 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1087 tree info = CP_OMP_CLAUSE_INFO (clause);
1088 tree ret = NULL;
1090 if (info)
1091 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1092 if (ret == NULL)
1093 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1095 return ret;
1098 /* Return code to destroy DECL. */
1100 tree
1101 cxx_omp_clause_dtor (tree clause, tree decl)
1103 tree info = CP_OMP_CLAUSE_INFO (clause);
1104 tree ret = NULL;
1106 if (info)
1107 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1109 return ret;
1112 /* True if OpenMP should privatize what this DECL points to rather
1113 than the DECL itself. */
1115 bool
1116 cxx_omp_privatize_by_reference (const_tree decl)
1118 return is_invisiref_parm (decl);
1121 /* True if OpenMP sharing attribute of DECL is predetermined. */
1123 enum omp_clause_default_kind
1124 cxx_omp_predetermined_sharing (tree decl)
1126 tree type;
1128 /* Static data members are predetermined as shared. */
1129 if (TREE_STATIC (decl))
1131 tree ctx = CP_DECL_CONTEXT (decl);
1132 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1133 return OMP_CLAUSE_DEFAULT_SHARED;
1136 type = TREE_TYPE (decl);
1137 if (TREE_CODE (type) == REFERENCE_TYPE)
1139 if (!is_invisiref_parm (decl))
1140 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1141 type = TREE_TYPE (type);
1143 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1145 /* NVR doesn't preserve const qualification of the
1146 variable's type. */
1147 tree outer = outer_curly_brace_block (current_function_decl);
1148 tree var;
1150 if (outer)
1151 for (var = BLOCK_VARS (outer); var; var = TREE_CHAIN (var))
1152 if (DECL_NAME (decl) == DECL_NAME (var)
1153 && (TYPE_MAIN_VARIANT (type)
1154 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1156 if (TYPE_READONLY (TREE_TYPE (var)))
1157 type = TREE_TYPE (var);
1158 break;
1163 if (type == error_mark_node)
1164 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1166 /* Variables with const-qualified type having no mutable member
1167 are predetermined shared. */
1168 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1169 return OMP_CLAUSE_DEFAULT_SHARED;
1171 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1174 /* Finalize an implicitly determined clause. */
1176 void
1177 cxx_omp_finish_clause (tree c)
1179 tree decl, inner_type;
1180 bool make_shared = false;
1182 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1183 return;
1185 decl = OMP_CLAUSE_DECL (c);
1186 decl = require_complete_type (decl);
1187 inner_type = TREE_TYPE (decl);
1188 if (decl == error_mark_node)
1189 make_shared = true;
1190 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1192 if (is_invisiref_parm (decl))
1193 inner_type = TREE_TYPE (inner_type);
1194 else
1196 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1197 decl);
1198 make_shared = true;
1202 /* We're interested in the base element, not arrays. */
1203 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1204 inner_type = TREE_TYPE (inner_type);
1206 /* Check for special function availability by building a call to one.
1207 Save the results, because later we won't be in the right context
1208 for making these queries. */
1209 if (!make_shared
1210 && CLASS_TYPE_P (inner_type)
1211 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1212 make_shared = true;
1214 if (make_shared)
1215 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;