Make build_poly_scop not return a bool.
[official-gcc/graphite-test-results.git] / gcc / cp / cp-gimplify.c
blob533d2d18384c7185869d18ec3257c21a42125afa
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "cp-tree.h"
29 #include "c-common.h"
30 #include "toplev.h"
31 #include "tree-iterator.h"
32 #include "gimple.h"
33 #include "hashtab.h"
34 #include "pointer-set.h"
35 #include "flags.h"
37 /* Local declarations. */
39 enum bc_t { bc_break = 0, bc_continue = 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
46 indicates which.
48 Just creates a label and pushes it into the current context. */
50 static tree
51 begin_bc_block (enum bc_t bc)
53 tree label = create_artificial_label (input_location);
54 TREE_CHAIN (label) = bc_label[bc];
55 bc_label[bc] = label;
56 return label;
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
66 static gimple_seq
67 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
69 gcc_assert (label == bc_label[bc]);
71 if (TREE_USED (label))
73 gimple_seq_add_stmt (&body, gimple_build_label (label));
76 bc_label[bc] = TREE_CHAIN (label);
77 TREE_CHAIN (label) = NULL_TREE;
78 return body;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
84 static tree
85 get_bc_label (enum bc_t bc)
87 tree label = bc_label[bc];
89 if (label == NULL_TREE)
91 if (bc == bc_break)
92 error ("break statement not within loop or switch");
93 else
94 error ("continue statement not within loop or switch");
96 return NULL_TREE;
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label) = 1;
101 return label;
104 /* Genericize a TRY_BLOCK. */
106 static void
107 genericize_try_block (tree *stmt_p)
109 tree body = TRY_STMTS (*stmt_p);
110 tree cleanup = TRY_HANDLERS (*stmt_p);
112 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 static void
118 genericize_catch_block (tree *stmt_p)
120 tree type = HANDLER_TYPE (*stmt_p);
121 tree body = HANDLER_BODY (*stmt_p);
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 /* A terser interface for building a representation of an exception
128 specification. */
130 static tree
131 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 tree t;
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140 append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 return t;
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 static void
149 genericize_eh_spec_block (tree *stmt_p)
151 tree body = EH_SPEC_STMTS (*stmt_p);
152 tree allowed = EH_SPEC_RAISES (*stmt_p);
153 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
155 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
156 TREE_NO_WARNING (*stmt_p) = true;
157 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
162 static void
163 genericize_if_stmt (tree *stmt_p)
165 tree stmt, cond, then_, else_;
166 location_t locus = EXPR_LOCATION (*stmt_p);
168 stmt = *stmt_p;
169 cond = IF_COND (stmt);
170 then_ = THEN_CLAUSE (stmt);
171 else_ = ELSE_CLAUSE (stmt);
173 if (!then_)
174 then_ = build_empty_stmt (locus);
175 if (!else_)
176 else_ = build_empty_stmt (locus);
178 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179 stmt = then_;
180 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
181 stmt = else_;
182 else
183 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
184 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
185 SET_EXPR_LOCATION (stmt, locus);
186 *stmt_p = stmt;
189 /* Build a generic representation of one of the C loop forms. COND is the
190 loop condition or NULL_TREE. BODY is the (possibly compound) statement
191 controlled by the loop. INCR is the increment expression of a for-loop,
192 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
193 evaluated before the loop body as in while and for loops, or after the
194 loop body as in do-while loops. */
196 static gimple_seq
197 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
199 gimple top, entry, stmt;
200 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
201 tree cont_block, break_block;
202 location_t stmt_locus;
204 stmt_locus = input_location;
205 stmt_list = NULL;
206 body_seq = NULL;
207 incr_seq = NULL;
208 exit_seq = NULL;
209 entry = NULL;
211 break_block = begin_bc_block (bc_break);
212 cont_block = begin_bc_block (bc_continue);
214 /* If condition is zero don't generate a loop construct. */
215 if (cond && integer_zerop (cond))
217 top = NULL;
218 if (cond_is_first)
220 stmt = gimple_build_goto (get_bc_label (bc_break));
221 gimple_set_location (stmt, stmt_locus);
222 gimple_seq_add_stmt (&stmt_list, stmt);
225 else
227 /* If we use a LOOP_EXPR here, we have to feed the whole thing
228 back through the main gimplifier to lower it. Given that we
229 have to gimplify the loop body NOW so that we can resolve
230 break/continue stmts, seems easier to just expand to gotos. */
231 top = gimple_build_label (create_artificial_label (stmt_locus));
233 /* If we have an exit condition, then we build an IF with gotos either
234 out of the loop, or to the top of it. If there's no exit condition,
235 then we just build a jump back to the top. */
236 if (cond && !integer_nonzerop (cond))
238 if (cond != error_mark_node)
240 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
241 stmt = gimple_build_cond (NE_EXPR, cond,
242 build_int_cst (TREE_TYPE (cond), 0),
243 gimple_label_label (top),
244 get_bc_label (bc_break));
245 gimple_seq_add_stmt (&exit_seq, stmt);
248 if (cond_is_first)
250 if (incr)
252 entry = gimple_build_label
253 (create_artificial_label (stmt_locus));
254 stmt = gimple_build_goto (gimple_label_label (entry));
256 else
257 stmt = gimple_build_goto (get_bc_label (bc_continue));
258 gimple_set_location (stmt, stmt_locus);
259 gimple_seq_add_stmt (&stmt_list, stmt);
262 else
264 stmt = gimple_build_goto (gimple_label_label (top));
265 gimple_seq_add_stmt (&exit_seq, stmt);
269 gimplify_stmt (&body, &body_seq);
270 gimplify_stmt (&incr, &incr_seq);
272 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
274 gimple_seq_add_stmt (&stmt_list, top);
275 gimple_seq_add_seq (&stmt_list, body_seq);
276 gimple_seq_add_seq (&stmt_list, incr_seq);
277 gimple_seq_add_stmt (&stmt_list, entry);
278 gimple_seq_add_seq (&stmt_list, exit_seq);
280 annotate_all_with_location (stmt_list, stmt_locus);
282 return finish_bc_block (bc_break, break_block, stmt_list);
285 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
286 prequeue and hand off to gimplify_cp_loop. */
288 static void
289 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
291 tree stmt = *stmt_p;
293 if (FOR_INIT_STMT (stmt))
294 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
296 gimple_seq_add_seq (pre_p,
297 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
298 FOR_EXPR (stmt), 1));
299 *stmt_p = NULL_TREE;
302 /* Gimplify a WHILE_STMT node. */
304 static void
305 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
307 tree stmt = *stmt_p;
308 gimple_seq_add_seq (pre_p,
309 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
310 NULL_TREE, 1));
311 *stmt_p = NULL_TREE;
314 /* Gimplify a DO_STMT node. */
316 static void
317 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
319 tree stmt = *stmt_p;
320 gimple_seq_add_seq (pre_p,
321 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
322 NULL_TREE, 0));
323 *stmt_p = NULL_TREE;
326 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
328 static void
329 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
331 tree stmt = *stmt_p;
332 tree break_block, body, t;
333 location_t stmt_locus = input_location;
334 gimple_seq seq = NULL;
336 break_block = begin_bc_block (bc_break);
338 body = SWITCH_STMT_BODY (stmt);
339 if (!body)
340 body = build_empty_stmt (stmt_locus);
342 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
343 SWITCH_STMT_COND (stmt), body, NULL_TREE);
344 SET_EXPR_LOCATION (t, stmt_locus);
345 gimplify_and_add (t, &seq);
347 seq = finish_bc_block (bc_break, break_block, seq);
348 gimple_seq_add_seq (pre_p, seq);
349 *stmt_p = NULL_TREE;
352 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
353 in order to properly gimplify CONTINUE statements. Here we merely
354 manage the continue stack; the rest of the job is performed by the
355 regular gimplifier. */
357 static enum gimplify_status
358 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
360 tree for_stmt = *expr_p;
361 tree cont_block;
362 gimple stmt;
363 gimple_seq seq = NULL;
365 /* Protect ourselves from recursion. */
366 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
367 return GS_UNHANDLED;
368 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
370 /* Note that while technically the continue label is enabled too soon
371 here, we should have already diagnosed invalid continues nested within
372 statement expressions within the INIT, COND, or INCR expressions. */
373 cont_block = begin_bc_block (bc_continue);
375 gimplify_and_add (for_stmt, &seq);
376 stmt = gimple_seq_last_stmt (seq);
377 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
378 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
379 gimple_omp_body (stmt)));
380 else
381 seq = finish_bc_block (bc_continue, cont_block, seq);
382 gimple_seq_add_seq (pre_p, seq);
384 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
386 return GS_ALL_DONE;
389 /* Gimplify an EXPR_STMT node. */
391 static void
392 gimplify_expr_stmt (tree *stmt_p)
394 tree stmt = EXPR_STMT_EXPR (*stmt_p);
396 if (stmt == error_mark_node)
397 stmt = NULL;
399 /* Gimplification of a statement expression will nullify the
400 statement if all its side effects are moved to *PRE_P and *POST_P.
402 In this case we will not want to emit the gimplified statement.
403 However, we may still want to emit a warning, so we do that before
404 gimplification. */
405 if (stmt && warn_unused_value)
407 if (!TREE_SIDE_EFFECTS (stmt))
409 if (!IS_EMPTY_STMT (stmt)
410 && !VOID_TYPE_P (TREE_TYPE (stmt))
411 && !TREE_NO_WARNING (stmt))
412 warning (OPT_Wunused_value, "statement with no effect");
414 else
415 warn_if_unused_value (stmt, input_location);
418 if (stmt == NULL_TREE)
419 stmt = alloc_stmt_list ();
421 *stmt_p = stmt;
424 /* Gimplify initialization from an AGGR_INIT_EXPR. */
426 static void
427 cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
429 tree from = TREE_OPERAND (*expr_p, 1);
430 tree to = TREE_OPERAND (*expr_p, 0);
431 tree t;
433 /* What about code that pulls out the temp and uses it elsewhere? I
434 think that such code never uses the TARGET_EXPR as an initializer. If
435 I'm wrong, we'll abort because the temp won't have any RTL. In that
436 case, I guess we'll need to replace references somehow. */
437 if (TREE_CODE (from) == TARGET_EXPR)
438 from = TARGET_EXPR_INITIAL (from);
440 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
441 inside the TARGET_EXPR. */
442 for (t = from; t; )
444 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
446 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
447 replace the slot operand with our target.
449 Should we add a target parm to gimplify_expr instead? No, as in this
450 case we want to replace the INIT_EXPR. */
451 if (TREE_CODE (sub) == AGGR_INIT_EXPR
452 || TREE_CODE (sub) == VEC_INIT_EXPR)
454 gimplify_expr (&to, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
455 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
456 AGGR_INIT_EXPR_SLOT (sub) = to;
457 else
458 VEC_INIT_EXPR_SLOT (sub) = to;
459 *expr_p = from;
461 /* The initialization is now a side-effect, so the container can
462 become void. */
463 if (from != sub)
464 TREE_TYPE (from) = void_type_node;
467 if (t == sub)
468 break;
469 else
470 t = TREE_OPERAND (t, 1);
475 /* Gimplify a MUST_NOT_THROW_EXPR. */
477 static enum gimplify_status
478 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
480 tree stmt = *expr_p;
481 tree temp = voidify_wrapper_expr (stmt, NULL);
482 tree body = TREE_OPERAND (stmt, 0);
484 stmt = build_gimple_eh_filter_tree (body, NULL_TREE,
485 build_call_n (terminate_node, 0));
487 gimplify_and_add (stmt, pre_p);
488 if (temp)
490 *expr_p = temp;
491 return GS_OK;
494 *expr_p = NULL;
495 return GS_ALL_DONE;
498 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
501 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
503 int saved_stmts_are_full_exprs_p = 0;
504 enum tree_code code = TREE_CODE (*expr_p);
505 enum gimplify_status ret;
507 if (STATEMENT_CODE_P (code))
509 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
510 current_stmt_tree ()->stmts_are_full_exprs_p
511 = STMT_IS_FULL_EXPR_P (*expr_p);
514 switch (code)
516 case PTRMEM_CST:
517 *expr_p = cplus_expand_constant (*expr_p);
518 ret = GS_OK;
519 break;
521 case AGGR_INIT_EXPR:
522 simplify_aggr_init_expr (expr_p);
523 ret = GS_OK;
524 break;
526 case VEC_INIT_EXPR:
528 location_t loc = input_location;
529 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
530 input_location = EXPR_LOCATION (*expr_p);
531 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
532 VEC_INIT_EXPR_INIT (*expr_p), false, 1,
533 tf_warning_or_error);
534 ret = GS_OK;
535 input_location = loc;
537 break;
539 case THROW_EXPR:
540 /* FIXME communicate throw type to back end, probably by moving
541 THROW_EXPR into ../tree.def. */
542 *expr_p = TREE_OPERAND (*expr_p, 0);
543 ret = GS_OK;
544 break;
546 case MUST_NOT_THROW_EXPR:
547 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
548 break;
550 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
551 LHS of an assignment might also be involved in the RHS, as in bug
552 25979. */
553 case INIT_EXPR:
554 cp_gimplify_init_expr (expr_p, pre_p, post_p);
555 if (TREE_CODE (*expr_p) != INIT_EXPR)
556 return GS_OK;
557 /* Otherwise fall through. */
558 case MODIFY_EXPR:
560 /* If the back end isn't clever enough to know that the lhs and rhs
561 types are the same, add an explicit conversion. */
562 tree op0 = TREE_OPERAND (*expr_p, 0);
563 tree op1 = TREE_OPERAND (*expr_p, 1);
565 if (!error_operand_p (op0)
566 && !error_operand_p (op1)
567 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
568 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
569 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
570 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
571 TREE_TYPE (op0), op1);
573 ret = GS_OK;
574 break;
576 case EMPTY_CLASS_EXPR:
577 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
578 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
579 ret = GS_OK;
580 break;
582 case BASELINK:
583 *expr_p = BASELINK_FUNCTIONS (*expr_p);
584 ret = GS_OK;
585 break;
587 case TRY_BLOCK:
588 genericize_try_block (expr_p);
589 ret = GS_OK;
590 break;
592 case HANDLER:
593 genericize_catch_block (expr_p);
594 ret = GS_OK;
595 break;
597 case EH_SPEC_BLOCK:
598 genericize_eh_spec_block (expr_p);
599 ret = GS_OK;
600 break;
602 case USING_STMT:
603 gcc_unreachable ();
605 case FOR_STMT:
606 gimplify_for_stmt (expr_p, pre_p);
607 ret = GS_OK;
608 break;
610 case WHILE_STMT:
611 gimplify_while_stmt (expr_p, pre_p);
612 ret = GS_OK;
613 break;
615 case DO_STMT:
616 gimplify_do_stmt (expr_p, pre_p);
617 ret = GS_OK;
618 break;
620 case SWITCH_STMT:
621 gimplify_switch_stmt (expr_p, pre_p);
622 ret = GS_OK;
623 break;
625 case OMP_FOR:
626 ret = cp_gimplify_omp_for (expr_p, pre_p);
627 break;
629 case CONTINUE_STMT:
630 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
631 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
632 *expr_p = NULL_TREE;
633 ret = GS_ALL_DONE;
634 break;
636 case BREAK_STMT:
637 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
638 *expr_p = NULL_TREE;
639 ret = GS_ALL_DONE;
640 break;
642 case EXPR_STMT:
643 gimplify_expr_stmt (expr_p);
644 ret = GS_OK;
645 break;
647 case UNARY_PLUS_EXPR:
649 tree arg = TREE_OPERAND (*expr_p, 0);
650 tree type = TREE_TYPE (*expr_p);
651 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
652 : arg;
653 ret = GS_OK;
655 break;
657 default:
658 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
659 break;
662 /* Restore saved state. */
663 if (STATEMENT_CODE_P (code))
664 current_stmt_tree ()->stmts_are_full_exprs_p
665 = saved_stmts_are_full_exprs_p;
667 return ret;
670 static inline bool
671 is_invisiref_parm (const_tree t)
673 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
674 && DECL_BY_REFERENCE (t));
677 /* Return true if the uid in both int tree maps are equal. */
680 cxx_int_tree_map_eq (const void *va, const void *vb)
682 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
683 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
684 return (a->uid == b->uid);
687 /* Hash a UID in a cxx_int_tree_map. */
689 unsigned int
690 cxx_int_tree_map_hash (const void *item)
692 return ((const struct cxx_int_tree_map *)item)->uid;
695 struct cp_genericize_data
697 struct pointer_set_t *p_set;
698 VEC (tree, heap) *bind_expr_stack;
701 /* Perform any pre-gimplification lowering of C++ front end trees to
702 GENERIC. */
704 static tree
705 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
707 tree stmt = *stmt_p;
708 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
709 struct pointer_set_t *p_set = wtd->p_set;
711 if (is_invisiref_parm (stmt)
712 /* Don't dereference parms in a thunk, pass the references through. */
713 && !(DECL_THUNK_P (current_function_decl)
714 && TREE_CODE (stmt) == PARM_DECL))
716 *stmt_p = convert_from_reference (stmt);
717 *walk_subtrees = 0;
718 return NULL;
721 /* Map block scope extern declarations to visible declarations with the
722 same name and type in outer scopes if any. */
723 if (cp_function_chain->extern_decl_map
724 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
725 && DECL_EXTERNAL (stmt))
727 struct cxx_int_tree_map *h, in;
728 in.uid = DECL_UID (stmt);
729 h = (struct cxx_int_tree_map *)
730 htab_find_with_hash (cp_function_chain->extern_decl_map,
731 &in, in.uid);
732 if (h)
734 *stmt_p = h->to;
735 *walk_subtrees = 0;
736 return NULL;
740 /* Other than invisiref parms, don't walk the same tree twice. */
741 if (pointer_set_contains (p_set, stmt))
743 *walk_subtrees = 0;
744 return NULL_TREE;
747 if (TREE_CODE (stmt) == ADDR_EXPR
748 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
750 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
751 *walk_subtrees = 0;
753 else if (TREE_CODE (stmt) == RETURN_EXPR
754 && TREE_OPERAND (stmt, 0)
755 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
756 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
757 *walk_subtrees = 0;
758 else if (TREE_CODE (stmt) == OMP_CLAUSE)
759 switch (OMP_CLAUSE_CODE (stmt))
761 case OMP_CLAUSE_LASTPRIVATE:
762 /* Don't dereference an invisiref in OpenMP clauses. */
763 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
765 *walk_subtrees = 0;
766 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
767 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
768 cp_genericize_r, data, NULL);
770 break;
771 case OMP_CLAUSE_PRIVATE:
772 case OMP_CLAUSE_SHARED:
773 case OMP_CLAUSE_FIRSTPRIVATE:
774 case OMP_CLAUSE_COPYIN:
775 case OMP_CLAUSE_COPYPRIVATE:
776 /* Don't dereference an invisiref in OpenMP clauses. */
777 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
778 *walk_subtrees = 0;
779 break;
780 case OMP_CLAUSE_REDUCTION:
781 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
782 break;
783 default:
784 break;
786 else if (IS_TYPE_OR_DECL_P (stmt))
787 *walk_subtrees = 0;
789 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
790 to lower this construct before scanning it, so we need to lower these
791 before doing anything else. */
792 else if (TREE_CODE (stmt) == CLEANUP_STMT)
793 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
794 : TRY_FINALLY_EXPR,
795 void_type_node,
796 CLEANUP_BODY (stmt),
797 CLEANUP_EXPR (stmt));
799 else if (TREE_CODE (stmt) == IF_STMT)
801 genericize_if_stmt (stmt_p);
802 /* *stmt_p has changed, tail recurse to handle it again. */
803 return cp_genericize_r (stmt_p, walk_subtrees, data);
806 /* COND_EXPR might have incompatible types in branches if one or both
807 arms are bitfields. Fix it up now. */
808 else if (TREE_CODE (stmt) == COND_EXPR)
810 tree type_left
811 = (TREE_OPERAND (stmt, 1)
812 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
813 : NULL_TREE);
814 tree type_right
815 = (TREE_OPERAND (stmt, 2)
816 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
817 : NULL_TREE);
818 if (type_left
819 && !useless_type_conversion_p (TREE_TYPE (stmt),
820 TREE_TYPE (TREE_OPERAND (stmt, 1))))
822 TREE_OPERAND (stmt, 1)
823 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
824 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
825 type_left));
827 if (type_right
828 && !useless_type_conversion_p (TREE_TYPE (stmt),
829 TREE_TYPE (TREE_OPERAND (stmt, 2))))
831 TREE_OPERAND (stmt, 2)
832 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
833 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
834 type_right));
838 else if (TREE_CODE (stmt) == BIND_EXPR)
840 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
841 cp_walk_tree (&BIND_EXPR_BODY (stmt),
842 cp_genericize_r, data, NULL);
843 VEC_pop (tree, wtd->bind_expr_stack);
846 else if (TREE_CODE (stmt) == USING_STMT)
848 tree block = NULL_TREE;
850 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
851 BLOCK, and append an IMPORTED_DECL to its
852 BLOCK_VARS chained list. */
853 if (wtd->bind_expr_stack)
855 int i;
856 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
857 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
858 wtd->bind_expr_stack, i))))
859 break;
861 if (block)
863 tree using_directive;
864 gcc_assert (TREE_OPERAND (stmt, 0));
866 using_directive = make_node (IMPORTED_DECL);
867 TREE_TYPE (using_directive) = void_type_node;
869 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
870 = TREE_OPERAND (stmt, 0);
871 TREE_CHAIN (using_directive) = BLOCK_VARS (block);
872 BLOCK_VARS (block) = using_directive;
874 /* The USING_STMT won't appear in GENERIC. */
875 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
876 *walk_subtrees = 0;
879 else if (TREE_CODE (stmt) == DECL_EXPR
880 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
882 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
883 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
884 *walk_subtrees = 0;
887 pointer_set_insert (p_set, *stmt_p);
889 return NULL;
892 void
893 cp_genericize (tree fndecl)
895 tree t;
896 struct cp_genericize_data wtd;
898 /* Fix up the types of parms passed by invisible reference. */
899 for (t = DECL_ARGUMENTS (fndecl); t; t = TREE_CHAIN (t))
900 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
902 /* If a function's arguments are copied to create a thunk,
903 then DECL_BY_REFERENCE will be set -- but the type of the
904 argument will be a pointer type, so we will never get
905 here. */
906 gcc_assert (!DECL_BY_REFERENCE (t));
907 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
908 TREE_TYPE (t) = DECL_ARG_TYPE (t);
909 DECL_BY_REFERENCE (t) = 1;
910 TREE_ADDRESSABLE (t) = 0;
911 relayout_decl (t);
914 /* Do the same for the return value. */
915 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
917 t = DECL_RESULT (fndecl);
918 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
919 DECL_BY_REFERENCE (t) = 1;
920 TREE_ADDRESSABLE (t) = 0;
921 relayout_decl (t);
924 /* If we're a clone, the body is already GIMPLE. */
925 if (DECL_CLONED_FUNCTION_P (fndecl))
926 return;
928 /* We do want to see every occurrence of the parms, so we can't just use
929 walk_tree's hash functionality. */
930 wtd.p_set = pointer_set_create ();
931 wtd.bind_expr_stack = NULL;
932 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
933 pointer_set_destroy (wtd.p_set);
934 VEC_free (tree, heap, wtd.bind_expr_stack);
936 /* Do everything else. */
937 c_genericize (fndecl);
939 gcc_assert (bc_label[bc_break] == NULL);
940 gcc_assert (bc_label[bc_continue] == NULL);
943 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
944 NULL if there is in fact nothing to do. ARG2 may be null if FN
945 actually only takes one argument. */
947 static tree
948 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
950 tree defparm, parm, t;
951 int i = 0;
952 int nargs;
953 tree *argarray;
955 if (fn == NULL)
956 return NULL;
958 nargs = list_length (DECL_ARGUMENTS (fn));
959 argarray = (tree *) alloca (nargs * sizeof (tree));
961 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
962 if (arg2)
963 defparm = TREE_CHAIN (defparm);
965 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
967 tree inner_type = TREE_TYPE (arg1);
968 tree start1, end1, p1;
969 tree start2 = NULL, p2 = NULL;
970 tree ret = NULL, lab;
972 start1 = arg1;
973 start2 = arg2;
976 inner_type = TREE_TYPE (inner_type);
977 start1 = build4 (ARRAY_REF, inner_type, start1,
978 size_zero_node, NULL, NULL);
979 if (arg2)
980 start2 = build4 (ARRAY_REF, inner_type, start2,
981 size_zero_node, NULL, NULL);
983 while (TREE_CODE (inner_type) == ARRAY_TYPE);
984 start1 = build_fold_addr_expr_loc (input_location, start1);
985 if (arg2)
986 start2 = build_fold_addr_expr_loc (input_location, start2);
988 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
989 end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
991 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
992 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
993 append_to_statement_list (t, &ret);
995 if (arg2)
997 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
998 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
999 append_to_statement_list (t, &ret);
1002 lab = create_artificial_label (input_location);
1003 t = build1 (LABEL_EXPR, void_type_node, lab);
1004 append_to_statement_list (t, &ret);
1006 argarray[i++] = p1;
1007 if (arg2)
1008 argarray[i++] = p2;
1009 /* Handle default arguments. */
1010 for (parm = defparm; parm && parm != void_list_node;
1011 parm = TREE_CHAIN (parm), i++)
1012 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1013 TREE_PURPOSE (parm), fn, i);
1014 t = build_call_a (fn, i, argarray);
1015 t = fold_convert (void_type_node, t);
1016 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1017 append_to_statement_list (t, &ret);
1019 t = TYPE_SIZE_UNIT (inner_type);
1020 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
1021 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1022 append_to_statement_list (t, &ret);
1024 if (arg2)
1026 t = TYPE_SIZE_UNIT (inner_type);
1027 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
1028 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1029 append_to_statement_list (t, &ret);
1032 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1033 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1034 append_to_statement_list (t, &ret);
1036 return ret;
1038 else
1040 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1041 if (arg2)
1042 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1043 /* Handle default arguments. */
1044 for (parm = defparm; parm && parm != void_list_node;
1045 parm = TREE_CHAIN (parm), i++)
1046 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1047 TREE_PURPOSE (parm),
1048 fn, i);
1049 t = build_call_a (fn, i, argarray);
1050 t = fold_convert (void_type_node, t);
1051 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1055 /* Return code to initialize DECL with its default constructor, or
1056 NULL if there's nothing to do. */
1058 tree
1059 cxx_omp_clause_default_ctor (tree clause, tree decl,
1060 tree outer ATTRIBUTE_UNUSED)
1062 tree info = CP_OMP_CLAUSE_INFO (clause);
1063 tree ret = NULL;
1065 if (info)
1066 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1068 return ret;
1071 /* Return code to initialize DST with a copy constructor from SRC. */
1073 tree
1074 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1076 tree info = CP_OMP_CLAUSE_INFO (clause);
1077 tree ret = NULL;
1079 if (info)
1080 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1081 if (ret == NULL)
1082 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1084 return ret;
1087 /* Similarly, except use an assignment operator instead. */
1089 tree
1090 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1092 tree info = CP_OMP_CLAUSE_INFO (clause);
1093 tree ret = NULL;
1095 if (info)
1096 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1097 if (ret == NULL)
1098 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1100 return ret;
1103 /* Return code to destroy DECL. */
1105 tree
1106 cxx_omp_clause_dtor (tree clause, tree decl)
1108 tree info = CP_OMP_CLAUSE_INFO (clause);
1109 tree ret = NULL;
1111 if (info)
1112 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1114 return ret;
1117 /* True if OpenMP should privatize what this DECL points to rather
1118 than the DECL itself. */
1120 bool
1121 cxx_omp_privatize_by_reference (const_tree decl)
1123 return is_invisiref_parm (decl);
1126 /* True if OpenMP sharing attribute of DECL is predetermined. */
1128 enum omp_clause_default_kind
1129 cxx_omp_predetermined_sharing (tree decl)
1131 tree type;
1133 /* Static data members are predetermined as shared. */
1134 if (TREE_STATIC (decl))
1136 tree ctx = CP_DECL_CONTEXT (decl);
1137 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1138 return OMP_CLAUSE_DEFAULT_SHARED;
1141 type = TREE_TYPE (decl);
1142 if (TREE_CODE (type) == REFERENCE_TYPE)
1144 if (!is_invisiref_parm (decl))
1145 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1146 type = TREE_TYPE (type);
1148 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1150 /* NVR doesn't preserve const qualification of the
1151 variable's type. */
1152 tree outer = outer_curly_brace_block (current_function_decl);
1153 tree var;
1155 if (outer)
1156 for (var = BLOCK_VARS (outer); var; var = TREE_CHAIN (var))
1157 if (DECL_NAME (decl) == DECL_NAME (var)
1158 && (TYPE_MAIN_VARIANT (type)
1159 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1161 if (TYPE_READONLY (TREE_TYPE (var)))
1162 type = TREE_TYPE (var);
1163 break;
1168 if (type == error_mark_node)
1169 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1171 /* Variables with const-qualified type having no mutable member
1172 are predetermined shared. */
1173 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1174 return OMP_CLAUSE_DEFAULT_SHARED;
1176 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1179 /* Finalize an implicitly determined clause. */
1181 void
1182 cxx_omp_finish_clause (tree c)
1184 tree decl, inner_type;
1185 bool make_shared = false;
1187 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1188 return;
1190 decl = OMP_CLAUSE_DECL (c);
1191 decl = require_complete_type (decl);
1192 inner_type = TREE_TYPE (decl);
1193 if (decl == error_mark_node)
1194 make_shared = true;
1195 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1197 if (is_invisiref_parm (decl))
1198 inner_type = TREE_TYPE (inner_type);
1199 else
1201 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1202 decl);
1203 make_shared = true;
1207 /* We're interested in the base element, not arrays. */
1208 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1209 inner_type = TREE_TYPE (inner_type);
1211 /* Check for special function availability by building a call to one.
1212 Save the results, because later we won't be in the right context
1213 for making these queries. */
1214 if (!make_shared
1215 && CLASS_TYPE_P (inner_type)
1216 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1217 make_shared = true;
1219 if (make_shared)
1220 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;