Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / cp / cp-gimplify.c
blobca62df3e5858c7bd96bcd79e74c5142a3716babf
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "cp-tree.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "gimple.h"
32 #include "hashtab.h"
33 #include "pointer-set.h"
34 #include "flags.h"
36 /* Local declarations. */
38 enum bc_t { bc_break = 0, bc_continue = 1 };
40 /* Stack of labels which are targets for "break" or "continue",
41 linked through TREE_CHAIN. */
42 static tree bc_label[2];
44 /* Begin a scope which can be exited by a break or continue statement. BC
45 indicates which.
47 Just creates a label and pushes it into the current context. */
49 static tree
50 begin_bc_block (enum bc_t bc)
52 tree label = create_artificial_label (input_location);
53 DECL_CHAIN (label) = bc_label[bc];
54 bc_label[bc] = label;
55 return label;
58 /* Finish a scope which can be exited by a break or continue statement.
59 LABEL was returned from the most recent call to begin_bc_block. BODY is
60 an expression for the contents of the scope.
62 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
63 body. Otherwise, just forget the label. */
65 static gimple_seq
66 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
68 gcc_assert (label == bc_label[bc]);
70 if (TREE_USED (label))
72 gimple_seq_add_stmt (&body, gimple_build_label (label));
75 bc_label[bc] = DECL_CHAIN (label);
76 DECL_CHAIN (label) = NULL_TREE;
77 return body;
80 /* Get the LABEL_EXPR to represent a break or continue statement
81 in the current block scope. BC indicates which. */
83 static tree
84 get_bc_label (enum bc_t bc)
86 tree label = bc_label[bc];
88 if (label == NULL_TREE)
90 if (bc == bc_break)
91 error ("break statement not within loop or switch");
92 else
93 error ("continue statement not within loop or switch");
95 return NULL_TREE;
98 /* Mark the label used for finish_bc_block. */
99 TREE_USED (label) = 1;
100 return label;
103 /* Genericize a TRY_BLOCK. */
105 static void
106 genericize_try_block (tree *stmt_p)
108 tree body = TRY_STMTS (*stmt_p);
109 tree cleanup = TRY_HANDLERS (*stmt_p);
111 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
116 static void
117 genericize_catch_block (tree *stmt_p)
119 tree type = HANDLER_TYPE (*stmt_p);
120 tree body = HANDLER_BODY (*stmt_p);
122 /* FIXME should the caught type go in TREE_TYPE? */
123 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 /* A terser interface for building a representation of an exception
127 specification. */
129 static tree
130 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
132 tree t;
134 /* FIXME should the allowed types go in TREE_TYPE? */
135 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
136 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
138 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
139 append_to_statement_list (body, &TREE_OPERAND (t, 0));
141 return t;
144 /* Genericize an EH_SPEC_BLOCK by converting it to a
145 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
147 static void
148 genericize_eh_spec_block (tree *stmt_p)
150 tree body = EH_SPEC_STMTS (*stmt_p);
151 tree allowed = EH_SPEC_RAISES (*stmt_p);
152 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
154 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
155 TREE_NO_WARNING (*stmt_p) = true;
156 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
159 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
161 static void
162 genericize_if_stmt (tree *stmt_p)
164 tree stmt, cond, then_, else_;
165 location_t locus = EXPR_LOCATION (*stmt_p);
167 stmt = *stmt_p;
168 cond = IF_COND (stmt);
169 then_ = THEN_CLAUSE (stmt);
170 else_ = ELSE_CLAUSE (stmt);
172 if (!then_)
173 then_ = build_empty_stmt (locus);
174 if (!else_)
175 else_ = build_empty_stmt (locus);
177 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
178 stmt = then_;
179 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
180 stmt = else_;
181 else
182 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
183 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
184 SET_EXPR_LOCATION (stmt, locus);
185 *stmt_p = stmt;
188 /* Build a generic representation of one of the C loop forms. COND is the
189 loop condition or NULL_TREE. BODY is the (possibly compound) statement
190 controlled by the loop. INCR is the increment expression of a for-loop,
191 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
192 evaluated before the loop body as in while and for loops, or after the
193 loop body as in do-while loops. */
195 static gimple_seq
196 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
198 gimple top, entry, stmt;
199 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
200 tree cont_block, break_block;
201 location_t stmt_locus;
203 stmt_locus = input_location;
204 stmt_list = NULL;
205 body_seq = NULL;
206 incr_seq = NULL;
207 exit_seq = NULL;
208 entry = NULL;
210 break_block = begin_bc_block (bc_break);
211 cont_block = begin_bc_block (bc_continue);
213 /* If condition is zero don't generate a loop construct. */
214 if (cond && integer_zerop (cond))
216 top = NULL;
217 if (cond_is_first)
219 stmt = gimple_build_goto (get_bc_label (bc_break));
220 gimple_set_location (stmt, stmt_locus);
221 gimple_seq_add_stmt (&stmt_list, stmt);
224 else
226 /* If we use a LOOP_EXPR here, we have to feed the whole thing
227 back through the main gimplifier to lower it. Given that we
228 have to gimplify the loop body NOW so that we can resolve
229 break/continue stmts, seems easier to just expand to gotos. */
230 top = gimple_build_label (create_artificial_label (stmt_locus));
232 /* If we have an exit condition, then we build an IF with gotos either
233 out of the loop, or to the top of it. If there's no exit condition,
234 then we just build a jump back to the top. */
235 if (cond && !integer_nonzerop (cond))
237 if (cond != error_mark_node)
239 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
240 stmt = gimple_build_cond (NE_EXPR, cond,
241 build_int_cst (TREE_TYPE (cond), 0),
242 gimple_label_label (top),
243 get_bc_label (bc_break));
244 gimple_seq_add_stmt (&exit_seq, stmt);
247 if (cond_is_first)
249 if (incr)
251 entry = gimple_build_label
252 (create_artificial_label (stmt_locus));
253 stmt = gimple_build_goto (gimple_label_label (entry));
255 else
256 stmt = gimple_build_goto (get_bc_label (bc_continue));
257 gimple_set_location (stmt, stmt_locus);
258 gimple_seq_add_stmt (&stmt_list, stmt);
261 else
263 stmt = gimple_build_goto (gimple_label_label (top));
264 gimple_seq_add_stmt (&exit_seq, stmt);
268 gimplify_stmt (&body, &body_seq);
269 gimplify_stmt (&incr, &incr_seq);
271 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
273 gimple_seq_add_stmt (&stmt_list, top);
274 gimple_seq_add_seq (&stmt_list, body_seq);
275 gimple_seq_add_seq (&stmt_list, incr_seq);
276 gimple_seq_add_stmt (&stmt_list, entry);
277 gimple_seq_add_seq (&stmt_list, exit_seq);
279 annotate_all_with_location (stmt_list, stmt_locus);
281 return finish_bc_block (bc_break, break_block, stmt_list);
284 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
285 prequeue and hand off to gimplify_cp_loop. */
287 static void
288 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
290 tree stmt = *stmt_p;
292 if (FOR_INIT_STMT (stmt))
293 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
295 gimple_seq_add_seq (pre_p,
296 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
297 FOR_EXPR (stmt), 1));
298 *stmt_p = NULL_TREE;
301 /* Gimplify a WHILE_STMT node. */
303 static void
304 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
306 tree stmt = *stmt_p;
307 gimple_seq_add_seq (pre_p,
308 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
309 NULL_TREE, 1));
310 *stmt_p = NULL_TREE;
313 /* Gimplify a DO_STMT node. */
315 static void
316 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
318 tree stmt = *stmt_p;
319 gimple_seq_add_seq (pre_p,
320 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
321 NULL_TREE, 0));
322 *stmt_p = NULL_TREE;
325 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
327 static void
328 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
330 tree stmt = *stmt_p;
331 tree break_block, body, t;
332 location_t stmt_locus = input_location;
333 gimple_seq seq = NULL;
335 break_block = begin_bc_block (bc_break);
337 body = SWITCH_STMT_BODY (stmt);
338 if (!body)
339 body = build_empty_stmt (stmt_locus);
341 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
342 SWITCH_STMT_COND (stmt), body, NULL_TREE);
343 SET_EXPR_LOCATION (t, stmt_locus);
344 gimplify_and_add (t, &seq);
346 seq = finish_bc_block (bc_break, break_block, seq);
347 gimple_seq_add_seq (pre_p, seq);
348 *stmt_p = NULL_TREE;
351 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
352 in order to properly gimplify CONTINUE statements. Here we merely
353 manage the continue stack; the rest of the job is performed by the
354 regular gimplifier. */
356 static enum gimplify_status
357 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
359 tree for_stmt = *expr_p;
360 tree cont_block;
361 gimple stmt;
362 gimple_seq seq = NULL;
364 /* Protect ourselves from recursion. */
365 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
366 return GS_UNHANDLED;
367 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
369 /* Note that while technically the continue label is enabled too soon
370 here, we should have already diagnosed invalid continues nested within
371 statement expressions within the INIT, COND, or INCR expressions. */
372 cont_block = begin_bc_block (bc_continue);
374 gimplify_and_add (for_stmt, &seq);
375 stmt = gimple_seq_last_stmt (seq);
376 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
377 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
378 gimple_omp_body (stmt)));
379 else
380 seq = finish_bc_block (bc_continue, cont_block, seq);
381 gimple_seq_add_seq (pre_p, seq);
383 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
385 return GS_ALL_DONE;
388 /* Gimplify an EXPR_STMT node. */
390 static void
391 gimplify_expr_stmt (tree *stmt_p)
393 tree stmt = EXPR_STMT_EXPR (*stmt_p);
395 if (stmt == error_mark_node)
396 stmt = NULL;
398 /* Gimplification of a statement expression will nullify the
399 statement if all its side effects are moved to *PRE_P and *POST_P.
401 In this case we will not want to emit the gimplified statement.
402 However, we may still want to emit a warning, so we do that before
403 gimplification. */
404 if (stmt && warn_unused_value)
406 if (!TREE_SIDE_EFFECTS (stmt))
408 if (!IS_EMPTY_STMT (stmt)
409 && !VOID_TYPE_P (TREE_TYPE (stmt))
410 && !TREE_NO_WARNING (stmt))
411 warning (OPT_Wunused_value, "statement with no effect");
413 else
414 warn_if_unused_value (stmt, input_location);
417 if (stmt == NULL_TREE)
418 stmt = alloc_stmt_list ();
420 *stmt_p = stmt;
423 /* Gimplify initialization from an AGGR_INIT_EXPR. */
425 static void
426 cp_gimplify_init_expr (tree *expr_p)
428 tree from = TREE_OPERAND (*expr_p, 1);
429 tree to = TREE_OPERAND (*expr_p, 0);
430 tree t;
432 /* What about code that pulls out the temp and uses it elsewhere? I
433 think that such code never uses the TARGET_EXPR as an initializer. If
434 I'm wrong, we'll abort because the temp won't have any RTL. In that
435 case, I guess we'll need to replace references somehow. */
436 if (TREE_CODE (from) == TARGET_EXPR)
437 from = TARGET_EXPR_INITIAL (from);
439 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
440 inside the TARGET_EXPR. */
441 for (t = from; t; )
443 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
445 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
446 replace the slot operand with our target.
448 Should we add a target parm to gimplify_expr instead? No, as in this
449 case we want to replace the INIT_EXPR. */
450 if (TREE_CODE (sub) == AGGR_INIT_EXPR
451 || TREE_CODE (sub) == VEC_INIT_EXPR)
453 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
454 AGGR_INIT_EXPR_SLOT (sub) = to;
455 else
456 VEC_INIT_EXPR_SLOT (sub) = to;
457 *expr_p = from;
459 /* The initialization is now a side-effect, so the container can
460 become void. */
461 if (from != sub)
462 TREE_TYPE (from) = void_type_node;
465 if (t == sub)
466 break;
467 else
468 t = TREE_OPERAND (t, 1);
473 /* Gimplify a MUST_NOT_THROW_EXPR. */
475 static enum gimplify_status
476 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
478 tree stmt = *expr_p;
479 tree temp = voidify_wrapper_expr (stmt, NULL);
480 tree body = TREE_OPERAND (stmt, 0);
481 gimple_seq try_ = NULL;
482 gimple_seq catch_ = NULL;
483 gimple mnt;
485 gimplify_and_add (body, &try_);
486 mnt = gimple_build_eh_must_not_throw (terminate_node);
487 gimplify_seq_add_stmt (&catch_, mnt);
488 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
490 gimplify_seq_add_stmt (pre_p, mnt);
491 if (temp)
493 *expr_p = temp;
494 return GS_OK;
497 *expr_p = NULL;
498 return GS_ALL_DONE;
501 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
504 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
506 int saved_stmts_are_full_exprs_p = 0;
507 enum tree_code code = TREE_CODE (*expr_p);
508 enum gimplify_status ret;
510 if (STATEMENT_CODE_P (code))
512 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
513 current_stmt_tree ()->stmts_are_full_exprs_p
514 = STMT_IS_FULL_EXPR_P (*expr_p);
517 switch (code)
519 case PTRMEM_CST:
520 *expr_p = cplus_expand_constant (*expr_p);
521 ret = GS_OK;
522 break;
524 case AGGR_INIT_EXPR:
525 simplify_aggr_init_expr (expr_p);
526 ret = GS_OK;
527 break;
529 case VEC_INIT_EXPR:
531 location_t loc = input_location;
532 tree init = VEC_INIT_EXPR_INIT (*expr_p);
533 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
534 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
535 input_location = EXPR_LOCATION (*expr_p);
536 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
537 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
538 from_array,
539 tf_warning_or_error);
540 ret = GS_OK;
541 input_location = loc;
543 break;
545 case THROW_EXPR:
546 /* FIXME communicate throw type to back end, probably by moving
547 THROW_EXPR into ../tree.def. */
548 *expr_p = TREE_OPERAND (*expr_p, 0);
549 ret = GS_OK;
550 break;
552 case MUST_NOT_THROW_EXPR:
553 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
554 break;
556 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
557 LHS of an assignment might also be involved in the RHS, as in bug
558 25979. */
559 case INIT_EXPR:
560 cp_gimplify_init_expr (expr_p);
561 if (TREE_CODE (*expr_p) != INIT_EXPR)
562 return GS_OK;
563 /* Otherwise fall through. */
564 case MODIFY_EXPR:
566 /* If the back end isn't clever enough to know that the lhs and rhs
567 types are the same, add an explicit conversion. */
568 tree op0 = TREE_OPERAND (*expr_p, 0);
569 tree op1 = TREE_OPERAND (*expr_p, 1);
571 if (!error_operand_p (op0)
572 && !error_operand_p (op1)
573 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
574 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
575 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
576 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
577 TREE_TYPE (op0), op1);
579 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
580 || (TREE_CODE (op1) == CONSTRUCTOR
581 && CONSTRUCTOR_NELTS (op1) == 0)
582 || (TREE_CODE (op1) == CALL_EXPR
583 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
584 && is_really_empty_class (TREE_TYPE (op0)))
586 /* Remove any copies of empty classes. We check that the RHS
587 has a simple form so that TARGET_EXPRs and non-empty
588 CONSTRUCTORs get reduced properly, and we leave the return
589 slot optimization alone because it isn't a copy (FIXME so it
590 shouldn't be represented as one).
592 Also drop volatile variables on the RHS to avoid infinite
593 recursion from gimplify_expr trying to load the value. */
594 if (!TREE_SIDE_EFFECTS (op1)
595 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
596 *expr_p = op0;
597 else if (TREE_CODE (op1) == MEM_REF
598 && TREE_THIS_VOLATILE (op1))
600 /* Similarly for volatile MEM_REFs on the RHS. */
601 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
602 *expr_p = op0;
603 else
604 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
605 TREE_OPERAND (op1, 0), op0);
607 else
608 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
609 op0, op1);
612 ret = GS_OK;
613 break;
615 case EMPTY_CLASS_EXPR:
616 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
617 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
618 ret = GS_OK;
619 break;
621 case BASELINK:
622 *expr_p = BASELINK_FUNCTIONS (*expr_p);
623 ret = GS_OK;
624 break;
626 case TRY_BLOCK:
627 genericize_try_block (expr_p);
628 ret = GS_OK;
629 break;
631 case HANDLER:
632 genericize_catch_block (expr_p);
633 ret = GS_OK;
634 break;
636 case EH_SPEC_BLOCK:
637 genericize_eh_spec_block (expr_p);
638 ret = GS_OK;
639 break;
641 case USING_STMT:
642 gcc_unreachable ();
644 case FOR_STMT:
645 gimplify_for_stmt (expr_p, pre_p);
646 ret = GS_OK;
647 break;
649 case WHILE_STMT:
650 gimplify_while_stmt (expr_p, pre_p);
651 ret = GS_OK;
652 break;
654 case DO_STMT:
655 gimplify_do_stmt (expr_p, pre_p);
656 ret = GS_OK;
657 break;
659 case SWITCH_STMT:
660 gimplify_switch_stmt (expr_p, pre_p);
661 ret = GS_OK;
662 break;
664 case OMP_FOR:
665 ret = cp_gimplify_omp_for (expr_p, pre_p);
666 break;
668 case CONTINUE_STMT:
669 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
670 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
671 *expr_p = NULL_TREE;
672 ret = GS_ALL_DONE;
673 break;
675 case BREAK_STMT:
676 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
677 *expr_p = NULL_TREE;
678 ret = GS_ALL_DONE;
679 break;
681 case EXPR_STMT:
682 gimplify_expr_stmt (expr_p);
683 ret = GS_OK;
684 break;
686 case UNARY_PLUS_EXPR:
688 tree arg = TREE_OPERAND (*expr_p, 0);
689 tree type = TREE_TYPE (*expr_p);
690 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
691 : arg;
692 ret = GS_OK;
694 break;
696 default:
697 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
698 break;
701 /* Restore saved state. */
702 if (STATEMENT_CODE_P (code))
703 current_stmt_tree ()->stmts_are_full_exprs_p
704 = saved_stmts_are_full_exprs_p;
706 return ret;
709 static inline bool
710 is_invisiref_parm (const_tree t)
712 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
713 && DECL_BY_REFERENCE (t));
716 /* Return true if the uid in both int tree maps are equal. */
719 cxx_int_tree_map_eq (const void *va, const void *vb)
721 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
722 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
723 return (a->uid == b->uid);
726 /* Hash a UID in a cxx_int_tree_map. */
728 unsigned int
729 cxx_int_tree_map_hash (const void *item)
731 return ((const struct cxx_int_tree_map *)item)->uid;
734 struct cp_genericize_data
736 struct pointer_set_t *p_set;
737 VEC (tree, heap) *bind_expr_stack;
740 /* Perform any pre-gimplification lowering of C++ front end trees to
741 GENERIC. */
743 static tree
744 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
746 tree stmt = *stmt_p;
747 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
748 struct pointer_set_t *p_set = wtd->p_set;
750 if (is_invisiref_parm (stmt)
751 /* Don't dereference parms in a thunk, pass the references through. */
752 && !(DECL_THUNK_P (current_function_decl)
753 && TREE_CODE (stmt) == PARM_DECL))
755 *stmt_p = convert_from_reference (stmt);
756 *walk_subtrees = 0;
757 return NULL;
760 /* Map block scope extern declarations to visible declarations with the
761 same name and type in outer scopes if any. */
762 if (cp_function_chain->extern_decl_map
763 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
764 && DECL_EXTERNAL (stmt))
766 struct cxx_int_tree_map *h, in;
767 in.uid = DECL_UID (stmt);
768 h = (struct cxx_int_tree_map *)
769 htab_find_with_hash (cp_function_chain->extern_decl_map,
770 &in, in.uid);
771 if (h)
773 *stmt_p = h->to;
774 *walk_subtrees = 0;
775 return NULL;
779 /* Other than invisiref parms, don't walk the same tree twice. */
780 if (pointer_set_contains (p_set, stmt))
782 *walk_subtrees = 0;
783 return NULL_TREE;
786 if (TREE_CODE (stmt) == ADDR_EXPR
787 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
789 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
790 *walk_subtrees = 0;
792 else if (TREE_CODE (stmt) == RETURN_EXPR
793 && TREE_OPERAND (stmt, 0)
794 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
795 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
796 *walk_subtrees = 0;
797 else if (TREE_CODE (stmt) == OMP_CLAUSE)
798 switch (OMP_CLAUSE_CODE (stmt))
800 case OMP_CLAUSE_LASTPRIVATE:
801 /* Don't dereference an invisiref in OpenMP clauses. */
802 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
804 *walk_subtrees = 0;
805 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
806 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
807 cp_genericize_r, data, NULL);
809 break;
810 case OMP_CLAUSE_PRIVATE:
811 case OMP_CLAUSE_SHARED:
812 case OMP_CLAUSE_FIRSTPRIVATE:
813 case OMP_CLAUSE_COPYIN:
814 case OMP_CLAUSE_COPYPRIVATE:
815 /* Don't dereference an invisiref in OpenMP clauses. */
816 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
817 *walk_subtrees = 0;
818 break;
819 case OMP_CLAUSE_REDUCTION:
820 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
821 break;
822 default:
823 break;
825 else if (IS_TYPE_OR_DECL_P (stmt))
826 *walk_subtrees = 0;
828 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
829 to lower this construct before scanning it, so we need to lower these
830 before doing anything else. */
831 else if (TREE_CODE (stmt) == CLEANUP_STMT)
832 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
833 : TRY_FINALLY_EXPR,
834 void_type_node,
835 CLEANUP_BODY (stmt),
836 CLEANUP_EXPR (stmt));
838 else if (TREE_CODE (stmt) == IF_STMT)
840 genericize_if_stmt (stmt_p);
841 /* *stmt_p has changed, tail recurse to handle it again. */
842 return cp_genericize_r (stmt_p, walk_subtrees, data);
845 /* COND_EXPR might have incompatible types in branches if one or both
846 arms are bitfields. Fix it up now. */
847 else if (TREE_CODE (stmt) == COND_EXPR)
849 tree type_left
850 = (TREE_OPERAND (stmt, 1)
851 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
852 : NULL_TREE);
853 tree type_right
854 = (TREE_OPERAND (stmt, 2)
855 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
856 : NULL_TREE);
857 if (type_left
858 && !useless_type_conversion_p (TREE_TYPE (stmt),
859 TREE_TYPE (TREE_OPERAND (stmt, 1))))
861 TREE_OPERAND (stmt, 1)
862 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
863 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
864 type_left));
866 if (type_right
867 && !useless_type_conversion_p (TREE_TYPE (stmt),
868 TREE_TYPE (TREE_OPERAND (stmt, 2))))
870 TREE_OPERAND (stmt, 2)
871 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
872 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
873 type_right));
877 else if (TREE_CODE (stmt) == BIND_EXPR)
879 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
880 cp_walk_tree (&BIND_EXPR_BODY (stmt),
881 cp_genericize_r, data, NULL);
882 VEC_pop (tree, wtd->bind_expr_stack);
885 else if (TREE_CODE (stmt) == USING_STMT)
887 tree block = NULL_TREE;
889 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
890 BLOCK, and append an IMPORTED_DECL to its
891 BLOCK_VARS chained list. */
892 if (wtd->bind_expr_stack)
894 int i;
895 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
896 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
897 wtd->bind_expr_stack, i))))
898 break;
900 if (block)
902 tree using_directive;
903 gcc_assert (TREE_OPERAND (stmt, 0));
905 using_directive = make_node (IMPORTED_DECL);
906 TREE_TYPE (using_directive) = void_type_node;
908 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
909 = TREE_OPERAND (stmt, 0);
910 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
911 BLOCK_VARS (block) = using_directive;
913 /* The USING_STMT won't appear in GENERIC. */
914 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
915 *walk_subtrees = 0;
918 else if (TREE_CODE (stmt) == DECL_EXPR
919 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
921 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
922 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
923 *walk_subtrees = 0;
926 pointer_set_insert (p_set, *stmt_p);
928 return NULL;
931 void
932 cp_genericize (tree fndecl)
934 tree t;
935 struct cp_genericize_data wtd;
937 /* Fix up the types of parms passed by invisible reference. */
938 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
939 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
941 /* If a function's arguments are copied to create a thunk,
942 then DECL_BY_REFERENCE will be set -- but the type of the
943 argument will be a pointer type, so we will never get
944 here. */
945 gcc_assert (!DECL_BY_REFERENCE (t));
946 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
947 TREE_TYPE (t) = DECL_ARG_TYPE (t);
948 DECL_BY_REFERENCE (t) = 1;
949 TREE_ADDRESSABLE (t) = 0;
950 relayout_decl (t);
953 /* Do the same for the return value. */
954 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
956 t = DECL_RESULT (fndecl);
957 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
958 DECL_BY_REFERENCE (t) = 1;
959 TREE_ADDRESSABLE (t) = 0;
960 relayout_decl (t);
961 if (DECL_NAME (t))
963 /* Adjust DECL_VALUE_EXPR of the original var. */
964 tree outer = outer_curly_brace_block (current_function_decl);
965 tree var;
967 if (outer)
968 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
969 if (DECL_NAME (t) == DECL_NAME (var)
970 && DECL_HAS_VALUE_EXPR_P (var)
971 && DECL_VALUE_EXPR (var) == t)
973 tree val = convert_from_reference (t);
974 SET_DECL_VALUE_EXPR (var, val);
975 break;
980 /* If we're a clone, the body is already GIMPLE. */
981 if (DECL_CLONED_FUNCTION_P (fndecl))
982 return;
984 /* We do want to see every occurrence of the parms, so we can't just use
985 walk_tree's hash functionality. */
986 wtd.p_set = pointer_set_create ();
987 wtd.bind_expr_stack = NULL;
988 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
989 pointer_set_destroy (wtd.p_set);
990 VEC_free (tree, heap, wtd.bind_expr_stack);
992 /* Do everything else. */
993 c_genericize (fndecl);
995 gcc_assert (bc_label[bc_break] == NULL);
996 gcc_assert (bc_label[bc_continue] == NULL);
999 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1000 NULL if there is in fact nothing to do. ARG2 may be null if FN
1001 actually only takes one argument. */
1003 static tree
1004 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1006 tree defparm, parm, t;
1007 int i = 0;
1008 int nargs;
1009 tree *argarray;
1011 if (fn == NULL)
1012 return NULL;
1014 nargs = list_length (DECL_ARGUMENTS (fn));
1015 argarray = XALLOCAVEC (tree, nargs);
1017 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1018 if (arg2)
1019 defparm = TREE_CHAIN (defparm);
1021 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1023 tree inner_type = TREE_TYPE (arg1);
1024 tree start1, end1, p1;
1025 tree start2 = NULL, p2 = NULL;
1026 tree ret = NULL, lab;
1028 start1 = arg1;
1029 start2 = arg2;
1032 inner_type = TREE_TYPE (inner_type);
1033 start1 = build4 (ARRAY_REF, inner_type, start1,
1034 size_zero_node, NULL, NULL);
1035 if (arg2)
1036 start2 = build4 (ARRAY_REF, inner_type, start2,
1037 size_zero_node, NULL, NULL);
1039 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1040 start1 = build_fold_addr_expr_loc (input_location, start1);
1041 if (arg2)
1042 start2 = build_fold_addr_expr_loc (input_location, start2);
1044 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1045 end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
1047 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1048 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1049 append_to_statement_list (t, &ret);
1051 if (arg2)
1053 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1054 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1055 append_to_statement_list (t, &ret);
1058 lab = create_artificial_label (input_location);
1059 t = build1 (LABEL_EXPR, void_type_node, lab);
1060 append_to_statement_list (t, &ret);
1062 argarray[i++] = p1;
1063 if (arg2)
1064 argarray[i++] = p2;
1065 /* Handle default arguments. */
1066 for (parm = defparm; parm && parm != void_list_node;
1067 parm = TREE_CHAIN (parm), i++)
1068 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1069 TREE_PURPOSE (parm), fn, i);
1070 t = build_call_a (fn, i, argarray);
1071 t = fold_convert (void_type_node, t);
1072 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1073 append_to_statement_list (t, &ret);
1075 t = TYPE_SIZE_UNIT (inner_type);
1076 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
1077 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1078 append_to_statement_list (t, &ret);
1080 if (arg2)
1082 t = TYPE_SIZE_UNIT (inner_type);
1083 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
1084 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1085 append_to_statement_list (t, &ret);
1088 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1089 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1090 append_to_statement_list (t, &ret);
1092 return ret;
1094 else
1096 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1097 if (arg2)
1098 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1099 /* Handle default arguments. */
1100 for (parm = defparm; parm && parm != void_list_node;
1101 parm = TREE_CHAIN (parm), i++)
1102 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1103 TREE_PURPOSE (parm),
1104 fn, i);
1105 t = build_call_a (fn, i, argarray);
1106 t = fold_convert (void_type_node, t);
1107 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1111 /* Return code to initialize DECL with its default constructor, or
1112 NULL if there's nothing to do. */
1114 tree
1115 cxx_omp_clause_default_ctor (tree clause, tree decl,
1116 tree outer ATTRIBUTE_UNUSED)
1118 tree info = CP_OMP_CLAUSE_INFO (clause);
1119 tree ret = NULL;
1121 if (info)
1122 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1124 return ret;
1127 /* Return code to initialize DST with a copy constructor from SRC. */
1129 tree
1130 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1132 tree info = CP_OMP_CLAUSE_INFO (clause);
1133 tree ret = NULL;
1135 if (info)
1136 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1137 if (ret == NULL)
1138 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1140 return ret;
1143 /* Similarly, except use an assignment operator instead. */
1145 tree
1146 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1148 tree info = CP_OMP_CLAUSE_INFO (clause);
1149 tree ret = NULL;
1151 if (info)
1152 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1153 if (ret == NULL)
1154 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1156 return ret;
1159 /* Return code to destroy DECL. */
1161 tree
1162 cxx_omp_clause_dtor (tree clause, tree decl)
1164 tree info = CP_OMP_CLAUSE_INFO (clause);
1165 tree ret = NULL;
1167 if (info)
1168 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1170 return ret;
1173 /* True if OpenMP should privatize what this DECL points to rather
1174 than the DECL itself. */
1176 bool
1177 cxx_omp_privatize_by_reference (const_tree decl)
1179 return is_invisiref_parm (decl);
1182 /* True if OpenMP sharing attribute of DECL is predetermined. */
1184 enum omp_clause_default_kind
1185 cxx_omp_predetermined_sharing (tree decl)
1187 tree type;
1189 /* Static data members are predetermined as shared. */
1190 if (TREE_STATIC (decl))
1192 tree ctx = CP_DECL_CONTEXT (decl);
1193 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1194 return OMP_CLAUSE_DEFAULT_SHARED;
1197 type = TREE_TYPE (decl);
1198 if (TREE_CODE (type) == REFERENCE_TYPE)
1200 if (!is_invisiref_parm (decl))
1201 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1202 type = TREE_TYPE (type);
1204 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1206 /* NVR doesn't preserve const qualification of the
1207 variable's type. */
1208 tree outer = outer_curly_brace_block (current_function_decl);
1209 tree var;
1211 if (outer)
1212 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1213 if (DECL_NAME (decl) == DECL_NAME (var)
1214 && (TYPE_MAIN_VARIANT (type)
1215 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1217 if (TYPE_READONLY (TREE_TYPE (var)))
1218 type = TREE_TYPE (var);
1219 break;
1224 if (type == error_mark_node)
1225 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1227 /* Variables with const-qualified type having no mutable member
1228 are predetermined shared. */
1229 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1230 return OMP_CLAUSE_DEFAULT_SHARED;
1232 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1235 /* Finalize an implicitly determined clause. */
1237 void
1238 cxx_omp_finish_clause (tree c)
1240 tree decl, inner_type;
1241 bool make_shared = false;
1243 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1244 return;
1246 decl = OMP_CLAUSE_DECL (c);
1247 decl = require_complete_type (decl);
1248 inner_type = TREE_TYPE (decl);
1249 if (decl == error_mark_node)
1250 make_shared = true;
1251 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1253 if (is_invisiref_parm (decl))
1254 inner_type = TREE_TYPE (inner_type);
1255 else
1257 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1258 decl);
1259 make_shared = true;
1263 /* We're interested in the base element, not arrays. */
1264 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1265 inner_type = TREE_TYPE (inner_type);
1267 /* Check for special function availability by building a call to one.
1268 Save the results, because later we won't be in the right context
1269 for making these queries. */
1270 if (!make_shared
1271 && CLASS_TYPE_P (inner_type)
1272 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1273 make_shared = true;
1275 if (make_shared)
1276 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;