1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "tree-iterator.h"
34 #include "pointer-set.h"
37 /* Local declarations. */
39 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label
[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
48 Just creates a label and pushes it into the current context. */
51 begin_bc_block (enum bc_t bc
)
53 tree label
= create_artificial_label ();
54 TREE_CHAIN (label
) = bc_label
[bc
];
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
67 finish_bc_block (enum bc_t bc
, tree label
, gimple_seq body
)
69 gcc_assert (label
== bc_label
[bc
]);
71 if (TREE_USED (label
))
73 gimple_seq_add_stmt (&body
, gimple_build_label (label
));
76 bc_label
[bc
] = TREE_CHAIN (label
);
77 TREE_CHAIN (label
) = NULL_TREE
;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
85 get_bc_label (enum bc_t bc
)
87 tree label
= bc_label
[bc
];
89 if (label
== NULL_TREE
)
92 error ("break statement not within loop or switch");
94 error ("continue statement not within loop or switch");
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label
) = 1;
104 /* Genericize a TRY_BLOCK. */
107 genericize_try_block (tree
*stmt_p
)
109 tree body
= TRY_STMTS (*stmt_p
);
110 tree cleanup
= TRY_HANDLERS (*stmt_p
);
112 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 genericize_catch_block (tree
*stmt_p
)
120 tree type
= HANDLER_TYPE (*stmt_p
);
121 tree body
= HANDLER_BODY (*stmt_p
);
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
127 /* A terser interface for building a representation of an exception
131 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
137 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
139 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
140 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 genericize_eh_spec_block (tree
*stmt_p
)
151 tree body
= EH_SPEC_STMTS (*stmt_p
);
152 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
153 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
155 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
158 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
161 gimplify_if_stmt (tree
*stmt_p
)
163 tree stmt
, cond
, then_
, else_
;
164 location_t locus
= EXPR_LOCATION (*stmt_p
);
167 cond
= IF_COND (stmt
);
168 then_
= THEN_CLAUSE (stmt
);
169 else_
= ELSE_CLAUSE (stmt
);
172 then_
= build_empty_stmt ();
174 else_
= build_empty_stmt ();
176 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
178 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
181 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
182 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
183 SET_EXPR_LOCATION (stmt
, locus
);
187 /* Build a generic representation of one of the C loop forms. COND is the
188 loop condition or NULL_TREE. BODY is the (possibly compound) statement
189 controlled by the loop. INCR is the increment expression of a for-loop,
190 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
191 evaluated before the loop body as in while and for loops, or after the
192 loop body as in do-while loops. */
195 gimplify_cp_loop (tree cond
, tree body
, tree incr
, bool cond_is_first
)
197 gimple top
, entry
, stmt
;
198 gimple_seq stmt_list
, body_seq
, incr_seq
, exit_seq
;
199 tree cont_block
, break_block
;
200 location_t stmt_locus
;
202 stmt_locus
= input_location
;
209 break_block
= begin_bc_block (bc_break
);
210 cont_block
= begin_bc_block (bc_continue
);
212 /* If condition is zero don't generate a loop construct. */
213 if (cond
&& integer_zerop (cond
))
218 stmt
= gimple_build_goto (get_bc_label (bc_break
));
219 gimple_set_location (stmt
, stmt_locus
);
220 gimple_seq_add_stmt (&stmt_list
, stmt
);
225 /* If we use a LOOP_EXPR here, we have to feed the whole thing
226 back through the main gimplifier to lower it. Given that we
227 have to gimplify the loop body NOW so that we can resolve
228 break/continue stmts, seems easier to just expand to gotos. */
229 top
= gimple_build_label (create_artificial_label ());
231 /* If we have an exit condition, then we build an IF with gotos either
232 out of the loop, or to the top of it. If there's no exit condition,
233 then we just build a jump back to the top. */
234 if (cond
&& !integer_nonzerop (cond
))
236 if (cond
!= error_mark_node
)
238 gimplify_expr (&cond
, &exit_seq
, NULL
, is_gimple_val
, fb_rvalue
);
239 stmt
= gimple_build_cond (NE_EXPR
, cond
,
240 build_int_cst (TREE_TYPE (cond
), 0),
241 gimple_label_label (top
),
242 get_bc_label (bc_break
));
243 gimple_seq_add_stmt (&exit_seq
, stmt
);
250 entry
= gimple_build_label (create_artificial_label ());
251 stmt
= gimple_build_goto (gimple_label_label (entry
));
254 stmt
= gimple_build_goto (get_bc_label (bc_continue
));
255 gimple_set_location (stmt
, stmt_locus
);
256 gimple_seq_add_stmt (&stmt_list
, stmt
);
261 stmt
= gimple_build_goto (gimple_label_label (top
));
262 gimple_seq_add_stmt (&exit_seq
, stmt
);
266 gimplify_stmt (&body
, &body_seq
);
267 gimplify_stmt (&incr
, &incr_seq
);
269 body_seq
= finish_bc_block (bc_continue
, cont_block
, body_seq
);
271 gimple_seq_add_stmt (&stmt_list
, top
);
272 gimple_seq_add_seq (&stmt_list
, body_seq
);
273 gimple_seq_add_seq (&stmt_list
, incr_seq
);
274 gimple_seq_add_stmt (&stmt_list
, entry
);
275 gimple_seq_add_seq (&stmt_list
, exit_seq
);
277 annotate_all_with_location (stmt_list
, stmt_locus
);
279 return finish_bc_block (bc_break
, break_block
, stmt_list
);
282 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
283 prequeue and hand off to gimplify_cp_loop. */
286 gimplify_for_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
290 if (FOR_INIT_STMT (stmt
))
291 gimplify_and_add (FOR_INIT_STMT (stmt
), pre_p
);
293 gimple_seq_add_seq (pre_p
,
294 gimplify_cp_loop (FOR_COND (stmt
), FOR_BODY (stmt
),
295 FOR_EXPR (stmt
), 1));
299 /* Gimplify a WHILE_STMT node. */
302 gimplify_while_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
305 gimple_seq_add_seq (pre_p
,
306 gimplify_cp_loop (WHILE_COND (stmt
), WHILE_BODY (stmt
),
311 /* Gimplify a DO_STMT node. */
314 gimplify_do_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
317 gimple_seq_add_seq (pre_p
,
318 gimplify_cp_loop (DO_COND (stmt
), DO_BODY (stmt
),
323 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
326 gimplify_switch_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
329 tree break_block
, body
, t
;
330 location_t stmt_locus
= input_location
;
331 gimple_seq seq
= NULL
;
333 break_block
= begin_bc_block (bc_break
);
335 body
= SWITCH_STMT_BODY (stmt
);
337 body
= build_empty_stmt ();
339 t
= build3 (SWITCH_EXPR
, SWITCH_STMT_TYPE (stmt
),
340 SWITCH_STMT_COND (stmt
), body
, NULL_TREE
);
341 SET_EXPR_LOCATION (t
, stmt_locus
);
342 gimplify_and_add (t
, &seq
);
344 seq
= finish_bc_block (bc_break
, break_block
, seq
);
345 gimple_seq_add_seq (pre_p
, seq
);
349 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
350 in order to properly gimplify CONTINUE statements. Here we merely
351 manage the continue stack; the rest of the job is performed by the
352 regular gimplifier. */
354 static enum gimplify_status
355 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
357 tree for_stmt
= *expr_p
;
360 gimple_seq seq
= NULL
;
362 /* Protect ourselves from recursion. */
363 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
365 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
367 /* Note that while technically the continue label is enabled too soon
368 here, we should have already diagnosed invalid continues nested within
369 statement expressions within the INIT, COND, or INCR expressions. */
370 cont_block
= begin_bc_block (bc_continue
);
372 gimplify_and_add (for_stmt
, &seq
);
373 stmt
= gimple_seq_last_stmt (seq
);
374 if (gimple_code (stmt
) == GIMPLE_OMP_FOR
)
375 gimple_omp_set_body (stmt
, finish_bc_block (bc_continue
, cont_block
,
376 gimple_omp_body (stmt
)));
378 seq
= finish_bc_block (bc_continue
, cont_block
, seq
);
379 gimple_seq_add_seq (pre_p
, seq
);
381 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
386 /* Gimplify an EXPR_STMT node. */
389 gimplify_expr_stmt (tree
*stmt_p
)
391 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
393 if (stmt
== error_mark_node
)
396 /* Gimplification of a statement expression will nullify the
397 statement if all its side effects are moved to *PRE_P and *POST_P.
399 In this case we will not want to emit the gimplified statement.
400 However, we may still want to emit a warning, so we do that before
402 if (stmt
&& warn_unused_value
)
404 if (!TREE_SIDE_EFFECTS (stmt
))
406 if (!IS_EMPTY_STMT (stmt
)
407 && !VOID_TYPE_P (TREE_TYPE (stmt
))
408 && !TREE_NO_WARNING (stmt
))
409 warning (OPT_Wunused_value
, "statement with no effect");
412 warn_if_unused_value (stmt
, input_location
);
415 if (stmt
== NULL_TREE
)
416 stmt
= alloc_stmt_list ();
421 /* Gimplify initialization from an AGGR_INIT_EXPR. */
424 cp_gimplify_init_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
426 tree from
= TREE_OPERAND (*expr_p
, 1);
427 tree to
= TREE_OPERAND (*expr_p
, 0);
429 tree slot
= NULL_TREE
;
431 /* What about code that pulls out the temp and uses it elsewhere? I
432 think that such code never uses the TARGET_EXPR as an initializer. If
433 I'm wrong, we'll abort because the temp won't have any RTL. In that
434 case, I guess we'll need to replace references somehow. */
435 if (TREE_CODE (from
) == TARGET_EXPR
)
437 slot
= TARGET_EXPR_SLOT (from
);
438 from
= TARGET_EXPR_INITIAL (from
);
441 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
442 inside the TARGET_EXPR. */
445 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
447 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
448 replace the slot operand with our target.
450 Should we add a target parm to gimplify_expr instead? No, as in this
451 case we want to replace the INIT_EXPR. */
452 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
454 gimplify_expr (&to
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
455 AGGR_INIT_EXPR_SLOT (sub
) = to
;
458 /* The initialization is now a side-effect, so the container can
461 TREE_TYPE (from
) = void_type_node
;
463 else if (TREE_CODE (sub
) == INIT_EXPR
464 && TREE_OPERAND (sub
, 0) == slot
)
466 /* An INIT_EXPR under TARGET_EXPR created by build_value_init,
467 will be followed by an AGGR_INIT_EXPR. */
468 gimplify_expr (&to
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
469 TREE_OPERAND (sub
, 0) = to
;
475 t
= TREE_OPERAND (t
, 1);
480 /* Gimplify a MUST_NOT_THROW_EXPR. */
482 static enum gimplify_status
483 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
486 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
487 tree body
= TREE_OPERAND (stmt
, 0);
489 stmt
= build_gimple_eh_filter_tree (body
, NULL_TREE
,
490 build_call_n (terminate_node
, 0));
492 gimplify_and_add (stmt
, pre_p
);
503 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
506 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
508 int saved_stmts_are_full_exprs_p
= 0;
509 enum tree_code code
= TREE_CODE (*expr_p
);
510 enum gimplify_status ret
;
512 VEC(gimple
, heap
) *bind_expr_stack
= NULL
;
514 if (STATEMENT_CODE_P (code
))
516 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
517 current_stmt_tree ()->stmts_are_full_exprs_p
518 = STMT_IS_FULL_EXPR_P (*expr_p
);
524 *expr_p
= cplus_expand_constant (*expr_p
);
529 simplify_aggr_init_expr (expr_p
);
534 /* FIXME communicate throw type to back end, probably by moving
535 THROW_EXPR into ../tree.def. */
536 *expr_p
= TREE_OPERAND (*expr_p
, 0);
540 case MUST_NOT_THROW_EXPR
:
541 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
544 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
545 LHS of an assignment might also be involved in the RHS, as in bug
548 cp_gimplify_init_expr (expr_p
, pre_p
, post_p
);
552 case EMPTY_CLASS_EXPR
:
553 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
554 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
559 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
564 genericize_try_block (expr_p
);
569 genericize_catch_block (expr_p
);
574 genericize_eh_spec_block (expr_p
);
579 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
580 BLOCK, and append an IMPORTED_DECL to its
581 BLOCK_VARS chained list. */
583 bind_expr_stack
= gimple_bind_expr_stack ();
587 for (i
= VEC_length (gimple
, bind_expr_stack
) - 1; i
>= 0; i
--)
588 if ((block
= gimple_bind_block (VEC_index (gimple
,
595 tree using_directive
;
596 gcc_assert (TREE_OPERAND (*expr_p
,0)
597 && NAMESPACE_DECL_CHECK (TREE_OPERAND (*expr_p
, 0)));
599 using_directive
= make_node (IMPORTED_DECL
);
600 TREE_TYPE (using_directive
) = void_type_node
;
602 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
603 = TREE_OPERAND (*expr_p
, 0);
604 DECL_NAME (using_directive
)
605 = DECL_NAME (TREE_OPERAND (*expr_p
, 0));
606 TREE_CHAIN (using_directive
) = BLOCK_VARS (block
);
607 BLOCK_VARS (block
) = using_directive
;
609 /* The USING_STMT won't appear in GIMPLE. */
615 gimplify_if_stmt (expr_p
);
620 gimplify_for_stmt (expr_p
, pre_p
);
625 gimplify_while_stmt (expr_p
, pre_p
);
630 gimplify_do_stmt (expr_p
, pre_p
);
635 gimplify_switch_stmt (expr_p
, pre_p
);
640 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
644 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_CONTINUE
, NOT_TAKEN
));
645 gimple_seq_add_stmt (pre_p
, gimple_build_goto (get_bc_label (bc_continue
)));
651 gimple_seq_add_stmt (pre_p
, gimple_build_goto (get_bc_label (bc_break
)));
657 gimplify_expr_stmt (expr_p
);
661 case UNARY_PLUS_EXPR
:
663 tree arg
= TREE_OPERAND (*expr_p
, 0);
664 tree type
= TREE_TYPE (*expr_p
);
665 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
672 ret
= c_gimplify_expr (expr_p
, pre_p
, post_p
);
676 /* Restore saved state. */
677 if (STATEMENT_CODE_P (code
))
678 current_stmt_tree ()->stmts_are_full_exprs_p
679 = saved_stmts_are_full_exprs_p
;
685 is_invisiref_parm (const_tree t
)
687 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
688 && DECL_BY_REFERENCE (t
));
691 /* Return true if the uid in both int tree maps are equal. */
694 cxx_int_tree_map_eq (const void *va
, const void *vb
)
696 const struct cxx_int_tree_map
*a
= (const struct cxx_int_tree_map
*) va
;
697 const struct cxx_int_tree_map
*b
= (const struct cxx_int_tree_map
*) vb
;
698 return (a
->uid
== b
->uid
);
701 /* Hash a UID in a cxx_int_tree_map. */
704 cxx_int_tree_map_hash (const void *item
)
706 return ((const struct cxx_int_tree_map
*)item
)->uid
;
709 /* Perform any pre-gimplification lowering of C++ front end trees to
713 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
716 struct pointer_set_t
*p_set
= (struct pointer_set_t
*) data
;
718 if (is_invisiref_parm (stmt
)
719 /* Don't dereference parms in a thunk, pass the references through. */
720 && !(DECL_THUNK_P (current_function_decl
)
721 && TREE_CODE (stmt
) == PARM_DECL
))
723 *stmt_p
= convert_from_reference (stmt
);
728 /* Map block scope extern declarations to visible declarations with the
729 same name and type in outer scopes if any. */
730 if (cp_function_chain
->extern_decl_map
731 && (TREE_CODE (stmt
) == FUNCTION_DECL
|| TREE_CODE (stmt
) == VAR_DECL
)
732 && DECL_EXTERNAL (stmt
))
734 struct cxx_int_tree_map
*h
, in
;
735 in
.uid
= DECL_UID (stmt
);
736 h
= (struct cxx_int_tree_map
*)
737 htab_find_with_hash (cp_function_chain
->extern_decl_map
,
747 /* Other than invisiref parms, don't walk the same tree twice. */
748 if (pointer_set_contains (p_set
, stmt
))
754 if (TREE_CODE (stmt
) == ADDR_EXPR
755 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
757 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
760 else if (TREE_CODE (stmt
) == RETURN_EXPR
761 && TREE_OPERAND (stmt
, 0)
762 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
763 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
765 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
766 switch (OMP_CLAUSE_CODE (stmt
))
768 case OMP_CLAUSE_LASTPRIVATE
:
769 /* Don't dereference an invisiref in OpenMP clauses. */
770 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
773 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
774 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
775 cp_genericize_r
, p_set
, NULL
);
778 case OMP_CLAUSE_PRIVATE
:
779 case OMP_CLAUSE_SHARED
:
780 case OMP_CLAUSE_FIRSTPRIVATE
:
781 case OMP_CLAUSE_COPYIN
:
782 case OMP_CLAUSE_COPYPRIVATE
:
783 /* Don't dereference an invisiref in OpenMP clauses. */
784 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
787 case OMP_CLAUSE_REDUCTION
:
788 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)));
793 else if (IS_TYPE_OR_DECL_P (stmt
))
796 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
797 to lower this construct before scanning it, so we need to lower these
798 before doing anything else. */
799 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
800 *stmt_p
= build2 (CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
804 CLEANUP_EXPR (stmt
));
806 /* COND_EXPR might have incompatible types in branches if one or both
807 arms are bitfields. Fix it up now. */
808 else if (TREE_CODE (stmt
) == COND_EXPR
)
811 = (TREE_OPERAND (stmt
, 1)
812 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
815 = (TREE_OPERAND (stmt
, 2)
816 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
819 && !useless_type_conversion_p (TREE_TYPE (stmt
),
820 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
822 TREE_OPERAND (stmt
, 1)
823 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
824 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
828 && !useless_type_conversion_p (TREE_TYPE (stmt
),
829 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
831 TREE_OPERAND (stmt
, 2)
832 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
833 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
838 pointer_set_insert (p_set
, *stmt_p
);
844 cp_genericize (tree fndecl
)
847 struct pointer_set_t
*p_set
;
849 /* Fix up the types of parms passed by invisible reference. */
850 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= TREE_CHAIN (t
))
851 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
853 /* If a function's arguments are copied to create a thunk,
854 then DECL_BY_REFERENCE will be set -- but the type of the
855 argument will be a pointer type, so we will never get
857 gcc_assert (!DECL_BY_REFERENCE (t
));
858 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
859 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
860 DECL_BY_REFERENCE (t
) = 1;
861 TREE_ADDRESSABLE (t
) = 0;
865 /* Do the same for the return value. */
866 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
868 t
= DECL_RESULT (fndecl
);
869 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
870 DECL_BY_REFERENCE (t
) = 1;
871 TREE_ADDRESSABLE (t
) = 0;
875 /* If we're a clone, the body is already GIMPLE. */
876 if (DECL_CLONED_FUNCTION_P (fndecl
))
879 /* We do want to see every occurrence of the parms, so we can't just use
880 walk_tree's hash functionality. */
881 p_set
= pointer_set_create ();
882 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_genericize_r
, p_set
, NULL
);
883 pointer_set_destroy (p_set
);
885 /* Do everything else. */
886 c_genericize (fndecl
);
888 gcc_assert (bc_label
[bc_break
] == NULL
);
889 gcc_assert (bc_label
[bc_continue
] == NULL
);
892 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
893 NULL if there is in fact nothing to do. ARG2 may be null if FN
894 actually only takes one argument. */
897 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
899 tree defparm
, parm
, t
;
907 nargs
= list_length (DECL_ARGUMENTS (fn
));
908 argarray
= (tree
*) alloca (nargs
* sizeof (tree
));
910 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
912 defparm
= TREE_CHAIN (defparm
);
914 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
916 tree inner_type
= TREE_TYPE (arg1
);
917 tree start1
, end1
, p1
;
918 tree start2
= NULL
, p2
= NULL
;
919 tree ret
= NULL
, lab
;
925 inner_type
= TREE_TYPE (inner_type
);
926 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
927 size_zero_node
, NULL
, NULL
);
929 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
930 size_zero_node
, NULL
, NULL
);
932 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
933 start1
= build_fold_addr_expr (start1
);
935 start2
= build_fold_addr_expr (start2
);
937 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
938 end1
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start1
), start1
, end1
);
940 p1
= create_tmp_var (TREE_TYPE (start1
), NULL
);
941 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
942 append_to_statement_list (t
, &ret
);
946 p2
= create_tmp_var (TREE_TYPE (start2
), NULL
);
947 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
948 append_to_statement_list (t
, &ret
);
951 lab
= create_artificial_label ();
952 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
953 append_to_statement_list (t
, &ret
);
958 /* Handle default arguments. */
959 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
960 parm
= TREE_CHAIN (parm
), i
++)
961 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
962 TREE_PURPOSE (parm
), fn
, i
);
963 t
= build_call_a (fn
, i
, argarray
);
964 t
= fold_convert (void_type_node
, t
);
965 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
966 append_to_statement_list (t
, &ret
);
968 t
= TYPE_SIZE_UNIT (inner_type
);
969 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (p1
), p1
, t
);
970 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
971 append_to_statement_list (t
, &ret
);
975 t
= TYPE_SIZE_UNIT (inner_type
);
976 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (p2
), p2
, t
);
977 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
978 append_to_statement_list (t
, &ret
);
981 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
982 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
983 append_to_statement_list (t
, &ret
);
989 argarray
[i
++] = build_fold_addr_expr (arg1
);
991 argarray
[i
++] = build_fold_addr_expr (arg2
);
992 /* Handle default arguments. */
993 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
994 parm
= TREE_CHAIN (parm
), i
++)
995 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
998 t
= build_call_a (fn
, i
, argarray
);
999 t
= fold_convert (void_type_node
, t
);
1000 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1004 /* Return code to initialize DECL with its default constructor, or
1005 NULL if there's nothing to do. */
1008 cxx_omp_clause_default_ctor (tree clause
, tree decl
,
1009 tree outer ATTRIBUTE_UNUSED
)
1011 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1015 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1020 /* Return code to initialize DST with a copy constructor from SRC. */
1023 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1025 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1029 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1031 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1036 /* Similarly, except use an assignment operator instead. */
1039 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1041 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1045 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1047 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1052 /* Return code to destroy DECL. */
1055 cxx_omp_clause_dtor (tree clause
, tree decl
)
1057 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1061 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1066 /* True if OpenMP should privatize what this DECL points to rather
1067 than the DECL itself. */
1070 cxx_omp_privatize_by_reference (const_tree decl
)
1072 return is_invisiref_parm (decl
);
1075 /* True if OpenMP sharing attribute of DECL is predetermined. */
1077 enum omp_clause_default_kind
1078 cxx_omp_predetermined_sharing (tree decl
)
1082 /* Static data members are predetermined as shared. */
1083 if (TREE_STATIC (decl
))
1085 tree ctx
= CP_DECL_CONTEXT (decl
);
1086 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1087 return OMP_CLAUSE_DEFAULT_SHARED
;
1090 type
= TREE_TYPE (decl
);
1091 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1093 if (!is_invisiref_parm (decl
))
1094 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1095 type
= TREE_TYPE (type
);
1097 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1099 /* NVR doesn't preserve const qualification of the
1101 tree outer
= outer_curly_brace_block (current_function_decl
);
1105 for (var
= BLOCK_VARS (outer
); var
; var
= TREE_CHAIN (var
))
1106 if (DECL_NAME (decl
) == DECL_NAME (var
)
1107 && (TYPE_MAIN_VARIANT (type
)
1108 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1110 if (TYPE_READONLY (TREE_TYPE (var
)))
1111 type
= TREE_TYPE (var
);
1117 if (type
== error_mark_node
)
1118 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1120 /* Variables with const-qualified type having no mutable member
1121 are predetermined shared. */
1122 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1123 return OMP_CLAUSE_DEFAULT_SHARED
;
1125 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1128 /* Finalize an implicitly determined clause. */
1131 cxx_omp_finish_clause (tree c
)
1133 tree decl
, inner_type
;
1134 bool make_shared
= false;
1136 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1139 decl
= OMP_CLAUSE_DECL (c
);
1140 decl
= require_complete_type (decl
);
1141 inner_type
= TREE_TYPE (decl
);
1142 if (decl
== error_mark_node
)
1144 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1146 if (is_invisiref_parm (decl
))
1147 inner_type
= TREE_TYPE (inner_type
);
1150 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1156 /* We're interested in the base element, not arrays. */
1157 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1158 inner_type
= TREE_TYPE (inner_type
);
1160 /* Check for special function availability by building a call to one.
1161 Save the results, because later we won't be in the right context
1162 for making these queries. */
1164 && CLASS_TYPE_P (inner_type
)
1165 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false))
1169 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;