1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "c-family/c-common.h"
29 #include "tree-iterator.h"
32 #include "pointer-set.h"
34 #include "splay-tree.h"
36 /* Forward declarations. */
38 static tree
cp_genericize_r (tree
*, int *, void *);
39 static void cp_genericize_tree (tree
*);
41 /* Local declarations. */
43 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
45 /* Stack of labels which are targets for "break" or "continue",
46 linked through TREE_CHAIN. */
47 static tree bc_label
[2];
49 /* Begin a scope which can be exited by a break or continue statement. BC
52 Just creates a label with location LOCATION and pushes it into the current
56 begin_bc_block (enum bc_t bc
, location_t location
)
58 tree label
= create_artificial_label (location
);
59 DECL_CHAIN (label
) = bc_label
[bc
];
64 /* Finish a scope which can be exited by a break or continue statement.
65 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
66 an expression for the contents of the scope.
68 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
69 BLOCK. Otherwise, just forget the label. */
72 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
74 gcc_assert (label
== bc_label
[bc
]);
76 if (TREE_USED (label
))
77 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
80 bc_label
[bc
] = DECL_CHAIN (label
);
81 DECL_CHAIN (label
) = NULL_TREE
;
84 /* Get the LABEL_EXPR to represent a break or continue statement
85 in the current block scope. BC indicates which. */
88 get_bc_label (enum bc_t bc
)
90 tree label
= bc_label
[bc
];
92 /* Mark the label used for finish_bc_block. */
93 TREE_USED (label
) = 1;
97 /* Genericize a TRY_BLOCK. */
100 genericize_try_block (tree
*stmt_p
)
102 tree body
= TRY_STMTS (*stmt_p
);
103 tree cleanup
= TRY_HANDLERS (*stmt_p
);
105 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
108 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
111 genericize_catch_block (tree
*stmt_p
)
113 tree type
= HANDLER_TYPE (*stmt_p
);
114 tree body
= HANDLER_BODY (*stmt_p
);
116 /* FIXME should the caught type go in TREE_TYPE? */
117 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
120 /* A terser interface for building a representation of an exception
124 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
128 /* FIXME should the allowed types go in TREE_TYPE? */
129 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
130 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
132 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
133 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
138 /* Genericize an EH_SPEC_BLOCK by converting it to a
139 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
142 genericize_eh_spec_block (tree
*stmt_p
)
144 tree body
= EH_SPEC_STMTS (*stmt_p
);
145 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
146 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
148 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
149 TREE_NO_WARNING (*stmt_p
) = true;
150 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
153 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
156 genericize_if_stmt (tree
*stmt_p
)
158 tree stmt
, cond
, then_
, else_
;
159 location_t locus
= EXPR_LOCATION (*stmt_p
);
162 cond
= IF_COND (stmt
);
163 then_
= THEN_CLAUSE (stmt
);
164 else_
= ELSE_CLAUSE (stmt
);
167 then_
= build_empty_stmt (locus
);
169 else_
= build_empty_stmt (locus
);
171 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
173 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
176 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
177 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
178 SET_EXPR_LOCATION (stmt
, locus
);
182 /* Build a generic representation of one of the C loop forms. COND is the
183 loop condition or NULL_TREE. BODY is the (possibly compound) statement
184 controlled by the loop. INCR is the increment expression of a for-loop,
185 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
186 evaluated before the loop body as in while and for loops, or after the
187 loop body as in do-while loops. */
190 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
191 tree incr
, bool cond_is_first
, int *walk_subtrees
,
195 tree entry
= NULL
, exit
= NULL
, t
;
196 tree stmt_list
= NULL
;
198 blab
= begin_bc_block (bc_break
, start_locus
);
199 clab
= begin_bc_block (bc_continue
, start_locus
);
201 if (incr
&& EXPR_P (incr
))
202 SET_EXPR_LOCATION (incr
, start_locus
);
204 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
205 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
206 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
209 /* If condition is zero don't generate a loop construct. */
210 if (cond
&& integer_zerop (cond
))
214 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
215 get_bc_label (bc_break
));
216 append_to_statement_list (t
, &stmt_list
);
221 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
222 tree top
= build1 (LABEL_EXPR
, void_type_node
,
223 create_artificial_label (start_locus
));
225 /* If we have an exit condition, then we build an IF with gotos either
226 out of the loop, or to the top of it. If there's no exit condition,
227 then we just build a jump back to the top. */
228 exit
= build1 (GOTO_EXPR
, void_type_node
, LABEL_EXPR_LABEL (top
));
230 if (cond
&& !integer_nonzerop (cond
))
232 /* Canonicalize the loop condition to the end. This means
233 generating a branch to the loop condition. Reuse the
234 continue label, if possible. */
239 entry
= build1 (LABEL_EXPR
, void_type_node
,
240 create_artificial_label (start_locus
));
241 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
242 LABEL_EXPR_LABEL (entry
));
245 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
246 get_bc_label (bc_continue
));
247 append_to_statement_list (t
, &stmt_list
);
250 t
= build1 (GOTO_EXPR
, void_type_node
, get_bc_label (bc_break
));
251 exit
= fold_build3_loc (start_locus
,
252 COND_EXPR
, void_type_node
, cond
, exit
, t
);
255 append_to_statement_list (top
, &stmt_list
);
258 append_to_statement_list (body
, &stmt_list
);
259 finish_bc_block (&stmt_list
, bc_continue
, clab
);
260 append_to_statement_list (incr
, &stmt_list
);
261 append_to_statement_list (entry
, &stmt_list
);
262 append_to_statement_list (exit
, &stmt_list
);
263 finish_bc_block (&stmt_list
, bc_break
, blab
);
265 if (stmt_list
== NULL_TREE
)
266 stmt_list
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
271 /* Genericize a FOR_STMT node *STMT_P. */
274 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
279 tree init
= FOR_INIT_STMT (stmt
);
283 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
284 append_to_statement_list (init
, &expr
);
287 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
288 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
289 append_to_statement_list (loop
, &expr
);
293 /* Genericize a WHILE_STMT node *STMT_P. */
296 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
299 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
300 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
303 /* Genericize a DO_STMT node *STMT_P. */
306 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
309 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
310 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
316 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
319 tree break_block
, body
, cond
, type
;
320 location_t stmt_locus
= EXPR_LOCATION (stmt
);
322 break_block
= begin_bc_block (bc_break
, stmt_locus
);
324 body
= SWITCH_STMT_BODY (stmt
);
326 body
= build_empty_stmt (stmt_locus
);
327 cond
= SWITCH_STMT_COND (stmt
);
328 type
= SWITCH_STMT_TYPE (stmt
);
330 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
331 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
332 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
335 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
336 finish_bc_block (stmt_p
, bc_break
, break_block
);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
342 genericize_continue_stmt (tree
*stmt_p
)
344 tree stmt_list
= NULL
;
345 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
346 tree label
= get_bc_label (bc_continue
);
347 location_t location
= EXPR_LOCATION (*stmt_p
);
348 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
349 append_to_statement_list (pred
, &stmt_list
);
350 append_to_statement_list (jump
, &stmt_list
);
354 /* Genericize a BREAK_STMT node *STMT_P. */
357 genericize_break_stmt (tree
*stmt_p
)
359 tree label
= get_bc_label (bc_break
);
360 location_t location
= EXPR_LOCATION (*stmt_p
);
361 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
364 /* Genericize a OMP_FOR node *STMT_P. */
367 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
370 location_t locus
= EXPR_LOCATION (stmt
);
371 tree clab
= begin_bc_block (bc_continue
, locus
);
373 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
374 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
375 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
376 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
377 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
378 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
381 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
384 /* Hook into the middle of gimplifying an OMP_FOR node. */
386 static enum gimplify_status
387 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
389 tree for_stmt
= *expr_p
;
390 gimple_seq seq
= NULL
;
392 /* Protect ourselves from recursion. */
393 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
395 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
397 gimplify_and_add (for_stmt
, &seq
);
398 gimple_seq_add_seq (pre_p
, seq
);
400 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
405 /* Gimplify an EXPR_STMT node. */
408 gimplify_expr_stmt (tree
*stmt_p
)
410 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
412 if (stmt
== error_mark_node
)
415 /* Gimplification of a statement expression will nullify the
416 statement if all its side effects are moved to *PRE_P and *POST_P.
418 In this case we will not want to emit the gimplified statement.
419 However, we may still want to emit a warning, so we do that before
421 if (stmt
&& warn_unused_value
)
423 if (!TREE_SIDE_EFFECTS (stmt
))
425 if (!IS_EMPTY_STMT (stmt
)
426 && !VOID_TYPE_P (TREE_TYPE (stmt
))
427 && !TREE_NO_WARNING (stmt
))
428 warning (OPT_Wunused_value
, "statement with no effect");
431 warn_if_unused_value (stmt
, input_location
);
434 if (stmt
== NULL_TREE
)
435 stmt
= alloc_stmt_list ();
440 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 cp_gimplify_init_expr (tree
*expr_p
)
445 tree from
= TREE_OPERAND (*expr_p
, 1);
446 tree to
= TREE_OPERAND (*expr_p
, 0);
449 /* What about code that pulls out the temp and uses it elsewhere? I
450 think that such code never uses the TARGET_EXPR as an initializer. If
451 I'm wrong, we'll abort because the temp won't have any RTL. In that
452 case, I guess we'll need to replace references somehow. */
453 if (TREE_CODE (from
) == TARGET_EXPR
)
454 from
= TARGET_EXPR_INITIAL (from
);
456 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
457 inside the TARGET_EXPR. */
460 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
462 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
463 replace the slot operand with our target.
465 Should we add a target parm to gimplify_expr instead? No, as in this
466 case we want to replace the INIT_EXPR. */
467 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
468 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
470 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
471 AGGR_INIT_EXPR_SLOT (sub
) = to
;
473 VEC_INIT_EXPR_SLOT (sub
) = to
;
476 /* The initialization is now a side-effect, so the container can
479 TREE_TYPE (from
) = void_type_node
;
485 t
= TREE_OPERAND (t
, 1);
490 /* Gimplify a MUST_NOT_THROW_EXPR. */
492 static enum gimplify_status
493 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
496 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
497 tree body
= TREE_OPERAND (stmt
, 0);
498 gimple_seq try_
= NULL
;
499 gimple_seq catch_
= NULL
;
502 gimplify_and_add (body
, &try_
);
503 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
504 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
505 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
507 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
518 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
521 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
523 int saved_stmts_are_full_exprs_p
= 0;
524 enum tree_code code
= TREE_CODE (*expr_p
);
525 enum gimplify_status ret
;
527 if (STATEMENT_CODE_P (code
))
529 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
530 current_stmt_tree ()->stmts_are_full_exprs_p
531 = STMT_IS_FULL_EXPR_P (*expr_p
);
537 *expr_p
= cplus_expand_constant (*expr_p
);
542 simplify_aggr_init_expr (expr_p
);
548 location_t loc
= input_location
;
549 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
550 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
551 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
552 input_location
= EXPR_LOCATION (*expr_p
);
553 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
554 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
556 tf_warning_or_error
);
557 cp_genericize_tree (expr_p
);
559 input_location
= loc
;
564 /* FIXME communicate throw type to back end, probably by moving
565 THROW_EXPR into ../tree.def. */
566 *expr_p
= TREE_OPERAND (*expr_p
, 0);
570 case MUST_NOT_THROW_EXPR
:
571 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
574 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
575 LHS of an assignment might also be involved in the RHS, as in bug
578 cp_gimplify_init_expr (expr_p
);
579 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
581 /* Otherwise fall through. */
584 /* If the back end isn't clever enough to know that the lhs and rhs
585 types are the same, add an explicit conversion. */
586 tree op0
= TREE_OPERAND (*expr_p
, 0);
587 tree op1
= TREE_OPERAND (*expr_p
, 1);
589 if (!error_operand_p (op0
)
590 && !error_operand_p (op1
)
591 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
592 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
593 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
594 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
595 TREE_TYPE (op0
), op1
);
597 else if ((is_gimple_lvalue (op1
) || INDIRECT_REF_P (op1
)
598 || (TREE_CODE (op1
) == CONSTRUCTOR
599 && CONSTRUCTOR_NELTS (op1
) == 0
600 && !TREE_CLOBBER_P (op1
))
601 || (TREE_CODE (op1
) == CALL_EXPR
602 && !CALL_EXPR_RETURN_SLOT_OPT (op1
)))
603 && is_really_empty_class (TREE_TYPE (op0
)))
605 /* Remove any copies of empty classes. We check that the RHS
606 has a simple form so that TARGET_EXPRs and non-empty
607 CONSTRUCTORs get reduced properly, and we leave the return
608 slot optimization alone because it isn't a copy (FIXME so it
609 shouldn't be represented as one).
611 Also drop volatile variables on the RHS to avoid infinite
612 recursion from gimplify_expr trying to load the value. */
613 if (!TREE_SIDE_EFFECTS (op1
)
614 || (DECL_P (op1
) && TREE_THIS_VOLATILE (op1
)))
616 else if (TREE_CODE (op1
) == MEM_REF
617 && TREE_THIS_VOLATILE (op1
))
619 /* Similarly for volatile MEM_REFs on the RHS. */
620 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1
, 0)))
623 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
624 TREE_OPERAND (op1
, 0), op0
);
627 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
634 case EMPTY_CLASS_EXPR
:
635 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
636 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
641 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
646 genericize_try_block (expr_p
);
651 genericize_catch_block (expr_p
);
656 genericize_eh_spec_block (expr_p
);
674 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
678 gimplify_expr_stmt (expr_p
);
682 case UNARY_PLUS_EXPR
:
684 tree arg
= TREE_OPERAND (*expr_p
, 0);
685 tree type
= TREE_TYPE (*expr_p
);
686 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
693 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
697 /* Restore saved state. */
698 if (STATEMENT_CODE_P (code
))
699 current_stmt_tree ()->stmts_are_full_exprs_p
700 = saved_stmts_are_full_exprs_p
;
706 is_invisiref_parm (const_tree t
)
708 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
709 && DECL_BY_REFERENCE (t
));
712 /* Return true if the uid in both int tree maps are equal. */
715 cxx_int_tree_map_eq (const void *va
, const void *vb
)
717 const struct cxx_int_tree_map
*a
= (const struct cxx_int_tree_map
*) va
;
718 const struct cxx_int_tree_map
*b
= (const struct cxx_int_tree_map
*) vb
;
719 return (a
->uid
== b
->uid
);
722 /* Hash a UID in a cxx_int_tree_map. */
725 cxx_int_tree_map_hash (const void *item
)
727 return ((const struct cxx_int_tree_map
*)item
)->uid
;
730 /* A stable comparison routine for use with splay trees and DECLs. */
733 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
738 return DECL_UID (a
) - DECL_UID (b
);
741 /* OpenMP context during genericization. */
743 struct cp_genericize_omp_taskreg
747 struct cp_genericize_omp_taskreg
*outer
;
748 splay_tree variables
;
751 /* Return true if genericization should try to determine if
752 DECL is firstprivate or shared within task regions. */
755 omp_var_to_track (tree decl
)
757 tree type
= TREE_TYPE (decl
);
758 if (is_invisiref_parm (decl
))
759 type
= TREE_TYPE (type
);
760 while (TREE_CODE (type
) == ARRAY_TYPE
)
761 type
= TREE_TYPE (type
);
762 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
764 if (VAR_P (decl
) && DECL_THREAD_LOCAL_P (decl
))
766 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
771 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
774 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
776 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
777 (splay_tree_key
) decl
);
780 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
782 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
783 if (!omp_ctx
->default_shared
)
785 struct cp_genericize_omp_taskreg
*octx
;
787 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
789 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
790 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
792 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
795 if (octx
->is_parallel
)
799 && (TREE_CODE (decl
) == PARM_DECL
800 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
801 && DECL_CONTEXT (decl
) == current_function_decl
)))
802 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
803 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
805 /* DECL is implicitly determined firstprivate in
806 the current task construct. Ensure copy ctor and
807 dtor are instantiated, because during gimplification
808 it will be already too late. */
809 tree type
= TREE_TYPE (decl
);
810 if (is_invisiref_parm (decl
))
811 type
= TREE_TYPE (type
);
812 while (TREE_CODE (type
) == ARRAY_TYPE
)
813 type
= TREE_TYPE (type
);
814 get_copy_ctor (type
, tf_none
);
815 get_dtor (type
, tf_none
);
818 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
822 /* Genericization context. */
824 struct cp_genericize_data
826 struct pointer_set_t
*p_set
;
827 vec
<tree
> bind_expr_stack
;
828 struct cp_genericize_omp_taskreg
*omp_ctx
;
831 /* Perform any pre-gimplification lowering of C++ front end trees to
835 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
838 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
839 struct pointer_set_t
*p_set
= wtd
->p_set
;
841 /* If in an OpenMP context, note var uses. */
842 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
844 || TREE_CODE (stmt
) == PARM_DECL
845 || TREE_CODE (stmt
) == RESULT_DECL
)
846 && omp_var_to_track (stmt
))
847 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
849 if (is_invisiref_parm (stmt
)
850 /* Don't dereference parms in a thunk, pass the references through. */
851 && !(DECL_THUNK_P (current_function_decl
)
852 && TREE_CODE (stmt
) == PARM_DECL
))
854 *stmt_p
= convert_from_reference (stmt
);
859 /* Map block scope extern declarations to visible declarations with the
860 same name and type in outer scopes if any. */
861 if (cp_function_chain
->extern_decl_map
862 && VAR_OR_FUNCTION_DECL_P (stmt
)
863 && DECL_EXTERNAL (stmt
))
865 struct cxx_int_tree_map
*h
, in
;
866 in
.uid
= DECL_UID (stmt
);
867 h
= (struct cxx_int_tree_map
*)
868 htab_find_with_hash (cp_function_chain
->extern_decl_map
,
878 /* Other than invisiref parms, don't walk the same tree twice. */
879 if (pointer_set_contains (p_set
, stmt
))
885 if (TREE_CODE (stmt
) == ADDR_EXPR
886 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
888 /* If in an OpenMP context, note var uses. */
889 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
890 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
891 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
892 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
895 else if (TREE_CODE (stmt
) == RETURN_EXPR
896 && TREE_OPERAND (stmt
, 0)
897 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
898 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
900 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
901 switch (OMP_CLAUSE_CODE (stmt
))
903 case OMP_CLAUSE_LASTPRIVATE
:
904 /* Don't dereference an invisiref in OpenMP clauses. */
905 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
908 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
909 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
910 cp_genericize_r
, data
, NULL
);
913 case OMP_CLAUSE_PRIVATE
:
914 /* Don't dereference an invisiref in OpenMP clauses. */
915 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
917 else if (wtd
->omp_ctx
!= NULL
)
919 /* Private clause doesn't cause any references to the
920 var in outer contexts, avoid calling
921 omp_cxx_notice_variable for it. */
922 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
924 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
930 case OMP_CLAUSE_SHARED
:
931 case OMP_CLAUSE_FIRSTPRIVATE
:
932 case OMP_CLAUSE_COPYIN
:
933 case OMP_CLAUSE_COPYPRIVATE
:
934 /* Don't dereference an invisiref in OpenMP clauses. */
935 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
938 case OMP_CLAUSE_REDUCTION
:
939 /* Don't dereference an invisiref in reduction clause's
940 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
941 still needs to be genericized. */
942 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
945 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
946 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
947 cp_genericize_r
, data
, NULL
);
948 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
949 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
950 cp_genericize_r
, data
, NULL
);
956 else if (IS_TYPE_OR_DECL_P (stmt
))
959 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
960 to lower this construct before scanning it, so we need to lower these
961 before doing anything else. */
962 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
963 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
964 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
968 CLEANUP_EXPR (stmt
));
970 else if (TREE_CODE (stmt
) == IF_STMT
)
972 genericize_if_stmt (stmt_p
);
973 /* *stmt_p has changed, tail recurse to handle it again. */
974 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
977 /* COND_EXPR might have incompatible types in branches if one or both
978 arms are bitfields. Fix it up now. */
979 else if (TREE_CODE (stmt
) == COND_EXPR
)
982 = (TREE_OPERAND (stmt
, 1)
983 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
986 = (TREE_OPERAND (stmt
, 2)
987 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
990 && !useless_type_conversion_p (TREE_TYPE (stmt
),
991 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
993 TREE_OPERAND (stmt
, 1)
994 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
995 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
999 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1000 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1002 TREE_OPERAND (stmt
, 2)
1003 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1004 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1009 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1011 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1014 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1016 && !DECL_EXTERNAL (decl
)
1017 && omp_var_to_track (decl
))
1020 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1021 (splay_tree_key
) decl
);
1023 splay_tree_insert (wtd
->omp_ctx
->variables
,
1024 (splay_tree_key
) decl
,
1026 ? OMP_CLAUSE_DEFAULT_SHARED
1027 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1030 wtd
->bind_expr_stack
.safe_push (stmt
);
1031 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1032 cp_genericize_r
, data
, NULL
);
1033 wtd
->bind_expr_stack
.pop ();
1036 else if (TREE_CODE (stmt
) == USING_STMT
)
1038 tree block
= NULL_TREE
;
1040 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1041 BLOCK, and append an IMPORTED_DECL to its
1042 BLOCK_VARS chained list. */
1043 if (wtd
->bind_expr_stack
.exists ())
1046 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1047 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1052 tree using_directive
;
1053 gcc_assert (TREE_OPERAND (stmt
, 0));
1055 using_directive
= make_node (IMPORTED_DECL
);
1056 TREE_TYPE (using_directive
) = void_type_node
;
1058 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1059 = TREE_OPERAND (stmt
, 0);
1060 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1061 BLOCK_VARS (block
) = using_directive
;
1063 /* The USING_STMT won't appear in GENERIC. */
1064 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1068 else if (TREE_CODE (stmt
) == DECL_EXPR
1069 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1071 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1072 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1075 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1077 struct cp_genericize_omp_taskreg omp_ctx
;
1082 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1083 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1084 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1085 omp_ctx
.outer
= wtd
->omp_ctx
;
1086 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1087 wtd
->omp_ctx
= &omp_ctx
;
1088 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1089 switch (OMP_CLAUSE_CODE (c
))
1091 case OMP_CLAUSE_SHARED
:
1092 case OMP_CLAUSE_PRIVATE
:
1093 case OMP_CLAUSE_FIRSTPRIVATE
:
1094 case OMP_CLAUSE_LASTPRIVATE
:
1095 decl
= OMP_CLAUSE_DECL (c
);
1096 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1098 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1101 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1102 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1103 ? OMP_CLAUSE_DEFAULT_SHARED
1104 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1105 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1107 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1109 case OMP_CLAUSE_DEFAULT
:
1110 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1111 omp_ctx
.default_shared
= true;
1115 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1116 wtd
->omp_ctx
= omp_ctx
.outer
;
1117 splay_tree_delete (omp_ctx
.variables
);
1119 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1120 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1121 else if (TREE_CODE (stmt
) == FOR_STMT
)
1122 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1123 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1124 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1125 else if (TREE_CODE (stmt
) == DO_STMT
)
1126 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1127 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1128 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1129 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1130 genericize_continue_stmt (stmt_p
);
1131 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1132 genericize_break_stmt (stmt_p
);
1133 else if (TREE_CODE (stmt
) == OMP_FOR
1134 || TREE_CODE (stmt
) == OMP_SIMD
1135 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1136 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1137 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1139 if (SIZEOF_EXPR_TYPE_P (stmt
))
1141 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1142 SIZEOF_EXPR
, false);
1143 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1144 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1145 SIZEOF_EXPR
, false);
1147 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1148 SIZEOF_EXPR
, false);
1149 if (*stmt_p
== error_mark_node
)
1150 *stmt_p
= size_one_node
;
1154 pointer_set_insert (p_set
, *stmt_p
);
1159 /* Lower C++ front end trees to GENERIC in T_P. */
1162 cp_genericize_tree (tree
* t_p
)
1164 struct cp_genericize_data wtd
;
1166 wtd
.p_set
= pointer_set_create ();
1167 wtd
.bind_expr_stack
.create (0);
1169 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1170 pointer_set_destroy (wtd
.p_set
);
1171 wtd
.bind_expr_stack
.release ();
1175 cp_genericize (tree fndecl
)
1179 /* Fix up the types of parms passed by invisible reference. */
1180 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1181 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1183 /* If a function's arguments are copied to create a thunk,
1184 then DECL_BY_REFERENCE will be set -- but the type of the
1185 argument will be a pointer type, so we will never get
1187 gcc_assert (!DECL_BY_REFERENCE (t
));
1188 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1189 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1190 DECL_BY_REFERENCE (t
) = 1;
1191 TREE_ADDRESSABLE (t
) = 0;
1195 /* Do the same for the return value. */
1196 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1198 t
= DECL_RESULT (fndecl
);
1199 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1200 DECL_BY_REFERENCE (t
) = 1;
1201 TREE_ADDRESSABLE (t
) = 0;
1205 /* Adjust DECL_VALUE_EXPR of the original var. */
1206 tree outer
= outer_curly_brace_block (current_function_decl
);
1210 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1211 if (DECL_NAME (t
) == DECL_NAME (var
)
1212 && DECL_HAS_VALUE_EXPR_P (var
)
1213 && DECL_VALUE_EXPR (var
) == t
)
1215 tree val
= convert_from_reference (t
);
1216 SET_DECL_VALUE_EXPR (var
, val
);
1222 /* If we're a clone, the body is already GIMPLE. */
1223 if (DECL_CLONED_FUNCTION_P (fndecl
))
1226 /* Expand all the array notations here. */
1227 if (flag_enable_cilkplus
1228 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1229 DECL_SAVED_TREE (fndecl
) =
1230 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1232 /* We do want to see every occurrence of the parms, so we can't just use
1233 walk_tree's hash functionality. */
1234 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1236 /* Do everything else. */
1237 c_genericize (fndecl
);
1239 gcc_assert (bc_label
[bc_break
] == NULL
);
1240 gcc_assert (bc_label
[bc_continue
] == NULL
);
1243 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1244 NULL if there is in fact nothing to do. ARG2 may be null if FN
1245 actually only takes one argument. */
1248 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1250 tree defparm
, parm
, t
;
1258 nargs
= list_length (DECL_ARGUMENTS (fn
));
1259 argarray
= XALLOCAVEC (tree
, nargs
);
1261 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1263 defparm
= TREE_CHAIN (defparm
);
1265 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1267 tree inner_type
= TREE_TYPE (arg1
);
1268 tree start1
, end1
, p1
;
1269 tree start2
= NULL
, p2
= NULL
;
1270 tree ret
= NULL
, lab
;
1276 inner_type
= TREE_TYPE (inner_type
);
1277 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1278 size_zero_node
, NULL
, NULL
);
1280 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1281 size_zero_node
, NULL
, NULL
);
1283 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1284 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1286 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1288 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1289 end1
= fold_build_pointer_plus (start1
, end1
);
1291 p1
= create_tmp_var (TREE_TYPE (start1
), NULL
);
1292 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1293 append_to_statement_list (t
, &ret
);
1297 p2
= create_tmp_var (TREE_TYPE (start2
), NULL
);
1298 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1299 append_to_statement_list (t
, &ret
);
1302 lab
= create_artificial_label (input_location
);
1303 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1304 append_to_statement_list (t
, &ret
);
1309 /* Handle default arguments. */
1310 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1311 parm
= TREE_CHAIN (parm
), i
++)
1312 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1313 TREE_PURPOSE (parm
), fn
, i
,
1314 tf_warning_or_error
);
1315 t
= build_call_a (fn
, i
, argarray
);
1316 t
= fold_convert (void_type_node
, t
);
1317 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1318 append_to_statement_list (t
, &ret
);
1320 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1321 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1322 append_to_statement_list (t
, &ret
);
1326 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1327 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1328 append_to_statement_list (t
, &ret
);
1331 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1332 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1333 append_to_statement_list (t
, &ret
);
1339 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1341 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1342 /* Handle default arguments. */
1343 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1344 parm
= TREE_CHAIN (parm
), i
++)
1345 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1346 TREE_PURPOSE (parm
),
1347 fn
, i
, tf_warning_or_error
);
1348 t
= build_call_a (fn
, i
, argarray
);
1349 t
= fold_convert (void_type_node
, t
);
1350 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1354 /* Return code to initialize DECL with its default constructor, or
1355 NULL if there's nothing to do. */
1358 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1360 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1364 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1369 /* Return code to initialize DST with a copy constructor from SRC. */
1372 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1374 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1378 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1380 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1385 /* Similarly, except use an assignment operator instead. */
1388 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1390 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1394 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1396 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1401 /* Return code to destroy DECL. */
1404 cxx_omp_clause_dtor (tree clause
, tree decl
)
1406 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1410 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1415 /* True if OpenMP should privatize what this DECL points to rather
1416 than the DECL itself. */
1419 cxx_omp_privatize_by_reference (const_tree decl
)
1421 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1422 || is_invisiref_parm (decl
));
1425 /* Return true if DECL is const qualified var having no mutable member. */
1427 cxx_omp_const_qual_no_mutable (tree decl
)
1429 tree type
= TREE_TYPE (decl
);
1430 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1432 if (!is_invisiref_parm (decl
))
1434 type
= TREE_TYPE (type
);
1436 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1438 /* NVR doesn't preserve const qualification of the
1440 tree outer
= outer_curly_brace_block (current_function_decl
);
1444 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1445 if (DECL_NAME (decl
) == DECL_NAME (var
)
1446 && (TYPE_MAIN_VARIANT (type
)
1447 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1449 if (TYPE_READONLY (TREE_TYPE (var
)))
1450 type
= TREE_TYPE (var
);
1456 if (type
== error_mark_node
)
1459 /* Variables with const-qualified type having no mutable member
1460 are predetermined shared. */
1461 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1467 /* True if OpenMP sharing attribute of DECL is predetermined. */
1469 enum omp_clause_default_kind
1470 cxx_omp_predetermined_sharing (tree decl
)
1472 /* Static data members are predetermined shared. */
1473 if (TREE_STATIC (decl
))
1475 tree ctx
= CP_DECL_CONTEXT (decl
);
1476 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1477 return OMP_CLAUSE_DEFAULT_SHARED
;
1480 /* Const qualified vars having no mutable member are predetermined
1482 if (cxx_omp_const_qual_no_mutable (decl
))
1483 return OMP_CLAUSE_DEFAULT_SHARED
;
1485 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1488 /* Finalize an implicitly determined clause. */
1491 cxx_omp_finish_clause (tree c
)
1493 tree decl
, inner_type
;
1494 bool make_shared
= false;
1496 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1499 decl
= OMP_CLAUSE_DECL (c
);
1500 decl
= require_complete_type (decl
);
1501 inner_type
= TREE_TYPE (decl
);
1502 if (decl
== error_mark_node
)
1504 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1506 if (is_invisiref_parm (decl
))
1507 inner_type
= TREE_TYPE (inner_type
);
1510 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1516 /* We're interested in the base element, not arrays. */
1517 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1518 inner_type
= TREE_TYPE (inner_type
);
1520 /* Check for special function availability by building a call to one.
1521 Save the results, because later we won't be in the right context
1522 for making these queries. */
1524 && CLASS_TYPE_P (inner_type
)
1525 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1529 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;