1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
5 Free Software Foundation, Inc.
6 Contributed by Jason Merrill <jason@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
30 #include "c-family/c-common.h"
31 #include "tree-iterator.h"
34 #include "pointer-set.h"
36 #include "splay-tree.h"
38 /* Forward declarations. */
40 static tree
cp_genericize_r (tree
*, int *, void *);
41 static void cp_genericize_tree (tree
*);
43 /* Local declarations. */
45 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label
[2];
51 /* Begin a scope which can be exited by a break or continue statement. BC
54 Just creates a label with location LOCATION and pushes it into the current
58 begin_bc_block (enum bc_t bc
, location_t location
)
60 tree label
= create_artificial_label (location
);
61 DECL_CHAIN (label
) = bc_label
[bc
];
66 /* Finish a scope which can be exited by a break or continue statement.
67 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
68 an expression for the contents of the scope.
70 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
71 BLOCK. Otherwise, just forget the label. */
74 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
76 gcc_assert (label
== bc_label
[bc
]);
78 if (TREE_USED (label
))
79 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
82 bc_label
[bc
] = DECL_CHAIN (label
);
83 DECL_CHAIN (label
) = NULL_TREE
;
86 /* Get the LABEL_EXPR to represent a break or continue statement
87 in the current block scope. BC indicates which. */
90 get_bc_label (enum bc_t bc
)
92 tree label
= bc_label
[bc
];
94 /* Mark the label used for finish_bc_block. */
95 TREE_USED (label
) = 1;
99 /* Genericize a TRY_BLOCK. */
102 genericize_try_block (tree
*stmt_p
)
104 tree body
= TRY_STMTS (*stmt_p
);
105 tree cleanup
= TRY_HANDLERS (*stmt_p
);
107 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
110 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
113 genericize_catch_block (tree
*stmt_p
)
115 tree type
= HANDLER_TYPE (*stmt_p
);
116 tree body
= HANDLER_BODY (*stmt_p
);
118 /* FIXME should the caught type go in TREE_TYPE? */
119 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
122 /* A terser interface for building a representation of an exception
126 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
130 /* FIXME should the allowed types go in TREE_TYPE? */
131 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
132 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
134 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
135 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
140 /* Genericize an EH_SPEC_BLOCK by converting it to a
141 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
144 genericize_eh_spec_block (tree
*stmt_p
)
146 tree body
= EH_SPEC_STMTS (*stmt_p
);
147 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
148 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
150 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
151 TREE_NO_WARNING (*stmt_p
) = true;
152 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
155 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
158 genericize_if_stmt (tree
*stmt_p
)
160 tree stmt
, cond
, then_
, else_
;
161 location_t locus
= EXPR_LOCATION (*stmt_p
);
164 cond
= IF_COND (stmt
);
165 then_
= THEN_CLAUSE (stmt
);
166 else_
= ELSE_CLAUSE (stmt
);
169 then_
= build_empty_stmt (locus
);
171 else_
= build_empty_stmt (locus
);
173 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
175 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
178 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
179 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
180 SET_EXPR_LOCATION (stmt
, locus
);
184 /* Build a generic representation of one of the C loop forms. COND is the
185 loop condition or NULL_TREE. BODY is the (possibly compound) statement
186 controlled by the loop. INCR is the increment expression of a for-loop,
187 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
188 evaluated before the loop body as in while and for loops, or after the
189 loop body as in do-while loops. */
192 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
193 tree incr
, bool cond_is_first
, int *walk_subtrees
,
197 tree entry
= NULL
, exit
= NULL
, t
;
198 tree stmt_list
= NULL
;
200 blab
= begin_bc_block (bc_break
, start_locus
);
201 clab
= begin_bc_block (bc_continue
, start_locus
);
203 if (incr
&& EXPR_P (incr
))
204 SET_EXPR_LOCATION (incr
, start_locus
);
206 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
207 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
208 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
211 /* If condition is zero don't generate a loop construct. */
212 if (cond
&& integer_zerop (cond
))
216 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
217 get_bc_label (bc_break
));
218 append_to_statement_list (t
, &stmt_list
);
223 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
224 tree top
= build1 (LABEL_EXPR
, void_type_node
,
225 create_artificial_label (start_locus
));
227 /* If we have an exit condition, then we build an IF with gotos either
228 out of the loop, or to the top of it. If there's no exit condition,
229 then we just build a jump back to the top. */
230 exit
= build1 (GOTO_EXPR
, void_type_node
, LABEL_EXPR_LABEL (top
));
232 if (cond
&& !integer_nonzerop (cond
))
234 /* Canonicalize the loop condition to the end. This means
235 generating a branch to the loop condition. Reuse the
236 continue label, if possible. */
241 entry
= build1 (LABEL_EXPR
, void_type_node
,
242 create_artificial_label (start_locus
));
243 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
244 LABEL_EXPR_LABEL (entry
));
247 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
248 get_bc_label (bc_continue
));
249 append_to_statement_list (t
, &stmt_list
);
252 t
= build1 (GOTO_EXPR
, void_type_node
, get_bc_label (bc_break
));
253 exit
= fold_build3_loc (start_locus
,
254 COND_EXPR
, void_type_node
, cond
, exit
, t
);
257 append_to_statement_list (top
, &stmt_list
);
260 append_to_statement_list (body
, &stmt_list
);
261 finish_bc_block (&stmt_list
, bc_continue
, clab
);
262 append_to_statement_list (incr
, &stmt_list
);
263 append_to_statement_list (entry
, &stmt_list
);
264 append_to_statement_list (exit
, &stmt_list
);
265 finish_bc_block (&stmt_list
, bc_break
, blab
);
267 if (stmt_list
== NULL_TREE
)
268 stmt_list
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
273 /* Genericize a FOR_STMT node *STMT_P. */
276 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
281 tree init
= FOR_INIT_STMT (stmt
);
285 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
286 append_to_statement_list (init
, &expr
);
289 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
290 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
291 append_to_statement_list (loop
, &expr
);
295 /* Genericize a WHILE_STMT node *STMT_P. */
298 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
301 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
302 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
305 /* Genericize a DO_STMT node *STMT_P. */
308 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
311 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
312 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
315 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
318 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
321 tree break_block
, body
, cond
, type
;
322 location_t stmt_locus
= EXPR_LOCATION (stmt
);
324 break_block
= begin_bc_block (bc_break
, stmt_locus
);
326 body
= SWITCH_STMT_BODY (stmt
);
328 body
= build_empty_stmt (stmt_locus
);
329 cond
= SWITCH_STMT_COND (stmt
);
330 type
= SWITCH_STMT_TYPE (stmt
);
332 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
333 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
334 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
337 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
338 finish_bc_block (stmt_p
, bc_break
, break_block
);
341 /* Genericize a CONTINUE_STMT node *STMT_P. */
344 genericize_continue_stmt (tree
*stmt_p
)
346 tree stmt_list
= NULL
;
347 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
348 tree label
= get_bc_label (bc_continue
);
349 location_t location
= EXPR_LOCATION (*stmt_p
);
350 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
351 append_to_statement_list (pred
, &stmt_list
);
352 append_to_statement_list (jump
, &stmt_list
);
356 /* Genericize a BREAK_STMT node *STMT_P. */
359 genericize_break_stmt (tree
*stmt_p
)
361 tree label
= get_bc_label (bc_break
);
362 location_t location
= EXPR_LOCATION (*stmt_p
);
363 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
366 /* Genericize a OMP_FOR node *STMT_P. */
369 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
372 location_t locus
= EXPR_LOCATION (stmt
);
373 tree clab
= begin_bc_block (bc_continue
, locus
);
375 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
377 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
378 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
379 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
383 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
391 tree for_stmt
= *expr_p
;
392 gimple_seq seq
= NULL
;
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
397 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
399 gimplify_and_add (for_stmt
, &seq
);
400 gimple_seq_add_seq (pre_p
, seq
);
402 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
407 /* Gimplify an EXPR_STMT node. */
410 gimplify_expr_stmt (tree
*stmt_p
)
412 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
414 if (stmt
== error_mark_node
)
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
423 if (stmt
&& warn_unused_value
)
425 if (!TREE_SIDE_EFFECTS (stmt
))
427 if (!IS_EMPTY_STMT (stmt
)
428 && !VOID_TYPE_P (TREE_TYPE (stmt
))
429 && !TREE_NO_WARNING (stmt
))
430 warning (OPT_Wunused_value
, "statement with no effect");
433 warn_if_unused_value (stmt
, input_location
);
436 if (stmt
== NULL_TREE
)
437 stmt
= alloc_stmt_list ();
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
445 cp_gimplify_init_expr (tree
*expr_p
)
447 tree from
= TREE_OPERAND (*expr_p
, 1);
448 tree to
= TREE_OPERAND (*expr_p
, 0);
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from
) == TARGET_EXPR
)
456 from
= TARGET_EXPR_INITIAL (from
);
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
462 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
470 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
472 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
473 AGGR_INIT_EXPR_SLOT (sub
) = to
;
475 VEC_INIT_EXPR_SLOT (sub
) = to
;
478 /* The initialization is now a side-effect, so the container can
481 TREE_TYPE (from
) = void_type_node
;
487 t
= TREE_OPERAND (t
, 1);
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
498 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
499 tree body
= TREE_OPERAND (stmt
, 0);
500 gimple_seq try_
= NULL
;
501 gimple_seq catch_
= NULL
;
504 gimplify_and_add (body
, &try_
);
505 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
506 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
507 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
509 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
520 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
523 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
525 int saved_stmts_are_full_exprs_p
= 0;
526 enum tree_code code
= TREE_CODE (*expr_p
);
527 enum gimplify_status ret
;
529 if (STATEMENT_CODE_P (code
))
531 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
532 current_stmt_tree ()->stmts_are_full_exprs_p
533 = STMT_IS_FULL_EXPR_P (*expr_p
);
539 *expr_p
= cplus_expand_constant (*expr_p
);
544 simplify_aggr_init_expr (expr_p
);
550 location_t loc
= input_location
;
551 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
552 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
553 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
554 input_location
= EXPR_LOCATION (*expr_p
);
555 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
556 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
558 tf_warning_or_error
);
559 cp_genericize_tree (expr_p
);
561 input_location
= loc
;
566 /* FIXME communicate throw type to back end, probably by moving
567 THROW_EXPR into ../tree.def. */
568 *expr_p
= TREE_OPERAND (*expr_p
, 0);
572 case MUST_NOT_THROW_EXPR
:
573 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
576 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
577 LHS of an assignment might also be involved in the RHS, as in bug
580 cp_gimplify_init_expr (expr_p
);
581 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
583 /* Otherwise fall through. */
586 /* If the back end isn't clever enough to know that the lhs and rhs
587 types are the same, add an explicit conversion. */
588 tree op0
= TREE_OPERAND (*expr_p
, 0);
589 tree op1
= TREE_OPERAND (*expr_p
, 1);
591 if (!error_operand_p (op0
)
592 && !error_operand_p (op1
)
593 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
594 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
595 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
596 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
597 TREE_TYPE (op0
), op1
);
599 else if ((is_gimple_lvalue (op1
) || INDIRECT_REF_P (op1
)
600 || (TREE_CODE (op1
) == CONSTRUCTOR
601 && CONSTRUCTOR_NELTS (op1
) == 0
602 && !TREE_CLOBBER_P (op1
))
603 || (TREE_CODE (op1
) == CALL_EXPR
604 && !CALL_EXPR_RETURN_SLOT_OPT (op1
)))
605 && is_really_empty_class (TREE_TYPE (op0
)))
607 /* Remove any copies of empty classes. We check that the RHS
608 has a simple form so that TARGET_EXPRs and non-empty
609 CONSTRUCTORs get reduced properly, and we leave the return
610 slot optimization alone because it isn't a copy (FIXME so it
611 shouldn't be represented as one).
613 Also drop volatile variables on the RHS to avoid infinite
614 recursion from gimplify_expr trying to load the value. */
615 if (!TREE_SIDE_EFFECTS (op1
)
616 || (DECL_P (op1
) && TREE_THIS_VOLATILE (op1
)))
618 else if (TREE_CODE (op1
) == MEM_REF
619 && TREE_THIS_VOLATILE (op1
))
621 /* Similarly for volatile MEM_REFs on the RHS. */
622 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1
, 0)))
625 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
626 TREE_OPERAND (op1
, 0), op0
);
629 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
636 case EMPTY_CLASS_EXPR
:
637 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
638 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
643 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
648 genericize_try_block (expr_p
);
653 genericize_catch_block (expr_p
);
658 genericize_eh_spec_block (expr_p
);
674 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
678 gimplify_expr_stmt (expr_p
);
682 case UNARY_PLUS_EXPR
:
684 tree arg
= TREE_OPERAND (*expr_p
, 0);
685 tree type
= TREE_TYPE (*expr_p
);
686 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
693 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
697 /* Restore saved state. */
698 if (STATEMENT_CODE_P (code
))
699 current_stmt_tree ()->stmts_are_full_exprs_p
700 = saved_stmts_are_full_exprs_p
;
706 is_invisiref_parm (const_tree t
)
708 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
709 && DECL_BY_REFERENCE (t
));
712 /* Return true if the uid in both int tree maps are equal. */
715 cxx_int_tree_map_eq (const void *va
, const void *vb
)
717 const struct cxx_int_tree_map
*a
= (const struct cxx_int_tree_map
*) va
;
718 const struct cxx_int_tree_map
*b
= (const struct cxx_int_tree_map
*) vb
;
719 return (a
->uid
== b
->uid
);
722 /* Hash a UID in a cxx_int_tree_map. */
725 cxx_int_tree_map_hash (const void *item
)
727 return ((const struct cxx_int_tree_map
*)item
)->uid
;
730 /* A stable comparison routine for use with splay trees and DECLs. */
733 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
738 return DECL_UID (a
) - DECL_UID (b
);
741 /* OpenMP context during genericization. */
743 struct cp_genericize_omp_taskreg
747 struct cp_genericize_omp_taskreg
*outer
;
748 splay_tree variables
;
751 /* Return true if genericization should try to determine if
752 DECL is firstprivate or shared within task regions. */
755 omp_var_to_track (tree decl
)
757 tree type
= TREE_TYPE (decl
);
758 if (is_invisiref_parm (decl
))
759 type
= TREE_TYPE (type
);
760 while (TREE_CODE (type
) == ARRAY_TYPE
)
761 type
= TREE_TYPE (type
);
762 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
764 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_THREAD_LOCAL_P (decl
))
766 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
771 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
774 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
776 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
777 (splay_tree_key
) decl
);
780 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
782 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
783 if (!omp_ctx
->default_shared
)
785 struct cp_genericize_omp_taskreg
*octx
;
787 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
789 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
790 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
792 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
795 if (octx
->is_parallel
)
799 && (TREE_CODE (decl
) == PARM_DECL
800 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
801 && DECL_CONTEXT (decl
) == current_function_decl
)))
802 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
803 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
805 /* DECL is implicitly determined firstprivate in
806 the current task construct. Ensure copy ctor and
807 dtor are instantiated, because during gimplification
808 it will be already too late. */
809 tree type
= TREE_TYPE (decl
);
810 if (is_invisiref_parm (decl
))
811 type
= TREE_TYPE (type
);
812 while (TREE_CODE (type
) == ARRAY_TYPE
)
813 type
= TREE_TYPE (type
);
814 get_copy_ctor (type
, tf_none
);
815 get_dtor (type
, tf_none
);
818 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
822 /* Genericization context. */
824 struct cp_genericize_data
826 struct pointer_set_t
*p_set
;
827 VEC (tree
, heap
) *bind_expr_stack
;
828 struct cp_genericize_omp_taskreg
*omp_ctx
;
831 /* Perform any pre-gimplification lowering of C++ front end trees to
835 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
838 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
839 struct pointer_set_t
*p_set
= wtd
->p_set
;
841 /* If in an OpenMP context, note var uses. */
842 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
843 && (TREE_CODE (stmt
) == VAR_DECL
844 || TREE_CODE (stmt
) == PARM_DECL
845 || TREE_CODE (stmt
) == RESULT_DECL
)
846 && omp_var_to_track (stmt
))
847 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
849 if (is_invisiref_parm (stmt
)
850 /* Don't dereference parms in a thunk, pass the references through. */
851 && !(DECL_THUNK_P (current_function_decl
)
852 && TREE_CODE (stmt
) == PARM_DECL
))
854 *stmt_p
= convert_from_reference (stmt
);
859 /* Map block scope extern declarations to visible declarations with the
860 same name and type in outer scopes if any. */
861 if (cp_function_chain
->extern_decl_map
862 && (TREE_CODE (stmt
) == FUNCTION_DECL
|| TREE_CODE (stmt
) == VAR_DECL
)
863 && DECL_EXTERNAL (stmt
))
865 struct cxx_int_tree_map
*h
, in
;
866 in
.uid
= DECL_UID (stmt
);
867 h
= (struct cxx_int_tree_map
*)
868 htab_find_with_hash (cp_function_chain
->extern_decl_map
,
878 /* Other than invisiref parms, don't walk the same tree twice. */
879 if (pointer_set_contains (p_set
, stmt
))
885 if (TREE_CODE (stmt
) == ADDR_EXPR
886 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
888 /* If in an OpenMP context, note var uses. */
889 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
890 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
891 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
892 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
895 else if (TREE_CODE (stmt
) == RETURN_EXPR
896 && TREE_OPERAND (stmt
, 0)
897 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
898 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
900 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
901 switch (OMP_CLAUSE_CODE (stmt
))
903 case OMP_CLAUSE_LASTPRIVATE
:
904 /* Don't dereference an invisiref in OpenMP clauses. */
905 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
908 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
909 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
910 cp_genericize_r
, data
, NULL
);
913 case OMP_CLAUSE_PRIVATE
:
914 /* Don't dereference an invisiref in OpenMP clauses. */
915 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
917 else if (wtd
->omp_ctx
!= NULL
)
919 /* Private clause doesn't cause any references to the
920 var in outer contexts, avoid calling
921 omp_cxx_notice_variable for it. */
922 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
924 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
930 case OMP_CLAUSE_SHARED
:
931 case OMP_CLAUSE_FIRSTPRIVATE
:
932 case OMP_CLAUSE_COPYIN
:
933 case OMP_CLAUSE_COPYPRIVATE
:
934 /* Don't dereference an invisiref in OpenMP clauses. */
935 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
938 case OMP_CLAUSE_REDUCTION
:
939 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)));
944 else if (IS_TYPE_OR_DECL_P (stmt
))
947 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
948 to lower this construct before scanning it, so we need to lower these
949 before doing anything else. */
950 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
951 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
952 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
956 CLEANUP_EXPR (stmt
));
958 else if (TREE_CODE (stmt
) == IF_STMT
)
960 genericize_if_stmt (stmt_p
);
961 /* *stmt_p has changed, tail recurse to handle it again. */
962 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
965 /* COND_EXPR might have incompatible types in branches if one or both
966 arms are bitfields. Fix it up now. */
967 else if (TREE_CODE (stmt
) == COND_EXPR
)
970 = (TREE_OPERAND (stmt
, 1)
971 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
974 = (TREE_OPERAND (stmt
, 2)
975 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
978 && !useless_type_conversion_p (TREE_TYPE (stmt
),
979 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
981 TREE_OPERAND (stmt
, 1)
982 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
983 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
987 && !useless_type_conversion_p (TREE_TYPE (stmt
),
988 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
990 TREE_OPERAND (stmt
, 2)
991 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
992 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
997 else if (TREE_CODE (stmt
) == BIND_EXPR
)
999 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1002 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1003 if (TREE_CODE (decl
) == VAR_DECL
1004 && !DECL_EXTERNAL (decl
)
1005 && omp_var_to_track (decl
))
1008 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1009 (splay_tree_key
) decl
);
1011 splay_tree_insert (wtd
->omp_ctx
->variables
,
1012 (splay_tree_key
) decl
,
1014 ? OMP_CLAUSE_DEFAULT_SHARED
1015 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1018 VEC_safe_push (tree
, heap
, wtd
->bind_expr_stack
, stmt
);
1019 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1020 cp_genericize_r
, data
, NULL
);
1021 VEC_pop (tree
, wtd
->bind_expr_stack
);
1024 else if (TREE_CODE (stmt
) == USING_STMT
)
1026 tree block
= NULL_TREE
;
1028 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1029 BLOCK, and append an IMPORTED_DECL to its
1030 BLOCK_VARS chained list. */
1031 if (wtd
->bind_expr_stack
)
1034 for (i
= VEC_length (tree
, wtd
->bind_expr_stack
) - 1; i
>= 0; i
--)
1035 if ((block
= BIND_EXPR_BLOCK (VEC_index (tree
,
1036 wtd
->bind_expr_stack
, i
))))
1041 tree using_directive
;
1042 gcc_assert (TREE_OPERAND (stmt
, 0));
1044 using_directive
= make_node (IMPORTED_DECL
);
1045 TREE_TYPE (using_directive
) = void_type_node
;
1047 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1048 = TREE_OPERAND (stmt
, 0);
1049 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1050 BLOCK_VARS (block
) = using_directive
;
1052 /* The USING_STMT won't appear in GENERIC. */
1053 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1057 else if (TREE_CODE (stmt
) == DECL_EXPR
1058 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1060 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1061 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1064 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1066 struct cp_genericize_omp_taskreg omp_ctx
;
1071 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1072 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1073 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1074 omp_ctx
.outer
= wtd
->omp_ctx
;
1075 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1076 wtd
->omp_ctx
= &omp_ctx
;
1077 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1078 switch (OMP_CLAUSE_CODE (c
))
1080 case OMP_CLAUSE_SHARED
:
1081 case OMP_CLAUSE_PRIVATE
:
1082 case OMP_CLAUSE_FIRSTPRIVATE
:
1083 case OMP_CLAUSE_LASTPRIVATE
:
1084 decl
= OMP_CLAUSE_DECL (c
);
1085 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1087 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1090 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1091 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1092 ? OMP_CLAUSE_DEFAULT_SHARED
1093 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1094 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1096 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1098 case OMP_CLAUSE_DEFAULT
:
1099 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1100 omp_ctx
.default_shared
= true;
1104 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1105 wtd
->omp_ctx
= omp_ctx
.outer
;
1106 splay_tree_delete (omp_ctx
.variables
);
1108 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1109 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1110 else if (TREE_CODE (stmt
) == FOR_STMT
)
1111 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1112 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1113 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1114 else if (TREE_CODE (stmt
) == DO_STMT
)
1115 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1116 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1117 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1118 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1119 genericize_continue_stmt (stmt_p
);
1120 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1121 genericize_break_stmt (stmt_p
);
1122 else if (TREE_CODE (stmt
) == OMP_FOR
)
1123 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1124 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1126 if (SIZEOF_EXPR_TYPE_P (stmt
))
1128 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1129 SIZEOF_EXPR
, false);
1130 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1131 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1132 SIZEOF_EXPR
, false);
1134 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1135 SIZEOF_EXPR
, false);
1136 if (*stmt_p
== error_mark_node
)
1137 *stmt_p
= size_one_node
;
1141 pointer_set_insert (p_set
, *stmt_p
);
1146 /* Lower C++ front end trees to GENERIC in T_P. */
1149 cp_genericize_tree (tree
* t_p
)
1151 struct cp_genericize_data wtd
;
1153 wtd
.p_set
= pointer_set_create ();
1154 wtd
.bind_expr_stack
= NULL
;
1156 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1157 pointer_set_destroy (wtd
.p_set
);
1158 VEC_free (tree
, heap
, wtd
.bind_expr_stack
);
1162 cp_genericize (tree fndecl
)
1166 /* Fix up the types of parms passed by invisible reference. */
1167 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1168 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1170 /* If a function's arguments are copied to create a thunk,
1171 then DECL_BY_REFERENCE will be set -- but the type of the
1172 argument will be a pointer type, so we will never get
1174 gcc_assert (!DECL_BY_REFERENCE (t
));
1175 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1176 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1177 DECL_BY_REFERENCE (t
) = 1;
1178 TREE_ADDRESSABLE (t
) = 0;
1182 /* Do the same for the return value. */
1183 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1185 t
= DECL_RESULT (fndecl
);
1186 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1187 DECL_BY_REFERENCE (t
) = 1;
1188 TREE_ADDRESSABLE (t
) = 0;
1192 /* Adjust DECL_VALUE_EXPR of the original var. */
1193 tree outer
= outer_curly_brace_block (current_function_decl
);
1197 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1198 if (DECL_NAME (t
) == DECL_NAME (var
)
1199 && DECL_HAS_VALUE_EXPR_P (var
)
1200 && DECL_VALUE_EXPR (var
) == t
)
1202 tree val
= convert_from_reference (t
);
1203 SET_DECL_VALUE_EXPR (var
, val
);
1209 /* If we're a clone, the body is already GIMPLE. */
1210 if (DECL_CLONED_FUNCTION_P (fndecl
))
1213 /* We do want to see every occurrence of the parms, so we can't just use
1214 walk_tree's hash functionality. */
1215 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1217 /* Do everything else. */
1218 c_genericize (fndecl
);
1220 gcc_assert (bc_label
[bc_break
] == NULL
);
1221 gcc_assert (bc_label
[bc_continue
] == NULL
);
1224 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1225 NULL if there is in fact nothing to do. ARG2 may be null if FN
1226 actually only takes one argument. */
1229 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1231 tree defparm
, parm
, t
;
1239 nargs
= list_length (DECL_ARGUMENTS (fn
));
1240 argarray
= XALLOCAVEC (tree
, nargs
);
1242 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1244 defparm
= TREE_CHAIN (defparm
);
1246 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1248 tree inner_type
= TREE_TYPE (arg1
);
1249 tree start1
, end1
, p1
;
1250 tree start2
= NULL
, p2
= NULL
;
1251 tree ret
= NULL
, lab
;
1257 inner_type
= TREE_TYPE (inner_type
);
1258 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1259 size_zero_node
, NULL
, NULL
);
1261 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1262 size_zero_node
, NULL
, NULL
);
1264 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1265 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1267 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1269 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1270 end1
= fold_build_pointer_plus (start1
, end1
);
1272 p1
= create_tmp_var (TREE_TYPE (start1
), NULL
);
1273 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1274 append_to_statement_list (t
, &ret
);
1278 p2
= create_tmp_var (TREE_TYPE (start2
), NULL
);
1279 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1280 append_to_statement_list (t
, &ret
);
1283 lab
= create_artificial_label (input_location
);
1284 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1285 append_to_statement_list (t
, &ret
);
1290 /* Handle default arguments. */
1291 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1292 parm
= TREE_CHAIN (parm
), i
++)
1293 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1294 TREE_PURPOSE (parm
), fn
, i
,
1295 tf_warning_or_error
);
1296 t
= build_call_a (fn
, i
, argarray
);
1297 t
= fold_convert (void_type_node
, t
);
1298 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1299 append_to_statement_list (t
, &ret
);
1301 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1302 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1303 append_to_statement_list (t
, &ret
);
1307 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1308 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1309 append_to_statement_list (t
, &ret
);
1312 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1313 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1314 append_to_statement_list (t
, &ret
);
1320 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1322 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1323 /* Handle default arguments. */
1324 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1325 parm
= TREE_CHAIN (parm
), i
++)
1326 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1327 TREE_PURPOSE (parm
),
1328 fn
, i
, tf_warning_or_error
);
1329 t
= build_call_a (fn
, i
, argarray
);
1330 t
= fold_convert (void_type_node
, t
);
1331 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1335 /* Return code to initialize DECL with its default constructor, or
1336 NULL if there's nothing to do. */
1339 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1341 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1345 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1350 /* Return code to initialize DST with a copy constructor from SRC. */
1353 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1355 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1359 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1361 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1366 /* Similarly, except use an assignment operator instead. */
1369 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1371 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1375 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1377 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1382 /* Return code to destroy DECL. */
1385 cxx_omp_clause_dtor (tree clause
, tree decl
)
1387 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1391 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1396 /* True if OpenMP should privatize what this DECL points to rather
1397 than the DECL itself. */
1400 cxx_omp_privatize_by_reference (const_tree decl
)
1402 return is_invisiref_parm (decl
);
1405 /* Return true if DECL is const qualified var having no mutable member. */
1407 cxx_omp_const_qual_no_mutable (tree decl
)
1409 tree type
= TREE_TYPE (decl
);
1410 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1412 if (!is_invisiref_parm (decl
))
1414 type
= TREE_TYPE (type
);
1416 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1418 /* NVR doesn't preserve const qualification of the
1420 tree outer
= outer_curly_brace_block (current_function_decl
);
1424 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1425 if (DECL_NAME (decl
) == DECL_NAME (var
)
1426 && (TYPE_MAIN_VARIANT (type
)
1427 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1429 if (TYPE_READONLY (TREE_TYPE (var
)))
1430 type
= TREE_TYPE (var
);
1436 if (type
== error_mark_node
)
1439 /* Variables with const-qualified type having no mutable member
1440 are predetermined shared. */
1441 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1447 /* True if OpenMP sharing attribute of DECL is predetermined. */
1449 enum omp_clause_default_kind
1450 cxx_omp_predetermined_sharing (tree decl
)
1452 /* Static data members are predetermined shared. */
1453 if (TREE_STATIC (decl
))
1455 tree ctx
= CP_DECL_CONTEXT (decl
);
1456 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1457 return OMP_CLAUSE_DEFAULT_SHARED
;
1460 /* Const qualified vars having no mutable member are predetermined
1462 if (cxx_omp_const_qual_no_mutable (decl
))
1463 return OMP_CLAUSE_DEFAULT_SHARED
;
1465 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1468 /* Finalize an implicitly determined clause. */
1471 cxx_omp_finish_clause (tree c
)
1473 tree decl
, inner_type
;
1474 bool make_shared
= false;
1476 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1479 decl
= OMP_CLAUSE_DECL (c
);
1480 decl
= require_complete_type (decl
);
1481 inner_type
= TREE_TYPE (decl
);
1482 if (decl
== error_mark_node
)
1484 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1486 if (is_invisiref_parm (decl
))
1487 inner_type
= TREE_TYPE (inner_type
);
1490 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1496 /* We're interested in the base element, not arrays. */
1497 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1498 inner_type
= TREE_TYPE (inner_type
);
1500 /* Check for special function availability by building a call to one.
1501 Save the results, because later we won't be in the right context
1502 for making these queries. */
1504 && CLASS_TYPE_P (inner_type
)
1505 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false))
1509 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;