1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "double-int.h"
36 #include "stor-layout.h"
38 #include "c-family/c-common.h"
39 #include "tree-iterator.h"
41 #include "hard-reg-set.h"
44 #include "basic-block.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
52 #include "splay-tree.h"
54 #include "c-family/c-ubsan.h"
57 /* Forward declarations. */
59 static tree
cp_genericize_r (tree
*, int *, void *);
60 static void cp_genericize_tree (tree
*);
62 /* Local declarations. */
64 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
66 /* Stack of labels which are targets for "break" or "continue",
67 linked through TREE_CHAIN. */
68 static tree bc_label
[2];
70 /* Begin a scope which can be exited by a break or continue statement. BC
73 Just creates a label with location LOCATION and pushes it into the current
77 begin_bc_block (enum bc_t bc
, location_t location
)
79 tree label
= create_artificial_label (location
);
80 DECL_CHAIN (label
) = bc_label
[bc
];
83 LABEL_DECL_BREAK (label
) = true;
85 LABEL_DECL_CONTINUE (label
) = true;
89 /* Finish a scope which can be exited by a break or continue statement.
90 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
91 an expression for the contents of the scope.
93 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
94 BLOCK. Otherwise, just forget the label. */
97 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
99 gcc_assert (label
== bc_label
[bc
]);
101 if (TREE_USED (label
))
102 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
105 bc_label
[bc
] = DECL_CHAIN (label
);
106 DECL_CHAIN (label
) = NULL_TREE
;
109 /* Get the LABEL_EXPR to represent a break or continue statement
110 in the current block scope. BC indicates which. */
113 get_bc_label (enum bc_t bc
)
115 tree label
= bc_label
[bc
];
117 /* Mark the label used for finish_bc_block. */
118 TREE_USED (label
) = 1;
122 /* Genericize a TRY_BLOCK. */
125 genericize_try_block (tree
*stmt_p
)
127 tree body
= TRY_STMTS (*stmt_p
);
128 tree cleanup
= TRY_HANDLERS (*stmt_p
);
130 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
133 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
136 genericize_catch_block (tree
*stmt_p
)
138 tree type
= HANDLER_TYPE (*stmt_p
);
139 tree body
= HANDLER_BODY (*stmt_p
);
141 /* FIXME should the caught type go in TREE_TYPE? */
142 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
145 /* A terser interface for building a representation of an exception
149 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
153 /* FIXME should the allowed types go in TREE_TYPE? */
154 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
155 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
157 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
158 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
163 /* Genericize an EH_SPEC_BLOCK by converting it to a
164 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
167 genericize_eh_spec_block (tree
*stmt_p
)
169 tree body
= EH_SPEC_STMTS (*stmt_p
);
170 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
171 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
173 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
174 TREE_NO_WARNING (*stmt_p
) = true;
175 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
178 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
181 genericize_if_stmt (tree
*stmt_p
)
183 tree stmt
, cond
, then_
, else_
;
184 location_t locus
= EXPR_LOCATION (*stmt_p
);
187 cond
= IF_COND (stmt
);
188 then_
= THEN_CLAUSE (stmt
);
189 else_
= ELSE_CLAUSE (stmt
);
192 then_
= build_empty_stmt (locus
);
194 else_
= build_empty_stmt (locus
);
196 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
198 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
201 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
202 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
203 SET_EXPR_LOCATION (stmt
, locus
);
207 /* Build a generic representation of one of the C loop forms. COND is the
208 loop condition or NULL_TREE. BODY is the (possibly compound) statement
209 controlled by the loop. INCR is the increment expression of a for-loop,
210 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
211 evaluated before the loop body as in while and for loops, or after the
212 loop body as in do-while loops. */
215 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
216 tree incr
, bool cond_is_first
, int *walk_subtrees
,
221 tree stmt_list
= NULL
;
223 blab
= begin_bc_block (bc_break
, start_locus
);
224 clab
= begin_bc_block (bc_continue
, start_locus
);
226 if (incr
&& EXPR_P (incr
))
227 SET_EXPR_LOCATION (incr
, start_locus
);
229 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
230 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
231 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
234 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
239 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
240 get_bc_label (bc_break
));
241 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
242 build_empty_stmt (cloc
), exit
);
245 if (exit
&& cond_is_first
)
246 append_to_statement_list (exit
, &stmt_list
);
247 append_to_statement_list (body
, &stmt_list
);
248 finish_bc_block (&stmt_list
, bc_continue
, clab
);
249 append_to_statement_list (incr
, &stmt_list
);
250 if (exit
&& !cond_is_first
)
251 append_to_statement_list (exit
, &stmt_list
);
254 stmt_list
= build_empty_stmt (start_locus
);
257 if (cond
&& integer_zerop (cond
))
260 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
261 void_type_node
, cond
, stmt_list
,
262 build_empty_stmt (start_locus
));
267 loop
= build1_loc (start_locus
, LOOP_EXPR
, void_type_node
, stmt_list
);
270 append_to_statement_list (loop
, &stmt_list
);
271 finish_bc_block (&stmt_list
, bc_break
, blab
);
273 stmt_list
= build_empty_stmt (start_locus
);
278 /* Genericize a FOR_STMT node *STMT_P. */
281 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
286 tree init
= FOR_INIT_STMT (stmt
);
290 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
291 append_to_statement_list (init
, &expr
);
294 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
295 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
296 append_to_statement_list (loop
, &expr
);
297 if (expr
== NULL_TREE
)
302 /* Genericize a WHILE_STMT node *STMT_P. */
305 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
308 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
309 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
312 /* Genericize a DO_STMT node *STMT_P. */
315 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
318 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
319 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
322 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
325 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
328 tree break_block
, body
, cond
, type
;
329 location_t stmt_locus
= EXPR_LOCATION (stmt
);
331 break_block
= begin_bc_block (bc_break
, stmt_locus
);
333 body
= SWITCH_STMT_BODY (stmt
);
335 body
= build_empty_stmt (stmt_locus
);
336 cond
= SWITCH_STMT_COND (stmt
);
337 type
= SWITCH_STMT_TYPE (stmt
);
339 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
340 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
341 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
344 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
345 finish_bc_block (stmt_p
, bc_break
, break_block
);
348 /* Genericize a CONTINUE_STMT node *STMT_P. */
351 genericize_continue_stmt (tree
*stmt_p
)
353 tree stmt_list
= NULL
;
354 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
355 tree label
= get_bc_label (bc_continue
);
356 location_t location
= EXPR_LOCATION (*stmt_p
);
357 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
358 append_to_statement_list (pred
, &stmt_list
);
359 append_to_statement_list (jump
, &stmt_list
);
363 /* Genericize a BREAK_STMT node *STMT_P. */
366 genericize_break_stmt (tree
*stmt_p
)
368 tree label
= get_bc_label (bc_break
);
369 location_t location
= EXPR_LOCATION (*stmt_p
);
370 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
373 /* Genericize a OMP_FOR node *STMT_P. */
376 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
379 location_t locus
= EXPR_LOCATION (stmt
);
380 tree clab
= begin_bc_block (bc_continue
, locus
);
382 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
383 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
384 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
385 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
386 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
387 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
390 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
393 /* Hook into the middle of gimplifying an OMP_FOR node. */
395 static enum gimplify_status
396 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
398 tree for_stmt
= *expr_p
;
399 gimple_seq seq
= NULL
;
401 /* Protect ourselves from recursion. */
402 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
404 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
406 gimplify_and_add (for_stmt
, &seq
);
407 gimple_seq_add_seq (pre_p
, seq
);
409 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
414 /* Gimplify an EXPR_STMT node. */
417 gimplify_expr_stmt (tree
*stmt_p
)
419 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
421 if (stmt
== error_mark_node
)
424 /* Gimplification of a statement expression will nullify the
425 statement if all its side effects are moved to *PRE_P and *POST_P.
427 In this case we will not want to emit the gimplified statement.
428 However, we may still want to emit a warning, so we do that before
430 if (stmt
&& warn_unused_value
)
432 if (!TREE_SIDE_EFFECTS (stmt
))
434 if (!IS_EMPTY_STMT (stmt
)
435 && !VOID_TYPE_P (TREE_TYPE (stmt
))
436 && !TREE_NO_WARNING (stmt
))
437 warning (OPT_Wunused_value
, "statement with no effect");
440 warn_if_unused_value (stmt
, input_location
);
443 if (stmt
== NULL_TREE
)
444 stmt
= alloc_stmt_list ();
449 /* Gimplify initialization from an AGGR_INIT_EXPR. */
452 cp_gimplify_init_expr (tree
*expr_p
)
454 tree from
= TREE_OPERAND (*expr_p
, 1);
455 tree to
= TREE_OPERAND (*expr_p
, 0);
458 /* What about code that pulls out the temp and uses it elsewhere? I
459 think that such code never uses the TARGET_EXPR as an initializer. If
460 I'm wrong, we'll abort because the temp won't have any RTL. In that
461 case, I guess we'll need to replace references somehow. */
462 if (TREE_CODE (from
) == TARGET_EXPR
)
463 from
= TARGET_EXPR_INITIAL (from
);
465 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
466 inside the TARGET_EXPR. */
469 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
471 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
472 replace the slot operand with our target.
474 Should we add a target parm to gimplify_expr instead? No, as in this
475 case we want to replace the INIT_EXPR. */
476 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
477 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
479 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
480 AGGR_INIT_EXPR_SLOT (sub
) = to
;
482 VEC_INIT_EXPR_SLOT (sub
) = to
;
485 /* The initialization is now a side-effect, so the container can
488 TREE_TYPE (from
) = void_type_node
;
491 if (cxx_dialect
>= cxx14
&& TREE_CODE (sub
) == CONSTRUCTOR
)
492 /* Handle aggregate NSDMI. */
493 replace_placeholders (sub
, to
);
498 t
= TREE_OPERAND (t
, 1);
503 /* Gimplify a MUST_NOT_THROW_EXPR. */
505 static enum gimplify_status
506 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
509 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
510 tree body
= TREE_OPERAND (stmt
, 0);
511 gimple_seq try_
= NULL
;
512 gimple_seq catch_
= NULL
;
515 gimplify_and_add (body
, &try_
);
516 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
517 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
518 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
520 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
531 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
534 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
536 int saved_stmts_are_full_exprs_p
= 0;
537 enum tree_code code
= TREE_CODE (*expr_p
);
538 enum gimplify_status ret
;
540 if (STATEMENT_CODE_P (code
))
542 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
543 current_stmt_tree ()->stmts_are_full_exprs_p
544 = STMT_IS_FULL_EXPR_P (*expr_p
);
550 *expr_p
= cplus_expand_constant (*expr_p
);
555 simplify_aggr_init_expr (expr_p
);
561 location_t loc
= input_location
;
562 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
563 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
564 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
565 input_location
= EXPR_LOCATION (*expr_p
);
566 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
567 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
569 tf_warning_or_error
);
570 cp_genericize_tree (expr_p
);
572 input_location
= loc
;
577 /* FIXME communicate throw type to back end, probably by moving
578 THROW_EXPR into ../tree.def. */
579 *expr_p
= TREE_OPERAND (*expr_p
, 0);
583 case MUST_NOT_THROW_EXPR
:
584 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
587 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
588 LHS of an assignment might also be involved in the RHS, as in bug
591 if (fn_contains_cilk_spawn_p (cfun
)
592 && cilk_detect_spawn_and_unwrap (expr_p
)
594 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
595 cp_gimplify_init_expr (expr_p
);
596 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
598 /* Otherwise fall through. */
601 if (fn_contains_cilk_spawn_p (cfun
)
602 && cilk_detect_spawn_and_unwrap (expr_p
)
604 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
606 /* If the back end isn't clever enough to know that the lhs and rhs
607 types are the same, add an explicit conversion. */
608 tree op0
= TREE_OPERAND (*expr_p
, 0);
609 tree op1
= TREE_OPERAND (*expr_p
, 1);
611 if (!error_operand_p (op0
)
612 && !error_operand_p (op1
)
613 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
614 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
615 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
616 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
617 TREE_TYPE (op0
), op1
);
619 else if ((is_gimple_lvalue (op1
) || INDIRECT_REF_P (op1
)
620 || (TREE_CODE (op1
) == CONSTRUCTOR
621 && CONSTRUCTOR_NELTS (op1
) == 0
622 && !TREE_CLOBBER_P (op1
))
623 || (TREE_CODE (op1
) == CALL_EXPR
624 && !CALL_EXPR_RETURN_SLOT_OPT (op1
)))
625 && is_really_empty_class (TREE_TYPE (op0
)))
627 /* Remove any copies of empty classes. We check that the RHS
628 has a simple form so that TARGET_EXPRs and non-empty
629 CONSTRUCTORs get reduced properly, and we leave the return
630 slot optimization alone because it isn't a copy (FIXME so it
631 shouldn't be represented as one).
633 Also drop volatile variables on the RHS to avoid infinite
634 recursion from gimplify_expr trying to load the value. */
635 if (!TREE_SIDE_EFFECTS (op1
))
637 else if (TREE_THIS_VOLATILE (op1
)
638 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
639 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
640 build_fold_addr_expr (op1
), op0
);
642 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
649 case EMPTY_CLASS_EXPR
:
650 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
651 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
656 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
661 genericize_try_block (expr_p
);
666 genericize_catch_block (expr_p
);
671 genericize_eh_spec_block (expr_p
);
689 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
693 gimplify_expr_stmt (expr_p
);
697 case UNARY_PLUS_EXPR
:
699 tree arg
= TREE_OPERAND (*expr_p
, 0);
700 tree type
= TREE_TYPE (*expr_p
);
701 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
707 case CILK_SPAWN_STMT
:
709 (fn_contains_cilk_spawn_p (cfun
)
710 && cilk_detect_spawn_and_unwrap (expr_p
));
712 /* If errors are seen, then just process it as a CALL_EXPR. */
714 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
717 if (fn_contains_cilk_spawn_p (cfun
)
718 && cilk_detect_spawn_and_unwrap (expr_p
)
720 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
722 /* DR 1030 says that we need to evaluate the elements of an
723 initializer-list in forward order even when it's used as arguments to
724 a constructor. So if the target wants to evaluate them in reverse
725 order and there's more than one argument other than 'this', gimplify
728 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
729 && call_expr_nargs (*expr_p
) > 2)
731 int nargs
= call_expr_nargs (*expr_p
);
732 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
733 for (int i
= 1; i
< nargs
; ++i
)
735 enum gimplify_status t
736 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
744 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
748 /* Restore saved state. */
749 if (STATEMENT_CODE_P (code
))
750 current_stmt_tree ()->stmts_are_full_exprs_p
751 = saved_stmts_are_full_exprs_p
;
757 is_invisiref_parm (const_tree t
)
759 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
760 && DECL_BY_REFERENCE (t
));
763 /* Return true if the uid in both int tree maps are equal. */
766 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
768 return (a
->uid
== b
->uid
);
771 /* Hash a UID in a cxx_int_tree_map. */
774 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
779 /* A stable comparison routine for use with splay trees and DECLs. */
782 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
787 return DECL_UID (a
) - DECL_UID (b
);
790 /* OpenMP context during genericization. */
792 struct cp_genericize_omp_taskreg
796 struct cp_genericize_omp_taskreg
*outer
;
797 splay_tree variables
;
800 /* Return true if genericization should try to determine if
801 DECL is firstprivate or shared within task regions. */
804 omp_var_to_track (tree decl
)
806 tree type
= TREE_TYPE (decl
);
807 if (is_invisiref_parm (decl
))
808 type
= TREE_TYPE (type
);
809 while (TREE_CODE (type
) == ARRAY_TYPE
)
810 type
= TREE_TYPE (type
);
811 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
813 if (VAR_P (decl
) && DECL_THREAD_LOCAL_P (decl
))
815 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
820 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
823 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
825 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
826 (splay_tree_key
) decl
);
829 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
831 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
832 if (!omp_ctx
->default_shared
)
834 struct cp_genericize_omp_taskreg
*octx
;
836 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
838 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
839 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
841 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
844 if (octx
->is_parallel
)
848 && (TREE_CODE (decl
) == PARM_DECL
849 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
850 && DECL_CONTEXT (decl
) == current_function_decl
)))
851 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
852 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
854 /* DECL is implicitly determined firstprivate in
855 the current task construct. Ensure copy ctor and
856 dtor are instantiated, because during gimplification
857 it will be already too late. */
858 tree type
= TREE_TYPE (decl
);
859 if (is_invisiref_parm (decl
))
860 type
= TREE_TYPE (type
);
861 while (TREE_CODE (type
) == ARRAY_TYPE
)
862 type
= TREE_TYPE (type
);
863 get_copy_ctor (type
, tf_none
);
864 get_dtor (type
, tf_none
);
867 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
871 /* Genericization context. */
873 struct cp_genericize_data
875 hash_set
<tree
> *p_set
;
876 vec
<tree
> bind_expr_stack
;
877 struct cp_genericize_omp_taskreg
*omp_ctx
;
880 /* Perform any pre-gimplification lowering of C++ front end trees to
884 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
887 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
888 hash_set
<tree
> *p_set
= wtd
->p_set
;
890 /* If in an OpenMP context, note var uses. */
891 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
893 || TREE_CODE (stmt
) == PARM_DECL
894 || TREE_CODE (stmt
) == RESULT_DECL
)
895 && omp_var_to_track (stmt
))
896 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
898 if (is_invisiref_parm (stmt
)
899 /* Don't dereference parms in a thunk, pass the references through. */
900 && !(DECL_THUNK_P (current_function_decl
)
901 && TREE_CODE (stmt
) == PARM_DECL
))
903 *stmt_p
= convert_from_reference (stmt
);
908 /* Map block scope extern declarations to visible declarations with the
909 same name and type in outer scopes if any. */
910 if (cp_function_chain
->extern_decl_map
911 && VAR_OR_FUNCTION_DECL_P (stmt
)
912 && DECL_EXTERNAL (stmt
))
914 struct cxx_int_tree_map
*h
, in
;
915 in
.uid
= DECL_UID (stmt
);
916 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
925 /* Other than invisiref parms, don't walk the same tree twice. */
926 if (p_set
->contains (stmt
))
932 if (TREE_CODE (stmt
) == ADDR_EXPR
933 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
935 /* If in an OpenMP context, note var uses. */
936 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
937 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
938 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
939 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
942 else if (TREE_CODE (stmt
) == RETURN_EXPR
943 && TREE_OPERAND (stmt
, 0)
944 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
945 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
947 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
948 switch (OMP_CLAUSE_CODE (stmt
))
950 case OMP_CLAUSE_LASTPRIVATE
:
951 /* Don't dereference an invisiref in OpenMP clauses. */
952 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
955 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
956 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
957 cp_genericize_r
, data
, NULL
);
960 case OMP_CLAUSE_PRIVATE
:
961 /* Don't dereference an invisiref in OpenMP clauses. */
962 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
964 else if (wtd
->omp_ctx
!= NULL
)
966 /* Private clause doesn't cause any references to the
967 var in outer contexts, avoid calling
968 omp_cxx_notice_variable for it. */
969 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
971 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
977 case OMP_CLAUSE_SHARED
:
978 case OMP_CLAUSE_FIRSTPRIVATE
:
979 case OMP_CLAUSE_COPYIN
:
980 case OMP_CLAUSE_COPYPRIVATE
:
981 /* Don't dereference an invisiref in OpenMP clauses. */
982 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
985 case OMP_CLAUSE_REDUCTION
:
986 /* Don't dereference an invisiref in reduction clause's
987 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
988 still needs to be genericized. */
989 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
992 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
993 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
994 cp_genericize_r
, data
, NULL
);
995 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
996 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
997 cp_genericize_r
, data
, NULL
);
1003 else if (IS_TYPE_OR_DECL_P (stmt
))
1006 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1007 to lower this construct before scanning it, so we need to lower these
1008 before doing anything else. */
1009 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1010 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1011 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1014 CLEANUP_BODY (stmt
),
1015 CLEANUP_EXPR (stmt
));
1017 else if (TREE_CODE (stmt
) == IF_STMT
)
1019 genericize_if_stmt (stmt_p
);
1020 /* *stmt_p has changed, tail recurse to handle it again. */
1021 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1024 /* COND_EXPR might have incompatible types in branches if one or both
1025 arms are bitfields. Fix it up now. */
1026 else if (TREE_CODE (stmt
) == COND_EXPR
)
1029 = (TREE_OPERAND (stmt
, 1)
1030 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1033 = (TREE_OPERAND (stmt
, 2)
1034 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1037 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1038 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1040 TREE_OPERAND (stmt
, 1)
1041 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1042 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1046 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1047 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1049 TREE_OPERAND (stmt
, 2)
1050 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1051 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1056 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1058 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1061 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1063 && !DECL_EXTERNAL (decl
)
1064 && omp_var_to_track (decl
))
1067 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1068 (splay_tree_key
) decl
);
1070 splay_tree_insert (wtd
->omp_ctx
->variables
,
1071 (splay_tree_key
) decl
,
1073 ? OMP_CLAUSE_DEFAULT_SHARED
1074 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1077 wtd
->bind_expr_stack
.safe_push (stmt
);
1078 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1079 cp_genericize_r
, data
, NULL
);
1080 wtd
->bind_expr_stack
.pop ();
1083 else if (TREE_CODE (stmt
) == USING_STMT
)
1085 tree block
= NULL_TREE
;
1087 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1088 BLOCK, and append an IMPORTED_DECL to its
1089 BLOCK_VARS chained list. */
1090 if (wtd
->bind_expr_stack
.exists ())
1093 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1094 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1099 tree using_directive
;
1100 gcc_assert (TREE_OPERAND (stmt
, 0));
1102 using_directive
= make_node (IMPORTED_DECL
);
1103 TREE_TYPE (using_directive
) = void_type_node
;
1105 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1106 = TREE_OPERAND (stmt
, 0);
1107 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1108 BLOCK_VARS (block
) = using_directive
;
1110 /* The USING_STMT won't appear in GENERIC. */
1111 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1115 else if (TREE_CODE (stmt
) == DECL_EXPR
1116 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1118 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1119 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1122 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1124 struct cp_genericize_omp_taskreg omp_ctx
;
1129 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1130 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1131 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1132 omp_ctx
.outer
= wtd
->omp_ctx
;
1133 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1134 wtd
->omp_ctx
= &omp_ctx
;
1135 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1136 switch (OMP_CLAUSE_CODE (c
))
1138 case OMP_CLAUSE_SHARED
:
1139 case OMP_CLAUSE_PRIVATE
:
1140 case OMP_CLAUSE_FIRSTPRIVATE
:
1141 case OMP_CLAUSE_LASTPRIVATE
:
1142 decl
= OMP_CLAUSE_DECL (c
);
1143 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1145 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1148 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1149 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1150 ? OMP_CLAUSE_DEFAULT_SHARED
1151 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1152 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1154 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1156 case OMP_CLAUSE_DEFAULT
:
1157 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1158 omp_ctx
.default_shared
= true;
1162 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1163 wtd
->omp_ctx
= omp_ctx
.outer
;
1164 splay_tree_delete (omp_ctx
.variables
);
1166 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1167 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1168 else if (TREE_CODE (stmt
) == FOR_STMT
)
1169 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1170 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1171 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1172 else if (TREE_CODE (stmt
) == DO_STMT
)
1173 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1174 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1175 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1176 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1177 genericize_continue_stmt (stmt_p
);
1178 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1179 genericize_break_stmt (stmt_p
);
1180 else if (TREE_CODE (stmt
) == OMP_FOR
1181 || TREE_CODE (stmt
) == OMP_SIMD
1182 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1183 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1184 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1186 if (SIZEOF_EXPR_TYPE_P (stmt
))
1188 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1189 SIZEOF_EXPR
, false);
1190 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1191 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1192 SIZEOF_EXPR
, false);
1194 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1195 SIZEOF_EXPR
, false);
1196 if (*stmt_p
== error_mark_node
)
1197 *stmt_p
= size_one_node
;
1200 else if (flag_sanitize
1201 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1203 if ((flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1204 && TREE_CODE (stmt
) == NOP_EXPR
1205 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1206 ubsan_maybe_instrument_reference (stmt
);
1207 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1209 tree fn
= CALL_EXPR_FN (stmt
);
1211 && !error_operand_p (fn
)
1212 && POINTER_TYPE_P (TREE_TYPE (fn
))
1213 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1216 = TREE_CODE (fn
) == ADDR_EXPR
1217 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1218 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1219 if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1220 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1221 if ((flag_sanitize
& SANITIZE_VPTR
) && !is_ctor
)
1222 cp_ubsan_maybe_instrument_member_call (stmt
);
1227 p_set
->add (*stmt_p
);
1232 /* Lower C++ front end trees to GENERIC in T_P. */
1235 cp_genericize_tree (tree
* t_p
)
1237 struct cp_genericize_data wtd
;
1239 wtd
.p_set
= new hash_set
<tree
>;
1240 wtd
.bind_expr_stack
.create (0);
1242 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1244 wtd
.bind_expr_stack
.release ();
1245 if (flag_sanitize
& SANITIZE_VPTR
)
1246 cp_ubsan_instrument_member_accesses (t_p
);
1249 /* If a function that should end with a return in non-void
1250 function doesn't obviously end with return, add ubsan
1251 instrumentation code to verify it at runtime. */
1254 cp_ubsan_maybe_instrument_return (tree fndecl
)
1256 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1257 || DECL_CONSTRUCTOR_P (fndecl
)
1258 || DECL_DESTRUCTOR_P (fndecl
)
1259 || !targetm
.warn_func_return (fndecl
))
1262 tree t
= DECL_SAVED_TREE (fndecl
);
1265 switch (TREE_CODE (t
))
1268 t
= BIND_EXPR_BODY (t
);
1270 case TRY_FINALLY_EXPR
:
1271 t
= TREE_OPERAND (t
, 0);
1273 case STATEMENT_LIST
:
1275 tree_stmt_iterator i
= tsi_last (t
);
1292 t
= DECL_SAVED_TREE (fndecl
);
1293 if (TREE_CODE (t
) == BIND_EXPR
1294 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1296 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1297 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1298 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1303 cp_genericize (tree fndecl
)
1307 /* Fix up the types of parms passed by invisible reference. */
1308 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1309 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1311 /* If a function's arguments are copied to create a thunk,
1312 then DECL_BY_REFERENCE will be set -- but the type of the
1313 argument will be a pointer type, so we will never get
1315 gcc_assert (!DECL_BY_REFERENCE (t
));
1316 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1317 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1318 DECL_BY_REFERENCE (t
) = 1;
1319 TREE_ADDRESSABLE (t
) = 0;
1323 /* Do the same for the return value. */
1324 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1326 t
= DECL_RESULT (fndecl
);
1327 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1328 DECL_BY_REFERENCE (t
) = 1;
1329 TREE_ADDRESSABLE (t
) = 0;
1333 /* Adjust DECL_VALUE_EXPR of the original var. */
1334 tree outer
= outer_curly_brace_block (current_function_decl
);
1338 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1339 if (DECL_NAME (t
) == DECL_NAME (var
)
1340 && DECL_HAS_VALUE_EXPR_P (var
)
1341 && DECL_VALUE_EXPR (var
) == t
)
1343 tree val
= convert_from_reference (t
);
1344 SET_DECL_VALUE_EXPR (var
, val
);
1350 /* If we're a clone, the body is already GIMPLE. */
1351 if (DECL_CLONED_FUNCTION_P (fndecl
))
1354 /* Expand all the array notations here. */
1356 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1357 DECL_SAVED_TREE (fndecl
) =
1358 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1360 /* We do want to see every occurrence of the parms, so we can't just use
1361 walk_tree's hash functionality. */
1362 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1364 if (flag_sanitize
& SANITIZE_RETURN
1365 && do_ubsan_in_current_function ())
1366 cp_ubsan_maybe_instrument_return (fndecl
);
1368 /* Do everything else. */
1369 c_genericize (fndecl
);
1371 gcc_assert (bc_label
[bc_break
] == NULL
);
1372 gcc_assert (bc_label
[bc_continue
] == NULL
);
1375 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1376 NULL if there is in fact nothing to do. ARG2 may be null if FN
1377 actually only takes one argument. */
1380 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1382 tree defparm
, parm
, t
;
1390 nargs
= list_length (DECL_ARGUMENTS (fn
));
1391 argarray
= XALLOCAVEC (tree
, nargs
);
1393 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1395 defparm
= TREE_CHAIN (defparm
);
1397 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1399 tree inner_type
= TREE_TYPE (arg1
);
1400 tree start1
, end1
, p1
;
1401 tree start2
= NULL
, p2
= NULL
;
1402 tree ret
= NULL
, lab
;
1408 inner_type
= TREE_TYPE (inner_type
);
1409 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1410 size_zero_node
, NULL
, NULL
);
1412 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1413 size_zero_node
, NULL
, NULL
);
1415 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1416 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1418 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1420 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1421 end1
= fold_build_pointer_plus (start1
, end1
);
1423 p1
= create_tmp_var (TREE_TYPE (start1
));
1424 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1425 append_to_statement_list (t
, &ret
);
1429 p2
= create_tmp_var (TREE_TYPE (start2
));
1430 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1431 append_to_statement_list (t
, &ret
);
1434 lab
= create_artificial_label (input_location
);
1435 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1436 append_to_statement_list (t
, &ret
);
1441 /* Handle default arguments. */
1442 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1443 parm
= TREE_CHAIN (parm
), i
++)
1444 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1445 TREE_PURPOSE (parm
), fn
, i
,
1446 tf_warning_or_error
);
1447 t
= build_call_a (fn
, i
, argarray
);
1448 t
= fold_convert (void_type_node
, t
);
1449 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1450 append_to_statement_list (t
, &ret
);
1452 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1453 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1454 append_to_statement_list (t
, &ret
);
1458 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1459 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1460 append_to_statement_list (t
, &ret
);
1463 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1464 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1465 append_to_statement_list (t
, &ret
);
1471 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1473 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1474 /* Handle default arguments. */
1475 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1476 parm
= TREE_CHAIN (parm
), i
++)
1477 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1478 TREE_PURPOSE (parm
),
1479 fn
, i
, tf_warning_or_error
);
1480 t
= build_call_a (fn
, i
, argarray
);
1481 t
= fold_convert (void_type_node
, t
);
1482 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1486 /* Return code to initialize DECL with its default constructor, or
1487 NULL if there's nothing to do. */
1490 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1492 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1496 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1501 /* Return code to initialize DST with a copy constructor from SRC. */
1504 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1506 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1510 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1512 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1517 /* Similarly, except use an assignment operator instead. */
1520 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1522 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1526 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1528 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1533 /* Return code to destroy DECL. */
1536 cxx_omp_clause_dtor (tree clause
, tree decl
)
1538 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1542 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1547 /* True if OpenMP should privatize what this DECL points to rather
1548 than the DECL itself. */
1551 cxx_omp_privatize_by_reference (const_tree decl
)
1553 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1554 || is_invisiref_parm (decl
));
1557 /* Return true if DECL is const qualified var having no mutable member. */
1559 cxx_omp_const_qual_no_mutable (tree decl
)
1561 tree type
= TREE_TYPE (decl
);
1562 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1564 if (!is_invisiref_parm (decl
))
1566 type
= TREE_TYPE (type
);
1568 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1570 /* NVR doesn't preserve const qualification of the
1572 tree outer
= outer_curly_brace_block (current_function_decl
);
1576 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1577 if (DECL_NAME (decl
) == DECL_NAME (var
)
1578 && (TYPE_MAIN_VARIANT (type
)
1579 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1581 if (TYPE_READONLY (TREE_TYPE (var
)))
1582 type
= TREE_TYPE (var
);
1588 if (type
== error_mark_node
)
1591 /* Variables with const-qualified type having no mutable member
1592 are predetermined shared. */
1593 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1599 /* True if OpenMP sharing attribute of DECL is predetermined. */
1601 enum omp_clause_default_kind
1602 cxx_omp_predetermined_sharing (tree decl
)
1604 /* Static data members are predetermined shared. */
1605 if (TREE_STATIC (decl
))
1607 tree ctx
= CP_DECL_CONTEXT (decl
);
1608 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1609 return OMP_CLAUSE_DEFAULT_SHARED
;
1612 /* Const qualified vars having no mutable member are predetermined
1614 if (cxx_omp_const_qual_no_mutable (decl
))
1615 return OMP_CLAUSE_DEFAULT_SHARED
;
1617 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1620 /* Finalize an implicitly determined clause. */
1623 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1625 tree decl
, inner_type
;
1626 bool make_shared
= false;
1628 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1631 decl
= OMP_CLAUSE_DECL (c
);
1632 decl
= require_complete_type (decl
);
1633 inner_type
= TREE_TYPE (decl
);
1634 if (decl
== error_mark_node
)
1636 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1638 if (is_invisiref_parm (decl
))
1639 inner_type
= TREE_TYPE (inner_type
);
1642 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1648 /* We're interested in the base element, not arrays. */
1649 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1650 inner_type
= TREE_TYPE (inner_type
);
1652 /* Check for special function availability by building a call to one.
1653 Save the results, because later we won't be in the right context
1654 for making these queries. */
1656 && CLASS_TYPE_P (inner_type
)
1657 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1661 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;