PR target/84064
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob21859251795bb82fd875d7f10b674d41694e38e1
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
38 /* Forward declarations. */
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
45 /* Local declarations. */
47 enum bc_t { bc_break = 0, bc_continue = 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
54 indicates which.
56 Just creates a label with location LOCATION and pushes it into the current
57 context. */
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
62 tree label = create_artificial_label (location);
63 DECL_CHAIN (label) = bc_label[bc];
64 bc_label[bc] = label;
65 if (bc == bc_break)
66 LABEL_DECL_BREAK (label) = true;
67 else
68 LABEL_DECL_CONTINUE (label) = true;
69 return label;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 gcc_assert (label == bc_label[bc]);
84 if (TREE_USED (label))
85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 block);
88 bc_label[bc] = DECL_CHAIN (label);
89 DECL_CHAIN (label) = NULL_TREE;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
95 static tree
96 get_bc_label (enum bc_t bc)
98 tree label = bc_label[bc];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label) = 1;
102 return label;
105 /* Genericize a TRY_BLOCK. */
107 static void
108 genericize_try_block (tree *stmt_p)
110 tree body = TRY_STMTS (*stmt_p);
111 tree cleanup = TRY_HANDLERS (*stmt_p);
113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 static void
119 genericize_catch_block (tree *stmt_p)
121 tree type = HANDLER_TYPE (*stmt_p);
122 tree body = HANDLER_BODY (*stmt_p);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
128 /* A terser interface for building a representation of an exception
129 specification. */
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 tree t;
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 return t;
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
152 tree body = EH_SPEC_STMTS (*stmt_p);
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 TREE_NO_WARNING (*stmt_p) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163 static void
164 genericize_if_stmt (tree *stmt_p)
166 tree stmt, cond, then_, else_;
167 location_t locus = EXPR_LOCATION (*stmt_p);
169 stmt = *stmt_p;
170 cond = IF_COND (stmt);
171 then_ = THEN_CLAUSE (stmt);
172 else_ = ELSE_CLAUSE (stmt);
174 if (!then_)
175 then_ = build_empty_stmt (locus);
176 if (!else_)
177 else_ = build_empty_stmt (locus);
179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180 stmt = then_;
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = else_;
183 else
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185 if (!EXPR_HAS_LOCATION (stmt))
186 protected_set_expr_location (stmt, locus);
187 *stmt_p = stmt;
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 tree incr, bool cond_is_first, int *walk_subtrees,
200 void *data)
202 tree blab, clab;
203 tree exit = NULL;
204 tree stmt_list = NULL;
206 blab = begin_bc_block (bc_break, start_locus);
207 clab = begin_bc_block (bc_continue, start_locus);
209 protected_set_expr_location (incr, start_locus);
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
216 if (cond && TREE_CODE (cond) != INTEGER_CST)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 build_empty_stmt (cloc), exit);
227 if (exit && cond_is_first)
228 append_to_statement_list (exit, &stmt_list);
229 append_to_statement_list (body, &stmt_list);
230 finish_bc_block (&stmt_list, bc_continue, clab);
231 append_to_statement_list (incr, &stmt_list);
232 if (exit && !cond_is_first)
233 append_to_statement_list (exit, &stmt_list);
235 if (!stmt_list)
236 stmt_list = build_empty_stmt (start_locus);
238 tree loop;
239 if (cond && integer_zerop (cond))
241 if (cond_is_first)
242 loop = fold_build3_loc (start_locus, COND_EXPR,
243 void_type_node, cond, stmt_list,
244 build_empty_stmt (start_locus));
245 else
246 loop = stmt_list;
248 else
250 location_t loc = start_locus;
251 if (!cond || integer_nonzerop (cond))
252 loc = EXPR_LOCATION (expr_first (body));
253 if (loc == UNKNOWN_LOCATION)
254 loc = start_locus;
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 *stmt_p = stmt_list;
267 /* Genericize a FOR_STMT node *STMT_P. */
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
277 if (init)
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
291 /* Genericize a WHILE_STMT node *STMT_P. */
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 /* Genericize a DO_STMT node *STMT_P. */
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
320 break_block = begin_bc_block (bc_break, stmt_locus);
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
333 if (TREE_USED (break_block))
334 SWITCH_BREAK_LABEL_P (break_block) = 1;
335 finish_bc_block (&body, bc_break, break_block);
336 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
337 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
339 || !TREE_USED (break_block));
342 /* Genericize a CONTINUE_STMT node *STMT_P. */
344 static void
345 genericize_continue_stmt (tree *stmt_p)
347 tree stmt_list = NULL;
348 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
349 tree label = get_bc_label (bc_continue);
350 location_t location = EXPR_LOCATION (*stmt_p);
351 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
352 append_to_statement_list_force (pred, &stmt_list);
353 append_to_statement_list (jump, &stmt_list);
354 *stmt_p = stmt_list;
357 /* Genericize a BREAK_STMT node *STMT_P. */
359 static void
360 genericize_break_stmt (tree *stmt_p)
362 tree label = get_bc_label (bc_break);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
367 /* Genericize a OMP_FOR node *STMT_P. */
369 static void
370 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
372 tree stmt = *stmt_p;
373 location_t locus = EXPR_LOCATION (stmt);
374 tree clab = begin_bc_block (bc_continue, locus);
376 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
377 if (TREE_CODE (stmt) != OMP_TASKLOOP)
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
381 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
383 *walk_subtrees = 0;
385 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
388 /* Hook into the middle of gimplifying an OMP_FOR node. */
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
393 tree for_stmt = *expr_p;
394 gimple_seq seq = NULL;
396 /* Protect ourselves from recursion. */
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
398 return GS_UNHANDLED;
399 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
401 gimplify_and_add (for_stmt, &seq);
402 gimple_seq_add_seq (pre_p, seq);
404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
406 return GS_ALL_DONE;
409 /* Gimplify an EXPR_STMT node. */
411 static void
412 gimplify_expr_stmt (tree *stmt_p)
414 tree stmt = EXPR_STMT_EXPR (*stmt_p);
416 if (stmt == error_mark_node)
417 stmt = NULL;
419 /* Gimplification of a statement expression will nullify the
420 statement if all its side effects are moved to *PRE_P and *POST_P.
422 In this case we will not want to emit the gimplified statement.
423 However, we may still want to emit a warning, so we do that before
424 gimplification. */
425 if (stmt && warn_unused_value)
427 if (!TREE_SIDE_EFFECTS (stmt))
429 if (!IS_EMPTY_STMT (stmt)
430 && !VOID_TYPE_P (TREE_TYPE (stmt))
431 && !TREE_NO_WARNING (stmt))
432 warning (OPT_Wunused_value, "statement with no effect");
434 else
435 warn_if_unused_value (stmt, input_location);
438 if (stmt == NULL_TREE)
439 stmt = alloc_stmt_list ();
441 *stmt_p = stmt;
444 /* Gimplify initialization from an AGGR_INIT_EXPR. */
446 static void
447 cp_gimplify_init_expr (tree *expr_p)
449 tree from = TREE_OPERAND (*expr_p, 1);
450 tree to = TREE_OPERAND (*expr_p, 0);
451 tree t;
453 /* What about code that pulls out the temp and uses it elsewhere? I
454 think that such code never uses the TARGET_EXPR as an initializer. If
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
456 case, I guess we'll need to replace references somehow. */
457 if (TREE_CODE (from) == TARGET_EXPR)
458 from = TARGET_EXPR_INITIAL (from);
460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461 inside the TARGET_EXPR. */
462 for (t = from; t; )
464 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 replace the slot operand with our target.
469 Should we add a target parm to gimplify_expr instead? No, as in this
470 case we want to replace the INIT_EXPR. */
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR
472 || TREE_CODE (sub) == VEC_INIT_EXPR)
474 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
475 AGGR_INIT_EXPR_SLOT (sub) = to;
476 else
477 VEC_INIT_EXPR_SLOT (sub) = to;
478 *expr_p = from;
480 /* The initialization is now a side-effect, so the container can
481 become void. */
482 if (from != sub)
483 TREE_TYPE (from) = void_type_node;
486 /* Handle aggregate NSDMI. */
487 replace_placeholders (sub, to);
489 if (t == sub)
490 break;
491 else
492 t = TREE_OPERAND (t, 1);
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
502 tree stmt = *expr_p;
503 tree temp = voidify_wrapper_expr (stmt, NULL);
504 tree body = TREE_OPERAND (stmt, 0);
505 gimple_seq try_ = NULL;
506 gimple_seq catch_ = NULL;
507 gimple *mnt;
509 gimplify_and_add (body, &try_);
510 mnt = gimple_build_eh_must_not_throw (terminate_fn);
511 gimple_seq_add_stmt_without_update (&catch_, mnt);
512 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
514 gimple_seq_add_stmt_without_update (pre_p, mnt);
515 if (temp)
517 *expr_p = temp;
518 return GS_OK;
521 *expr_p = NULL;
522 return GS_ALL_DONE;
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526 really just an empty class copy.
528 Check that the operand has a simple form so that TARGET_EXPRs and
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
530 return slot optimization alone because it isn't a copy. */
532 static bool
533 simple_empty_class_p (tree type, tree op)
535 return
536 ((TREE_CODE (op) == COMPOUND_EXPR
537 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
538 || TREE_CODE (op) == EMPTY_CLASS_EXPR
539 || is_gimple_lvalue (op)
540 || INDIRECT_REF_P (op)
541 || (TREE_CODE (op) == CONSTRUCTOR
542 && CONSTRUCTOR_NELTS (op) == 0
543 && !TREE_CLOBBER_P (op))
544 || (TREE_CODE (op) == CALL_EXPR
545 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
546 && is_really_empty_class (type);
549 /* Returns true if evaluating E as an lvalue has side-effects;
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551 have side-effects until there is a read or write through it. */
553 static bool
554 lvalue_has_side_effects (tree e)
556 if (!TREE_SIDE_EFFECTS (e))
557 return false;
558 while (handled_component_p (e))
560 if (TREE_CODE (e) == ARRAY_REF
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
562 return true;
563 e = TREE_OPERAND (e, 0);
565 if (DECL_P (e))
566 /* Just naming a variable has no side-effects. */
567 return false;
568 else if (INDIRECT_REF_P (e))
569 /* Similarly, indirection has no side-effects. */
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
571 else
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
573 return TREE_SIDE_EFFECTS (e);
576 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
579 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
581 int saved_stmts_are_full_exprs_p = 0;
582 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
583 enum tree_code code = TREE_CODE (*expr_p);
584 enum gimplify_status ret;
586 if (STATEMENT_CODE_P (code))
588 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
589 current_stmt_tree ()->stmts_are_full_exprs_p
590 = STMT_IS_FULL_EXPR_P (*expr_p);
593 switch (code)
595 case AGGR_INIT_EXPR:
596 simplify_aggr_init_expr (expr_p);
597 ret = GS_OK;
598 break;
600 case VEC_INIT_EXPR:
602 location_t loc = input_location;
603 tree init = VEC_INIT_EXPR_INIT (*expr_p);
604 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
605 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
606 input_location = EXPR_LOCATION (*expr_p);
607 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
608 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
609 from_array,
610 tf_warning_or_error);
611 hash_set<tree> pset;
612 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
613 cp_genericize_tree (expr_p, false);
614 ret = GS_OK;
615 input_location = loc;
617 break;
619 case THROW_EXPR:
620 /* FIXME communicate throw type to back end, probably by moving
621 THROW_EXPR into ../tree.def. */
622 *expr_p = TREE_OPERAND (*expr_p, 0);
623 ret = GS_OK;
624 break;
626 case MUST_NOT_THROW_EXPR:
627 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
628 break;
630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 LHS of an assignment might also be involved in the RHS, as in bug
632 25979. */
633 case INIT_EXPR:
634 cp_gimplify_init_expr (expr_p);
635 if (TREE_CODE (*expr_p) != INIT_EXPR)
636 return GS_OK;
637 /* Fall through. */
638 case MODIFY_EXPR:
639 modify_expr_case:
641 /* If the back end isn't clever enough to know that the lhs and rhs
642 types are the same, add an explicit conversion. */
643 tree op0 = TREE_OPERAND (*expr_p, 0);
644 tree op1 = TREE_OPERAND (*expr_p, 1);
646 if (!error_operand_p (op0)
647 && !error_operand_p (op1)
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
650 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
651 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
652 TREE_TYPE (op0), op1);
654 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
656 /* Remove any copies of empty classes. Also drop volatile
657 variables on the RHS to avoid infinite recursion from
658 gimplify_expr trying to load the value. */
659 if (TREE_SIDE_EFFECTS (op1))
661 if (TREE_THIS_VOLATILE (op1)
662 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
663 op1 = build_fold_addr_expr (op1);
665 gimplify_and_add (op1, pre_p);
667 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
668 is_gimple_lvalue, fb_lvalue);
669 *expr_p = TREE_OPERAND (*expr_p, 0);
671 /* P0145 says that the RHS is sequenced before the LHS.
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 isn't quite strong enough in two cases:
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 mean it's evaluated after the LHS.
678 2) the value calculation of the RHS is also sequenced before the
679 LHS, so for scalar assignment we need to preevaluate if the
680 RHS could be affected by LHS side-effects even if it has no
681 side-effects of its own. We don't need this for classes because
682 class assignment takes its RHS by reference. */
683 else if (flag_strong_eval_order > 1
684 && TREE_CODE (*expr_p) == MODIFY_EXPR
685 && lvalue_has_side_effects (op0)
686 && (TREE_CODE (op1) == CALL_EXPR
687 || (SCALAR_TYPE_P (TREE_TYPE (op1))
688 && !TREE_CONSTANT (op1))))
689 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
691 ret = GS_OK;
692 break;
694 case EMPTY_CLASS_EXPR:
695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
696 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
697 ret = GS_OK;
698 break;
700 case BASELINK:
701 *expr_p = BASELINK_FUNCTIONS (*expr_p);
702 ret = GS_OK;
703 break;
705 case TRY_BLOCK:
706 genericize_try_block (expr_p);
707 ret = GS_OK;
708 break;
710 case HANDLER:
711 genericize_catch_block (expr_p);
712 ret = GS_OK;
713 break;
715 case EH_SPEC_BLOCK:
716 genericize_eh_spec_block (expr_p);
717 ret = GS_OK;
718 break;
720 case USING_STMT:
721 gcc_unreachable ();
723 case FOR_STMT:
724 case WHILE_STMT:
725 case DO_STMT:
726 case SWITCH_STMT:
727 case CONTINUE_STMT:
728 case BREAK_STMT:
729 gcc_unreachable ();
731 case OMP_FOR:
732 case OMP_SIMD:
733 case OMP_DISTRIBUTE:
734 case OMP_TASKLOOP:
735 ret = cp_gimplify_omp_for (expr_p, pre_p);
736 break;
738 case EXPR_STMT:
739 gimplify_expr_stmt (expr_p);
740 ret = GS_OK;
741 break;
743 case UNARY_PLUS_EXPR:
745 tree arg = TREE_OPERAND (*expr_p, 0);
746 tree type = TREE_TYPE (*expr_p);
747 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
748 : arg;
749 ret = GS_OK;
751 break;
753 case CALL_EXPR:
754 ret = GS_OK;
755 if (!CALL_EXPR_FN (*expr_p))
756 /* Internal function call. */;
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
759 /* This is a call to a (compound) assignment operator that used
760 the operator syntax; gimplify the RHS first. */
761 gcc_assert (call_expr_nargs (*expr_p) == 2);
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
763 enum gimplify_status t
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
765 if (t == GS_ERROR)
766 ret = GS_ERROR;
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
771 with __builtin_va_arg_pack(). */
772 int nargs = call_expr_nargs (*expr_p) - 1;
773 for (int i = 0; i < nargs; ++i)
775 enum gimplify_status t
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
777 if (t == GS_ERROR)
778 ret = GS_ERROR;
781 else if (flag_strong_eval_order
782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
784 /* If flag_strong_eval_order, evaluate the object argument first. */
785 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
786 if (POINTER_TYPE_P (fntype))
787 fntype = TREE_TYPE (fntype);
788 if (TREE_CODE (fntype) == METHOD_TYPE)
790 enum gimplify_status t
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
792 if (t == GS_ERROR)
793 ret = GS_ERROR;
796 break;
798 case RETURN_EXPR:
799 if (TREE_OPERAND (*expr_p, 0)
800 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
801 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
803 expr_p = &TREE_OPERAND (*expr_p, 0);
804 code = TREE_CODE (*expr_p);
805 /* Avoid going through the INIT_EXPR case, which can
806 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
807 goto modify_expr_case;
809 /* Fall through. */
811 default:
812 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
813 break;
816 /* Restore saved state. */
817 if (STATEMENT_CODE_P (code))
818 current_stmt_tree ()->stmts_are_full_exprs_p
819 = saved_stmts_are_full_exprs_p;
821 return ret;
824 static inline bool
825 is_invisiref_parm (const_tree t)
827 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
828 && DECL_BY_REFERENCE (t));
831 /* Return true if the uid in both int tree maps are equal. */
833 bool
834 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
836 return (a->uid == b->uid);
839 /* Hash a UID in a cxx_int_tree_map. */
841 unsigned int
842 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
844 return item->uid;
847 /* A stable comparison routine for use with splay trees and DECLs. */
849 static int
850 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
852 tree a = (tree) xa;
853 tree b = (tree) xb;
855 return DECL_UID (a) - DECL_UID (b);
858 /* OpenMP context during genericization. */
860 struct cp_genericize_omp_taskreg
862 bool is_parallel;
863 bool default_shared;
864 struct cp_genericize_omp_taskreg *outer;
865 splay_tree variables;
868 /* Return true if genericization should try to determine if
869 DECL is firstprivate or shared within task regions. */
871 static bool
872 omp_var_to_track (tree decl)
874 tree type = TREE_TYPE (decl);
875 if (is_invisiref_parm (decl))
876 type = TREE_TYPE (type);
877 else if (TREE_CODE (type) == REFERENCE_TYPE)
878 type = TREE_TYPE (type);
879 while (TREE_CODE (type) == ARRAY_TYPE)
880 type = TREE_TYPE (type);
881 if (type == error_mark_node || !CLASS_TYPE_P (type))
882 return false;
883 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
884 return false;
885 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
886 return false;
887 return true;
890 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
892 static void
893 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
895 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
896 (splay_tree_key) decl);
897 if (n == NULL)
899 int flags = OMP_CLAUSE_DEFAULT_SHARED;
900 if (omp_ctx->outer)
901 omp_cxx_notice_variable (omp_ctx->outer, decl);
902 if (!omp_ctx->default_shared)
904 struct cp_genericize_omp_taskreg *octx;
906 for (octx = omp_ctx->outer; octx; octx = octx->outer)
908 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
909 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
911 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
912 break;
914 if (octx->is_parallel)
915 break;
917 if (octx == NULL
918 && (TREE_CODE (decl) == PARM_DECL
919 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
920 && DECL_CONTEXT (decl) == current_function_decl)))
921 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
922 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
924 /* DECL is implicitly determined firstprivate in
925 the current task construct. Ensure copy ctor and
926 dtor are instantiated, because during gimplification
927 it will be already too late. */
928 tree type = TREE_TYPE (decl);
929 if (is_invisiref_parm (decl))
930 type = TREE_TYPE (type);
931 else if (TREE_CODE (type) == REFERENCE_TYPE)
932 type = TREE_TYPE (type);
933 while (TREE_CODE (type) == ARRAY_TYPE)
934 type = TREE_TYPE (type);
935 get_copy_ctor (type, tf_none);
936 get_dtor (type, tf_none);
939 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
943 /* Genericization context. */
945 struct cp_genericize_data
947 hash_set<tree> *p_set;
948 vec<tree> bind_expr_stack;
949 struct cp_genericize_omp_taskreg *omp_ctx;
950 tree try_block;
951 bool no_sanitize_p;
952 bool handle_invisiref_parm_p;
955 /* Perform any pre-gimplification folding of C++ front end trees to
956 GENERIC.
957 Note: The folding of none-omp cases is something to move into
958 the middle-end. As for now we have most foldings only on GENERIC
959 in fold-const, we need to perform this before transformation to
960 GIMPLE-form. */
962 static tree
963 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
965 tree stmt;
966 enum tree_code code;
968 *stmt_p = stmt = cp_fold (*stmt_p);
970 if (((hash_set<tree> *) data)->add (stmt))
972 /* Don't walk subtrees of stmts we've already walked once, otherwise
973 we can have exponential complexity with e.g. lots of nested
974 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
975 always the same tree, which the first time cp_fold_r has been
976 called on it had the subtrees walked. */
977 *walk_subtrees = 0;
978 return NULL;
981 code = TREE_CODE (stmt);
982 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
983 || code == OMP_TASKLOOP || code == OACC_LOOP)
985 tree x;
986 int i, n;
988 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
989 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
990 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
991 x = OMP_FOR_COND (stmt);
992 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
994 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
995 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
997 else if (x && TREE_CODE (x) == TREE_VEC)
999 n = TREE_VEC_LENGTH (x);
1000 for (i = 0; i < n; i++)
1002 tree o = TREE_VEC_ELT (x, i);
1003 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1004 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1007 x = OMP_FOR_INCR (stmt);
1008 if (x && TREE_CODE (x) == TREE_VEC)
1010 n = TREE_VEC_LENGTH (x);
1011 for (i = 0; i < n; i++)
1013 tree o = TREE_VEC_ELT (x, i);
1014 if (o && TREE_CODE (o) == MODIFY_EXPR)
1015 o = TREE_OPERAND (o, 1);
1016 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1017 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1019 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1020 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1024 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1025 *walk_subtrees = 0;
1028 return NULL;
1031 /* Fold ALL the trees! FIXME we should be able to remove this, but
1032 apparently that still causes optimization regressions. */
1034 void
1035 cp_fold_function (tree fndecl)
1037 hash_set<tree> pset;
1038 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1041 /* Perform any pre-gimplification lowering of C++ front end trees to
1042 GENERIC. */
1044 static tree
1045 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1047 tree stmt = *stmt_p;
1048 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1049 hash_set<tree> *p_set = wtd->p_set;
1051 /* If in an OpenMP context, note var uses. */
1052 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1053 && (VAR_P (stmt)
1054 || TREE_CODE (stmt) == PARM_DECL
1055 || TREE_CODE (stmt) == RESULT_DECL)
1056 && omp_var_to_track (stmt))
1057 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1059 /* Don't dereference parms in a thunk, pass the references through. */
1060 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1061 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1063 *walk_subtrees = 0;
1064 return NULL;
1067 /* Dereference invisible reference parms. */
1068 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1070 *stmt_p = convert_from_reference (stmt);
1071 p_set->add (*stmt_p);
1072 *walk_subtrees = 0;
1073 return NULL;
1076 /* Map block scope extern declarations to visible declarations with the
1077 same name and type in outer scopes if any. */
1078 if (cp_function_chain->extern_decl_map
1079 && VAR_OR_FUNCTION_DECL_P (stmt)
1080 && DECL_EXTERNAL (stmt))
1082 struct cxx_int_tree_map *h, in;
1083 in.uid = DECL_UID (stmt);
1084 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1085 if (h)
1087 *stmt_p = h->to;
1088 *walk_subtrees = 0;
1089 return NULL;
1093 if (TREE_CODE (stmt) == INTEGER_CST
1094 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1095 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1096 && !wtd->no_sanitize_p)
1098 ubsan_maybe_instrument_reference (stmt_p);
1099 if (*stmt_p != stmt)
1101 *walk_subtrees = 0;
1102 return NULL_TREE;
1106 /* Other than invisiref parms, don't walk the same tree twice. */
1107 if (p_set->contains (stmt))
1109 *walk_subtrees = 0;
1110 return NULL_TREE;
1113 switch (TREE_CODE (stmt))
1115 case ADDR_EXPR:
1116 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1118 /* If in an OpenMP context, note var uses. */
1119 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1120 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1121 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1122 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1123 *walk_subtrees = 0;
1125 break;
1127 case RETURN_EXPR:
1128 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1129 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1130 *walk_subtrees = 0;
1131 break;
1133 case OMP_CLAUSE:
1134 switch (OMP_CLAUSE_CODE (stmt))
1136 case OMP_CLAUSE_LASTPRIVATE:
1137 /* Don't dereference an invisiref in OpenMP clauses. */
1138 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1140 *walk_subtrees = 0;
1141 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1142 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1143 cp_genericize_r, data, NULL);
1145 break;
1146 case OMP_CLAUSE_PRIVATE:
1147 /* Don't dereference an invisiref in OpenMP clauses. */
1148 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1149 *walk_subtrees = 0;
1150 else if (wtd->omp_ctx != NULL)
1152 /* Private clause doesn't cause any references to the
1153 var in outer contexts, avoid calling
1154 omp_cxx_notice_variable for it. */
1155 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1156 wtd->omp_ctx = NULL;
1157 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1158 data, NULL);
1159 wtd->omp_ctx = old;
1160 *walk_subtrees = 0;
1162 break;
1163 case OMP_CLAUSE_SHARED:
1164 case OMP_CLAUSE_FIRSTPRIVATE:
1165 case OMP_CLAUSE_COPYIN:
1166 case OMP_CLAUSE_COPYPRIVATE:
1167 /* Don't dereference an invisiref in OpenMP clauses. */
1168 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1169 *walk_subtrees = 0;
1170 break;
1171 case OMP_CLAUSE_REDUCTION:
1172 /* Don't dereference an invisiref in reduction clause's
1173 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1174 still needs to be genericized. */
1175 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1177 *walk_subtrees = 0;
1178 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1179 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1180 cp_genericize_r, data, NULL);
1181 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1182 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1183 cp_genericize_r, data, NULL);
1185 break;
1186 default:
1187 break;
1189 break;
1191 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1192 to lower this construct before scanning it, so we need to lower these
1193 before doing anything else. */
1194 case CLEANUP_STMT:
1195 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1196 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1197 : TRY_FINALLY_EXPR,
1198 void_type_node,
1199 CLEANUP_BODY (stmt),
1200 CLEANUP_EXPR (stmt));
1201 break;
1203 case IF_STMT:
1204 genericize_if_stmt (stmt_p);
1205 /* *stmt_p has changed, tail recurse to handle it again. */
1206 return cp_genericize_r (stmt_p, walk_subtrees, data);
1208 /* COND_EXPR might have incompatible types in branches if one or both
1209 arms are bitfields. Fix it up now. */
1210 case COND_EXPR:
1212 tree type_left
1213 = (TREE_OPERAND (stmt, 1)
1214 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1215 : NULL_TREE);
1216 tree type_right
1217 = (TREE_OPERAND (stmt, 2)
1218 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1219 : NULL_TREE);
1220 if (type_left
1221 && !useless_type_conversion_p (TREE_TYPE (stmt),
1222 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1224 TREE_OPERAND (stmt, 1)
1225 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1226 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1227 type_left));
1229 if (type_right
1230 && !useless_type_conversion_p (TREE_TYPE (stmt),
1231 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1233 TREE_OPERAND (stmt, 2)
1234 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1235 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1236 type_right));
1239 break;
1241 case BIND_EXPR:
1242 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1244 tree decl;
1245 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1246 if (VAR_P (decl)
1247 && !DECL_EXTERNAL (decl)
1248 && omp_var_to_track (decl))
1250 splay_tree_node n
1251 = splay_tree_lookup (wtd->omp_ctx->variables,
1252 (splay_tree_key) decl);
1253 if (n == NULL)
1254 splay_tree_insert (wtd->omp_ctx->variables,
1255 (splay_tree_key) decl,
1256 TREE_STATIC (decl)
1257 ? OMP_CLAUSE_DEFAULT_SHARED
1258 : OMP_CLAUSE_DEFAULT_PRIVATE);
1261 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1263 /* The point here is to not sanitize static initializers. */
1264 bool no_sanitize_p = wtd->no_sanitize_p;
1265 wtd->no_sanitize_p = true;
1266 for (tree decl = BIND_EXPR_VARS (stmt);
1267 decl;
1268 decl = DECL_CHAIN (decl))
1269 if (VAR_P (decl)
1270 && TREE_STATIC (decl)
1271 && DECL_INITIAL (decl))
1272 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1273 wtd->no_sanitize_p = no_sanitize_p;
1275 wtd->bind_expr_stack.safe_push (stmt);
1276 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1277 cp_genericize_r, data, NULL);
1278 wtd->bind_expr_stack.pop ();
1279 break;
1281 case USING_STMT:
1283 tree block = NULL_TREE;
1285 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1286 BLOCK, and append an IMPORTED_DECL to its
1287 BLOCK_VARS chained list. */
1288 if (wtd->bind_expr_stack.exists ())
1290 int i;
1291 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1292 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1293 break;
1295 if (block)
1297 tree using_directive;
1298 gcc_assert (TREE_OPERAND (stmt, 0));
1300 using_directive = make_node (IMPORTED_DECL);
1301 TREE_TYPE (using_directive) = void_type_node;
1303 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1304 = TREE_OPERAND (stmt, 0);
1305 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1306 BLOCK_VARS (block) = using_directive;
1308 /* The USING_STMT won't appear in GENERIC. */
1309 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1310 *walk_subtrees = 0;
1312 break;
1314 case DECL_EXPR:
1315 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1317 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1318 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1319 *walk_subtrees = 0;
1321 else
1323 tree d = DECL_EXPR_DECL (stmt);
1324 if (VAR_P (d))
1325 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1327 break;
1329 case OMP_PARALLEL:
1330 case OMP_TASK:
1331 case OMP_TASKLOOP:
1333 struct cp_genericize_omp_taskreg omp_ctx;
1334 tree c, decl;
1335 splay_tree_node n;
1337 *walk_subtrees = 0;
1338 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1339 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1340 omp_ctx.default_shared = omp_ctx.is_parallel;
1341 omp_ctx.outer = wtd->omp_ctx;
1342 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1343 wtd->omp_ctx = &omp_ctx;
1344 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1345 switch (OMP_CLAUSE_CODE (c))
1347 case OMP_CLAUSE_SHARED:
1348 case OMP_CLAUSE_PRIVATE:
1349 case OMP_CLAUSE_FIRSTPRIVATE:
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 decl = OMP_CLAUSE_DECL (c);
1352 if (decl == error_mark_node || !omp_var_to_track (decl))
1353 break;
1354 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1355 if (n != NULL)
1356 break;
1357 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1358 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1359 ? OMP_CLAUSE_DEFAULT_SHARED
1360 : OMP_CLAUSE_DEFAULT_PRIVATE);
1361 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1362 omp_cxx_notice_variable (omp_ctx.outer, decl);
1363 break;
1364 case OMP_CLAUSE_DEFAULT:
1365 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1366 omp_ctx.default_shared = true;
1367 default:
1368 break;
1370 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1371 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1372 else
1373 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1374 wtd->omp_ctx = omp_ctx.outer;
1375 splay_tree_delete (omp_ctx.variables);
1377 break;
1379 case TRY_BLOCK:
1381 *walk_subtrees = 0;
1382 tree try_block = wtd->try_block;
1383 wtd->try_block = stmt;
1384 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1385 wtd->try_block = try_block;
1386 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1388 break;
1390 case MUST_NOT_THROW_EXPR:
1391 /* MUST_NOT_THROW_COND might be something else with TM. */
1392 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1394 *walk_subtrees = 0;
1395 tree try_block = wtd->try_block;
1396 wtd->try_block = stmt;
1397 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1398 wtd->try_block = try_block;
1400 break;
1402 case THROW_EXPR:
1404 location_t loc = location_of (stmt);
1405 if (TREE_NO_WARNING (stmt))
1406 /* Never mind. */;
1407 else if (wtd->try_block)
1409 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1410 && warning_at (loc, OPT_Wterminate,
1411 "throw will always call terminate()")
1412 && cxx_dialect >= cxx11
1413 && DECL_DESTRUCTOR_P (current_function_decl))
1414 inform (loc, "in C++11 destructors default to noexcept");
1416 else
1418 if (warn_cxx11_compat && cxx_dialect < cxx11
1419 && DECL_DESTRUCTOR_P (current_function_decl)
1420 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1421 == NULL_TREE)
1422 && (get_defaulted_eh_spec (current_function_decl)
1423 == empty_except_spec))
1424 warning_at (loc, OPT_Wc__11_compat,
1425 "in C++11 this throw will terminate because "
1426 "destructors default to noexcept");
1429 break;
1431 case CONVERT_EXPR:
1432 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1433 break;
1435 case FOR_STMT:
1436 genericize_for_stmt (stmt_p, walk_subtrees, data);
1437 break;
1439 case WHILE_STMT:
1440 genericize_while_stmt (stmt_p, walk_subtrees, data);
1441 break;
1443 case DO_STMT:
1444 genericize_do_stmt (stmt_p, walk_subtrees, data);
1445 break;
1447 case SWITCH_STMT:
1448 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1449 break;
1451 case CONTINUE_STMT:
1452 genericize_continue_stmt (stmt_p);
1453 break;
1455 case BREAK_STMT:
1456 genericize_break_stmt (stmt_p);
1457 break;
1459 case OMP_FOR:
1460 case OMP_SIMD:
1461 case OMP_DISTRIBUTE:
1462 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1463 break;
1465 case PTRMEM_CST:
1466 /* By the time we get here we're handing off to the back end, so we don't
1467 need or want to preserve PTRMEM_CST anymore. */
1468 *stmt_p = cplus_expand_constant (stmt);
1469 *walk_subtrees = 0;
1470 break;
1472 case MEM_REF:
1473 /* For MEM_REF, make sure not to sanitize the second operand even
1474 if it has reference type. It is just an offset with a type
1475 holding other information. There is no other processing we
1476 need to do for INTEGER_CSTs, so just ignore the second argument
1477 unconditionally. */
1478 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1479 *walk_subtrees = 0;
1480 break;
1482 case NOP_EXPR:
1483 if (!wtd->no_sanitize_p
1484 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1485 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1486 ubsan_maybe_instrument_reference (stmt_p);
1487 break;
1489 case CALL_EXPR:
1490 if (!wtd->no_sanitize_p
1491 && sanitize_flags_p ((SANITIZE_NULL
1492 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1494 tree fn = CALL_EXPR_FN (stmt);
1495 if (fn != NULL_TREE
1496 && !error_operand_p (fn)
1497 && POINTER_TYPE_P (TREE_TYPE (fn))
1498 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1500 bool is_ctor
1501 = TREE_CODE (fn) == ADDR_EXPR
1502 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1503 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1504 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1505 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1506 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1507 cp_ubsan_maybe_instrument_member_call (stmt);
1509 else if (fn == NULL_TREE
1510 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1511 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1512 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (stmt, 0)))
1513 == REFERENCE_TYPE))
1514 *walk_subtrees = 0;
1516 break;
1518 default:
1519 if (IS_TYPE_OR_DECL_P (stmt))
1520 *walk_subtrees = 0;
1521 break;
1524 p_set->add (*stmt_p);
1526 return NULL;
1529 /* Lower C++ front end trees to GENERIC in T_P. */
1531 static void
1532 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1534 struct cp_genericize_data wtd;
1536 wtd.p_set = new hash_set<tree>;
1537 wtd.bind_expr_stack.create (0);
1538 wtd.omp_ctx = NULL;
1539 wtd.try_block = NULL_TREE;
1540 wtd.no_sanitize_p = false;
1541 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1542 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1543 delete wtd.p_set;
1544 wtd.bind_expr_stack.release ();
1545 if (sanitize_flags_p (SANITIZE_VPTR))
1546 cp_ubsan_instrument_member_accesses (t_p);
1549 /* If a function that should end with a return in non-void
1550 function doesn't obviously end with return, add ubsan
1551 instrumentation code to verify it at runtime. If -fsanitize=return
1552 is not enabled, instrument __builtin_unreachable. */
1554 static void
1555 cp_maybe_instrument_return (tree fndecl)
1557 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1558 || DECL_CONSTRUCTOR_P (fndecl)
1559 || DECL_DESTRUCTOR_P (fndecl)
1560 || !targetm.warn_func_return (fndecl))
1561 return;
1563 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1564 /* Don't add __builtin_unreachable () if not optimizing, it will not
1565 improve any optimizations in that case, just break UB code.
1566 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1567 UBSan covers this with ubsan_instrument_return above where sufficient
1568 information is provided, while the __builtin_unreachable () below
1569 if return sanitization is disabled will just result in hard to
1570 understand runtime error without location. */
1571 && (!optimize
1572 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1573 return;
1575 tree t = DECL_SAVED_TREE (fndecl);
1576 while (t)
1578 switch (TREE_CODE (t))
1580 case BIND_EXPR:
1581 t = BIND_EXPR_BODY (t);
1582 continue;
1583 case TRY_FINALLY_EXPR:
1584 case CLEANUP_POINT_EXPR:
1585 t = TREE_OPERAND (t, 0);
1586 continue;
1587 case STATEMENT_LIST:
1589 tree_stmt_iterator i = tsi_last (t);
1590 if (!tsi_end_p (i))
1592 t = tsi_stmt (i);
1593 continue;
1596 break;
1597 case RETURN_EXPR:
1598 return;
1599 default:
1600 break;
1602 break;
1604 if (t == NULL_TREE)
1605 return;
1606 tree *p = &DECL_SAVED_TREE (fndecl);
1607 if (TREE_CODE (*p) == BIND_EXPR)
1608 p = &BIND_EXPR_BODY (*p);
1610 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1611 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1612 t = ubsan_instrument_return (loc);
1613 else
1615 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1616 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1619 append_to_statement_list (t, p);
1622 void
1623 cp_genericize (tree fndecl)
1625 tree t;
1627 /* Fix up the types of parms passed by invisible reference. */
1628 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1629 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1631 /* If a function's arguments are copied to create a thunk,
1632 then DECL_BY_REFERENCE will be set -- but the type of the
1633 argument will be a pointer type, so we will never get
1634 here. */
1635 gcc_assert (!DECL_BY_REFERENCE (t));
1636 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1637 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1638 DECL_BY_REFERENCE (t) = 1;
1639 TREE_ADDRESSABLE (t) = 0;
1640 relayout_decl (t);
1643 /* Do the same for the return value. */
1644 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1646 t = DECL_RESULT (fndecl);
1647 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1648 DECL_BY_REFERENCE (t) = 1;
1649 TREE_ADDRESSABLE (t) = 0;
1650 relayout_decl (t);
1651 if (DECL_NAME (t))
1653 /* Adjust DECL_VALUE_EXPR of the original var. */
1654 tree outer = outer_curly_brace_block (current_function_decl);
1655 tree var;
1657 if (outer)
1658 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1659 if (VAR_P (var)
1660 && DECL_NAME (t) == DECL_NAME (var)
1661 && DECL_HAS_VALUE_EXPR_P (var)
1662 && DECL_VALUE_EXPR (var) == t)
1664 tree val = convert_from_reference (t);
1665 SET_DECL_VALUE_EXPR (var, val);
1666 break;
1671 /* If we're a clone, the body is already GIMPLE. */
1672 if (DECL_CLONED_FUNCTION_P (fndecl))
1673 return;
1675 /* Allow cp_genericize calls to be nested. */
1676 tree save_bc_label[2];
1677 save_bc_label[bc_break] = bc_label[bc_break];
1678 save_bc_label[bc_continue] = bc_label[bc_continue];
1679 bc_label[bc_break] = NULL_TREE;
1680 bc_label[bc_continue] = NULL_TREE;
1682 /* We do want to see every occurrence of the parms, so we can't just use
1683 walk_tree's hash functionality. */
1684 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1686 cp_maybe_instrument_return (fndecl);
1688 /* Do everything else. */
1689 c_genericize (fndecl);
1691 gcc_assert (bc_label[bc_break] == NULL);
1692 gcc_assert (bc_label[bc_continue] == NULL);
1693 bc_label[bc_break] = save_bc_label[bc_break];
1694 bc_label[bc_continue] = save_bc_label[bc_continue];
1697 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1698 NULL if there is in fact nothing to do. ARG2 may be null if FN
1699 actually only takes one argument. */
1701 static tree
1702 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1704 tree defparm, parm, t;
1705 int i = 0;
1706 int nargs;
1707 tree *argarray;
1709 if (fn == NULL)
1710 return NULL;
1712 nargs = list_length (DECL_ARGUMENTS (fn));
1713 argarray = XALLOCAVEC (tree, nargs);
1715 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1716 if (arg2)
1717 defparm = TREE_CHAIN (defparm);
1719 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1720 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1722 tree inner_type = TREE_TYPE (arg1);
1723 tree start1, end1, p1;
1724 tree start2 = NULL, p2 = NULL;
1725 tree ret = NULL, lab;
1727 start1 = arg1;
1728 start2 = arg2;
1731 inner_type = TREE_TYPE (inner_type);
1732 start1 = build4 (ARRAY_REF, inner_type, start1,
1733 size_zero_node, NULL, NULL);
1734 if (arg2)
1735 start2 = build4 (ARRAY_REF, inner_type, start2,
1736 size_zero_node, NULL, NULL);
1738 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1739 start1 = build_fold_addr_expr_loc (input_location, start1);
1740 if (arg2)
1741 start2 = build_fold_addr_expr_loc (input_location, start2);
1743 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1744 end1 = fold_build_pointer_plus (start1, end1);
1746 p1 = create_tmp_var (TREE_TYPE (start1));
1747 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1748 append_to_statement_list (t, &ret);
1750 if (arg2)
1752 p2 = create_tmp_var (TREE_TYPE (start2));
1753 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1754 append_to_statement_list (t, &ret);
1757 lab = create_artificial_label (input_location);
1758 t = build1 (LABEL_EXPR, void_type_node, lab);
1759 append_to_statement_list (t, &ret);
1761 argarray[i++] = p1;
1762 if (arg2)
1763 argarray[i++] = p2;
1764 /* Handle default arguments. */
1765 for (parm = defparm; parm && parm != void_list_node;
1766 parm = TREE_CHAIN (parm), i++)
1767 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1768 TREE_PURPOSE (parm), fn,
1769 i - is_method, tf_warning_or_error);
1770 t = build_call_a (fn, i, argarray);
1771 t = fold_convert (void_type_node, t);
1772 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1773 append_to_statement_list (t, &ret);
1775 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1776 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1777 append_to_statement_list (t, &ret);
1779 if (arg2)
1781 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1782 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1783 append_to_statement_list (t, &ret);
1786 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1787 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1788 append_to_statement_list (t, &ret);
1790 return ret;
1792 else
1794 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1795 if (arg2)
1796 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1797 /* Handle default arguments. */
1798 for (parm = defparm; parm && parm != void_list_node;
1799 parm = TREE_CHAIN (parm), i++)
1800 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1801 TREE_PURPOSE (parm), fn,
1802 i - is_method, tf_warning_or_error);
1803 t = build_call_a (fn, i, argarray);
1804 t = fold_convert (void_type_node, t);
1805 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1809 /* Return code to initialize DECL with its default constructor, or
1810 NULL if there's nothing to do. */
1812 tree
1813 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1815 tree info = CP_OMP_CLAUSE_INFO (clause);
1816 tree ret = NULL;
1818 if (info)
1819 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1821 return ret;
1824 /* Return code to initialize DST with a copy constructor from SRC. */
1826 tree
1827 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1829 tree info = CP_OMP_CLAUSE_INFO (clause);
1830 tree ret = NULL;
1832 if (info)
1833 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1834 if (ret == NULL)
1835 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1837 return ret;
1840 /* Similarly, except use an assignment operator instead. */
1842 tree
1843 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1845 tree info = CP_OMP_CLAUSE_INFO (clause);
1846 tree ret = NULL;
1848 if (info)
1849 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1850 if (ret == NULL)
1851 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1853 return ret;
1856 /* Return code to destroy DECL. */
1858 tree
1859 cxx_omp_clause_dtor (tree clause, tree decl)
1861 tree info = CP_OMP_CLAUSE_INFO (clause);
1862 tree ret = NULL;
1864 if (info)
1865 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1867 return ret;
1870 /* True if OpenMP should privatize what this DECL points to rather
1871 than the DECL itself. */
1873 bool
1874 cxx_omp_privatize_by_reference (const_tree decl)
1876 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1877 || is_invisiref_parm (decl));
1880 /* Return true if DECL is const qualified var having no mutable member. */
1881 bool
1882 cxx_omp_const_qual_no_mutable (tree decl)
1884 tree type = TREE_TYPE (decl);
1885 if (TREE_CODE (type) == REFERENCE_TYPE)
1887 if (!is_invisiref_parm (decl))
1888 return false;
1889 type = TREE_TYPE (type);
1891 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1893 /* NVR doesn't preserve const qualification of the
1894 variable's type. */
1895 tree outer = outer_curly_brace_block (current_function_decl);
1896 tree var;
1898 if (outer)
1899 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1900 if (VAR_P (var)
1901 && DECL_NAME (decl) == DECL_NAME (var)
1902 && (TYPE_MAIN_VARIANT (type)
1903 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1905 if (TYPE_READONLY (TREE_TYPE (var)))
1906 type = TREE_TYPE (var);
1907 break;
1912 if (type == error_mark_node)
1913 return false;
1915 /* Variables with const-qualified type having no mutable member
1916 are predetermined shared. */
1917 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1918 return true;
1920 return false;
1923 /* True if OpenMP sharing attribute of DECL is predetermined. */
1925 enum omp_clause_default_kind
1926 cxx_omp_predetermined_sharing (tree decl)
1928 /* Static data members are predetermined shared. */
1929 if (TREE_STATIC (decl))
1931 tree ctx = CP_DECL_CONTEXT (decl);
1932 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1933 return OMP_CLAUSE_DEFAULT_SHARED;
1936 /* Const qualified vars having no mutable member are predetermined
1937 shared. */
1938 if (cxx_omp_const_qual_no_mutable (decl))
1939 return OMP_CLAUSE_DEFAULT_SHARED;
1941 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1944 /* Finalize an implicitly determined clause. */
1946 void
1947 cxx_omp_finish_clause (tree c, gimple_seq *)
1949 tree decl, inner_type;
1950 bool make_shared = false;
1952 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1953 return;
1955 decl = OMP_CLAUSE_DECL (c);
1956 decl = require_complete_type (decl);
1957 inner_type = TREE_TYPE (decl);
1958 if (decl == error_mark_node)
1959 make_shared = true;
1960 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1961 inner_type = TREE_TYPE (inner_type);
1963 /* We're interested in the base element, not arrays. */
1964 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1965 inner_type = TREE_TYPE (inner_type);
1967 /* Check for special function availability by building a call to one.
1968 Save the results, because later we won't be in the right context
1969 for making these queries. */
1970 if (!make_shared
1971 && CLASS_TYPE_P (inner_type)
1972 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1973 make_shared = true;
1975 if (make_shared)
1977 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1978 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1979 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1983 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1984 disregarded in OpenMP construct, because it is going to be
1985 remapped during OpenMP lowering. SHARED is true if DECL
1986 is going to be shared, false if it is going to be privatized. */
1988 bool
1989 cxx_omp_disregard_value_expr (tree decl, bool shared)
1991 return !shared
1992 && VAR_P (decl)
1993 && DECL_HAS_VALUE_EXPR_P (decl)
1994 && DECL_ARTIFICIAL (decl)
1995 && DECL_LANG_SPECIFIC (decl)
1996 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1999 /* Fold expression X which is used as an rvalue if RVAL is true. */
2001 static tree
2002 cp_fold_maybe_rvalue (tree x, bool rval)
2004 while (true)
2006 x = cp_fold (x);
2007 if (rval && DECL_P (x)
2008 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2010 tree v = decl_constant_value (x);
2011 if (v != x && v != error_mark_node)
2013 x = v;
2014 continue;
2017 break;
2019 return x;
2022 /* Fold expression X which is used as an rvalue. */
2024 static tree
2025 cp_fold_rvalue (tree x)
2027 return cp_fold_maybe_rvalue (x, true);
2030 /* Perform folding on expression X. */
2032 tree
2033 cp_fully_fold (tree x)
2035 if (processing_template_decl)
2036 return x;
2037 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2038 have to call both. */
2039 if (cxx_dialect >= cxx11)
2040 x = maybe_constant_value (x);
2041 return cp_fold_rvalue (x);
2044 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2045 and certain changes are made to the folding done. Or should be (FIXME). We
2046 never touch maybe_const, as it is only used for the C front-end
2047 C_MAYBE_CONST_EXPR. */
2049 tree
2050 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2052 return cp_fold_maybe_rvalue (x, !lval);
2055 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2057 /* Dispose of the whole FOLD_CACHE. */
2059 void
2060 clear_fold_cache (void)
2062 if (fold_cache != NULL)
2063 fold_cache->empty ();
2066 /* This function tries to fold an expression X.
2067 To avoid combinatorial explosion, folding results are kept in fold_cache.
2068 If X is invalid, we don't fold at all.
2069 For performance reasons we don't cache expressions representing a
2070 declaration or constant.
2071 Function returns X or its folded variant. */
2073 static tree
2074 cp_fold (tree x)
2076 tree op0, op1, op2, op3;
2077 tree org_x = x, r = NULL_TREE;
2078 enum tree_code code;
2079 location_t loc;
2080 bool rval_ops = true;
2082 if (!x || x == error_mark_node)
2083 return x;
2085 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2086 return x;
2088 /* Don't bother to cache DECLs or constants. */
2089 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2090 return x;
2092 if (fold_cache == NULL)
2093 fold_cache = hash_map<tree, tree>::create_ggc (101);
2095 if (tree *cached = fold_cache->get (x))
2096 return *cached;
2098 code = TREE_CODE (x);
2099 switch (code)
2101 case CLEANUP_POINT_EXPR:
2102 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2103 effects. */
2104 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2105 if (!TREE_SIDE_EFFECTS (r))
2106 x = r;
2107 break;
2109 case SIZEOF_EXPR:
2110 x = fold_sizeof_expr (x);
2111 break;
2113 case VIEW_CONVERT_EXPR:
2114 rval_ops = false;
2115 /* FALLTHRU */
2116 case CONVERT_EXPR:
2117 case NOP_EXPR:
2118 case NON_LVALUE_EXPR:
2120 if (VOID_TYPE_P (TREE_TYPE (x)))
2122 /* This is just to make sure we don't end up with casts to
2123 void from error_mark_node. If we just return x, then
2124 cp_fold_r might fold the operand into error_mark_node and
2125 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2126 during gimplification doesn't like such casts.
2127 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2128 folding of the operand should be in the caches and if in cp_fold_r
2129 it will modify it in place. */
2130 op0 = cp_fold (TREE_OPERAND (x, 0));
2131 if (op0 == error_mark_node)
2132 x = error_mark_node;
2133 break;
2136 loc = EXPR_LOCATION (x);
2137 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2139 if (code == CONVERT_EXPR
2140 && SCALAR_TYPE_P (TREE_TYPE (x))
2141 && op0 != void_node)
2142 /* During parsing we used convert_to_*_nofold; re-convert now using the
2143 folding variants, since fold() doesn't do those transformations. */
2144 x = fold (convert (TREE_TYPE (x), op0));
2145 else if (op0 != TREE_OPERAND (x, 0))
2147 if (op0 == error_mark_node)
2148 x = error_mark_node;
2149 else
2150 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2152 else
2153 x = fold (x);
2155 /* Conversion of an out-of-range value has implementation-defined
2156 behavior; the language considers it different from arithmetic
2157 overflow, which is undefined. */
2158 if (TREE_CODE (op0) == INTEGER_CST
2159 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2160 TREE_OVERFLOW (x) = false;
2162 break;
2164 case INDIRECT_REF:
2165 /* We don't need the decltype(auto) obfuscation anymore. */
2166 if (REF_PARENTHESIZED_P (x))
2168 tree p = maybe_undo_parenthesized_ref (x);
2169 return cp_fold (p);
2171 goto unary;
2173 case ADDR_EXPR:
2174 case REALPART_EXPR:
2175 case IMAGPART_EXPR:
2176 rval_ops = false;
2177 /* FALLTHRU */
2178 case CONJ_EXPR:
2179 case FIX_TRUNC_EXPR:
2180 case FLOAT_EXPR:
2181 case NEGATE_EXPR:
2182 case ABS_EXPR:
2183 case BIT_NOT_EXPR:
2184 case TRUTH_NOT_EXPR:
2185 case FIXED_CONVERT_EXPR:
2186 unary:
2188 loc = EXPR_LOCATION (x);
2189 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2191 if (op0 != TREE_OPERAND (x, 0))
2193 if (op0 == error_mark_node)
2194 x = error_mark_node;
2195 else
2197 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2198 if (code == INDIRECT_REF
2199 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2201 TREE_READONLY (x) = TREE_READONLY (org_x);
2202 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2203 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2207 else
2208 x = fold (x);
2210 gcc_assert (TREE_CODE (x) != COND_EXPR
2211 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2212 break;
2214 case UNARY_PLUS_EXPR:
2215 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2216 if (op0 == error_mark_node)
2217 x = error_mark_node;
2218 else
2219 x = fold_convert (TREE_TYPE (x), op0);
2220 break;
2222 case POSTDECREMENT_EXPR:
2223 case POSTINCREMENT_EXPR:
2224 case INIT_EXPR:
2225 case PREDECREMENT_EXPR:
2226 case PREINCREMENT_EXPR:
2227 case COMPOUND_EXPR:
2228 case MODIFY_EXPR:
2229 rval_ops = false;
2230 /* FALLTHRU */
2231 case POINTER_PLUS_EXPR:
2232 case PLUS_EXPR:
2233 case POINTER_DIFF_EXPR:
2234 case MINUS_EXPR:
2235 case MULT_EXPR:
2236 case TRUNC_DIV_EXPR:
2237 case CEIL_DIV_EXPR:
2238 case FLOOR_DIV_EXPR:
2239 case ROUND_DIV_EXPR:
2240 case TRUNC_MOD_EXPR:
2241 case CEIL_MOD_EXPR:
2242 case ROUND_MOD_EXPR:
2243 case RDIV_EXPR:
2244 case EXACT_DIV_EXPR:
2245 case MIN_EXPR:
2246 case MAX_EXPR:
2247 case LSHIFT_EXPR:
2248 case RSHIFT_EXPR:
2249 case LROTATE_EXPR:
2250 case RROTATE_EXPR:
2251 case BIT_AND_EXPR:
2252 case BIT_IOR_EXPR:
2253 case BIT_XOR_EXPR:
2254 case TRUTH_AND_EXPR:
2255 case TRUTH_ANDIF_EXPR:
2256 case TRUTH_OR_EXPR:
2257 case TRUTH_ORIF_EXPR:
2258 case TRUTH_XOR_EXPR:
2259 case LT_EXPR: case LE_EXPR:
2260 case GT_EXPR: case GE_EXPR:
2261 case EQ_EXPR: case NE_EXPR:
2262 case UNORDERED_EXPR: case ORDERED_EXPR:
2263 case UNLT_EXPR: case UNLE_EXPR:
2264 case UNGT_EXPR: case UNGE_EXPR:
2265 case UNEQ_EXPR: case LTGT_EXPR:
2266 case RANGE_EXPR: case COMPLEX_EXPR:
2268 loc = EXPR_LOCATION (x);
2269 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2270 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2272 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2274 if (op0 == error_mark_node || op1 == error_mark_node)
2275 x = error_mark_node;
2276 else
2277 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2279 else
2280 x = fold (x);
2282 if (TREE_NO_WARNING (org_x)
2283 && warn_nonnull_compare
2284 && COMPARISON_CLASS_P (org_x))
2286 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2288 else if (COMPARISON_CLASS_P (x))
2289 TREE_NO_WARNING (x) = 1;
2290 /* Otherwise give up on optimizing these, let GIMPLE folders
2291 optimize those later on. */
2292 else if (op0 != TREE_OPERAND (org_x, 0)
2293 || op1 != TREE_OPERAND (org_x, 1))
2295 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2296 TREE_NO_WARNING (x) = 1;
2298 else
2299 x = org_x;
2301 break;
2303 case VEC_COND_EXPR:
2304 case COND_EXPR:
2305 loc = EXPR_LOCATION (x);
2306 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2307 op1 = cp_fold (TREE_OPERAND (x, 1));
2308 op2 = cp_fold (TREE_OPERAND (x, 2));
2310 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2312 warning_sentinel s (warn_int_in_bool_context);
2313 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2314 op1 = cp_truthvalue_conversion (op1);
2315 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2316 op2 = cp_truthvalue_conversion (op2);
2318 else if (VOID_TYPE_P (TREE_TYPE (x)))
2320 if (TREE_CODE (op0) == INTEGER_CST)
2322 /* If the condition is constant, fold can fold away
2323 the COND_EXPR. If some statement-level uses of COND_EXPR
2324 have one of the branches NULL, avoid folding crash. */
2325 if (!op1)
2326 op1 = build_empty_stmt (loc);
2327 if (!op2)
2328 op2 = build_empty_stmt (loc);
2330 else
2332 /* Otherwise, don't bother folding a void condition, since
2333 it can't produce a constant value. */
2334 if (op0 != TREE_OPERAND (x, 0)
2335 || op1 != TREE_OPERAND (x, 1)
2336 || op2 != TREE_OPERAND (x, 2))
2337 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2338 break;
2342 if (op0 != TREE_OPERAND (x, 0)
2343 || op1 != TREE_OPERAND (x, 1)
2344 || op2 != TREE_OPERAND (x, 2))
2346 if (op0 == error_mark_node
2347 || op1 == error_mark_node
2348 || op2 == error_mark_node)
2349 x = error_mark_node;
2350 else
2351 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2353 else
2354 x = fold (x);
2356 /* A COND_EXPR might have incompatible types in branches if one or both
2357 arms are bitfields. If folding exposed such a branch, fix it up. */
2358 if (TREE_CODE (x) != code
2359 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2360 x = fold_convert (TREE_TYPE (org_x), x);
2362 break;
2364 case CALL_EXPR:
2366 int i, m, sv = optimize, nw = sv, changed = 0;
2367 tree callee = get_callee_fndecl (x);
2369 /* Some built-in function calls will be evaluated at compile-time in
2370 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2371 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2372 if (callee && DECL_BUILT_IN (callee) && !optimize
2373 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2374 && current_function_decl
2375 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2376 nw = 1;
2378 x = copy_node (x);
2380 m = call_expr_nargs (x);
2381 for (i = 0; i < m; i++)
2383 r = cp_fold (CALL_EXPR_ARG (x, i));
2384 if (r != CALL_EXPR_ARG (x, i))
2386 if (r == error_mark_node)
2388 x = error_mark_node;
2389 break;
2391 changed = 1;
2393 CALL_EXPR_ARG (x, i) = r;
2395 if (x == error_mark_node)
2396 break;
2398 optimize = nw;
2399 r = fold (x);
2400 optimize = sv;
2402 if (TREE_CODE (r) != CALL_EXPR)
2404 x = cp_fold (r);
2405 break;
2408 optimize = nw;
2410 /* Invoke maybe_constant_value for functions declared
2411 constexpr and not called with AGGR_INIT_EXPRs.
2412 TODO:
2413 Do constexpr expansion of expressions where the call itself is not
2414 constant, but the call followed by an INDIRECT_REF is. */
2415 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2416 && !flag_no_inline)
2417 r = maybe_constant_value (x);
2418 optimize = sv;
2420 if (TREE_CODE (r) != CALL_EXPR)
2422 if (DECL_CONSTRUCTOR_P (callee))
2424 loc = EXPR_LOCATION (x);
2425 tree s = build_fold_indirect_ref_loc (loc,
2426 CALL_EXPR_ARG (x, 0));
2427 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2429 x = r;
2430 break;
2433 if (!changed)
2434 x = org_x;
2435 break;
2438 case CONSTRUCTOR:
2440 unsigned i;
2441 constructor_elt *p;
2442 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2443 vec<constructor_elt, va_gc> *nelts = NULL;
2444 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2446 tree op = cp_fold (p->value);
2447 if (op != p->value)
2449 if (op == error_mark_node)
2451 x = error_mark_node;
2452 vec_free (nelts);
2453 break;
2455 if (nelts == NULL)
2456 nelts = elts->copy ();
2457 (*nelts)[i].value = op;
2460 if (nelts)
2461 x = build_constructor (TREE_TYPE (x), nelts);
2462 break;
2464 case TREE_VEC:
2466 bool changed = false;
2467 vec<tree, va_gc> *vec = make_tree_vector ();
2468 int i, n = TREE_VEC_LENGTH (x);
2469 vec_safe_reserve (vec, n);
2471 for (i = 0; i < n; i++)
2473 tree op = cp_fold (TREE_VEC_ELT (x, i));
2474 vec->quick_push (op);
2475 if (op != TREE_VEC_ELT (x, i))
2476 changed = true;
2479 if (changed)
2481 r = copy_node (x);
2482 for (i = 0; i < n; i++)
2483 TREE_VEC_ELT (r, i) = (*vec)[i];
2484 x = r;
2487 release_tree_vector (vec);
2490 break;
2492 case ARRAY_REF:
2493 case ARRAY_RANGE_REF:
2495 loc = EXPR_LOCATION (x);
2496 op0 = cp_fold (TREE_OPERAND (x, 0));
2497 op1 = cp_fold (TREE_OPERAND (x, 1));
2498 op2 = cp_fold (TREE_OPERAND (x, 2));
2499 op3 = cp_fold (TREE_OPERAND (x, 3));
2501 if (op0 != TREE_OPERAND (x, 0)
2502 || op1 != TREE_OPERAND (x, 1)
2503 || op2 != TREE_OPERAND (x, 2)
2504 || op3 != TREE_OPERAND (x, 3))
2506 if (op0 == error_mark_node
2507 || op1 == error_mark_node
2508 || op2 == error_mark_node
2509 || op3 == error_mark_node)
2510 x = error_mark_node;
2511 else
2513 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2514 TREE_READONLY (x) = TREE_READONLY (org_x);
2515 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2516 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2520 x = fold (x);
2521 break;
2523 case SAVE_EXPR:
2524 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2525 folding, evaluates to an invariant. In that case no need to wrap
2526 this folded tree with a SAVE_EXPR. */
2527 r = cp_fold (TREE_OPERAND (x, 0));
2528 if (tree_invariant_p (r))
2529 x = r;
2530 break;
2532 default:
2533 return org_x;
2536 fold_cache->put (org_x, x);
2537 /* Prevent that we try to fold an already folded result again. */
2538 if (x != org_x)
2539 fold_cache->put (x, x);
2541 return x;
2544 #include "gt-cp-cp-gimplify.h"