Add DW_CFA_AARCH64_negate_ra_state to dwarf2.def/h and dwarfnames.c
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob4a52aa50e77b8193f77c0d6eb3dc46051f9ca1a0
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
40 /* Forward declarations. */
42 static tree cp_genericize_r (tree *, int *, void *);
43 static tree cp_fold_r (tree *, int *, void *);
44 static void cp_genericize_tree (tree*, bool);
45 static tree cp_fold (tree);
47 /* Local declarations. */
49 enum bc_t { bc_break = 0, bc_continue = 1 };
51 /* Stack of labels which are targets for "break" or "continue",
52 linked through TREE_CHAIN. */
53 static tree bc_label[2];
55 /* Begin a scope which can be exited by a break or continue statement. BC
56 indicates which.
58 Just creates a label with location LOCATION and pushes it into the current
59 context. */
61 static tree
62 begin_bc_block (enum bc_t bc, location_t location)
64 tree label = create_artificial_label (location);
65 DECL_CHAIN (label) = bc_label[bc];
66 bc_label[bc] = label;
67 if (bc == bc_break)
68 LABEL_DECL_BREAK (label) = true;
69 else
70 LABEL_DECL_CONTINUE (label) = true;
71 return label;
74 /* Finish a scope which can be exited by a break or continue statement.
75 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
76 an expression for the contents of the scope.
78 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
79 BLOCK. Otherwise, just forget the label. */
81 static void
82 finish_bc_block (tree *block, enum bc_t bc, tree label)
84 gcc_assert (label == bc_label[bc]);
86 if (TREE_USED (label))
87 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
88 block);
90 bc_label[bc] = DECL_CHAIN (label);
91 DECL_CHAIN (label) = NULL_TREE;
94 /* Get the LABEL_EXPR to represent a break or continue statement
95 in the current block scope. BC indicates which. */
97 static tree
98 get_bc_label (enum bc_t bc)
100 tree label = bc_label[bc];
102 /* Mark the label used for finish_bc_block. */
103 TREE_USED (label) = 1;
104 return label;
107 /* Genericize a TRY_BLOCK. */
109 static void
110 genericize_try_block (tree *stmt_p)
112 tree body = TRY_STMTS (*stmt_p);
113 tree cleanup = TRY_HANDLERS (*stmt_p);
115 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
118 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
120 static void
121 genericize_catch_block (tree *stmt_p)
123 tree type = HANDLER_TYPE (*stmt_p);
124 tree body = HANDLER_BODY (*stmt_p);
126 /* FIXME should the caught type go in TREE_TYPE? */
127 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
130 /* A terser interface for building a representation of an exception
131 specification. */
133 static tree
134 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
136 tree t;
138 /* FIXME should the allowed types go in TREE_TYPE? */
139 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
140 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
142 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
143 append_to_statement_list (body, &TREE_OPERAND (t, 0));
145 return t;
148 /* Genericize an EH_SPEC_BLOCK by converting it to a
149 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
151 static void
152 genericize_eh_spec_block (tree *stmt_p)
154 tree body = EH_SPEC_STMTS (*stmt_p);
155 tree allowed = EH_SPEC_RAISES (*stmt_p);
156 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
158 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
159 TREE_NO_WARNING (*stmt_p) = true;
160 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
163 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
165 static void
166 genericize_if_stmt (tree *stmt_p)
168 tree stmt, cond, then_, else_;
169 location_t locus = EXPR_LOCATION (*stmt_p);
171 stmt = *stmt_p;
172 cond = IF_COND (stmt);
173 then_ = THEN_CLAUSE (stmt);
174 else_ = ELSE_CLAUSE (stmt);
176 if (!then_)
177 then_ = build_empty_stmt (locus);
178 if (!else_)
179 else_ = build_empty_stmt (locus);
181 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
182 stmt = then_;
183 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
184 stmt = else_;
185 else
186 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
187 if (!EXPR_HAS_LOCATION (stmt))
188 protected_set_expr_location (stmt, locus);
189 *stmt_p = stmt;
192 /* Build a generic representation of one of the C loop forms. COND is the
193 loop condition or NULL_TREE. BODY is the (possibly compound) statement
194 controlled by the loop. INCR is the increment expression of a for-loop,
195 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
196 evaluated before the loop body as in while and for loops, or after the
197 loop body as in do-while loops. */
199 static void
200 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
201 tree incr, bool cond_is_first, int *walk_subtrees,
202 void *data)
204 tree blab, clab;
205 tree exit = NULL;
206 tree stmt_list = NULL;
208 blab = begin_bc_block (bc_break, start_locus);
209 clab = begin_bc_block (bc_continue, start_locus);
211 protected_set_expr_location (incr, start_locus);
213 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214 cp_walk_tree (&body, cp_genericize_r, data, NULL);
215 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216 *walk_subtrees = 0;
218 if (cond && TREE_CODE (cond) != INTEGER_CST)
220 /* If COND is constant, don't bother building an exit. If it's false,
221 we won't build a loop. If it's true, any exits are in the body. */
222 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
223 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
224 get_bc_label (bc_break));
225 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
226 build_empty_stmt (cloc), exit);
229 if (exit && cond_is_first)
230 append_to_statement_list (exit, &stmt_list);
231 append_to_statement_list (body, &stmt_list);
232 finish_bc_block (&stmt_list, bc_continue, clab);
233 append_to_statement_list (incr, &stmt_list);
234 if (exit && !cond_is_first)
235 append_to_statement_list (exit, &stmt_list);
237 if (!stmt_list)
238 stmt_list = build_empty_stmt (start_locus);
240 tree loop;
241 if (cond && integer_zerop (cond))
243 if (cond_is_first)
244 loop = fold_build3_loc (start_locus, COND_EXPR,
245 void_type_node, cond, stmt_list,
246 build_empty_stmt (start_locus));
247 else
248 loop = stmt_list;
250 else
252 location_t loc = start_locus;
253 if (!cond || integer_nonzerop (cond))
254 loc = EXPR_LOCATION (expr_first (body));
255 if (loc == UNKNOWN_LOCATION)
256 loc = start_locus;
257 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
260 stmt_list = NULL;
261 append_to_statement_list (loop, &stmt_list);
262 finish_bc_block (&stmt_list, bc_break, blab);
263 if (!stmt_list)
264 stmt_list = build_empty_stmt (start_locus);
266 *stmt_p = stmt_list;
269 /* Genericize a FOR_STMT node *STMT_P. */
271 static void
272 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
274 tree stmt = *stmt_p;
275 tree expr = NULL;
276 tree loop;
277 tree init = FOR_INIT_STMT (stmt);
279 if (init)
281 cp_walk_tree (&init, cp_genericize_r, data, NULL);
282 append_to_statement_list (init, &expr);
285 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
286 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
287 append_to_statement_list (loop, &expr);
288 if (expr == NULL_TREE)
289 expr = loop;
290 *stmt_p = expr;
293 /* Genericize a WHILE_STMT node *STMT_P. */
295 static void
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
298 tree stmt = *stmt_p;
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
305 static void
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 tree stmt = *stmt_p;
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
315 static void
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 tree stmt = *stmt_p;
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
325 if (!body)
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
333 *walk_subtrees = 0;
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
341 static void
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list_force (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
351 *stmt_p = stmt_list;
354 /* Genericize a BREAK_STMT node *STMT_P. */
356 static void
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
366 static void
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
369 tree stmt = *stmt_p;
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 if (TREE_CODE (stmt) != OMP_TASKLOOP)
375 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
380 *walk_subtrees = 0;
382 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
385 /* Hook into the middle of gimplifying an OMP_FOR node. */
387 static enum gimplify_status
388 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
390 tree for_stmt = *expr_p;
391 gimple_seq seq = NULL;
393 /* Protect ourselves from recursion. */
394 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
395 return GS_UNHANDLED;
396 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
398 gimplify_and_add (for_stmt, &seq);
399 gimple_seq_add_seq (pre_p, seq);
401 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
403 return GS_ALL_DONE;
406 /* Gimplify an EXPR_STMT node. */
408 static void
409 gimplify_expr_stmt (tree *stmt_p)
411 tree stmt = EXPR_STMT_EXPR (*stmt_p);
413 if (stmt == error_mark_node)
414 stmt = NULL;
416 /* Gimplification of a statement expression will nullify the
417 statement if all its side effects are moved to *PRE_P and *POST_P.
419 In this case we will not want to emit the gimplified statement.
420 However, we may still want to emit a warning, so we do that before
421 gimplification. */
422 if (stmt && warn_unused_value)
424 if (!TREE_SIDE_EFFECTS (stmt))
426 if (!IS_EMPTY_STMT (stmt)
427 && !VOID_TYPE_P (TREE_TYPE (stmt))
428 && !TREE_NO_WARNING (stmt))
429 warning (OPT_Wunused_value, "statement with no effect");
431 else
432 warn_if_unused_value (stmt, input_location);
435 if (stmt == NULL_TREE)
436 stmt = alloc_stmt_list ();
438 *stmt_p = stmt;
441 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 static void
444 cp_gimplify_init_expr (tree *expr_p)
446 tree from = TREE_OPERAND (*expr_p, 1);
447 tree to = TREE_OPERAND (*expr_p, 0);
448 tree t;
450 /* What about code that pulls out the temp and uses it elsewhere? I
451 think that such code never uses the TARGET_EXPR as an initializer. If
452 I'm wrong, we'll abort because the temp won't have any RTL. In that
453 case, I guess we'll need to replace references somehow. */
454 if (TREE_CODE (from) == TARGET_EXPR)
455 from = TARGET_EXPR_INITIAL (from);
457 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
458 inside the TARGET_EXPR. */
459 for (t = from; t; )
461 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
463 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
464 replace the slot operand with our target.
466 Should we add a target parm to gimplify_expr instead? No, as in this
467 case we want to replace the INIT_EXPR. */
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR
469 || TREE_CODE (sub) == VEC_INIT_EXPR)
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
472 AGGR_INIT_EXPR_SLOT (sub) = to;
473 else
474 VEC_INIT_EXPR_SLOT (sub) = to;
475 *expr_p = from;
477 /* The initialization is now a side-effect, so the container can
478 become void. */
479 if (from != sub)
480 TREE_TYPE (from) = void_type_node;
483 /* Handle aggregate NSDMI. */
484 replace_placeholders (sub, to);
486 if (t == sub)
487 break;
488 else
489 t = TREE_OPERAND (t, 1);
494 /* Gimplify a MUST_NOT_THROW_EXPR. */
496 static enum gimplify_status
497 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
499 tree stmt = *expr_p;
500 tree temp = voidify_wrapper_expr (stmt, NULL);
501 tree body = TREE_OPERAND (stmt, 0);
502 gimple_seq try_ = NULL;
503 gimple_seq catch_ = NULL;
504 gimple *mnt;
506 gimplify_and_add (body, &try_);
507 mnt = gimple_build_eh_must_not_throw (terminate_fn);
508 gimple_seq_add_stmt_without_update (&catch_, mnt);
509 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
511 gimple_seq_add_stmt_without_update (pre_p, mnt);
512 if (temp)
514 *expr_p = temp;
515 return GS_OK;
518 *expr_p = NULL;
519 return GS_ALL_DONE;
522 /* Return TRUE if an operand (OP) of a given TYPE being copied is
523 really just an empty class copy.
525 Check that the operand has a simple form so that TARGET_EXPRs and
526 non-empty CONSTRUCTORs get reduced properly, and we leave the
527 return slot optimization alone because it isn't a copy. */
529 static bool
530 simple_empty_class_p (tree type, tree op)
532 return
533 ((TREE_CODE (op) == COMPOUND_EXPR
534 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
535 || TREE_CODE (op) == EMPTY_CLASS_EXPR
536 || is_gimple_lvalue (op)
537 || INDIRECT_REF_P (op)
538 || (TREE_CODE (op) == CONSTRUCTOR
539 && CONSTRUCTOR_NELTS (op) == 0
540 && !TREE_CLOBBER_P (op))
541 || (TREE_CODE (op) == CALL_EXPR
542 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
543 && is_really_empty_class (type);
546 /* Returns true if evaluating E as an lvalue has side-effects;
547 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
548 have side-effects until there is a read or write through it. */
550 static bool
551 lvalue_has_side_effects (tree e)
553 if (!TREE_SIDE_EFFECTS (e))
554 return false;
555 while (handled_component_p (e))
557 if (TREE_CODE (e) == ARRAY_REF
558 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
559 return true;
560 e = TREE_OPERAND (e, 0);
562 if (DECL_P (e))
563 /* Just naming a variable has no side-effects. */
564 return false;
565 else if (INDIRECT_REF_P (e))
566 /* Similarly, indirection has no side-effects. */
567 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
568 else
569 /* For anything else, trust TREE_SIDE_EFFECTS. */
570 return TREE_SIDE_EFFECTS (e);
573 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
576 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
578 int saved_stmts_are_full_exprs_p = 0;
579 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
580 enum tree_code code = TREE_CODE (*expr_p);
581 enum gimplify_status ret;
583 if (STATEMENT_CODE_P (code))
585 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
586 current_stmt_tree ()->stmts_are_full_exprs_p
587 = STMT_IS_FULL_EXPR_P (*expr_p);
590 switch (code)
592 case AGGR_INIT_EXPR:
593 simplify_aggr_init_expr (expr_p);
594 ret = GS_OK;
595 break;
597 case VEC_INIT_EXPR:
599 location_t loc = input_location;
600 tree init = VEC_INIT_EXPR_INIT (*expr_p);
601 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
602 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
603 input_location = EXPR_LOCATION (*expr_p);
604 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
605 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
606 from_array,
607 tf_warning_or_error);
608 hash_set<tree> pset;
609 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
610 cp_genericize_tree (expr_p, false);
611 ret = GS_OK;
612 input_location = loc;
614 break;
616 case THROW_EXPR:
617 /* FIXME communicate throw type to back end, probably by moving
618 THROW_EXPR into ../tree.def. */
619 *expr_p = TREE_OPERAND (*expr_p, 0);
620 ret = GS_OK;
621 break;
623 case MUST_NOT_THROW_EXPR:
624 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
625 break;
627 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
628 LHS of an assignment might also be involved in the RHS, as in bug
629 25979. */
630 case INIT_EXPR:
631 if (fn_contains_cilk_spawn_p (cfun))
633 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
634 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
635 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
636 return GS_ERROR;
639 cp_gimplify_init_expr (expr_p);
640 if (TREE_CODE (*expr_p) != INIT_EXPR)
641 return GS_OK;
642 /* Fall through. */
643 case MODIFY_EXPR:
644 modify_expr_case:
646 if (fn_contains_cilk_spawn_p (cfun)
647 && cilk_cp_detect_spawn_and_unwrap (expr_p)
648 && !seen_error ())
649 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
650 /* If the back end isn't clever enough to know that the lhs and rhs
651 types are the same, add an explicit conversion. */
652 tree op0 = TREE_OPERAND (*expr_p, 0);
653 tree op1 = TREE_OPERAND (*expr_p, 1);
655 if (!error_operand_p (op0)
656 && !error_operand_p (op1)
657 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
658 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
659 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
660 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
661 TREE_TYPE (op0), op1);
663 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
665 /* Remove any copies of empty classes. Also drop volatile
666 variables on the RHS to avoid infinite recursion from
667 gimplify_expr trying to load the value. */
668 if (TREE_SIDE_EFFECTS (op1))
670 if (TREE_THIS_VOLATILE (op1)
671 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
672 op1 = build_fold_addr_expr (op1);
674 gimplify_and_add (op1, pre_p);
676 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
677 is_gimple_lvalue, fb_lvalue);
678 *expr_p = TREE_OPERAND (*expr_p, 0);
680 /* P0145 says that the RHS is sequenced before the LHS.
681 gimplify_modify_expr gimplifies the RHS before the LHS, but that
682 isn't quite strong enough in two cases:
684 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
685 mean it's evaluated after the LHS.
687 2) the value calculation of the RHS is also sequenced before the
688 LHS, so for scalar assignment we need to preevaluate if the
689 RHS could be affected by LHS side-effects even if it has no
690 side-effects of its own. We don't need this for classes because
691 class assignment takes its RHS by reference. */
692 else if (flag_strong_eval_order > 1
693 && TREE_CODE (*expr_p) == MODIFY_EXPR
694 && lvalue_has_side_effects (op0)
695 && (TREE_CODE (op1) == CALL_EXPR
696 || (SCALAR_TYPE_P (TREE_TYPE (op1))
697 && !TREE_CONSTANT (op1))))
698 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
700 ret = GS_OK;
701 break;
703 case EMPTY_CLASS_EXPR:
704 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
705 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
706 ret = GS_OK;
707 break;
709 case BASELINK:
710 *expr_p = BASELINK_FUNCTIONS (*expr_p);
711 ret = GS_OK;
712 break;
714 case TRY_BLOCK:
715 genericize_try_block (expr_p);
716 ret = GS_OK;
717 break;
719 case HANDLER:
720 genericize_catch_block (expr_p);
721 ret = GS_OK;
722 break;
724 case EH_SPEC_BLOCK:
725 genericize_eh_spec_block (expr_p);
726 ret = GS_OK;
727 break;
729 case USING_STMT:
730 gcc_unreachable ();
732 case FOR_STMT:
733 case WHILE_STMT:
734 case DO_STMT:
735 case SWITCH_STMT:
736 case CONTINUE_STMT:
737 case BREAK_STMT:
738 gcc_unreachable ();
740 case OMP_FOR:
741 case OMP_SIMD:
742 case OMP_DISTRIBUTE:
743 case OMP_TASKLOOP:
744 ret = cp_gimplify_omp_for (expr_p, pre_p);
745 break;
747 case EXPR_STMT:
748 gimplify_expr_stmt (expr_p);
749 ret = GS_OK;
750 break;
752 case UNARY_PLUS_EXPR:
754 tree arg = TREE_OPERAND (*expr_p, 0);
755 tree type = TREE_TYPE (*expr_p);
756 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
757 : arg;
758 ret = GS_OK;
760 break;
762 case CILK_SPAWN_STMT:
763 gcc_assert(fn_contains_cilk_spawn_p (cfun)
764 && cilk_cp_detect_spawn_and_unwrap (expr_p));
766 if (!seen_error ())
767 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
768 return GS_ERROR;
770 case CALL_EXPR:
771 if (fn_contains_cilk_spawn_p (cfun)
772 && cilk_cp_detect_spawn_and_unwrap (expr_p)
773 && !seen_error ())
774 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
775 ret = GS_OK;
776 if (!CALL_EXPR_FN (*expr_p))
777 /* Internal function call. */;
778 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
780 /* This is a call to a (compound) assignment operator that used
781 the operator syntax; gimplify the RHS first. */
782 gcc_assert (call_expr_nargs (*expr_p) == 2);
783 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
784 enum gimplify_status t
785 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
786 if (t == GS_ERROR)
787 ret = GS_ERROR;
789 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
791 /* Leave the last argument for gimplify_call_expr, to avoid problems
792 with __builtin_va_arg_pack(). */
793 int nargs = call_expr_nargs (*expr_p) - 1;
794 for (int i = 0; i < nargs; ++i)
796 enum gimplify_status t
797 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
798 if (t == GS_ERROR)
799 ret = GS_ERROR;
802 else if (flag_strong_eval_order
803 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
805 /* If flag_strong_eval_order, evaluate the object argument first. */
806 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
807 if (POINTER_TYPE_P (fntype))
808 fntype = TREE_TYPE (fntype);
809 if (TREE_CODE (fntype) == METHOD_TYPE)
811 enum gimplify_status t
812 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
813 if (t == GS_ERROR)
814 ret = GS_ERROR;
817 break;
819 case RETURN_EXPR:
820 if (TREE_OPERAND (*expr_p, 0)
821 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
822 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
824 expr_p = &TREE_OPERAND (*expr_p, 0);
825 code = TREE_CODE (*expr_p);
826 /* Avoid going through the INIT_EXPR case, which can
827 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
828 goto modify_expr_case;
830 /* Fall through. */
832 default:
833 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
834 break;
837 /* Restore saved state. */
838 if (STATEMENT_CODE_P (code))
839 current_stmt_tree ()->stmts_are_full_exprs_p
840 = saved_stmts_are_full_exprs_p;
842 return ret;
845 static inline bool
846 is_invisiref_parm (const_tree t)
848 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
849 && DECL_BY_REFERENCE (t));
852 /* Return true if the uid in both int tree maps are equal. */
854 bool
855 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
857 return (a->uid == b->uid);
860 /* Hash a UID in a cxx_int_tree_map. */
862 unsigned int
863 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
865 return item->uid;
868 /* A stable comparison routine for use with splay trees and DECLs. */
870 static int
871 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
873 tree a = (tree) xa;
874 tree b = (tree) xb;
876 return DECL_UID (a) - DECL_UID (b);
879 /* OpenMP context during genericization. */
881 struct cp_genericize_omp_taskreg
883 bool is_parallel;
884 bool default_shared;
885 struct cp_genericize_omp_taskreg *outer;
886 splay_tree variables;
889 /* Return true if genericization should try to determine if
890 DECL is firstprivate or shared within task regions. */
892 static bool
893 omp_var_to_track (tree decl)
895 tree type = TREE_TYPE (decl);
896 if (is_invisiref_parm (decl))
897 type = TREE_TYPE (type);
898 while (TREE_CODE (type) == ARRAY_TYPE)
899 type = TREE_TYPE (type);
900 if (type == error_mark_node || !CLASS_TYPE_P (type))
901 return false;
902 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
903 return false;
904 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
905 return false;
906 return true;
909 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
911 static void
912 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
914 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
915 (splay_tree_key) decl);
916 if (n == NULL)
918 int flags = OMP_CLAUSE_DEFAULT_SHARED;
919 if (omp_ctx->outer)
920 omp_cxx_notice_variable (omp_ctx->outer, decl);
921 if (!omp_ctx->default_shared)
923 struct cp_genericize_omp_taskreg *octx;
925 for (octx = omp_ctx->outer; octx; octx = octx->outer)
927 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
928 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
930 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
931 break;
933 if (octx->is_parallel)
934 break;
936 if (octx == NULL
937 && (TREE_CODE (decl) == PARM_DECL
938 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
939 && DECL_CONTEXT (decl) == current_function_decl)))
940 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
941 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
943 /* DECL is implicitly determined firstprivate in
944 the current task construct. Ensure copy ctor and
945 dtor are instantiated, because during gimplification
946 it will be already too late. */
947 tree type = TREE_TYPE (decl);
948 if (is_invisiref_parm (decl))
949 type = TREE_TYPE (type);
950 while (TREE_CODE (type) == ARRAY_TYPE)
951 type = TREE_TYPE (type);
952 get_copy_ctor (type, tf_none);
953 get_dtor (type, tf_none);
956 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
960 /* Genericization context. */
962 struct cp_genericize_data
964 hash_set<tree> *p_set;
965 vec<tree> bind_expr_stack;
966 struct cp_genericize_omp_taskreg *omp_ctx;
967 tree try_block;
968 bool no_sanitize_p;
969 bool handle_invisiref_parm_p;
972 /* Perform any pre-gimplification folding of C++ front end trees to
973 GENERIC.
974 Note: The folding of none-omp cases is something to move into
975 the middle-end. As for now we have most foldings only on GENERIC
976 in fold-const, we need to perform this before transformation to
977 GIMPLE-form. */
979 static tree
980 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
982 tree stmt;
983 enum tree_code code;
985 *stmt_p = stmt = cp_fold (*stmt_p);
987 if (((hash_set<tree> *) data)->add (stmt))
989 /* Don't walk subtrees of stmts we've already walked once, otherwise
990 we can have exponential complexity with e.g. lots of nested
991 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
992 always the same tree, which the first time cp_fold_r has been
993 called on it had the subtrees walked. */
994 *walk_subtrees = 0;
995 return NULL;
998 code = TREE_CODE (stmt);
999 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1000 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1001 || code == OACC_LOOP)
1003 tree x;
1004 int i, n;
1006 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1007 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1008 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1009 x = OMP_FOR_COND (stmt);
1010 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1012 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1013 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1015 else if (x && TREE_CODE (x) == TREE_VEC)
1017 n = TREE_VEC_LENGTH (x);
1018 for (i = 0; i < n; i++)
1020 tree o = TREE_VEC_ELT (x, i);
1021 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1022 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1025 x = OMP_FOR_INCR (stmt);
1026 if (x && TREE_CODE (x) == TREE_VEC)
1028 n = TREE_VEC_LENGTH (x);
1029 for (i = 0; i < n; i++)
1031 tree o = TREE_VEC_ELT (x, i);
1032 if (o && TREE_CODE (o) == MODIFY_EXPR)
1033 o = TREE_OPERAND (o, 1);
1034 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1035 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1037 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1038 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1042 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1043 *walk_subtrees = 0;
1046 return NULL;
1049 /* Fold ALL the trees! FIXME we should be able to remove this, but
1050 apparently that still causes optimization regressions. */
1052 void
1053 cp_fold_function (tree fndecl)
1055 hash_set<tree> pset;
1056 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1059 /* Perform any pre-gimplification lowering of C++ front end trees to
1060 GENERIC. */
1062 static tree
1063 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1065 tree stmt = *stmt_p;
1066 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1067 hash_set<tree> *p_set = wtd->p_set;
1069 /* If in an OpenMP context, note var uses. */
1070 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1071 && (VAR_P (stmt)
1072 || TREE_CODE (stmt) == PARM_DECL
1073 || TREE_CODE (stmt) == RESULT_DECL)
1074 && omp_var_to_track (stmt))
1075 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1077 /* Dereference invisible reference parms. */
1078 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1080 *stmt_p = convert_from_reference (stmt);
1081 p_set->add (*stmt_p);
1082 *walk_subtrees = 0;
1083 return NULL;
1086 /* Map block scope extern declarations to visible declarations with the
1087 same name and type in outer scopes if any. */
1088 if (cp_function_chain->extern_decl_map
1089 && VAR_OR_FUNCTION_DECL_P (stmt)
1090 && DECL_EXTERNAL (stmt))
1092 struct cxx_int_tree_map *h, in;
1093 in.uid = DECL_UID (stmt);
1094 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1095 if (h)
1097 *stmt_p = h->to;
1098 *walk_subtrees = 0;
1099 return NULL;
1103 if (TREE_CODE (stmt) == INTEGER_CST
1104 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1105 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1106 && !wtd->no_sanitize_p)
1108 ubsan_maybe_instrument_reference (stmt_p);
1109 if (*stmt_p != stmt)
1111 *walk_subtrees = 0;
1112 return NULL_TREE;
1116 /* Other than invisiref parms, don't walk the same tree twice. */
1117 if (p_set->contains (stmt))
1119 *walk_subtrees = 0;
1120 return NULL_TREE;
1123 switch (TREE_CODE (stmt))
1125 case ADDR_EXPR:
1126 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1128 /* If in an OpenMP context, note var uses. */
1129 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1130 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1131 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1132 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1133 *walk_subtrees = 0;
1135 break;
1137 case RETURN_EXPR:
1138 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1139 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1140 *walk_subtrees = 0;
1141 break;
1143 case OMP_CLAUSE:
1144 switch (OMP_CLAUSE_CODE (stmt))
1146 case OMP_CLAUSE_LASTPRIVATE:
1147 /* Don't dereference an invisiref in OpenMP clauses. */
1148 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1150 *walk_subtrees = 0;
1151 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1152 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1153 cp_genericize_r, data, NULL);
1155 break;
1156 case OMP_CLAUSE_PRIVATE:
1157 /* Don't dereference an invisiref in OpenMP clauses. */
1158 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1159 *walk_subtrees = 0;
1160 else if (wtd->omp_ctx != NULL)
1162 /* Private clause doesn't cause any references to the
1163 var in outer contexts, avoid calling
1164 omp_cxx_notice_variable for it. */
1165 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1166 wtd->omp_ctx = NULL;
1167 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1168 data, NULL);
1169 wtd->omp_ctx = old;
1170 *walk_subtrees = 0;
1172 break;
1173 case OMP_CLAUSE_SHARED:
1174 case OMP_CLAUSE_FIRSTPRIVATE:
1175 case OMP_CLAUSE_COPYIN:
1176 case OMP_CLAUSE_COPYPRIVATE:
1177 /* Don't dereference an invisiref in OpenMP clauses. */
1178 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1179 *walk_subtrees = 0;
1180 break;
1181 case OMP_CLAUSE_REDUCTION:
1182 /* Don't dereference an invisiref in reduction clause's
1183 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1184 still needs to be genericized. */
1185 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1187 *walk_subtrees = 0;
1188 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1189 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1190 cp_genericize_r, data, NULL);
1191 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1192 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1193 cp_genericize_r, data, NULL);
1195 break;
1196 default:
1197 break;
1199 break;
1201 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1202 to lower this construct before scanning it, so we need to lower these
1203 before doing anything else. */
1204 case CLEANUP_STMT:
1205 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1206 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1207 : TRY_FINALLY_EXPR,
1208 void_type_node,
1209 CLEANUP_BODY (stmt),
1210 CLEANUP_EXPR (stmt));
1211 break;
1213 case IF_STMT:
1214 genericize_if_stmt (stmt_p);
1215 /* *stmt_p has changed, tail recurse to handle it again. */
1216 return cp_genericize_r (stmt_p, walk_subtrees, data);
1218 /* COND_EXPR might have incompatible types in branches if one or both
1219 arms are bitfields. Fix it up now. */
1220 case COND_EXPR:
1222 tree type_left
1223 = (TREE_OPERAND (stmt, 1)
1224 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1225 : NULL_TREE);
1226 tree type_right
1227 = (TREE_OPERAND (stmt, 2)
1228 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1229 : NULL_TREE);
1230 if (type_left
1231 && !useless_type_conversion_p (TREE_TYPE (stmt),
1232 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1234 TREE_OPERAND (stmt, 1)
1235 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1236 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1237 type_left));
1239 if (type_right
1240 && !useless_type_conversion_p (TREE_TYPE (stmt),
1241 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1243 TREE_OPERAND (stmt, 2)
1244 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1245 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1246 type_right));
1249 break;
1251 case BIND_EXPR:
1252 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1254 tree decl;
1255 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1256 if (VAR_P (decl)
1257 && !DECL_EXTERNAL (decl)
1258 && omp_var_to_track (decl))
1260 splay_tree_node n
1261 = splay_tree_lookup (wtd->omp_ctx->variables,
1262 (splay_tree_key) decl);
1263 if (n == NULL)
1264 splay_tree_insert (wtd->omp_ctx->variables,
1265 (splay_tree_key) decl,
1266 TREE_STATIC (decl)
1267 ? OMP_CLAUSE_DEFAULT_SHARED
1268 : OMP_CLAUSE_DEFAULT_PRIVATE);
1271 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1273 /* The point here is to not sanitize static initializers. */
1274 bool no_sanitize_p = wtd->no_sanitize_p;
1275 wtd->no_sanitize_p = true;
1276 for (tree decl = BIND_EXPR_VARS (stmt);
1277 decl;
1278 decl = DECL_CHAIN (decl))
1279 if (VAR_P (decl)
1280 && TREE_STATIC (decl)
1281 && DECL_INITIAL (decl))
1282 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1283 wtd->no_sanitize_p = no_sanitize_p;
1285 wtd->bind_expr_stack.safe_push (stmt);
1286 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1287 cp_genericize_r, data, NULL);
1288 wtd->bind_expr_stack.pop ();
1289 break;
1291 case USING_STMT:
1293 tree block = NULL_TREE;
1295 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1296 BLOCK, and append an IMPORTED_DECL to its
1297 BLOCK_VARS chained list. */
1298 if (wtd->bind_expr_stack.exists ())
1300 int i;
1301 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1302 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1303 break;
1305 if (block)
1307 tree using_directive;
1308 gcc_assert (TREE_OPERAND (stmt, 0));
1310 using_directive = make_node (IMPORTED_DECL);
1311 TREE_TYPE (using_directive) = void_type_node;
1313 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1314 = TREE_OPERAND (stmt, 0);
1315 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1316 BLOCK_VARS (block) = using_directive;
1318 /* The USING_STMT won't appear in GENERIC. */
1319 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1320 *walk_subtrees = 0;
1322 break;
1324 case DECL_EXPR:
1325 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1327 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1328 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1329 *walk_subtrees = 0;
1331 else
1333 tree d = DECL_EXPR_DECL (stmt);
1334 if (VAR_P (d))
1335 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1337 break;
1339 case OMP_PARALLEL:
1340 case OMP_TASK:
1341 case OMP_TASKLOOP:
1343 struct cp_genericize_omp_taskreg omp_ctx;
1344 tree c, decl;
1345 splay_tree_node n;
1347 *walk_subtrees = 0;
1348 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1349 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1350 omp_ctx.default_shared = omp_ctx.is_parallel;
1351 omp_ctx.outer = wtd->omp_ctx;
1352 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1353 wtd->omp_ctx = &omp_ctx;
1354 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1355 switch (OMP_CLAUSE_CODE (c))
1357 case OMP_CLAUSE_SHARED:
1358 case OMP_CLAUSE_PRIVATE:
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_LASTPRIVATE:
1361 decl = OMP_CLAUSE_DECL (c);
1362 if (decl == error_mark_node || !omp_var_to_track (decl))
1363 break;
1364 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1365 if (n != NULL)
1366 break;
1367 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1368 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1369 ? OMP_CLAUSE_DEFAULT_SHARED
1370 : OMP_CLAUSE_DEFAULT_PRIVATE);
1371 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1372 omp_cxx_notice_variable (omp_ctx.outer, decl);
1373 break;
1374 case OMP_CLAUSE_DEFAULT:
1375 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1376 omp_ctx.default_shared = true;
1377 default:
1378 break;
1380 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1381 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1382 else
1383 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1384 wtd->omp_ctx = omp_ctx.outer;
1385 splay_tree_delete (omp_ctx.variables);
1387 break;
1389 case TRY_BLOCK:
1391 *walk_subtrees = 0;
1392 tree try_block = wtd->try_block;
1393 wtd->try_block = stmt;
1394 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1395 wtd->try_block = try_block;
1396 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1398 break;
1400 case MUST_NOT_THROW_EXPR:
1401 /* MUST_NOT_THROW_COND might be something else with TM. */
1402 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1404 *walk_subtrees = 0;
1405 tree try_block = wtd->try_block;
1406 wtd->try_block = stmt;
1407 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1408 wtd->try_block = try_block;
1410 break;
1412 case THROW_EXPR:
1414 location_t loc = location_of (stmt);
1415 if (TREE_NO_WARNING (stmt))
1416 /* Never mind. */;
1417 else if (wtd->try_block)
1419 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1420 && warning_at (loc, OPT_Wterminate,
1421 "throw will always call terminate()")
1422 && cxx_dialect >= cxx11
1423 && DECL_DESTRUCTOR_P (current_function_decl))
1424 inform (loc, "in C++11 destructors default to noexcept");
1426 else
1428 if (warn_cxx11_compat && cxx_dialect < cxx11
1429 && DECL_DESTRUCTOR_P (current_function_decl)
1430 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1431 == NULL_TREE)
1432 && (get_defaulted_eh_spec (current_function_decl)
1433 == empty_except_spec))
1434 warning_at (loc, OPT_Wc__11_compat,
1435 "in C++11 this throw will terminate because "
1436 "destructors default to noexcept");
1439 break;
1441 case CONVERT_EXPR:
1442 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1443 break;
1445 case FOR_STMT:
1446 genericize_for_stmt (stmt_p, walk_subtrees, data);
1447 break;
1449 case WHILE_STMT:
1450 genericize_while_stmt (stmt_p, walk_subtrees, data);
1451 break;
1453 case DO_STMT:
1454 genericize_do_stmt (stmt_p, walk_subtrees, data);
1455 break;
1457 case SWITCH_STMT:
1458 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1459 break;
1461 case CONTINUE_STMT:
1462 genericize_continue_stmt (stmt_p);
1463 break;
1465 case BREAK_STMT:
1466 genericize_break_stmt (stmt_p);
1467 break;
1469 case OMP_FOR:
1470 case OMP_SIMD:
1471 case OMP_DISTRIBUTE:
1472 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1473 break;
1475 case PTRMEM_CST:
1476 /* By the time we get here we're handing off to the back end, so we don't
1477 need or want to preserve PTRMEM_CST anymore. */
1478 *stmt_p = cplus_expand_constant (stmt);
1479 *walk_subtrees = 0;
1480 break;
1482 case MEM_REF:
1483 /* For MEM_REF, make sure not to sanitize the second operand even
1484 if it has reference type. It is just an offset with a type
1485 holding other information. There is no other processing we
1486 need to do for INTEGER_CSTs, so just ignore the second argument
1487 unconditionally. */
1488 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1489 *walk_subtrees = 0;
1490 break;
1492 case NOP_EXPR:
1493 if (!wtd->no_sanitize_p
1494 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1495 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1496 ubsan_maybe_instrument_reference (stmt_p);
1497 break;
1499 case CALL_EXPR:
1500 if (!wtd->no_sanitize_p
1501 && sanitize_flags_p ((SANITIZE_NULL
1502 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1504 tree fn = CALL_EXPR_FN (stmt);
1505 if (fn != NULL_TREE
1506 && !error_operand_p (fn)
1507 && POINTER_TYPE_P (TREE_TYPE (fn))
1508 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1510 bool is_ctor
1511 = TREE_CODE (fn) == ADDR_EXPR
1512 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1513 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1514 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1515 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1516 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1517 cp_ubsan_maybe_instrument_member_call (stmt);
1520 break;
1522 default:
1523 if (IS_TYPE_OR_DECL_P (stmt))
1524 *walk_subtrees = 0;
1525 break;
1528 p_set->add (*stmt_p);
1530 return NULL;
1533 /* Lower C++ front end trees to GENERIC in T_P. */
1535 static void
1536 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1538 struct cp_genericize_data wtd;
1540 wtd.p_set = new hash_set<tree>;
1541 wtd.bind_expr_stack.create (0);
1542 wtd.omp_ctx = NULL;
1543 wtd.try_block = NULL_TREE;
1544 wtd.no_sanitize_p = false;
1545 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1546 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1547 delete wtd.p_set;
1548 wtd.bind_expr_stack.release ();
1549 if (sanitize_flags_p (SANITIZE_VPTR))
1550 cp_ubsan_instrument_member_accesses (t_p);
1553 /* If a function that should end with a return in non-void
1554 function doesn't obviously end with return, add ubsan
1555 instrumentation code to verify it at runtime. */
1557 static void
1558 cp_ubsan_maybe_instrument_return (tree fndecl)
1560 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1561 || DECL_CONSTRUCTOR_P (fndecl)
1562 || DECL_DESTRUCTOR_P (fndecl)
1563 || !targetm.warn_func_return (fndecl))
1564 return;
1566 tree t = DECL_SAVED_TREE (fndecl);
1567 while (t)
1569 switch (TREE_CODE (t))
1571 case BIND_EXPR:
1572 t = BIND_EXPR_BODY (t);
1573 continue;
1574 case TRY_FINALLY_EXPR:
1575 t = TREE_OPERAND (t, 0);
1576 continue;
1577 case STATEMENT_LIST:
1579 tree_stmt_iterator i = tsi_last (t);
1580 if (!tsi_end_p (i))
1582 t = tsi_stmt (i);
1583 continue;
1586 break;
1587 case RETURN_EXPR:
1588 return;
1589 default:
1590 break;
1592 break;
1594 if (t == NULL_TREE)
1595 return;
1596 tree *p = &DECL_SAVED_TREE (fndecl);
1597 if (TREE_CODE (*p) == BIND_EXPR)
1598 p = &BIND_EXPR_BODY (*p);
1599 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1600 append_to_statement_list (t, p);
1603 void
1604 cp_genericize (tree fndecl)
1606 tree t;
1608 /* Fix up the types of parms passed by invisible reference. */
1609 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1610 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1612 /* If a function's arguments are copied to create a thunk,
1613 then DECL_BY_REFERENCE will be set -- but the type of the
1614 argument will be a pointer type, so we will never get
1615 here. */
1616 gcc_assert (!DECL_BY_REFERENCE (t));
1617 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1618 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1619 DECL_BY_REFERENCE (t) = 1;
1620 TREE_ADDRESSABLE (t) = 0;
1621 relayout_decl (t);
1624 /* Do the same for the return value. */
1625 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1627 t = DECL_RESULT (fndecl);
1628 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1629 DECL_BY_REFERENCE (t) = 1;
1630 TREE_ADDRESSABLE (t) = 0;
1631 relayout_decl (t);
1632 if (DECL_NAME (t))
1634 /* Adjust DECL_VALUE_EXPR of the original var. */
1635 tree outer = outer_curly_brace_block (current_function_decl);
1636 tree var;
1638 if (outer)
1639 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1640 if (VAR_P (var)
1641 && DECL_NAME (t) == DECL_NAME (var)
1642 && DECL_HAS_VALUE_EXPR_P (var)
1643 && DECL_VALUE_EXPR (var) == t)
1645 tree val = convert_from_reference (t);
1646 SET_DECL_VALUE_EXPR (var, val);
1647 break;
1652 /* If we're a clone, the body is already GIMPLE. */
1653 if (DECL_CLONED_FUNCTION_P (fndecl))
1654 return;
1656 /* Allow cp_genericize calls to be nested. */
1657 tree save_bc_label[2];
1658 save_bc_label[bc_break] = bc_label[bc_break];
1659 save_bc_label[bc_continue] = bc_label[bc_continue];
1660 bc_label[bc_break] = NULL_TREE;
1661 bc_label[bc_continue] = NULL_TREE;
1663 /* Expand all the array notations here. */
1664 if (flag_cilkplus
1665 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1666 DECL_SAVED_TREE (fndecl)
1667 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1669 /* We do want to see every occurrence of the parms, so we can't just use
1670 walk_tree's hash functionality. */
1671 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1673 if (sanitize_flags_p (SANITIZE_RETURN)
1674 && current_function_decl != NULL_TREE)
1675 cp_ubsan_maybe_instrument_return (fndecl);
1677 /* Do everything else. */
1678 c_genericize (fndecl);
1680 gcc_assert (bc_label[bc_break] == NULL);
1681 gcc_assert (bc_label[bc_continue] == NULL);
1682 bc_label[bc_break] = save_bc_label[bc_break];
1683 bc_label[bc_continue] = save_bc_label[bc_continue];
1686 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1687 NULL if there is in fact nothing to do. ARG2 may be null if FN
1688 actually only takes one argument. */
1690 static tree
1691 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1693 tree defparm, parm, t;
1694 int i = 0;
1695 int nargs;
1696 tree *argarray;
1698 if (fn == NULL)
1699 return NULL;
1701 nargs = list_length (DECL_ARGUMENTS (fn));
1702 argarray = XALLOCAVEC (tree, nargs);
1704 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1705 if (arg2)
1706 defparm = TREE_CHAIN (defparm);
1708 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1710 tree inner_type = TREE_TYPE (arg1);
1711 tree start1, end1, p1;
1712 tree start2 = NULL, p2 = NULL;
1713 tree ret = NULL, lab;
1715 start1 = arg1;
1716 start2 = arg2;
1719 inner_type = TREE_TYPE (inner_type);
1720 start1 = build4 (ARRAY_REF, inner_type, start1,
1721 size_zero_node, NULL, NULL);
1722 if (arg2)
1723 start2 = build4 (ARRAY_REF, inner_type, start2,
1724 size_zero_node, NULL, NULL);
1726 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1727 start1 = build_fold_addr_expr_loc (input_location, start1);
1728 if (arg2)
1729 start2 = build_fold_addr_expr_loc (input_location, start2);
1731 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1732 end1 = fold_build_pointer_plus (start1, end1);
1734 p1 = create_tmp_var (TREE_TYPE (start1));
1735 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1736 append_to_statement_list (t, &ret);
1738 if (arg2)
1740 p2 = create_tmp_var (TREE_TYPE (start2));
1741 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1742 append_to_statement_list (t, &ret);
1745 lab = create_artificial_label (input_location);
1746 t = build1 (LABEL_EXPR, void_type_node, lab);
1747 append_to_statement_list (t, &ret);
1749 argarray[i++] = p1;
1750 if (arg2)
1751 argarray[i++] = p2;
1752 /* Handle default arguments. */
1753 for (parm = defparm; parm && parm != void_list_node;
1754 parm = TREE_CHAIN (parm), i++)
1755 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1756 TREE_PURPOSE (parm), fn, i,
1757 tf_warning_or_error);
1758 t = build_call_a (fn, i, argarray);
1759 t = fold_convert (void_type_node, t);
1760 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1761 append_to_statement_list (t, &ret);
1763 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1764 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1765 append_to_statement_list (t, &ret);
1767 if (arg2)
1769 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1770 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1771 append_to_statement_list (t, &ret);
1774 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1775 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1776 append_to_statement_list (t, &ret);
1778 return ret;
1780 else
1782 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1783 if (arg2)
1784 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1785 /* Handle default arguments. */
1786 for (parm = defparm; parm && parm != void_list_node;
1787 parm = TREE_CHAIN (parm), i++)
1788 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1789 TREE_PURPOSE (parm),
1790 fn, i, tf_warning_or_error);
1791 t = build_call_a (fn, i, argarray);
1792 t = fold_convert (void_type_node, t);
1793 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1797 /* Return code to initialize DECL with its default constructor, or
1798 NULL if there's nothing to do. */
1800 tree
1801 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1803 tree info = CP_OMP_CLAUSE_INFO (clause);
1804 tree ret = NULL;
1806 if (info)
1807 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1809 return ret;
1812 /* Return code to initialize DST with a copy constructor from SRC. */
1814 tree
1815 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1817 tree info = CP_OMP_CLAUSE_INFO (clause);
1818 tree ret = NULL;
1820 if (info)
1821 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1822 if (ret == NULL)
1823 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1825 return ret;
1828 /* Similarly, except use an assignment operator instead. */
1830 tree
1831 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1833 tree info = CP_OMP_CLAUSE_INFO (clause);
1834 tree ret = NULL;
1836 if (info)
1837 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1838 if (ret == NULL)
1839 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1841 return ret;
1844 /* Return code to destroy DECL. */
1846 tree
1847 cxx_omp_clause_dtor (tree clause, tree decl)
1849 tree info = CP_OMP_CLAUSE_INFO (clause);
1850 tree ret = NULL;
1852 if (info)
1853 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1855 return ret;
1858 /* True if OpenMP should privatize what this DECL points to rather
1859 than the DECL itself. */
1861 bool
1862 cxx_omp_privatize_by_reference (const_tree decl)
1864 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1865 || is_invisiref_parm (decl));
1868 /* Return true if DECL is const qualified var having no mutable member. */
1869 bool
1870 cxx_omp_const_qual_no_mutable (tree decl)
1872 tree type = TREE_TYPE (decl);
1873 if (TREE_CODE (type) == REFERENCE_TYPE)
1875 if (!is_invisiref_parm (decl))
1876 return false;
1877 type = TREE_TYPE (type);
1879 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1881 /* NVR doesn't preserve const qualification of the
1882 variable's type. */
1883 tree outer = outer_curly_brace_block (current_function_decl);
1884 tree var;
1886 if (outer)
1887 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1888 if (VAR_P (var)
1889 && DECL_NAME (decl) == DECL_NAME (var)
1890 && (TYPE_MAIN_VARIANT (type)
1891 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1893 if (TYPE_READONLY (TREE_TYPE (var)))
1894 type = TREE_TYPE (var);
1895 break;
1900 if (type == error_mark_node)
1901 return false;
1903 /* Variables with const-qualified type having no mutable member
1904 are predetermined shared. */
1905 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1906 return true;
1908 return false;
1911 /* True if OpenMP sharing attribute of DECL is predetermined. */
1913 enum omp_clause_default_kind
1914 cxx_omp_predetermined_sharing (tree decl)
1916 /* Static data members are predetermined shared. */
1917 if (TREE_STATIC (decl))
1919 tree ctx = CP_DECL_CONTEXT (decl);
1920 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1921 return OMP_CLAUSE_DEFAULT_SHARED;
1924 /* Const qualified vars having no mutable member are predetermined
1925 shared. */
1926 if (cxx_omp_const_qual_no_mutable (decl))
1927 return OMP_CLAUSE_DEFAULT_SHARED;
1929 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1932 /* Finalize an implicitly determined clause. */
1934 void
1935 cxx_omp_finish_clause (tree c, gimple_seq *)
1937 tree decl, inner_type;
1938 bool make_shared = false;
1940 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1941 return;
1943 decl = OMP_CLAUSE_DECL (c);
1944 decl = require_complete_type (decl);
1945 inner_type = TREE_TYPE (decl);
1946 if (decl == error_mark_node)
1947 make_shared = true;
1948 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1949 inner_type = TREE_TYPE (inner_type);
1951 /* We're interested in the base element, not arrays. */
1952 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1953 inner_type = TREE_TYPE (inner_type);
1955 /* Check for special function availability by building a call to one.
1956 Save the results, because later we won't be in the right context
1957 for making these queries. */
1958 if (!make_shared
1959 && CLASS_TYPE_P (inner_type)
1960 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1961 make_shared = true;
1963 if (make_shared)
1965 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1966 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1967 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1971 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1972 disregarded in OpenMP construct, because it is going to be
1973 remapped during OpenMP lowering. SHARED is true if DECL
1974 is going to be shared, false if it is going to be privatized. */
1976 bool
1977 cxx_omp_disregard_value_expr (tree decl, bool shared)
1979 return !shared
1980 && VAR_P (decl)
1981 && DECL_HAS_VALUE_EXPR_P (decl)
1982 && DECL_ARTIFICIAL (decl)
1983 && DECL_LANG_SPECIFIC (decl)
1984 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1987 /* Fold expression X which is used as an rvalue if RVAL is true. */
1989 static tree
1990 cp_fold_maybe_rvalue (tree x, bool rval)
1992 while (true)
1994 x = cp_fold (x);
1995 if (rval && DECL_P (x)
1996 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1998 tree v = decl_constant_value (x);
1999 if (v != x && v != error_mark_node)
2001 x = v;
2002 continue;
2005 break;
2007 return x;
2010 /* Fold expression X which is used as an rvalue. */
2012 static tree
2013 cp_fold_rvalue (tree x)
2015 return cp_fold_maybe_rvalue (x, true);
2018 /* Perform folding on expression X. */
2020 tree
2021 cp_fully_fold (tree x)
2023 if (processing_template_decl)
2024 return x;
2025 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2026 have to call both. */
2027 if (cxx_dialect >= cxx11)
2028 x = maybe_constant_value (x);
2029 return cp_fold_rvalue (x);
2032 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2033 and certain changes are made to the folding done. Or should be (FIXME). We
2034 never touch maybe_const, as it is only used for the C front-end
2035 C_MAYBE_CONST_EXPR. */
2037 tree
2038 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2040 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2041 INTEGER_CST. */
2042 return cp_fold_rvalue (x);
2045 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2047 /* Dispose of the whole FOLD_CACHE. */
2049 void
2050 clear_fold_cache (void)
2052 if (fold_cache != NULL)
2053 fold_cache->empty ();
2056 /* This function tries to fold an expression X.
2057 To avoid combinatorial explosion, folding results are kept in fold_cache.
2058 If we are processing a template or X is invalid, we don't fold at all.
2059 For performance reasons we don't cache expressions representing a
2060 declaration or constant.
2061 Function returns X or its folded variant. */
2063 static tree
2064 cp_fold (tree x)
2066 tree op0, op1, op2, op3;
2067 tree org_x = x, r = NULL_TREE;
2068 enum tree_code code;
2069 location_t loc;
2070 bool rval_ops = true;
2072 if (!x || x == error_mark_node)
2073 return x;
2075 if (processing_template_decl
2076 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2077 return x;
2079 /* Don't bother to cache DECLs or constants. */
2080 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2081 return x;
2083 if (fold_cache == NULL)
2084 fold_cache = hash_map<tree, tree>::create_ggc (101);
2086 if (tree *cached = fold_cache->get (x))
2087 return *cached;
2089 code = TREE_CODE (x);
2090 switch (code)
2092 case CLEANUP_POINT_EXPR:
2093 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2094 effects. */
2095 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2096 if (!TREE_SIDE_EFFECTS (r))
2097 x = r;
2098 break;
2100 case SIZEOF_EXPR:
2101 x = fold_sizeof_expr (x);
2102 break;
2104 case VIEW_CONVERT_EXPR:
2105 rval_ops = false;
2106 /* FALLTHRU */
2107 case CONVERT_EXPR:
2108 case NOP_EXPR:
2109 case NON_LVALUE_EXPR:
2111 if (VOID_TYPE_P (TREE_TYPE (x)))
2112 return x;
2114 loc = EXPR_LOCATION (x);
2115 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2117 if (code == CONVERT_EXPR
2118 && SCALAR_TYPE_P (TREE_TYPE (x))
2119 && op0 != void_node)
2120 /* During parsing we used convert_to_*_nofold; re-convert now using the
2121 folding variants, since fold() doesn't do those transformations. */
2122 x = fold (convert (TREE_TYPE (x), op0));
2123 else if (op0 != TREE_OPERAND (x, 0))
2125 if (op0 == error_mark_node)
2126 x = error_mark_node;
2127 else
2128 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2130 else
2131 x = fold (x);
2133 /* Conversion of an out-of-range value has implementation-defined
2134 behavior; the language considers it different from arithmetic
2135 overflow, which is undefined. */
2136 if (TREE_CODE (op0) == INTEGER_CST
2137 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2138 TREE_OVERFLOW (x) = false;
2140 break;
2142 case INDIRECT_REF:
2143 /* We don't need the decltype(auto) obfuscation anymore. */
2144 if (REF_PARENTHESIZED_P (x))
2146 tree p = maybe_undo_parenthesized_ref (x);
2147 return cp_fold (p);
2149 goto unary;
2151 case ADDR_EXPR:
2152 case REALPART_EXPR:
2153 case IMAGPART_EXPR:
2154 rval_ops = false;
2155 /* FALLTHRU */
2156 case CONJ_EXPR:
2157 case FIX_TRUNC_EXPR:
2158 case FLOAT_EXPR:
2159 case NEGATE_EXPR:
2160 case ABS_EXPR:
2161 case BIT_NOT_EXPR:
2162 case TRUTH_NOT_EXPR:
2163 case FIXED_CONVERT_EXPR:
2164 unary:
2166 loc = EXPR_LOCATION (x);
2167 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2169 if (op0 != TREE_OPERAND (x, 0))
2171 if (op0 == error_mark_node)
2172 x = error_mark_node;
2173 else
2175 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2176 if (code == INDIRECT_REF
2177 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2179 TREE_READONLY (x) = TREE_READONLY (org_x);
2180 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2181 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2185 else
2186 x = fold (x);
2188 gcc_assert (TREE_CODE (x) != COND_EXPR
2189 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2190 break;
2192 case UNARY_PLUS_EXPR:
2193 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2194 if (op0 == error_mark_node)
2195 x = error_mark_node;
2196 else
2197 x = fold_convert (TREE_TYPE (x), op0);
2198 break;
2200 case POSTDECREMENT_EXPR:
2201 case POSTINCREMENT_EXPR:
2202 case INIT_EXPR:
2203 case PREDECREMENT_EXPR:
2204 case PREINCREMENT_EXPR:
2205 case COMPOUND_EXPR:
2206 case MODIFY_EXPR:
2207 rval_ops = false;
2208 /* FALLTHRU */
2209 case POINTER_PLUS_EXPR:
2210 case PLUS_EXPR:
2211 case MINUS_EXPR:
2212 case MULT_EXPR:
2213 case TRUNC_DIV_EXPR:
2214 case CEIL_DIV_EXPR:
2215 case FLOOR_DIV_EXPR:
2216 case ROUND_DIV_EXPR:
2217 case TRUNC_MOD_EXPR:
2218 case CEIL_MOD_EXPR:
2219 case ROUND_MOD_EXPR:
2220 case RDIV_EXPR:
2221 case EXACT_DIV_EXPR:
2222 case MIN_EXPR:
2223 case MAX_EXPR:
2224 case LSHIFT_EXPR:
2225 case RSHIFT_EXPR:
2226 case LROTATE_EXPR:
2227 case RROTATE_EXPR:
2228 case BIT_AND_EXPR:
2229 case BIT_IOR_EXPR:
2230 case BIT_XOR_EXPR:
2231 case TRUTH_AND_EXPR:
2232 case TRUTH_ANDIF_EXPR:
2233 case TRUTH_OR_EXPR:
2234 case TRUTH_ORIF_EXPR:
2235 case TRUTH_XOR_EXPR:
2236 case LT_EXPR: case LE_EXPR:
2237 case GT_EXPR: case GE_EXPR:
2238 case EQ_EXPR: case NE_EXPR:
2239 case UNORDERED_EXPR: case ORDERED_EXPR:
2240 case UNLT_EXPR: case UNLE_EXPR:
2241 case UNGT_EXPR: case UNGE_EXPR:
2242 case UNEQ_EXPR: case LTGT_EXPR:
2243 case RANGE_EXPR: case COMPLEX_EXPR:
2245 loc = EXPR_LOCATION (x);
2246 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2247 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2249 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2251 if (op0 == error_mark_node || op1 == error_mark_node)
2252 x = error_mark_node;
2253 else
2254 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2256 else
2257 x = fold (x);
2259 if (TREE_NO_WARNING (org_x)
2260 && warn_nonnull_compare
2261 && COMPARISON_CLASS_P (org_x))
2263 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2265 else if (COMPARISON_CLASS_P (x))
2266 TREE_NO_WARNING (x) = 1;
2267 /* Otherwise give up on optimizing these, let GIMPLE folders
2268 optimize those later on. */
2269 else if (op0 != TREE_OPERAND (org_x, 0)
2270 || op1 != TREE_OPERAND (org_x, 1))
2272 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2273 TREE_NO_WARNING (x) = 1;
2275 else
2276 x = org_x;
2278 break;
2280 case VEC_COND_EXPR:
2281 case COND_EXPR:
2283 /* Don't bother folding a void condition, since it can't produce a
2284 constant value. Also, some statement-level uses of COND_EXPR leave
2285 one of the branches NULL, so folding would crash. */
2286 if (VOID_TYPE_P (TREE_TYPE (x)))
2287 return x;
2289 loc = EXPR_LOCATION (x);
2290 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2291 op1 = cp_fold (TREE_OPERAND (x, 1));
2292 op2 = cp_fold (TREE_OPERAND (x, 2));
2294 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2296 warning_sentinel s (warn_int_in_bool_context);
2297 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2298 op1 = cp_truthvalue_conversion (op1);
2299 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2300 op2 = cp_truthvalue_conversion (op2);
2303 if (op0 != TREE_OPERAND (x, 0)
2304 || op1 != TREE_OPERAND (x, 1)
2305 || op2 != TREE_OPERAND (x, 2))
2307 if (op0 == error_mark_node
2308 || op1 == error_mark_node
2309 || op2 == error_mark_node)
2310 x = error_mark_node;
2311 else
2312 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2314 else
2315 x = fold (x);
2317 /* A COND_EXPR might have incompatible types in branches if one or both
2318 arms are bitfields. If folding exposed such a branch, fix it up. */
2319 if (TREE_CODE (x) != code
2320 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2321 x = fold_convert (TREE_TYPE (org_x), x);
2323 break;
2325 case CALL_EXPR:
2327 int i, m, sv = optimize, nw = sv, changed = 0;
2328 tree callee = get_callee_fndecl (x);
2330 /* Some built-in function calls will be evaluated at compile-time in
2331 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2332 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2333 if (callee && DECL_BUILT_IN (callee) && !optimize
2334 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2335 && current_function_decl
2336 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2337 nw = 1;
2339 x = copy_node (x);
2341 m = call_expr_nargs (x);
2342 for (i = 0; i < m; i++)
2344 r = cp_fold (CALL_EXPR_ARG (x, i));
2345 if (r != CALL_EXPR_ARG (x, i))
2347 if (r == error_mark_node)
2349 x = error_mark_node;
2350 break;
2352 changed = 1;
2354 CALL_EXPR_ARG (x, i) = r;
2356 if (x == error_mark_node)
2357 break;
2359 optimize = nw;
2360 r = fold (x);
2361 optimize = sv;
2363 if (TREE_CODE (r) != CALL_EXPR)
2365 x = cp_fold (r);
2366 break;
2369 optimize = nw;
2371 /* Invoke maybe_constant_value for functions declared
2372 constexpr and not called with AGGR_INIT_EXPRs.
2373 TODO:
2374 Do constexpr expansion of expressions where the call itself is not
2375 constant, but the call followed by an INDIRECT_REF is. */
2376 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2377 && !flag_no_inline)
2378 r = maybe_constant_value (x);
2379 optimize = sv;
2381 if (TREE_CODE (r) != CALL_EXPR)
2383 if (DECL_CONSTRUCTOR_P (callee))
2385 loc = EXPR_LOCATION (x);
2386 tree s = build_fold_indirect_ref_loc (loc,
2387 CALL_EXPR_ARG (x, 0));
2388 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2390 x = r;
2391 break;
2394 if (!changed)
2395 x = org_x;
2396 break;
2399 case CONSTRUCTOR:
2401 unsigned i;
2402 constructor_elt *p;
2403 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2404 vec<constructor_elt, va_gc> *nelts = NULL;
2405 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2407 tree op = cp_fold (p->value);
2408 if (op != p->value)
2410 if (op == error_mark_node)
2412 x = error_mark_node;
2413 vec_free (nelts);
2414 break;
2416 if (nelts == NULL)
2417 nelts = elts->copy ();
2418 (*nelts)[i].value = op;
2421 if (nelts)
2422 x = build_constructor (TREE_TYPE (x), nelts);
2423 break;
2425 case TREE_VEC:
2427 bool changed = false;
2428 vec<tree, va_gc> *vec = make_tree_vector ();
2429 int i, n = TREE_VEC_LENGTH (x);
2430 vec_safe_reserve (vec, n);
2432 for (i = 0; i < n; i++)
2434 tree op = cp_fold (TREE_VEC_ELT (x, i));
2435 vec->quick_push (op);
2436 if (op != TREE_VEC_ELT (x, i))
2437 changed = true;
2440 if (changed)
2442 r = copy_node (x);
2443 for (i = 0; i < n; i++)
2444 TREE_VEC_ELT (r, i) = (*vec)[i];
2445 x = r;
2448 release_tree_vector (vec);
2451 break;
2453 case ARRAY_REF:
2454 case ARRAY_RANGE_REF:
2456 loc = EXPR_LOCATION (x);
2457 op0 = cp_fold (TREE_OPERAND (x, 0));
2458 op1 = cp_fold (TREE_OPERAND (x, 1));
2459 op2 = cp_fold (TREE_OPERAND (x, 2));
2460 op3 = cp_fold (TREE_OPERAND (x, 3));
2462 if (op0 != TREE_OPERAND (x, 0)
2463 || op1 != TREE_OPERAND (x, 1)
2464 || op2 != TREE_OPERAND (x, 2)
2465 || op3 != TREE_OPERAND (x, 3))
2467 if (op0 == error_mark_node
2468 || op1 == error_mark_node
2469 || op2 == error_mark_node
2470 || op3 == error_mark_node)
2471 x = error_mark_node;
2472 else
2474 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2475 TREE_READONLY (x) = TREE_READONLY (org_x);
2476 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2477 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2481 x = fold (x);
2482 break;
2484 case SAVE_EXPR:
2485 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2486 folding, evaluates to an invariant. In that case no need to wrap
2487 this folded tree with a SAVE_EXPR. */
2488 r = cp_fold (TREE_OPERAND (x, 0));
2489 if (tree_invariant_p (r))
2490 x = r;
2491 break;
2493 default:
2494 return org_x;
2497 fold_cache->put (org_x, x);
2498 /* Prevent that we try to fold an already folded result again. */
2499 if (x != org_x)
2500 fold_cache->put (x, x);
2502 return x;
2505 #include "gt-cp-cp-gimplify.h"