2017-05-12 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobde62414ec3c975b376f8dc5f52d4d0fe070ef21f
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*, bool);
42 static tree cp_fold (tree);
44 /* Local declarations. */
46 enum bc_t { bc_break = 0, bc_continue = 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
53 indicates which.
55 Just creates a label with location LOCATION and pushes it into the current
56 context. */
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
63 bc_label[bc] = label;
64 if (bc == bc_break)
65 LABEL_DECL_BREAK (label) = true;
66 else
67 LABEL_DECL_CONTINUE (label) = true;
68 return label;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 gcc_assert (label == bc_label[bc]);
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
91 /* Get the LABEL_EXPR to represent a break or continue statement
92 in the current block scope. BC indicates which. */
94 static tree
95 get_bc_label (enum bc_t bc)
97 tree label = bc_label[bc];
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label) = 1;
101 return label;
104 /* Genericize a TRY_BLOCK. */
106 static void
107 genericize_try_block (tree *stmt_p)
109 tree body = TRY_STMTS (*stmt_p);
110 tree cleanup = TRY_HANDLERS (*stmt_p);
112 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 static void
118 genericize_catch_block (tree *stmt_p)
120 tree type = HANDLER_TYPE (*stmt_p);
121 tree body = HANDLER_BODY (*stmt_p);
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 /* A terser interface for building a representation of an exception
128 specification. */
130 static tree
131 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 tree t;
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140 append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 return t;
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 static void
149 genericize_eh_spec_block (tree *stmt_p)
151 tree body = EH_SPEC_STMTS (*stmt_p);
152 tree allowed = EH_SPEC_RAISES (*stmt_p);
153 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
155 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
156 TREE_NO_WARNING (*stmt_p) = true;
157 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
162 static void
163 genericize_if_stmt (tree *stmt_p)
165 tree stmt, cond, then_, else_;
166 location_t locus = EXPR_LOCATION (*stmt_p);
168 stmt = *stmt_p;
169 cond = IF_COND (stmt);
170 then_ = THEN_CLAUSE (stmt);
171 else_ = ELSE_CLAUSE (stmt);
173 if (!then_)
174 then_ = build_empty_stmt (locus);
175 if (!else_)
176 else_ = build_empty_stmt (locus);
178 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179 stmt = then_;
180 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
181 stmt = else_;
182 else
183 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
184 if (!EXPR_HAS_LOCATION (stmt))
185 protected_set_expr_location (stmt, locus);
186 *stmt_p = stmt;
189 /* Build a generic representation of one of the C loop forms. COND is the
190 loop condition or NULL_TREE. BODY is the (possibly compound) statement
191 controlled by the loop. INCR is the increment expression of a for-loop,
192 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
193 evaluated before the loop body as in while and for loops, or after the
194 loop body as in do-while loops. */
196 static void
197 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
198 tree incr, bool cond_is_first, int *walk_subtrees,
199 void *data)
201 tree blab, clab;
202 tree exit = NULL;
203 tree stmt_list = NULL;
205 blab = begin_bc_block (bc_break, start_locus);
206 clab = begin_bc_block (bc_continue, start_locus);
208 protected_set_expr_location (incr, start_locus);
210 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
211 cp_walk_tree (&body, cp_genericize_r, data, NULL);
212 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
213 *walk_subtrees = 0;
215 if (cond && TREE_CODE (cond) != INTEGER_CST)
217 /* If COND is constant, don't bother building an exit. If it's false,
218 we won't build a loop. If it's true, any exits are in the body. */
219 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
220 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
221 get_bc_label (bc_break));
222 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
223 build_empty_stmt (cloc), exit);
226 if (exit && cond_is_first)
227 append_to_statement_list (exit, &stmt_list);
228 append_to_statement_list (body, &stmt_list);
229 finish_bc_block (&stmt_list, bc_continue, clab);
230 append_to_statement_list (incr, &stmt_list);
231 if (exit && !cond_is_first)
232 append_to_statement_list (exit, &stmt_list);
234 if (!stmt_list)
235 stmt_list = build_empty_stmt (start_locus);
237 tree loop;
238 if (cond && integer_zerop (cond))
240 if (cond_is_first)
241 loop = fold_build3_loc (start_locus, COND_EXPR,
242 void_type_node, cond, stmt_list,
243 build_empty_stmt (start_locus));
244 else
245 loop = stmt_list;
247 else
249 location_t loc = start_locus;
250 if (!cond || integer_nonzerop (cond))
251 loc = EXPR_LOCATION (expr_first (body));
252 if (loc == UNKNOWN_LOCATION)
253 loc = start_locus;
254 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
257 stmt_list = NULL;
258 append_to_statement_list (loop, &stmt_list);
259 finish_bc_block (&stmt_list, bc_break, blab);
260 if (!stmt_list)
261 stmt_list = build_empty_stmt (start_locus);
263 *stmt_p = stmt_list;
266 /* Genericize a FOR_STMT node *STMT_P. */
268 static void
269 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
271 tree stmt = *stmt_p;
272 tree expr = NULL;
273 tree loop;
274 tree init = FOR_INIT_STMT (stmt);
276 if (init)
278 cp_walk_tree (&init, cp_genericize_r, data, NULL);
279 append_to_statement_list (init, &expr);
282 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
283 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
284 append_to_statement_list (loop, &expr);
285 if (expr == NULL_TREE)
286 expr = loop;
287 *stmt_p = expr;
290 /* Genericize a WHILE_STMT node *STMT_P. */
292 static void
293 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
295 tree stmt = *stmt_p;
296 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
297 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
300 /* Genericize a DO_STMT node *STMT_P. */
302 static void
303 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
305 tree stmt = *stmt_p;
306 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
307 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
310 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
312 static void
313 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
315 tree stmt = *stmt_p;
316 tree break_block, body, cond, type;
317 location_t stmt_locus = EXPR_LOCATION (stmt);
319 break_block = begin_bc_block (bc_break, stmt_locus);
321 body = SWITCH_STMT_BODY (stmt);
322 if (!body)
323 body = build_empty_stmt (stmt_locus);
324 cond = SWITCH_STMT_COND (stmt);
325 type = SWITCH_STMT_TYPE (stmt);
327 cp_walk_tree (&body, cp_genericize_r, data, NULL);
328 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
329 cp_walk_tree (&type, cp_genericize_r, data, NULL);
330 *walk_subtrees = 0;
332 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
333 finish_bc_block (stmt_p, bc_break, break_block);
336 /* Genericize a CONTINUE_STMT node *STMT_P. */
338 static void
339 genericize_continue_stmt (tree *stmt_p)
341 tree stmt_list = NULL;
342 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
343 tree label = get_bc_label (bc_continue);
344 location_t location = EXPR_LOCATION (*stmt_p);
345 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
346 append_to_statement_list_force (pred, &stmt_list);
347 append_to_statement_list (jump, &stmt_list);
348 *stmt_p = stmt_list;
351 /* Genericize a BREAK_STMT node *STMT_P. */
353 static void
354 genericize_break_stmt (tree *stmt_p)
356 tree label = get_bc_label (bc_break);
357 location_t location = EXPR_LOCATION (*stmt_p);
358 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
361 /* Genericize a OMP_FOR node *STMT_P. */
363 static void
364 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
366 tree stmt = *stmt_p;
367 location_t locus = EXPR_LOCATION (stmt);
368 tree clab = begin_bc_block (bc_continue, locus);
370 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
371 if (TREE_CODE (stmt) != OMP_TASKLOOP)
372 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
373 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
377 *walk_subtrees = 0;
379 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
382 /* Hook into the middle of gimplifying an OMP_FOR node. */
384 static enum gimplify_status
385 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
387 tree for_stmt = *expr_p;
388 gimple_seq seq = NULL;
390 /* Protect ourselves from recursion. */
391 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
392 return GS_UNHANDLED;
393 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
395 gimplify_and_add (for_stmt, &seq);
396 gimple_seq_add_seq (pre_p, seq);
398 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
400 return GS_ALL_DONE;
403 /* Gimplify an EXPR_STMT node. */
405 static void
406 gimplify_expr_stmt (tree *stmt_p)
408 tree stmt = EXPR_STMT_EXPR (*stmt_p);
410 if (stmt == error_mark_node)
411 stmt = NULL;
413 /* Gimplification of a statement expression will nullify the
414 statement if all its side effects are moved to *PRE_P and *POST_P.
416 In this case we will not want to emit the gimplified statement.
417 However, we may still want to emit a warning, so we do that before
418 gimplification. */
419 if (stmt && warn_unused_value)
421 if (!TREE_SIDE_EFFECTS (stmt))
423 if (!IS_EMPTY_STMT (stmt)
424 && !VOID_TYPE_P (TREE_TYPE (stmt))
425 && !TREE_NO_WARNING (stmt))
426 warning (OPT_Wunused_value, "statement with no effect");
428 else
429 warn_if_unused_value (stmt, input_location);
432 if (stmt == NULL_TREE)
433 stmt = alloc_stmt_list ();
435 *stmt_p = stmt;
438 /* Gimplify initialization from an AGGR_INIT_EXPR. */
440 static void
441 cp_gimplify_init_expr (tree *expr_p)
443 tree from = TREE_OPERAND (*expr_p, 1);
444 tree to = TREE_OPERAND (*expr_p, 0);
445 tree t;
447 /* What about code that pulls out the temp and uses it elsewhere? I
448 think that such code never uses the TARGET_EXPR as an initializer. If
449 I'm wrong, we'll abort because the temp won't have any RTL. In that
450 case, I guess we'll need to replace references somehow. */
451 if (TREE_CODE (from) == TARGET_EXPR)
452 from = TARGET_EXPR_INITIAL (from);
454 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
455 inside the TARGET_EXPR. */
456 for (t = from; t; )
458 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
460 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
461 replace the slot operand with our target.
463 Should we add a target parm to gimplify_expr instead? No, as in this
464 case we want to replace the INIT_EXPR. */
465 if (TREE_CODE (sub) == AGGR_INIT_EXPR
466 || TREE_CODE (sub) == VEC_INIT_EXPR)
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
469 AGGR_INIT_EXPR_SLOT (sub) = to;
470 else
471 VEC_INIT_EXPR_SLOT (sub) = to;
472 *expr_p = from;
474 /* The initialization is now a side-effect, so the container can
475 become void. */
476 if (from != sub)
477 TREE_TYPE (from) = void_type_node;
480 /* Handle aggregate NSDMI. */
481 replace_placeholders (sub, to);
483 if (t == sub)
484 break;
485 else
486 t = TREE_OPERAND (t, 1);
491 /* Gimplify a MUST_NOT_THROW_EXPR. */
493 static enum gimplify_status
494 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
496 tree stmt = *expr_p;
497 tree temp = voidify_wrapper_expr (stmt, NULL);
498 tree body = TREE_OPERAND (stmt, 0);
499 gimple_seq try_ = NULL;
500 gimple_seq catch_ = NULL;
501 gimple *mnt;
503 gimplify_and_add (body, &try_);
504 mnt = gimple_build_eh_must_not_throw (terminate_node);
505 gimple_seq_add_stmt_without_update (&catch_, mnt);
506 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
508 gimple_seq_add_stmt_without_update (pre_p, mnt);
509 if (temp)
511 *expr_p = temp;
512 return GS_OK;
515 *expr_p = NULL;
516 return GS_ALL_DONE;
519 /* Return TRUE if an operand (OP) of a given TYPE being copied is
520 really just an empty class copy.
522 Check that the operand has a simple form so that TARGET_EXPRs and
523 non-empty CONSTRUCTORs get reduced properly, and we leave the
524 return slot optimization alone because it isn't a copy. */
526 static bool
527 simple_empty_class_p (tree type, tree op)
529 return
530 ((TREE_CODE (op) == COMPOUND_EXPR
531 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
532 || TREE_CODE (op) == EMPTY_CLASS_EXPR
533 || is_gimple_lvalue (op)
534 || INDIRECT_REF_P (op)
535 || (TREE_CODE (op) == CONSTRUCTOR
536 && CONSTRUCTOR_NELTS (op) == 0
537 && !TREE_CLOBBER_P (op))
538 || (TREE_CODE (op) == CALL_EXPR
539 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
540 && is_really_empty_class (type);
543 /* Returns true if evaluating E as an lvalue has side-effects;
544 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
545 have side-effects until there is a read or write through it. */
547 static bool
548 lvalue_has_side_effects (tree e)
550 if (!TREE_SIDE_EFFECTS (e))
551 return false;
552 while (handled_component_p (e))
554 if (TREE_CODE (e) == ARRAY_REF
555 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
556 return true;
557 e = TREE_OPERAND (e, 0);
559 if (DECL_P (e))
560 /* Just naming a variable has no side-effects. */
561 return false;
562 else if (INDIRECT_REF_P (e))
563 /* Similarly, indirection has no side-effects. */
564 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
565 else
566 /* For anything else, trust TREE_SIDE_EFFECTS. */
567 return TREE_SIDE_EFFECTS (e);
570 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
573 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
575 int saved_stmts_are_full_exprs_p = 0;
576 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
577 enum tree_code code = TREE_CODE (*expr_p);
578 enum gimplify_status ret;
580 if (STATEMENT_CODE_P (code))
582 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
583 current_stmt_tree ()->stmts_are_full_exprs_p
584 = STMT_IS_FULL_EXPR_P (*expr_p);
587 switch (code)
589 case AGGR_INIT_EXPR:
590 simplify_aggr_init_expr (expr_p);
591 ret = GS_OK;
592 break;
594 case VEC_INIT_EXPR:
596 location_t loc = input_location;
597 tree init = VEC_INIT_EXPR_INIT (*expr_p);
598 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
599 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
600 input_location = EXPR_LOCATION (*expr_p);
601 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
602 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
603 from_array,
604 tf_warning_or_error);
605 hash_set<tree> pset;
606 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
607 cp_genericize_tree (expr_p, false);
608 ret = GS_OK;
609 input_location = loc;
611 break;
613 case THROW_EXPR:
614 /* FIXME communicate throw type to back end, probably by moving
615 THROW_EXPR into ../tree.def. */
616 *expr_p = TREE_OPERAND (*expr_p, 0);
617 ret = GS_OK;
618 break;
620 case MUST_NOT_THROW_EXPR:
621 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
622 break;
624 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
625 LHS of an assignment might also be involved in the RHS, as in bug
626 25979. */
627 case INIT_EXPR:
628 if (fn_contains_cilk_spawn_p (cfun))
630 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
631 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
632 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
633 return GS_ERROR;
636 cp_gimplify_init_expr (expr_p);
637 if (TREE_CODE (*expr_p) != INIT_EXPR)
638 return GS_OK;
639 /* Fall through. */
640 case MODIFY_EXPR:
641 modify_expr_case:
643 if (fn_contains_cilk_spawn_p (cfun)
644 && cilk_cp_detect_spawn_and_unwrap (expr_p)
645 && !seen_error ())
646 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
647 /* If the back end isn't clever enough to know that the lhs and rhs
648 types are the same, add an explicit conversion. */
649 tree op0 = TREE_OPERAND (*expr_p, 0);
650 tree op1 = TREE_OPERAND (*expr_p, 1);
652 if (!error_operand_p (op0)
653 && !error_operand_p (op1)
654 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
655 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
656 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
657 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
658 TREE_TYPE (op0), op1);
660 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
662 /* Remove any copies of empty classes. Also drop volatile
663 variables on the RHS to avoid infinite recursion from
664 gimplify_expr trying to load the value. */
665 if (TREE_SIDE_EFFECTS (op1))
667 if (TREE_THIS_VOLATILE (op1)
668 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
669 op1 = build_fold_addr_expr (op1);
671 gimplify_and_add (op1, pre_p);
673 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
674 is_gimple_lvalue, fb_lvalue);
675 *expr_p = TREE_OPERAND (*expr_p, 0);
677 /* P0145 says that the RHS is sequenced before the LHS.
678 gimplify_modify_expr gimplifies the RHS before the LHS, but that
679 isn't quite strong enough in two cases:
681 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
682 mean it's evaluated after the LHS.
684 2) the value calculation of the RHS is also sequenced before the
685 LHS, so for scalar assignment we need to preevaluate if the
686 RHS could be affected by LHS side-effects even if it has no
687 side-effects of its own. We don't need this for classes because
688 class assignment takes its RHS by reference. */
689 else if (flag_strong_eval_order > 1
690 && TREE_CODE (*expr_p) == MODIFY_EXPR
691 && lvalue_has_side_effects (op0)
692 && (TREE_CODE (op1) == CALL_EXPR
693 || (SCALAR_TYPE_P (TREE_TYPE (op1))
694 && !TREE_CONSTANT (op1))))
695 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
697 ret = GS_OK;
698 break;
700 case EMPTY_CLASS_EXPR:
701 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
702 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
703 ret = GS_OK;
704 break;
706 case BASELINK:
707 *expr_p = BASELINK_FUNCTIONS (*expr_p);
708 ret = GS_OK;
709 break;
711 case TRY_BLOCK:
712 genericize_try_block (expr_p);
713 ret = GS_OK;
714 break;
716 case HANDLER:
717 genericize_catch_block (expr_p);
718 ret = GS_OK;
719 break;
721 case EH_SPEC_BLOCK:
722 genericize_eh_spec_block (expr_p);
723 ret = GS_OK;
724 break;
726 case USING_STMT:
727 gcc_unreachable ();
729 case FOR_STMT:
730 case WHILE_STMT:
731 case DO_STMT:
732 case SWITCH_STMT:
733 case CONTINUE_STMT:
734 case BREAK_STMT:
735 gcc_unreachable ();
737 case OMP_FOR:
738 case OMP_SIMD:
739 case OMP_DISTRIBUTE:
740 case OMP_TASKLOOP:
741 ret = cp_gimplify_omp_for (expr_p, pre_p);
742 break;
744 case EXPR_STMT:
745 gimplify_expr_stmt (expr_p);
746 ret = GS_OK;
747 break;
749 case UNARY_PLUS_EXPR:
751 tree arg = TREE_OPERAND (*expr_p, 0);
752 tree type = TREE_TYPE (*expr_p);
753 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
754 : arg;
755 ret = GS_OK;
757 break;
759 case CILK_SPAWN_STMT:
760 gcc_assert(fn_contains_cilk_spawn_p (cfun)
761 && cilk_cp_detect_spawn_and_unwrap (expr_p));
763 if (!seen_error ())
764 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
765 return GS_ERROR;
767 case CALL_EXPR:
768 if (fn_contains_cilk_spawn_p (cfun)
769 && cilk_cp_detect_spawn_and_unwrap (expr_p)
770 && !seen_error ())
771 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
772 ret = GS_OK;
773 if (!CALL_EXPR_FN (*expr_p))
774 /* Internal function call. */;
775 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
777 /* This is a call to a (compound) assignment operator that used
778 the operator syntax; gimplify the RHS first. */
779 gcc_assert (call_expr_nargs (*expr_p) == 2);
780 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
781 enum gimplify_status t
782 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
783 if (t == GS_ERROR)
784 ret = GS_ERROR;
786 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
788 /* Leave the last argument for gimplify_call_expr, to avoid problems
789 with __builtin_va_arg_pack(). */
790 int nargs = call_expr_nargs (*expr_p) - 1;
791 for (int i = 0; i < nargs; ++i)
793 enum gimplify_status t
794 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
795 if (t == GS_ERROR)
796 ret = GS_ERROR;
799 else if (flag_strong_eval_order
800 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
802 /* If flag_strong_eval_order, evaluate the object argument first. */
803 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
804 if (POINTER_TYPE_P (fntype))
805 fntype = TREE_TYPE (fntype);
806 if (TREE_CODE (fntype) == METHOD_TYPE)
808 enum gimplify_status t
809 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
810 if (t == GS_ERROR)
811 ret = GS_ERROR;
814 break;
816 case RETURN_EXPR:
817 if (TREE_OPERAND (*expr_p, 0)
818 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
819 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
821 expr_p = &TREE_OPERAND (*expr_p, 0);
822 code = TREE_CODE (*expr_p);
823 /* Avoid going through the INIT_EXPR case, which can
824 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
825 goto modify_expr_case;
827 /* Fall through. */
829 default:
830 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
831 break;
834 /* Restore saved state. */
835 if (STATEMENT_CODE_P (code))
836 current_stmt_tree ()->stmts_are_full_exprs_p
837 = saved_stmts_are_full_exprs_p;
839 return ret;
842 static inline bool
843 is_invisiref_parm (const_tree t)
845 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
846 && DECL_BY_REFERENCE (t));
849 /* Return true if the uid in both int tree maps are equal. */
851 bool
852 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
854 return (a->uid == b->uid);
857 /* Hash a UID in a cxx_int_tree_map. */
859 unsigned int
860 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
862 return item->uid;
865 /* A stable comparison routine for use with splay trees and DECLs. */
867 static int
868 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
870 tree a = (tree) xa;
871 tree b = (tree) xb;
873 return DECL_UID (a) - DECL_UID (b);
876 /* OpenMP context during genericization. */
878 struct cp_genericize_omp_taskreg
880 bool is_parallel;
881 bool default_shared;
882 struct cp_genericize_omp_taskreg *outer;
883 splay_tree variables;
886 /* Return true if genericization should try to determine if
887 DECL is firstprivate or shared within task regions. */
889 static bool
890 omp_var_to_track (tree decl)
892 tree type = TREE_TYPE (decl);
893 if (is_invisiref_parm (decl))
894 type = TREE_TYPE (type);
895 while (TREE_CODE (type) == ARRAY_TYPE)
896 type = TREE_TYPE (type);
897 if (type == error_mark_node || !CLASS_TYPE_P (type))
898 return false;
899 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
900 return false;
901 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
902 return false;
903 return true;
906 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
908 static void
909 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
911 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
912 (splay_tree_key) decl);
913 if (n == NULL)
915 int flags = OMP_CLAUSE_DEFAULT_SHARED;
916 if (omp_ctx->outer)
917 omp_cxx_notice_variable (omp_ctx->outer, decl);
918 if (!omp_ctx->default_shared)
920 struct cp_genericize_omp_taskreg *octx;
922 for (octx = omp_ctx->outer; octx; octx = octx->outer)
924 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
925 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
927 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
928 break;
930 if (octx->is_parallel)
931 break;
933 if (octx == NULL
934 && (TREE_CODE (decl) == PARM_DECL
935 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
936 && DECL_CONTEXT (decl) == current_function_decl)))
937 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
938 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
940 /* DECL is implicitly determined firstprivate in
941 the current task construct. Ensure copy ctor and
942 dtor are instantiated, because during gimplification
943 it will be already too late. */
944 tree type = TREE_TYPE (decl);
945 if (is_invisiref_parm (decl))
946 type = TREE_TYPE (type);
947 while (TREE_CODE (type) == ARRAY_TYPE)
948 type = TREE_TYPE (type);
949 get_copy_ctor (type, tf_none);
950 get_dtor (type, tf_none);
953 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
957 /* Genericization context. */
959 struct cp_genericize_data
961 hash_set<tree> *p_set;
962 vec<tree> bind_expr_stack;
963 struct cp_genericize_omp_taskreg *omp_ctx;
964 tree try_block;
965 bool no_sanitize_p;
966 bool handle_invisiref_parm_p;
969 /* Perform any pre-gimplification folding of C++ front end trees to
970 GENERIC.
971 Note: The folding of none-omp cases is something to move into
972 the middle-end. As for now we have most foldings only on GENERIC
973 in fold-const, we need to perform this before transformation to
974 GIMPLE-form. */
976 static tree
977 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
979 tree stmt;
980 enum tree_code code;
982 *stmt_p = stmt = cp_fold (*stmt_p);
984 if (((hash_set<tree> *) data)->add (stmt))
986 /* Don't walk subtrees of stmts we've already walked once, otherwise
987 we can have exponential complexity with e.g. lots of nested
988 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
989 always the same tree, which the first time cp_fold_r has been
990 called on it had the subtrees walked. */
991 *walk_subtrees = 0;
992 return NULL;
995 code = TREE_CODE (stmt);
996 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
997 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
998 || code == OACC_LOOP)
1000 tree x;
1001 int i, n;
1003 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1004 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1005 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1006 x = OMP_FOR_COND (stmt);
1007 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1009 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1010 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1012 else if (x && TREE_CODE (x) == TREE_VEC)
1014 n = TREE_VEC_LENGTH (x);
1015 for (i = 0; i < n; i++)
1017 tree o = TREE_VEC_ELT (x, i);
1018 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1019 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1022 x = OMP_FOR_INCR (stmt);
1023 if (x && TREE_CODE (x) == TREE_VEC)
1025 n = TREE_VEC_LENGTH (x);
1026 for (i = 0; i < n; i++)
1028 tree o = TREE_VEC_ELT (x, i);
1029 if (o && TREE_CODE (o) == MODIFY_EXPR)
1030 o = TREE_OPERAND (o, 1);
1031 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1032 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1034 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1035 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1039 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1040 *walk_subtrees = 0;
1043 return NULL;
1046 /* Fold ALL the trees! FIXME we should be able to remove this, but
1047 apparently that still causes optimization regressions. */
1049 void
1050 cp_fold_function (tree fndecl)
1052 hash_set<tree> pset;
1053 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1056 /* Perform any pre-gimplification lowering of C++ front end trees to
1057 GENERIC. */
1059 static tree
1060 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1062 tree stmt = *stmt_p;
1063 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1064 hash_set<tree> *p_set = wtd->p_set;
1066 /* If in an OpenMP context, note var uses. */
1067 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1068 && (VAR_P (stmt)
1069 || TREE_CODE (stmt) == PARM_DECL
1070 || TREE_CODE (stmt) == RESULT_DECL)
1071 && omp_var_to_track (stmt))
1072 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1074 /* Dereference invisible reference parms. */
1075 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1077 *stmt_p = convert_from_reference (stmt);
1078 p_set->add (*stmt_p);
1079 *walk_subtrees = 0;
1080 return NULL;
1083 /* Map block scope extern declarations to visible declarations with the
1084 same name and type in outer scopes if any. */
1085 if (cp_function_chain->extern_decl_map
1086 && VAR_OR_FUNCTION_DECL_P (stmt)
1087 && DECL_EXTERNAL (stmt))
1089 struct cxx_int_tree_map *h, in;
1090 in.uid = DECL_UID (stmt);
1091 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1092 if (h)
1094 *stmt_p = h->to;
1095 *walk_subtrees = 0;
1096 return NULL;
1100 if (TREE_CODE (stmt) == INTEGER_CST
1101 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1102 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1103 && !wtd->no_sanitize_p)
1105 ubsan_maybe_instrument_reference (stmt_p);
1106 if (*stmt_p != stmt)
1108 *walk_subtrees = 0;
1109 return NULL_TREE;
1113 /* Other than invisiref parms, don't walk the same tree twice. */
1114 if (p_set->contains (stmt))
1116 *walk_subtrees = 0;
1117 return NULL_TREE;
1120 if (TREE_CODE (stmt) == ADDR_EXPR
1121 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1123 /* If in an OpenMP context, note var uses. */
1124 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1125 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1126 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1127 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1128 *walk_subtrees = 0;
1130 else if (TREE_CODE (stmt) == RETURN_EXPR
1131 && TREE_OPERAND (stmt, 0)
1132 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1133 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1134 *walk_subtrees = 0;
1135 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1136 switch (OMP_CLAUSE_CODE (stmt))
1138 case OMP_CLAUSE_LASTPRIVATE:
1139 /* Don't dereference an invisiref in OpenMP clauses. */
1140 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1142 *walk_subtrees = 0;
1143 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1144 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1145 cp_genericize_r, data, NULL);
1147 break;
1148 case OMP_CLAUSE_PRIVATE:
1149 /* Don't dereference an invisiref in OpenMP clauses. */
1150 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1151 *walk_subtrees = 0;
1152 else if (wtd->omp_ctx != NULL)
1154 /* Private clause doesn't cause any references to the
1155 var in outer contexts, avoid calling
1156 omp_cxx_notice_variable for it. */
1157 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1158 wtd->omp_ctx = NULL;
1159 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1160 data, NULL);
1161 wtd->omp_ctx = old;
1162 *walk_subtrees = 0;
1164 break;
1165 case OMP_CLAUSE_SHARED:
1166 case OMP_CLAUSE_FIRSTPRIVATE:
1167 case OMP_CLAUSE_COPYIN:
1168 case OMP_CLAUSE_COPYPRIVATE:
1169 /* Don't dereference an invisiref in OpenMP clauses. */
1170 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1171 *walk_subtrees = 0;
1172 break;
1173 case OMP_CLAUSE_REDUCTION:
1174 /* Don't dereference an invisiref in reduction clause's
1175 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1176 still needs to be genericized. */
1177 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1179 *walk_subtrees = 0;
1180 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1181 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1182 cp_genericize_r, data, NULL);
1183 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1184 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1185 cp_genericize_r, data, NULL);
1187 break;
1188 default:
1189 break;
1191 else if (IS_TYPE_OR_DECL_P (stmt))
1192 *walk_subtrees = 0;
1194 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1195 to lower this construct before scanning it, so we need to lower these
1196 before doing anything else. */
1197 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1198 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1199 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1200 : TRY_FINALLY_EXPR,
1201 void_type_node,
1202 CLEANUP_BODY (stmt),
1203 CLEANUP_EXPR (stmt));
1205 else if (TREE_CODE (stmt) == IF_STMT)
1207 genericize_if_stmt (stmt_p);
1208 /* *stmt_p has changed, tail recurse to handle it again. */
1209 return cp_genericize_r (stmt_p, walk_subtrees, data);
1212 /* COND_EXPR might have incompatible types in branches if one or both
1213 arms are bitfields. Fix it up now. */
1214 else if (TREE_CODE (stmt) == COND_EXPR)
1216 tree type_left
1217 = (TREE_OPERAND (stmt, 1)
1218 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1219 : NULL_TREE);
1220 tree type_right
1221 = (TREE_OPERAND (stmt, 2)
1222 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1223 : NULL_TREE);
1224 if (type_left
1225 && !useless_type_conversion_p (TREE_TYPE (stmt),
1226 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1228 TREE_OPERAND (stmt, 1)
1229 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1230 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1231 type_left));
1233 if (type_right
1234 && !useless_type_conversion_p (TREE_TYPE (stmt),
1235 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1237 TREE_OPERAND (stmt, 2)
1238 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1239 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1240 type_right));
1244 else if (TREE_CODE (stmt) == BIND_EXPR)
1246 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1248 tree decl;
1249 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1250 if (VAR_P (decl)
1251 && !DECL_EXTERNAL (decl)
1252 && omp_var_to_track (decl))
1254 splay_tree_node n
1255 = splay_tree_lookup (wtd->omp_ctx->variables,
1256 (splay_tree_key) decl);
1257 if (n == NULL)
1258 splay_tree_insert (wtd->omp_ctx->variables,
1259 (splay_tree_key) decl,
1260 TREE_STATIC (decl)
1261 ? OMP_CLAUSE_DEFAULT_SHARED
1262 : OMP_CLAUSE_DEFAULT_PRIVATE);
1265 if (flag_sanitize
1266 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1268 /* The point here is to not sanitize static initializers. */
1269 bool no_sanitize_p = wtd->no_sanitize_p;
1270 wtd->no_sanitize_p = true;
1271 for (tree decl = BIND_EXPR_VARS (stmt);
1272 decl;
1273 decl = DECL_CHAIN (decl))
1274 if (VAR_P (decl)
1275 && TREE_STATIC (decl)
1276 && DECL_INITIAL (decl))
1277 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1278 wtd->no_sanitize_p = no_sanitize_p;
1280 wtd->bind_expr_stack.safe_push (stmt);
1281 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1282 cp_genericize_r, data, NULL);
1283 wtd->bind_expr_stack.pop ();
1286 else if (TREE_CODE (stmt) == USING_STMT)
1288 tree block = NULL_TREE;
1290 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1291 BLOCK, and append an IMPORTED_DECL to its
1292 BLOCK_VARS chained list. */
1293 if (wtd->bind_expr_stack.exists ())
1295 int i;
1296 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1297 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1298 break;
1300 if (block)
1302 tree using_directive;
1303 gcc_assert (TREE_OPERAND (stmt, 0));
1305 using_directive = make_node (IMPORTED_DECL);
1306 TREE_TYPE (using_directive) = void_type_node;
1308 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1309 = TREE_OPERAND (stmt, 0);
1310 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1311 BLOCK_VARS (block) = using_directive;
1313 /* The USING_STMT won't appear in GENERIC. */
1314 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1315 *walk_subtrees = 0;
1318 else if (TREE_CODE (stmt) == DECL_EXPR
1319 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1321 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1322 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1323 *walk_subtrees = 0;
1325 else if (TREE_CODE (stmt) == DECL_EXPR)
1327 tree d = DECL_EXPR_DECL (stmt);
1328 if (VAR_P (d))
1329 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1331 else if (TREE_CODE (stmt) == OMP_PARALLEL
1332 || TREE_CODE (stmt) == OMP_TASK
1333 || TREE_CODE (stmt) == OMP_TASKLOOP)
1335 struct cp_genericize_omp_taskreg omp_ctx;
1336 tree c, decl;
1337 splay_tree_node n;
1339 *walk_subtrees = 0;
1340 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1341 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1342 omp_ctx.default_shared = omp_ctx.is_parallel;
1343 omp_ctx.outer = wtd->omp_ctx;
1344 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1345 wtd->omp_ctx = &omp_ctx;
1346 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1347 switch (OMP_CLAUSE_CODE (c))
1349 case OMP_CLAUSE_SHARED:
1350 case OMP_CLAUSE_PRIVATE:
1351 case OMP_CLAUSE_FIRSTPRIVATE:
1352 case OMP_CLAUSE_LASTPRIVATE:
1353 decl = OMP_CLAUSE_DECL (c);
1354 if (decl == error_mark_node || !omp_var_to_track (decl))
1355 break;
1356 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1357 if (n != NULL)
1358 break;
1359 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1360 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1361 ? OMP_CLAUSE_DEFAULT_SHARED
1362 : OMP_CLAUSE_DEFAULT_PRIVATE);
1363 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1364 && omp_ctx.outer)
1365 omp_cxx_notice_variable (omp_ctx.outer, decl);
1366 break;
1367 case OMP_CLAUSE_DEFAULT:
1368 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1369 omp_ctx.default_shared = true;
1370 default:
1371 break;
1373 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1374 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1375 else
1376 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1377 wtd->omp_ctx = omp_ctx.outer;
1378 splay_tree_delete (omp_ctx.variables);
1380 else if (TREE_CODE (stmt) == TRY_BLOCK)
1382 *walk_subtrees = 0;
1383 tree try_block = wtd->try_block;
1384 wtd->try_block = stmt;
1385 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1386 wtd->try_block = try_block;
1387 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1389 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1391 /* MUST_NOT_THROW_COND might be something else with TM. */
1392 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1394 *walk_subtrees = 0;
1395 tree try_block = wtd->try_block;
1396 wtd->try_block = stmt;
1397 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1398 wtd->try_block = try_block;
1401 else if (TREE_CODE (stmt) == THROW_EXPR)
1403 location_t loc = location_of (stmt);
1404 if (TREE_NO_WARNING (stmt))
1405 /* Never mind. */;
1406 else if (wtd->try_block)
1408 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1409 && warning_at (loc, OPT_Wterminate,
1410 "throw will always call terminate()")
1411 && cxx_dialect >= cxx11
1412 && DECL_DESTRUCTOR_P (current_function_decl))
1413 inform (loc, "in C++11 destructors default to noexcept");
1415 else
1417 if (warn_cxx11_compat && cxx_dialect < cxx11
1418 && DECL_DESTRUCTOR_P (current_function_decl)
1419 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1420 == NULL_TREE)
1421 && (get_defaulted_eh_spec (current_function_decl)
1422 == empty_except_spec))
1423 warning_at (loc, OPT_Wc__11_compat,
1424 "in C++11 this throw will terminate because "
1425 "destructors default to noexcept");
1428 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1429 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1430 else if (TREE_CODE (stmt) == FOR_STMT)
1431 genericize_for_stmt (stmt_p, walk_subtrees, data);
1432 else if (TREE_CODE (stmt) == WHILE_STMT)
1433 genericize_while_stmt (stmt_p, walk_subtrees, data);
1434 else if (TREE_CODE (stmt) == DO_STMT)
1435 genericize_do_stmt (stmt_p, walk_subtrees, data);
1436 else if (TREE_CODE (stmt) == SWITCH_STMT)
1437 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1438 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1439 genericize_continue_stmt (stmt_p);
1440 else if (TREE_CODE (stmt) == BREAK_STMT)
1441 genericize_break_stmt (stmt_p);
1442 else if (TREE_CODE (stmt) == OMP_FOR
1443 || TREE_CODE (stmt) == OMP_SIMD
1444 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1445 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1446 else if (TREE_CODE (stmt) == PTRMEM_CST)
1448 /* By the time we get here we're handing off to the back end, so we don't
1449 need or want to preserve PTRMEM_CST anymore. */
1450 *stmt_p = cplus_expand_constant (stmt);
1451 *walk_subtrees = 0;
1453 else if ((flag_sanitize
1454 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1455 && !wtd->no_sanitize_p)
1457 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1458 && TREE_CODE (stmt) == NOP_EXPR
1459 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1460 ubsan_maybe_instrument_reference (stmt_p);
1461 else if (TREE_CODE (stmt) == CALL_EXPR)
1463 tree fn = CALL_EXPR_FN (stmt);
1464 if (fn != NULL_TREE
1465 && !error_operand_p (fn)
1466 && POINTER_TYPE_P (TREE_TYPE (fn))
1467 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1469 bool is_ctor
1470 = TREE_CODE (fn) == ADDR_EXPR
1471 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1472 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1473 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1474 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1475 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1476 cp_ubsan_maybe_instrument_member_call (stmt);
1481 p_set->add (*stmt_p);
1483 return NULL;
1486 /* Lower C++ front end trees to GENERIC in T_P. */
1488 static void
1489 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1491 struct cp_genericize_data wtd;
1493 wtd.p_set = new hash_set<tree>;
1494 wtd.bind_expr_stack.create (0);
1495 wtd.omp_ctx = NULL;
1496 wtd.try_block = NULL_TREE;
1497 wtd.no_sanitize_p = false;
1498 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1499 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1500 delete wtd.p_set;
1501 wtd.bind_expr_stack.release ();
1502 if (flag_sanitize & SANITIZE_VPTR)
1503 cp_ubsan_instrument_member_accesses (t_p);
1506 /* If a function that should end with a return in non-void
1507 function doesn't obviously end with return, add ubsan
1508 instrumentation code to verify it at runtime. */
1510 static void
1511 cp_ubsan_maybe_instrument_return (tree fndecl)
1513 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1514 || DECL_CONSTRUCTOR_P (fndecl)
1515 || DECL_DESTRUCTOR_P (fndecl)
1516 || !targetm.warn_func_return (fndecl))
1517 return;
1519 tree t = DECL_SAVED_TREE (fndecl);
1520 while (t)
1522 switch (TREE_CODE (t))
1524 case BIND_EXPR:
1525 t = BIND_EXPR_BODY (t);
1526 continue;
1527 case TRY_FINALLY_EXPR:
1528 t = TREE_OPERAND (t, 0);
1529 continue;
1530 case STATEMENT_LIST:
1532 tree_stmt_iterator i = tsi_last (t);
1533 if (!tsi_end_p (i))
1535 t = tsi_stmt (i);
1536 continue;
1539 break;
1540 case RETURN_EXPR:
1541 return;
1542 default:
1543 break;
1545 break;
1547 if (t == NULL_TREE)
1548 return;
1549 tree *p = &DECL_SAVED_TREE (fndecl);
1550 if (TREE_CODE (*p) == BIND_EXPR)
1551 p = &BIND_EXPR_BODY (*p);
1552 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1553 append_to_statement_list (t, p);
1556 void
1557 cp_genericize (tree fndecl)
1559 tree t;
1561 /* Fix up the types of parms passed by invisible reference. */
1562 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1563 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1565 /* If a function's arguments are copied to create a thunk,
1566 then DECL_BY_REFERENCE will be set -- but the type of the
1567 argument will be a pointer type, so we will never get
1568 here. */
1569 gcc_assert (!DECL_BY_REFERENCE (t));
1570 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1571 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1572 DECL_BY_REFERENCE (t) = 1;
1573 TREE_ADDRESSABLE (t) = 0;
1574 relayout_decl (t);
1577 /* Do the same for the return value. */
1578 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1580 t = DECL_RESULT (fndecl);
1581 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1582 DECL_BY_REFERENCE (t) = 1;
1583 TREE_ADDRESSABLE (t) = 0;
1584 relayout_decl (t);
1585 if (DECL_NAME (t))
1587 /* Adjust DECL_VALUE_EXPR of the original var. */
1588 tree outer = outer_curly_brace_block (current_function_decl);
1589 tree var;
1591 if (outer)
1592 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1593 if (DECL_NAME (t) == DECL_NAME (var)
1594 && DECL_HAS_VALUE_EXPR_P (var)
1595 && DECL_VALUE_EXPR (var) == t)
1597 tree val = convert_from_reference (t);
1598 SET_DECL_VALUE_EXPR (var, val);
1599 break;
1604 /* If we're a clone, the body is already GIMPLE. */
1605 if (DECL_CLONED_FUNCTION_P (fndecl))
1606 return;
1608 /* Allow cp_genericize calls to be nested. */
1609 tree save_bc_label[2];
1610 save_bc_label[bc_break] = bc_label[bc_break];
1611 save_bc_label[bc_continue] = bc_label[bc_continue];
1612 bc_label[bc_break] = NULL_TREE;
1613 bc_label[bc_continue] = NULL_TREE;
1615 /* Expand all the array notations here. */
1616 if (flag_cilkplus
1617 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1618 DECL_SAVED_TREE (fndecl)
1619 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1621 /* We do want to see every occurrence of the parms, so we can't just use
1622 walk_tree's hash functionality. */
1623 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1625 if (flag_sanitize & SANITIZE_RETURN
1626 && do_ubsan_in_current_function ())
1627 cp_ubsan_maybe_instrument_return (fndecl);
1629 /* Do everything else. */
1630 c_genericize (fndecl);
1632 gcc_assert (bc_label[bc_break] == NULL);
1633 gcc_assert (bc_label[bc_continue] == NULL);
1634 bc_label[bc_break] = save_bc_label[bc_break];
1635 bc_label[bc_continue] = save_bc_label[bc_continue];
1638 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1639 NULL if there is in fact nothing to do. ARG2 may be null if FN
1640 actually only takes one argument. */
1642 static tree
1643 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1645 tree defparm, parm, t;
1646 int i = 0;
1647 int nargs;
1648 tree *argarray;
1650 if (fn == NULL)
1651 return NULL;
1653 nargs = list_length (DECL_ARGUMENTS (fn));
1654 argarray = XALLOCAVEC (tree, nargs);
1656 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1657 if (arg2)
1658 defparm = TREE_CHAIN (defparm);
1660 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1662 tree inner_type = TREE_TYPE (arg1);
1663 tree start1, end1, p1;
1664 tree start2 = NULL, p2 = NULL;
1665 tree ret = NULL, lab;
1667 start1 = arg1;
1668 start2 = arg2;
1671 inner_type = TREE_TYPE (inner_type);
1672 start1 = build4 (ARRAY_REF, inner_type, start1,
1673 size_zero_node, NULL, NULL);
1674 if (arg2)
1675 start2 = build4 (ARRAY_REF, inner_type, start2,
1676 size_zero_node, NULL, NULL);
1678 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1679 start1 = build_fold_addr_expr_loc (input_location, start1);
1680 if (arg2)
1681 start2 = build_fold_addr_expr_loc (input_location, start2);
1683 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1684 end1 = fold_build_pointer_plus (start1, end1);
1686 p1 = create_tmp_var (TREE_TYPE (start1));
1687 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1688 append_to_statement_list (t, &ret);
1690 if (arg2)
1692 p2 = create_tmp_var (TREE_TYPE (start2));
1693 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1694 append_to_statement_list (t, &ret);
1697 lab = create_artificial_label (input_location);
1698 t = build1 (LABEL_EXPR, void_type_node, lab);
1699 append_to_statement_list (t, &ret);
1701 argarray[i++] = p1;
1702 if (arg2)
1703 argarray[i++] = p2;
1704 /* Handle default arguments. */
1705 for (parm = defparm; parm && parm != void_list_node;
1706 parm = TREE_CHAIN (parm), i++)
1707 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1708 TREE_PURPOSE (parm), fn, i,
1709 tf_warning_or_error);
1710 t = build_call_a (fn, i, argarray);
1711 t = fold_convert (void_type_node, t);
1712 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1713 append_to_statement_list (t, &ret);
1715 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1716 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1717 append_to_statement_list (t, &ret);
1719 if (arg2)
1721 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1722 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1723 append_to_statement_list (t, &ret);
1726 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1727 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1728 append_to_statement_list (t, &ret);
1730 return ret;
1732 else
1734 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1735 if (arg2)
1736 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1737 /* Handle default arguments. */
1738 for (parm = defparm; parm && parm != void_list_node;
1739 parm = TREE_CHAIN (parm), i++)
1740 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1741 TREE_PURPOSE (parm),
1742 fn, i, tf_warning_or_error);
1743 t = build_call_a (fn, i, argarray);
1744 t = fold_convert (void_type_node, t);
1745 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1749 /* Return code to initialize DECL with its default constructor, or
1750 NULL if there's nothing to do. */
1752 tree
1753 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1755 tree info = CP_OMP_CLAUSE_INFO (clause);
1756 tree ret = NULL;
1758 if (info)
1759 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1761 return ret;
1764 /* Return code to initialize DST with a copy constructor from SRC. */
1766 tree
1767 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1769 tree info = CP_OMP_CLAUSE_INFO (clause);
1770 tree ret = NULL;
1772 if (info)
1773 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1774 if (ret == NULL)
1775 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1777 return ret;
1780 /* Similarly, except use an assignment operator instead. */
1782 tree
1783 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1785 tree info = CP_OMP_CLAUSE_INFO (clause);
1786 tree ret = NULL;
1788 if (info)
1789 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1790 if (ret == NULL)
1791 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1793 return ret;
1796 /* Return code to destroy DECL. */
1798 tree
1799 cxx_omp_clause_dtor (tree clause, tree decl)
1801 tree info = CP_OMP_CLAUSE_INFO (clause);
1802 tree ret = NULL;
1804 if (info)
1805 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1807 return ret;
1810 /* True if OpenMP should privatize what this DECL points to rather
1811 than the DECL itself. */
1813 bool
1814 cxx_omp_privatize_by_reference (const_tree decl)
1816 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1817 || is_invisiref_parm (decl));
1820 /* Return true if DECL is const qualified var having no mutable member. */
1821 bool
1822 cxx_omp_const_qual_no_mutable (tree decl)
1824 tree type = TREE_TYPE (decl);
1825 if (TREE_CODE (type) == REFERENCE_TYPE)
1827 if (!is_invisiref_parm (decl))
1828 return false;
1829 type = TREE_TYPE (type);
1831 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1833 /* NVR doesn't preserve const qualification of the
1834 variable's type. */
1835 tree outer = outer_curly_brace_block (current_function_decl);
1836 tree var;
1838 if (outer)
1839 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1840 if (DECL_NAME (decl) == DECL_NAME (var)
1841 && (TYPE_MAIN_VARIANT (type)
1842 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1844 if (TYPE_READONLY (TREE_TYPE (var)))
1845 type = TREE_TYPE (var);
1846 break;
1851 if (type == error_mark_node)
1852 return false;
1854 /* Variables with const-qualified type having no mutable member
1855 are predetermined shared. */
1856 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1857 return true;
1859 return false;
1862 /* True if OpenMP sharing attribute of DECL is predetermined. */
1864 enum omp_clause_default_kind
1865 cxx_omp_predetermined_sharing (tree decl)
1867 /* Static data members are predetermined shared. */
1868 if (TREE_STATIC (decl))
1870 tree ctx = CP_DECL_CONTEXT (decl);
1871 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1872 return OMP_CLAUSE_DEFAULT_SHARED;
1875 /* Const qualified vars having no mutable member are predetermined
1876 shared. */
1877 if (cxx_omp_const_qual_no_mutable (decl))
1878 return OMP_CLAUSE_DEFAULT_SHARED;
1880 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1883 /* Finalize an implicitly determined clause. */
1885 void
1886 cxx_omp_finish_clause (tree c, gimple_seq *)
1888 tree decl, inner_type;
1889 bool make_shared = false;
1891 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1892 return;
1894 decl = OMP_CLAUSE_DECL (c);
1895 decl = require_complete_type (decl);
1896 inner_type = TREE_TYPE (decl);
1897 if (decl == error_mark_node)
1898 make_shared = true;
1899 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1900 inner_type = TREE_TYPE (inner_type);
1902 /* We're interested in the base element, not arrays. */
1903 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1904 inner_type = TREE_TYPE (inner_type);
1906 /* Check for special function availability by building a call to one.
1907 Save the results, because later we won't be in the right context
1908 for making these queries. */
1909 if (!make_shared
1910 && CLASS_TYPE_P (inner_type)
1911 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1912 make_shared = true;
1914 if (make_shared)
1915 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1918 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1919 disregarded in OpenMP construct, because it is going to be
1920 remapped during OpenMP lowering. SHARED is true if DECL
1921 is going to be shared, false if it is going to be privatized. */
1923 bool
1924 cxx_omp_disregard_value_expr (tree decl, bool shared)
1926 return !shared
1927 && VAR_P (decl)
1928 && DECL_HAS_VALUE_EXPR_P (decl)
1929 && DECL_ARTIFICIAL (decl)
1930 && DECL_LANG_SPECIFIC (decl)
1931 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1934 /* Perform folding on expression X. */
1936 tree
1937 cp_fully_fold (tree x)
1939 if (processing_template_decl)
1940 return x;
1941 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
1942 have to call both. */
1943 if (cxx_dialect >= cxx11)
1944 x = maybe_constant_value (x);
1945 return cp_fold (x);
1948 /* Fold expression X which is used as an rvalue if RVAL is true. */
1950 static tree
1951 cp_fold_maybe_rvalue (tree x, bool rval)
1953 while (true)
1955 x = cp_fold (x);
1956 if (rval && DECL_P (x)
1957 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1959 tree v = decl_constant_value (x);
1960 if (v != x && v != error_mark_node)
1962 x = v;
1963 continue;
1966 break;
1968 return x;
1971 /* Fold expression X which is used as an rvalue. */
1973 static tree
1974 cp_fold_rvalue (tree x)
1976 return cp_fold_maybe_rvalue (x, true);
1979 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1980 and certain changes are made to the folding done. Or should be (FIXME). We
1981 never touch maybe_const, as it is only used for the C front-end
1982 C_MAYBE_CONST_EXPR. */
1984 tree
1985 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
1987 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1988 INTEGER_CST. */
1989 return cp_fold_rvalue (x);
1992 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
1994 /* Dispose of the whole FOLD_CACHE. */
1996 void
1997 clear_fold_cache (void)
1999 if (fold_cache != NULL)
2000 fold_cache->empty ();
2003 /* This function tries to fold an expression X.
2004 To avoid combinatorial explosion, folding results are kept in fold_cache.
2005 If we are processing a template or X is invalid, we don't fold at all.
2006 For performance reasons we don't cache expressions representing a
2007 declaration or constant.
2008 Function returns X or its folded variant. */
2010 static tree
2011 cp_fold (tree x)
2013 tree op0, op1, op2, op3;
2014 tree org_x = x, r = NULL_TREE;
2015 enum tree_code code;
2016 location_t loc;
2017 bool rval_ops = true;
2019 if (!x || x == error_mark_node)
2020 return x;
2022 if (processing_template_decl
2023 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2024 return x;
2026 /* Don't bother to cache DECLs or constants. */
2027 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2028 return x;
2030 if (fold_cache == NULL)
2031 fold_cache = hash_map<tree, tree>::create_ggc (101);
2033 if (tree *cached = fold_cache->get (x))
2034 return *cached;
2036 code = TREE_CODE (x);
2037 switch (code)
2039 case CLEANUP_POINT_EXPR:
2040 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2041 effects. */
2042 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2043 if (!TREE_SIDE_EFFECTS (r))
2044 x = r;
2045 break;
2047 case SIZEOF_EXPR:
2048 x = fold_sizeof_expr (x);
2049 break;
2051 case VIEW_CONVERT_EXPR:
2052 rval_ops = false;
2053 /* FALLTHRU */
2054 case CONVERT_EXPR:
2055 case NOP_EXPR:
2056 case NON_LVALUE_EXPR:
2058 if (VOID_TYPE_P (TREE_TYPE (x)))
2059 return x;
2061 loc = EXPR_LOCATION (x);
2062 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2064 if (code == CONVERT_EXPR
2065 && SCALAR_TYPE_P (TREE_TYPE (x))
2066 && op0 != void_node)
2067 /* During parsing we used convert_to_*_nofold; re-convert now using the
2068 folding variants, since fold() doesn't do those transformations. */
2069 x = fold (convert (TREE_TYPE (x), op0));
2070 else if (op0 != TREE_OPERAND (x, 0))
2072 if (op0 == error_mark_node)
2073 x = error_mark_node;
2074 else
2075 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2077 else
2078 x = fold (x);
2080 /* Conversion of an out-of-range value has implementation-defined
2081 behavior; the language considers it different from arithmetic
2082 overflow, which is undefined. */
2083 if (TREE_CODE (op0) == INTEGER_CST
2084 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2085 TREE_OVERFLOW (x) = false;
2087 break;
2089 case INDIRECT_REF:
2090 /* We don't need the decltype(auto) obfuscation anymore. */
2091 if (REF_PARENTHESIZED_P (x))
2093 tree p = maybe_undo_parenthesized_ref (x);
2094 return cp_fold (p);
2096 goto unary;
2098 case ADDR_EXPR:
2099 case REALPART_EXPR:
2100 case IMAGPART_EXPR:
2101 rval_ops = false;
2102 /* FALLTHRU */
2103 case CONJ_EXPR:
2104 case FIX_TRUNC_EXPR:
2105 case FLOAT_EXPR:
2106 case NEGATE_EXPR:
2107 case ABS_EXPR:
2108 case BIT_NOT_EXPR:
2109 case TRUTH_NOT_EXPR:
2110 case FIXED_CONVERT_EXPR:
2111 unary:
2113 loc = EXPR_LOCATION (x);
2114 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2116 if (op0 != TREE_OPERAND (x, 0))
2118 if (op0 == error_mark_node)
2119 x = error_mark_node;
2120 else
2122 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2123 if (code == INDIRECT_REF
2124 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2126 TREE_READONLY (x) = TREE_READONLY (org_x);
2127 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2128 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2132 else
2133 x = fold (x);
2135 gcc_assert (TREE_CODE (x) != COND_EXPR
2136 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2137 break;
2139 case UNARY_PLUS_EXPR:
2140 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2141 if (op0 == error_mark_node)
2142 x = error_mark_node;
2143 else
2144 x = fold_convert (TREE_TYPE (x), op0);
2145 break;
2147 case POSTDECREMENT_EXPR:
2148 case POSTINCREMENT_EXPR:
2149 case INIT_EXPR:
2150 case PREDECREMENT_EXPR:
2151 case PREINCREMENT_EXPR:
2152 case COMPOUND_EXPR:
2153 case MODIFY_EXPR:
2154 rval_ops = false;
2155 /* FALLTHRU */
2156 case POINTER_PLUS_EXPR:
2157 case PLUS_EXPR:
2158 case MINUS_EXPR:
2159 case MULT_EXPR:
2160 case TRUNC_DIV_EXPR:
2161 case CEIL_DIV_EXPR:
2162 case FLOOR_DIV_EXPR:
2163 case ROUND_DIV_EXPR:
2164 case TRUNC_MOD_EXPR:
2165 case CEIL_MOD_EXPR:
2166 case ROUND_MOD_EXPR:
2167 case RDIV_EXPR:
2168 case EXACT_DIV_EXPR:
2169 case MIN_EXPR:
2170 case MAX_EXPR:
2171 case LSHIFT_EXPR:
2172 case RSHIFT_EXPR:
2173 case LROTATE_EXPR:
2174 case RROTATE_EXPR:
2175 case BIT_AND_EXPR:
2176 case BIT_IOR_EXPR:
2177 case BIT_XOR_EXPR:
2178 case TRUTH_AND_EXPR:
2179 case TRUTH_ANDIF_EXPR:
2180 case TRUTH_OR_EXPR:
2181 case TRUTH_ORIF_EXPR:
2182 case TRUTH_XOR_EXPR:
2183 case LT_EXPR: case LE_EXPR:
2184 case GT_EXPR: case GE_EXPR:
2185 case EQ_EXPR: case NE_EXPR:
2186 case UNORDERED_EXPR: case ORDERED_EXPR:
2187 case UNLT_EXPR: case UNLE_EXPR:
2188 case UNGT_EXPR: case UNGE_EXPR:
2189 case UNEQ_EXPR: case LTGT_EXPR:
2190 case RANGE_EXPR: case COMPLEX_EXPR:
2192 loc = EXPR_LOCATION (x);
2193 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2194 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2196 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2198 if (op0 == error_mark_node || op1 == error_mark_node)
2199 x = error_mark_node;
2200 else
2201 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2203 else
2204 x = fold (x);
2206 if (TREE_NO_WARNING (org_x)
2207 && warn_nonnull_compare
2208 && COMPARISON_CLASS_P (org_x))
2210 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2212 else if (COMPARISON_CLASS_P (x))
2213 TREE_NO_WARNING (x) = 1;
2214 /* Otherwise give up on optimizing these, let GIMPLE folders
2215 optimize those later on. */
2216 else if (op0 != TREE_OPERAND (org_x, 0)
2217 || op1 != TREE_OPERAND (org_x, 1))
2219 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2220 TREE_NO_WARNING (x) = 1;
2222 else
2223 x = org_x;
2225 break;
2227 case VEC_COND_EXPR:
2228 case COND_EXPR:
2230 /* Don't bother folding a void condition, since it can't produce a
2231 constant value. Also, some statement-level uses of COND_EXPR leave
2232 one of the branches NULL, so folding would crash. */
2233 if (VOID_TYPE_P (TREE_TYPE (x)))
2234 return x;
2236 loc = EXPR_LOCATION (x);
2237 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2238 op1 = cp_fold (TREE_OPERAND (x, 1));
2239 op2 = cp_fold (TREE_OPERAND (x, 2));
2241 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2243 warning_sentinel s (warn_int_in_bool_context);
2244 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2245 op1 = cp_truthvalue_conversion (op1);
2246 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2247 op2 = cp_truthvalue_conversion (op2);
2250 if (op0 != TREE_OPERAND (x, 0)
2251 || op1 != TREE_OPERAND (x, 1)
2252 || op2 != TREE_OPERAND (x, 2))
2254 if (op0 == error_mark_node
2255 || op1 == error_mark_node
2256 || op2 == error_mark_node)
2257 x = error_mark_node;
2258 else
2259 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2261 else
2262 x = fold (x);
2264 /* A COND_EXPR might have incompatible types in branches if one or both
2265 arms are bitfields. If folding exposed such a branch, fix it up. */
2266 if (TREE_CODE (x) != code)
2267 if (tree type = is_bitfield_expr_with_lowered_type (x))
2268 x = fold_convert (type, x);
2270 break;
2272 case CALL_EXPR:
2274 int i, m, sv = optimize, nw = sv, changed = 0;
2275 tree callee = get_callee_fndecl (x);
2277 /* Some built-in function calls will be evaluated at compile-time in
2278 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2279 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2280 if (callee && DECL_BUILT_IN (callee) && !optimize
2281 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2282 && current_function_decl
2283 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2284 nw = 1;
2286 x = copy_node (x);
2288 m = call_expr_nargs (x);
2289 for (i = 0; i < m; i++)
2291 r = cp_fold (CALL_EXPR_ARG (x, i));
2292 if (r != CALL_EXPR_ARG (x, i))
2294 if (r == error_mark_node)
2296 x = error_mark_node;
2297 break;
2299 changed = 1;
2301 CALL_EXPR_ARG (x, i) = r;
2303 if (x == error_mark_node)
2304 break;
2306 optimize = nw;
2307 r = fold (x);
2308 optimize = sv;
2310 if (TREE_CODE (r) != CALL_EXPR)
2312 x = cp_fold (r);
2313 break;
2316 optimize = nw;
2318 /* Invoke maybe_constant_value for functions declared
2319 constexpr and not called with AGGR_INIT_EXPRs.
2320 TODO:
2321 Do constexpr expansion of expressions where the call itself is not
2322 constant, but the call followed by an INDIRECT_REF is. */
2323 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2324 && !flag_no_inline)
2325 r = maybe_constant_value (x);
2326 optimize = sv;
2328 if (TREE_CODE (r) != CALL_EXPR)
2330 if (DECL_CONSTRUCTOR_P (callee))
2332 loc = EXPR_LOCATION (x);
2333 tree s = build_fold_indirect_ref_loc (loc,
2334 CALL_EXPR_ARG (x, 0));
2335 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2337 x = r;
2338 break;
2341 if (!changed)
2342 x = org_x;
2343 break;
2346 case CONSTRUCTOR:
2348 unsigned i;
2349 constructor_elt *p;
2350 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2351 vec<constructor_elt, va_gc> *nelts = NULL;
2352 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2354 tree op = cp_fold (p->value);
2355 if (op != p->value)
2357 if (op == error_mark_node)
2359 x = error_mark_node;
2360 vec_free (nelts);
2361 break;
2363 if (nelts == NULL)
2364 nelts = elts->copy ();
2365 (*nelts)[i].value = op;
2368 if (nelts)
2369 x = build_constructor (TREE_TYPE (x), nelts);
2370 break;
2372 case TREE_VEC:
2374 bool changed = false;
2375 vec<tree, va_gc> *vec = make_tree_vector ();
2376 int i, n = TREE_VEC_LENGTH (x);
2377 vec_safe_reserve (vec, n);
2379 for (i = 0; i < n; i++)
2381 tree op = cp_fold (TREE_VEC_ELT (x, i));
2382 vec->quick_push (op);
2383 if (op != TREE_VEC_ELT (x, i))
2384 changed = true;
2387 if (changed)
2389 r = copy_node (x);
2390 for (i = 0; i < n; i++)
2391 TREE_VEC_ELT (r, i) = (*vec)[i];
2392 x = r;
2395 release_tree_vector (vec);
2398 break;
2400 case ARRAY_REF:
2401 case ARRAY_RANGE_REF:
2403 loc = EXPR_LOCATION (x);
2404 op0 = cp_fold (TREE_OPERAND (x, 0));
2405 op1 = cp_fold (TREE_OPERAND (x, 1));
2406 op2 = cp_fold (TREE_OPERAND (x, 2));
2407 op3 = cp_fold (TREE_OPERAND (x, 3));
2409 if (op0 != TREE_OPERAND (x, 0)
2410 || op1 != TREE_OPERAND (x, 1)
2411 || op2 != TREE_OPERAND (x, 2)
2412 || op3 != TREE_OPERAND (x, 3))
2414 if (op0 == error_mark_node
2415 || op1 == error_mark_node
2416 || op2 == error_mark_node
2417 || op3 == error_mark_node)
2418 x = error_mark_node;
2419 else
2421 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2422 TREE_READONLY (x) = TREE_READONLY (org_x);
2423 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2424 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2428 x = fold (x);
2429 break;
2431 default:
2432 return org_x;
2435 fold_cache->put (org_x, x);
2436 /* Prevent that we try to fold an already folded result again. */
2437 if (x != org_x)
2438 fold_cache->put (x, x);
2440 return x;
2443 #include "gt-cp-cp-gimplify.h"