fix merge conflict in libgfortran/ChangeLog
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob3187a64a7254b3fde77c3eff16d490c3c3289dd4
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
38 /* Forward declarations. */
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
45 /* Local declarations. */
47 enum bc_t { bc_break = 0, bc_continue = 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
54 indicates which.
56 Just creates a label with location LOCATION and pushes it into the current
57 context. */
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
62 tree label = create_artificial_label (location);
63 DECL_CHAIN (label) = bc_label[bc];
64 bc_label[bc] = label;
65 if (bc == bc_break)
66 LABEL_DECL_BREAK (label) = true;
67 else
68 LABEL_DECL_CONTINUE (label) = true;
69 return label;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 gcc_assert (label == bc_label[bc]);
84 if (TREE_USED (label))
85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 block);
88 bc_label[bc] = DECL_CHAIN (label);
89 DECL_CHAIN (label) = NULL_TREE;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
95 static tree
96 get_bc_label (enum bc_t bc)
98 tree label = bc_label[bc];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label) = 1;
102 return label;
105 /* Genericize a TRY_BLOCK. */
107 static void
108 genericize_try_block (tree *stmt_p)
110 tree body = TRY_STMTS (*stmt_p);
111 tree cleanup = TRY_HANDLERS (*stmt_p);
113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 static void
119 genericize_catch_block (tree *stmt_p)
121 tree type = HANDLER_TYPE (*stmt_p);
122 tree body = HANDLER_BODY (*stmt_p);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
128 /* A terser interface for building a representation of an exception
129 specification. */
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 tree t;
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 return t;
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
152 tree body = EH_SPEC_STMTS (*stmt_p);
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 TREE_NO_WARNING (*stmt_p) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163 static void
164 genericize_if_stmt (tree *stmt_p)
166 tree stmt, cond, then_, else_;
167 location_t locus = EXPR_LOCATION (*stmt_p);
169 stmt = *stmt_p;
170 cond = IF_COND (stmt);
171 then_ = THEN_CLAUSE (stmt);
172 else_ = ELSE_CLAUSE (stmt);
174 if (!then_)
175 then_ = build_empty_stmt (locus);
176 if (!else_)
177 else_ = build_empty_stmt (locus);
179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180 stmt = then_;
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = else_;
183 else
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185 if (!EXPR_HAS_LOCATION (stmt))
186 protected_set_expr_location (stmt, locus);
187 *stmt_p = stmt;
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 tree incr, bool cond_is_first, int *walk_subtrees,
200 void *data)
202 tree blab, clab;
203 tree exit = NULL;
204 tree stmt_list = NULL;
206 blab = begin_bc_block (bc_break, start_locus);
207 clab = begin_bc_block (bc_continue, start_locus);
209 protected_set_expr_location (incr, start_locus);
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
216 if (cond && TREE_CODE (cond) != INTEGER_CST)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 build_empty_stmt (cloc), exit);
227 if (exit && cond_is_first)
228 append_to_statement_list (exit, &stmt_list);
229 append_to_statement_list (body, &stmt_list);
230 finish_bc_block (&stmt_list, bc_continue, clab);
231 append_to_statement_list (incr, &stmt_list);
232 if (exit && !cond_is_first)
233 append_to_statement_list (exit, &stmt_list);
235 if (!stmt_list)
236 stmt_list = build_empty_stmt (start_locus);
238 tree loop;
239 if (cond && integer_zerop (cond))
241 if (cond_is_first)
242 loop = fold_build3_loc (start_locus, COND_EXPR,
243 void_type_node, cond, stmt_list,
244 build_empty_stmt (start_locus));
245 else
246 loop = stmt_list;
248 else
250 location_t loc = start_locus;
251 if (!cond || integer_nonzerop (cond))
252 loc = EXPR_LOCATION (expr_first (body));
253 if (loc == UNKNOWN_LOCATION)
254 loc = start_locus;
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 *stmt_p = stmt_list;
267 /* Genericize a FOR_STMT node *STMT_P. */
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
277 if (init)
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
291 /* Genericize a WHILE_STMT node *STMT_P. */
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 /* Genericize a DO_STMT node *STMT_P. */
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
320 break_block = begin_bc_block (bc_break, stmt_locus);
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
333 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
334 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
335 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
336 || !TREE_USED (break_block));
337 finish_bc_block (stmt_p, bc_break, break_block);
340 /* Genericize a CONTINUE_STMT node *STMT_P. */
342 static void
343 genericize_continue_stmt (tree *stmt_p)
345 tree stmt_list = NULL;
346 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
347 tree label = get_bc_label (bc_continue);
348 location_t location = EXPR_LOCATION (*stmt_p);
349 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
350 append_to_statement_list_force (pred, &stmt_list);
351 append_to_statement_list (jump, &stmt_list);
352 *stmt_p = stmt_list;
355 /* Genericize a BREAK_STMT node *STMT_P. */
357 static void
358 genericize_break_stmt (tree *stmt_p)
360 tree label = get_bc_label (bc_break);
361 location_t location = EXPR_LOCATION (*stmt_p);
362 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 /* Genericize a OMP_FOR node *STMT_P. */
367 static void
368 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
370 tree stmt = *stmt_p;
371 location_t locus = EXPR_LOCATION (stmt);
372 tree clab = begin_bc_block (bc_continue, locus);
374 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
375 if (TREE_CODE (stmt) != OMP_TASKLOOP)
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
381 *walk_subtrees = 0;
383 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
391 tree for_stmt = *expr_p;
392 gimple_seq seq = NULL;
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
396 return GS_UNHANDLED;
397 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
399 gimplify_and_add (for_stmt, &seq);
400 gimple_seq_add_seq (pre_p, seq);
402 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
404 return GS_ALL_DONE;
407 /* Gimplify an EXPR_STMT node. */
409 static void
410 gimplify_expr_stmt (tree *stmt_p)
412 tree stmt = EXPR_STMT_EXPR (*stmt_p);
414 if (stmt == error_mark_node)
415 stmt = NULL;
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
422 gimplification. */
423 if (stmt && warn_unused_value)
425 if (!TREE_SIDE_EFFECTS (stmt))
427 if (!IS_EMPTY_STMT (stmt)
428 && !VOID_TYPE_P (TREE_TYPE (stmt))
429 && !TREE_NO_WARNING (stmt))
430 warning (OPT_Wunused_value, "statement with no effect");
432 else
433 warn_if_unused_value (stmt, input_location);
436 if (stmt == NULL_TREE)
437 stmt = alloc_stmt_list ();
439 *stmt_p = stmt;
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
444 static void
445 cp_gimplify_init_expr (tree *expr_p)
447 tree from = TREE_OPERAND (*expr_p, 1);
448 tree to = TREE_OPERAND (*expr_p, 0);
449 tree t;
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from) == TARGET_EXPR)
456 from = TARGET_EXPR_INITIAL (from);
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
460 for (t = from; t; )
462 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub) == AGGR_INIT_EXPR
470 || TREE_CODE (sub) == VEC_INIT_EXPR)
472 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
473 AGGR_INIT_EXPR_SLOT (sub) = to;
474 else
475 VEC_INIT_EXPR_SLOT (sub) = to;
476 *expr_p = from;
478 /* The initialization is now a side-effect, so the container can
479 become void. */
480 if (from != sub)
481 TREE_TYPE (from) = void_type_node;
484 /* Handle aggregate NSDMI. */
485 replace_placeholders (sub, to);
487 if (t == sub)
488 break;
489 else
490 t = TREE_OPERAND (t, 1);
495 /* Gimplify a MUST_NOT_THROW_EXPR. */
497 static enum gimplify_status
498 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
500 tree stmt = *expr_p;
501 tree temp = voidify_wrapper_expr (stmt, NULL);
502 tree body = TREE_OPERAND (stmt, 0);
503 gimple_seq try_ = NULL;
504 gimple_seq catch_ = NULL;
505 gimple *mnt;
507 gimplify_and_add (body, &try_);
508 mnt = gimple_build_eh_must_not_throw (terminate_fn);
509 gimple_seq_add_stmt_without_update (&catch_, mnt);
510 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
512 gimple_seq_add_stmt_without_update (pre_p, mnt);
513 if (temp)
515 *expr_p = temp;
516 return GS_OK;
519 *expr_p = NULL;
520 return GS_ALL_DONE;
523 /* Return TRUE if an operand (OP) of a given TYPE being copied is
524 really just an empty class copy.
526 Check that the operand has a simple form so that TARGET_EXPRs and
527 non-empty CONSTRUCTORs get reduced properly, and we leave the
528 return slot optimization alone because it isn't a copy. */
530 static bool
531 simple_empty_class_p (tree type, tree op)
533 return
534 ((TREE_CODE (op) == COMPOUND_EXPR
535 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
536 || TREE_CODE (op) == EMPTY_CLASS_EXPR
537 || is_gimple_lvalue (op)
538 || INDIRECT_REF_P (op)
539 || (TREE_CODE (op) == CONSTRUCTOR
540 && CONSTRUCTOR_NELTS (op) == 0
541 && !TREE_CLOBBER_P (op))
542 || (TREE_CODE (op) == CALL_EXPR
543 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
544 && is_really_empty_class (type);
547 /* Returns true if evaluating E as an lvalue has side-effects;
548 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
549 have side-effects until there is a read or write through it. */
551 static bool
552 lvalue_has_side_effects (tree e)
554 if (!TREE_SIDE_EFFECTS (e))
555 return false;
556 while (handled_component_p (e))
558 if (TREE_CODE (e) == ARRAY_REF
559 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
560 return true;
561 e = TREE_OPERAND (e, 0);
563 if (DECL_P (e))
564 /* Just naming a variable has no side-effects. */
565 return false;
566 else if (INDIRECT_REF_P (e))
567 /* Similarly, indirection has no side-effects. */
568 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
569 else
570 /* For anything else, trust TREE_SIDE_EFFECTS. */
571 return TREE_SIDE_EFFECTS (e);
574 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
577 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
579 int saved_stmts_are_full_exprs_p = 0;
580 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
581 enum tree_code code = TREE_CODE (*expr_p);
582 enum gimplify_status ret;
584 if (STATEMENT_CODE_P (code))
586 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
587 current_stmt_tree ()->stmts_are_full_exprs_p
588 = STMT_IS_FULL_EXPR_P (*expr_p);
591 switch (code)
593 case AGGR_INIT_EXPR:
594 simplify_aggr_init_expr (expr_p);
595 ret = GS_OK;
596 break;
598 case VEC_INIT_EXPR:
600 location_t loc = input_location;
601 tree init = VEC_INIT_EXPR_INIT (*expr_p);
602 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
603 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
604 input_location = EXPR_LOCATION (*expr_p);
605 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
606 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
607 from_array,
608 tf_warning_or_error);
609 hash_set<tree> pset;
610 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
611 cp_genericize_tree (expr_p, false);
612 ret = GS_OK;
613 input_location = loc;
615 break;
617 case THROW_EXPR:
618 /* FIXME communicate throw type to back end, probably by moving
619 THROW_EXPR into ../tree.def. */
620 *expr_p = TREE_OPERAND (*expr_p, 0);
621 ret = GS_OK;
622 break;
624 case MUST_NOT_THROW_EXPR:
625 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
626 break;
628 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
629 LHS of an assignment might also be involved in the RHS, as in bug
630 25979. */
631 case INIT_EXPR:
632 cp_gimplify_init_expr (expr_p);
633 if (TREE_CODE (*expr_p) != INIT_EXPR)
634 return GS_OK;
635 /* Fall through. */
636 case MODIFY_EXPR:
637 modify_expr_case:
639 /* If the back end isn't clever enough to know that the lhs and rhs
640 types are the same, add an explicit conversion. */
641 tree op0 = TREE_OPERAND (*expr_p, 0);
642 tree op1 = TREE_OPERAND (*expr_p, 1);
644 if (!error_operand_p (op0)
645 && !error_operand_p (op1)
646 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
647 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
648 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
649 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
650 TREE_TYPE (op0), op1);
652 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
654 /* Remove any copies of empty classes. Also drop volatile
655 variables on the RHS to avoid infinite recursion from
656 gimplify_expr trying to load the value. */
657 if (TREE_SIDE_EFFECTS (op1))
659 if (TREE_THIS_VOLATILE (op1)
660 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
661 op1 = build_fold_addr_expr (op1);
663 gimplify_and_add (op1, pre_p);
665 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
666 is_gimple_lvalue, fb_lvalue);
667 *expr_p = TREE_OPERAND (*expr_p, 0);
669 /* P0145 says that the RHS is sequenced before the LHS.
670 gimplify_modify_expr gimplifies the RHS before the LHS, but that
671 isn't quite strong enough in two cases:
673 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
674 mean it's evaluated after the LHS.
676 2) the value calculation of the RHS is also sequenced before the
677 LHS, so for scalar assignment we need to preevaluate if the
678 RHS could be affected by LHS side-effects even if it has no
679 side-effects of its own. We don't need this for classes because
680 class assignment takes its RHS by reference. */
681 else if (flag_strong_eval_order > 1
682 && TREE_CODE (*expr_p) == MODIFY_EXPR
683 && lvalue_has_side_effects (op0)
684 && (TREE_CODE (op1) == CALL_EXPR
685 || (SCALAR_TYPE_P (TREE_TYPE (op1))
686 && !TREE_CONSTANT (op1))))
687 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
689 ret = GS_OK;
690 break;
692 case EMPTY_CLASS_EXPR:
693 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
694 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
695 ret = GS_OK;
696 break;
698 case BASELINK:
699 *expr_p = BASELINK_FUNCTIONS (*expr_p);
700 ret = GS_OK;
701 break;
703 case TRY_BLOCK:
704 genericize_try_block (expr_p);
705 ret = GS_OK;
706 break;
708 case HANDLER:
709 genericize_catch_block (expr_p);
710 ret = GS_OK;
711 break;
713 case EH_SPEC_BLOCK:
714 genericize_eh_spec_block (expr_p);
715 ret = GS_OK;
716 break;
718 case USING_STMT:
719 gcc_unreachable ();
721 case FOR_STMT:
722 case WHILE_STMT:
723 case DO_STMT:
724 case SWITCH_STMT:
725 case CONTINUE_STMT:
726 case BREAK_STMT:
727 gcc_unreachable ();
729 case OMP_FOR:
730 case OMP_SIMD:
731 case OMP_DISTRIBUTE:
732 case OMP_TASKLOOP:
733 ret = cp_gimplify_omp_for (expr_p, pre_p);
734 break;
736 case EXPR_STMT:
737 gimplify_expr_stmt (expr_p);
738 ret = GS_OK;
739 break;
741 case UNARY_PLUS_EXPR:
743 tree arg = TREE_OPERAND (*expr_p, 0);
744 tree type = TREE_TYPE (*expr_p);
745 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
746 : arg;
747 ret = GS_OK;
749 break;
751 case CALL_EXPR:
752 ret = GS_OK;
753 if (!CALL_EXPR_FN (*expr_p))
754 /* Internal function call. */;
755 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
757 /* This is a call to a (compound) assignment operator that used
758 the operator syntax; gimplify the RHS first. */
759 gcc_assert (call_expr_nargs (*expr_p) == 2);
760 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
761 enum gimplify_status t
762 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
763 if (t == GS_ERROR)
764 ret = GS_ERROR;
766 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
768 /* Leave the last argument for gimplify_call_expr, to avoid problems
769 with __builtin_va_arg_pack(). */
770 int nargs = call_expr_nargs (*expr_p) - 1;
771 for (int i = 0; i < nargs; ++i)
773 enum gimplify_status t
774 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
775 if (t == GS_ERROR)
776 ret = GS_ERROR;
779 else if (flag_strong_eval_order
780 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
782 /* If flag_strong_eval_order, evaluate the object argument first. */
783 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
784 if (POINTER_TYPE_P (fntype))
785 fntype = TREE_TYPE (fntype);
786 if (TREE_CODE (fntype) == METHOD_TYPE)
788 enum gimplify_status t
789 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
790 if (t == GS_ERROR)
791 ret = GS_ERROR;
794 break;
796 case RETURN_EXPR:
797 if (TREE_OPERAND (*expr_p, 0)
798 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
799 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
801 expr_p = &TREE_OPERAND (*expr_p, 0);
802 code = TREE_CODE (*expr_p);
803 /* Avoid going through the INIT_EXPR case, which can
804 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
805 goto modify_expr_case;
807 /* Fall through. */
809 default:
810 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
811 break;
814 /* Restore saved state. */
815 if (STATEMENT_CODE_P (code))
816 current_stmt_tree ()->stmts_are_full_exprs_p
817 = saved_stmts_are_full_exprs_p;
819 return ret;
822 static inline bool
823 is_invisiref_parm (const_tree t)
825 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
826 && DECL_BY_REFERENCE (t));
829 /* Return true if the uid in both int tree maps are equal. */
831 bool
832 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
834 return (a->uid == b->uid);
837 /* Hash a UID in a cxx_int_tree_map. */
839 unsigned int
840 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
842 return item->uid;
845 /* A stable comparison routine for use with splay trees and DECLs. */
847 static int
848 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
850 tree a = (tree) xa;
851 tree b = (tree) xb;
853 return DECL_UID (a) - DECL_UID (b);
856 /* OpenMP context during genericization. */
858 struct cp_genericize_omp_taskreg
860 bool is_parallel;
861 bool default_shared;
862 struct cp_genericize_omp_taskreg *outer;
863 splay_tree variables;
866 /* Return true if genericization should try to determine if
867 DECL is firstprivate or shared within task regions. */
869 static bool
870 omp_var_to_track (tree decl)
872 tree type = TREE_TYPE (decl);
873 if (is_invisiref_parm (decl))
874 type = TREE_TYPE (type);
875 else if (TREE_CODE (type) == REFERENCE_TYPE)
876 type = TREE_TYPE (type);
877 while (TREE_CODE (type) == ARRAY_TYPE)
878 type = TREE_TYPE (type);
879 if (type == error_mark_node || !CLASS_TYPE_P (type))
880 return false;
881 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
882 return false;
883 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
884 return false;
885 return true;
888 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
890 static void
891 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
893 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
894 (splay_tree_key) decl);
895 if (n == NULL)
897 int flags = OMP_CLAUSE_DEFAULT_SHARED;
898 if (omp_ctx->outer)
899 omp_cxx_notice_variable (omp_ctx->outer, decl);
900 if (!omp_ctx->default_shared)
902 struct cp_genericize_omp_taskreg *octx;
904 for (octx = omp_ctx->outer; octx; octx = octx->outer)
906 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
907 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
909 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
910 break;
912 if (octx->is_parallel)
913 break;
915 if (octx == NULL
916 && (TREE_CODE (decl) == PARM_DECL
917 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
918 && DECL_CONTEXT (decl) == current_function_decl)))
919 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
920 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
922 /* DECL is implicitly determined firstprivate in
923 the current task construct. Ensure copy ctor and
924 dtor are instantiated, because during gimplification
925 it will be already too late. */
926 tree type = TREE_TYPE (decl);
927 if (is_invisiref_parm (decl))
928 type = TREE_TYPE (type);
929 else if (TREE_CODE (type) == REFERENCE_TYPE)
930 type = TREE_TYPE (type);
931 while (TREE_CODE (type) == ARRAY_TYPE)
932 type = TREE_TYPE (type);
933 get_copy_ctor (type, tf_none);
934 get_dtor (type, tf_none);
937 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
941 /* Genericization context. */
943 struct cp_genericize_data
945 hash_set<tree> *p_set;
946 vec<tree> bind_expr_stack;
947 struct cp_genericize_omp_taskreg *omp_ctx;
948 tree try_block;
949 bool no_sanitize_p;
950 bool handle_invisiref_parm_p;
953 /* Perform any pre-gimplification folding of C++ front end trees to
954 GENERIC.
955 Note: The folding of none-omp cases is something to move into
956 the middle-end. As for now we have most foldings only on GENERIC
957 in fold-const, we need to perform this before transformation to
958 GIMPLE-form. */
960 static tree
961 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
963 tree stmt;
964 enum tree_code code;
966 *stmt_p = stmt = cp_fold (*stmt_p);
968 if (((hash_set<tree> *) data)->add (stmt))
970 /* Don't walk subtrees of stmts we've already walked once, otherwise
971 we can have exponential complexity with e.g. lots of nested
972 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
973 always the same tree, which the first time cp_fold_r has been
974 called on it had the subtrees walked. */
975 *walk_subtrees = 0;
976 return NULL;
979 code = TREE_CODE (stmt);
980 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
981 || code == OMP_TASKLOOP || code == OACC_LOOP)
983 tree x;
984 int i, n;
986 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
987 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
988 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
989 x = OMP_FOR_COND (stmt);
990 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
992 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
993 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
995 else if (x && TREE_CODE (x) == TREE_VEC)
997 n = TREE_VEC_LENGTH (x);
998 for (i = 0; i < n; i++)
1000 tree o = TREE_VEC_ELT (x, i);
1001 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1002 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1005 x = OMP_FOR_INCR (stmt);
1006 if (x && TREE_CODE (x) == TREE_VEC)
1008 n = TREE_VEC_LENGTH (x);
1009 for (i = 0; i < n; i++)
1011 tree o = TREE_VEC_ELT (x, i);
1012 if (o && TREE_CODE (o) == MODIFY_EXPR)
1013 o = TREE_OPERAND (o, 1);
1014 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1015 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1017 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1018 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1022 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1023 *walk_subtrees = 0;
1026 return NULL;
1029 /* Fold ALL the trees! FIXME we should be able to remove this, but
1030 apparently that still causes optimization regressions. */
1032 void
1033 cp_fold_function (tree fndecl)
1035 hash_set<tree> pset;
1036 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1039 /* Perform any pre-gimplification lowering of C++ front end trees to
1040 GENERIC. */
1042 static tree
1043 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1045 tree stmt = *stmt_p;
1046 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1047 hash_set<tree> *p_set = wtd->p_set;
1049 /* If in an OpenMP context, note var uses. */
1050 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1051 && (VAR_P (stmt)
1052 || TREE_CODE (stmt) == PARM_DECL
1053 || TREE_CODE (stmt) == RESULT_DECL)
1054 && omp_var_to_track (stmt))
1055 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1057 /* Don't dereference parms in a thunk, pass the references through. */
1058 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1059 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1061 *walk_subtrees = 0;
1062 return NULL;
1065 /* Dereference invisible reference parms. */
1066 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1068 *stmt_p = convert_from_reference (stmt);
1069 p_set->add (*stmt_p);
1070 *walk_subtrees = 0;
1071 return NULL;
1074 /* Map block scope extern declarations to visible declarations with the
1075 same name and type in outer scopes if any. */
1076 if (cp_function_chain->extern_decl_map
1077 && VAR_OR_FUNCTION_DECL_P (stmt)
1078 && DECL_EXTERNAL (stmt))
1080 struct cxx_int_tree_map *h, in;
1081 in.uid = DECL_UID (stmt);
1082 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1083 if (h)
1085 *stmt_p = h->to;
1086 *walk_subtrees = 0;
1087 return NULL;
1091 if (TREE_CODE (stmt) == INTEGER_CST
1092 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1093 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1094 && !wtd->no_sanitize_p)
1096 ubsan_maybe_instrument_reference (stmt_p);
1097 if (*stmt_p != stmt)
1099 *walk_subtrees = 0;
1100 return NULL_TREE;
1104 /* Other than invisiref parms, don't walk the same tree twice. */
1105 if (p_set->contains (stmt))
1107 *walk_subtrees = 0;
1108 return NULL_TREE;
1111 switch (TREE_CODE (stmt))
1113 case ADDR_EXPR:
1114 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1116 /* If in an OpenMP context, note var uses. */
1117 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1118 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1119 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1120 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1121 *walk_subtrees = 0;
1123 break;
1125 case RETURN_EXPR:
1126 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1127 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1128 *walk_subtrees = 0;
1129 break;
1131 case OMP_CLAUSE:
1132 switch (OMP_CLAUSE_CODE (stmt))
1134 case OMP_CLAUSE_LASTPRIVATE:
1135 /* Don't dereference an invisiref in OpenMP clauses. */
1136 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1138 *walk_subtrees = 0;
1139 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1140 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1141 cp_genericize_r, data, NULL);
1143 break;
1144 case OMP_CLAUSE_PRIVATE:
1145 /* Don't dereference an invisiref in OpenMP clauses. */
1146 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1147 *walk_subtrees = 0;
1148 else if (wtd->omp_ctx != NULL)
1150 /* Private clause doesn't cause any references to the
1151 var in outer contexts, avoid calling
1152 omp_cxx_notice_variable for it. */
1153 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1154 wtd->omp_ctx = NULL;
1155 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1156 data, NULL);
1157 wtd->omp_ctx = old;
1158 *walk_subtrees = 0;
1160 break;
1161 case OMP_CLAUSE_SHARED:
1162 case OMP_CLAUSE_FIRSTPRIVATE:
1163 case OMP_CLAUSE_COPYIN:
1164 case OMP_CLAUSE_COPYPRIVATE:
1165 /* Don't dereference an invisiref in OpenMP clauses. */
1166 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1167 *walk_subtrees = 0;
1168 break;
1169 case OMP_CLAUSE_REDUCTION:
1170 /* Don't dereference an invisiref in reduction clause's
1171 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1172 still needs to be genericized. */
1173 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1175 *walk_subtrees = 0;
1176 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1177 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1178 cp_genericize_r, data, NULL);
1179 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1180 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1181 cp_genericize_r, data, NULL);
1183 break;
1184 default:
1185 break;
1187 break;
1189 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1190 to lower this construct before scanning it, so we need to lower these
1191 before doing anything else. */
1192 case CLEANUP_STMT:
1193 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1194 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1195 : TRY_FINALLY_EXPR,
1196 void_type_node,
1197 CLEANUP_BODY (stmt),
1198 CLEANUP_EXPR (stmt));
1199 break;
1201 case IF_STMT:
1202 genericize_if_stmt (stmt_p);
1203 /* *stmt_p has changed, tail recurse to handle it again. */
1204 return cp_genericize_r (stmt_p, walk_subtrees, data);
1206 /* COND_EXPR might have incompatible types in branches if one or both
1207 arms are bitfields. Fix it up now. */
1208 case COND_EXPR:
1210 tree type_left
1211 = (TREE_OPERAND (stmt, 1)
1212 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1213 : NULL_TREE);
1214 tree type_right
1215 = (TREE_OPERAND (stmt, 2)
1216 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1217 : NULL_TREE);
1218 if (type_left
1219 && !useless_type_conversion_p (TREE_TYPE (stmt),
1220 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1222 TREE_OPERAND (stmt, 1)
1223 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1224 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1225 type_left));
1227 if (type_right
1228 && !useless_type_conversion_p (TREE_TYPE (stmt),
1229 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1231 TREE_OPERAND (stmt, 2)
1232 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1233 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1234 type_right));
1237 break;
1239 case BIND_EXPR:
1240 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1242 tree decl;
1243 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1244 if (VAR_P (decl)
1245 && !DECL_EXTERNAL (decl)
1246 && omp_var_to_track (decl))
1248 splay_tree_node n
1249 = splay_tree_lookup (wtd->omp_ctx->variables,
1250 (splay_tree_key) decl);
1251 if (n == NULL)
1252 splay_tree_insert (wtd->omp_ctx->variables,
1253 (splay_tree_key) decl,
1254 TREE_STATIC (decl)
1255 ? OMP_CLAUSE_DEFAULT_SHARED
1256 : OMP_CLAUSE_DEFAULT_PRIVATE);
1259 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1261 /* The point here is to not sanitize static initializers. */
1262 bool no_sanitize_p = wtd->no_sanitize_p;
1263 wtd->no_sanitize_p = true;
1264 for (tree decl = BIND_EXPR_VARS (stmt);
1265 decl;
1266 decl = DECL_CHAIN (decl))
1267 if (VAR_P (decl)
1268 && TREE_STATIC (decl)
1269 && DECL_INITIAL (decl))
1270 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1271 wtd->no_sanitize_p = no_sanitize_p;
1273 wtd->bind_expr_stack.safe_push (stmt);
1274 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1275 cp_genericize_r, data, NULL);
1276 wtd->bind_expr_stack.pop ();
1277 break;
1279 case USING_STMT:
1281 tree block = NULL_TREE;
1283 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1284 BLOCK, and append an IMPORTED_DECL to its
1285 BLOCK_VARS chained list. */
1286 if (wtd->bind_expr_stack.exists ())
1288 int i;
1289 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1290 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1291 break;
1293 if (block)
1295 tree using_directive;
1296 gcc_assert (TREE_OPERAND (stmt, 0));
1298 using_directive = make_node (IMPORTED_DECL);
1299 TREE_TYPE (using_directive) = void_type_node;
1301 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1302 = TREE_OPERAND (stmt, 0);
1303 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1304 BLOCK_VARS (block) = using_directive;
1306 /* The USING_STMT won't appear in GENERIC. */
1307 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1308 *walk_subtrees = 0;
1310 break;
1312 case DECL_EXPR:
1313 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1315 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1316 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1317 *walk_subtrees = 0;
1319 else
1321 tree d = DECL_EXPR_DECL (stmt);
1322 if (VAR_P (d))
1323 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1325 break;
1327 case OMP_PARALLEL:
1328 case OMP_TASK:
1329 case OMP_TASKLOOP:
1331 struct cp_genericize_omp_taskreg omp_ctx;
1332 tree c, decl;
1333 splay_tree_node n;
1335 *walk_subtrees = 0;
1336 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1337 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1338 omp_ctx.default_shared = omp_ctx.is_parallel;
1339 omp_ctx.outer = wtd->omp_ctx;
1340 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1341 wtd->omp_ctx = &omp_ctx;
1342 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1343 switch (OMP_CLAUSE_CODE (c))
1345 case OMP_CLAUSE_SHARED:
1346 case OMP_CLAUSE_PRIVATE:
1347 case OMP_CLAUSE_FIRSTPRIVATE:
1348 case OMP_CLAUSE_LASTPRIVATE:
1349 decl = OMP_CLAUSE_DECL (c);
1350 if (decl == error_mark_node || !omp_var_to_track (decl))
1351 break;
1352 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1353 if (n != NULL)
1354 break;
1355 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1356 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1357 ? OMP_CLAUSE_DEFAULT_SHARED
1358 : OMP_CLAUSE_DEFAULT_PRIVATE);
1359 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1360 omp_cxx_notice_variable (omp_ctx.outer, decl);
1361 break;
1362 case OMP_CLAUSE_DEFAULT:
1363 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1364 omp_ctx.default_shared = true;
1365 default:
1366 break;
1368 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1369 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1370 else
1371 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1372 wtd->omp_ctx = omp_ctx.outer;
1373 splay_tree_delete (omp_ctx.variables);
1375 break;
1377 case TRY_BLOCK:
1379 *walk_subtrees = 0;
1380 tree try_block = wtd->try_block;
1381 wtd->try_block = stmt;
1382 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1383 wtd->try_block = try_block;
1384 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1386 break;
1388 case MUST_NOT_THROW_EXPR:
1389 /* MUST_NOT_THROW_COND might be something else with TM. */
1390 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1392 *walk_subtrees = 0;
1393 tree try_block = wtd->try_block;
1394 wtd->try_block = stmt;
1395 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1396 wtd->try_block = try_block;
1398 break;
1400 case THROW_EXPR:
1402 location_t loc = location_of (stmt);
1403 if (TREE_NO_WARNING (stmt))
1404 /* Never mind. */;
1405 else if (wtd->try_block)
1407 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1408 && warning_at (loc, OPT_Wterminate,
1409 "throw will always call terminate()")
1410 && cxx_dialect >= cxx11
1411 && DECL_DESTRUCTOR_P (current_function_decl))
1412 inform (loc, "in C++11 destructors default to noexcept");
1414 else
1416 if (warn_cxx11_compat && cxx_dialect < cxx11
1417 && DECL_DESTRUCTOR_P (current_function_decl)
1418 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1419 == NULL_TREE)
1420 && (get_defaulted_eh_spec (current_function_decl)
1421 == empty_except_spec))
1422 warning_at (loc, OPT_Wc__11_compat,
1423 "in C++11 this throw will terminate because "
1424 "destructors default to noexcept");
1427 break;
1429 case CONVERT_EXPR:
1430 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1431 break;
1433 case FOR_STMT:
1434 genericize_for_stmt (stmt_p, walk_subtrees, data);
1435 break;
1437 case WHILE_STMT:
1438 genericize_while_stmt (stmt_p, walk_subtrees, data);
1439 break;
1441 case DO_STMT:
1442 genericize_do_stmt (stmt_p, walk_subtrees, data);
1443 break;
1445 case SWITCH_STMT:
1446 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1447 break;
1449 case CONTINUE_STMT:
1450 genericize_continue_stmt (stmt_p);
1451 break;
1453 case BREAK_STMT:
1454 genericize_break_stmt (stmt_p);
1455 break;
1457 case OMP_FOR:
1458 case OMP_SIMD:
1459 case OMP_DISTRIBUTE:
1460 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1461 break;
1463 case PTRMEM_CST:
1464 /* By the time we get here we're handing off to the back end, so we don't
1465 need or want to preserve PTRMEM_CST anymore. */
1466 *stmt_p = cplus_expand_constant (stmt);
1467 *walk_subtrees = 0;
1468 break;
1470 case MEM_REF:
1471 /* For MEM_REF, make sure not to sanitize the second operand even
1472 if it has reference type. It is just an offset with a type
1473 holding other information. There is no other processing we
1474 need to do for INTEGER_CSTs, so just ignore the second argument
1475 unconditionally. */
1476 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1477 *walk_subtrees = 0;
1478 break;
1480 case NOP_EXPR:
1481 if (!wtd->no_sanitize_p
1482 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1483 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1484 ubsan_maybe_instrument_reference (stmt_p);
1485 break;
1487 case CALL_EXPR:
1488 if (!wtd->no_sanitize_p
1489 && sanitize_flags_p ((SANITIZE_NULL
1490 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1492 tree fn = CALL_EXPR_FN (stmt);
1493 if (fn != NULL_TREE
1494 && !error_operand_p (fn)
1495 && POINTER_TYPE_P (TREE_TYPE (fn))
1496 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1498 bool is_ctor
1499 = TREE_CODE (fn) == ADDR_EXPR
1500 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1501 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1502 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1503 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1504 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1505 cp_ubsan_maybe_instrument_member_call (stmt);
1508 break;
1510 default:
1511 if (IS_TYPE_OR_DECL_P (stmt))
1512 *walk_subtrees = 0;
1513 break;
1516 p_set->add (*stmt_p);
1518 return NULL;
1521 /* Lower C++ front end trees to GENERIC in T_P. */
1523 static void
1524 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1526 struct cp_genericize_data wtd;
1528 wtd.p_set = new hash_set<tree>;
1529 wtd.bind_expr_stack.create (0);
1530 wtd.omp_ctx = NULL;
1531 wtd.try_block = NULL_TREE;
1532 wtd.no_sanitize_p = false;
1533 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1534 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1535 delete wtd.p_set;
1536 wtd.bind_expr_stack.release ();
1537 if (sanitize_flags_p (SANITIZE_VPTR))
1538 cp_ubsan_instrument_member_accesses (t_p);
1541 /* If a function that should end with a return in non-void
1542 function doesn't obviously end with return, add ubsan
1543 instrumentation code to verify it at runtime. If -fsanitize=return
1544 is not enabled, instrument __builtin_unreachable. */
1546 static void
1547 cp_maybe_instrument_return (tree fndecl)
1549 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1550 || DECL_CONSTRUCTOR_P (fndecl)
1551 || DECL_DESTRUCTOR_P (fndecl)
1552 || !targetm.warn_func_return (fndecl))
1553 return;
1555 tree t = DECL_SAVED_TREE (fndecl);
1556 while (t)
1558 switch (TREE_CODE (t))
1560 case BIND_EXPR:
1561 t = BIND_EXPR_BODY (t);
1562 continue;
1563 case TRY_FINALLY_EXPR:
1564 t = TREE_OPERAND (t, 0);
1565 continue;
1566 case STATEMENT_LIST:
1568 tree_stmt_iterator i = tsi_last (t);
1569 if (!tsi_end_p (i))
1571 t = tsi_stmt (i);
1572 continue;
1575 break;
1576 case RETURN_EXPR:
1577 return;
1578 default:
1579 break;
1581 break;
1583 if (t == NULL_TREE)
1584 return;
1585 tree *p = &DECL_SAVED_TREE (fndecl);
1586 if (TREE_CODE (*p) == BIND_EXPR)
1587 p = &BIND_EXPR_BODY (*p);
1589 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1590 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1591 t = ubsan_instrument_return (loc);
1592 else
1594 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1595 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1598 append_to_statement_list (t, p);
1601 void
1602 cp_genericize (tree fndecl)
1604 tree t;
1606 /* Fix up the types of parms passed by invisible reference. */
1607 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1608 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1610 /* If a function's arguments are copied to create a thunk,
1611 then DECL_BY_REFERENCE will be set -- but the type of the
1612 argument will be a pointer type, so we will never get
1613 here. */
1614 gcc_assert (!DECL_BY_REFERENCE (t));
1615 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1616 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1617 DECL_BY_REFERENCE (t) = 1;
1618 TREE_ADDRESSABLE (t) = 0;
1619 relayout_decl (t);
1622 /* Do the same for the return value. */
1623 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1625 t = DECL_RESULT (fndecl);
1626 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1627 DECL_BY_REFERENCE (t) = 1;
1628 TREE_ADDRESSABLE (t) = 0;
1629 relayout_decl (t);
1630 if (DECL_NAME (t))
1632 /* Adjust DECL_VALUE_EXPR of the original var. */
1633 tree outer = outer_curly_brace_block (current_function_decl);
1634 tree var;
1636 if (outer)
1637 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1638 if (VAR_P (var)
1639 && DECL_NAME (t) == DECL_NAME (var)
1640 && DECL_HAS_VALUE_EXPR_P (var)
1641 && DECL_VALUE_EXPR (var) == t)
1643 tree val = convert_from_reference (t);
1644 SET_DECL_VALUE_EXPR (var, val);
1645 break;
1650 /* If we're a clone, the body is already GIMPLE. */
1651 if (DECL_CLONED_FUNCTION_P (fndecl))
1652 return;
1654 /* Allow cp_genericize calls to be nested. */
1655 tree save_bc_label[2];
1656 save_bc_label[bc_break] = bc_label[bc_break];
1657 save_bc_label[bc_continue] = bc_label[bc_continue];
1658 bc_label[bc_break] = NULL_TREE;
1659 bc_label[bc_continue] = NULL_TREE;
1661 /* We do want to see every occurrence of the parms, so we can't just use
1662 walk_tree's hash functionality. */
1663 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1665 cp_maybe_instrument_return (fndecl);
1667 /* Do everything else. */
1668 c_genericize (fndecl);
1670 gcc_assert (bc_label[bc_break] == NULL);
1671 gcc_assert (bc_label[bc_continue] == NULL);
1672 bc_label[bc_break] = save_bc_label[bc_break];
1673 bc_label[bc_continue] = save_bc_label[bc_continue];
1676 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1677 NULL if there is in fact nothing to do. ARG2 may be null if FN
1678 actually only takes one argument. */
1680 static tree
1681 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1683 tree defparm, parm, t;
1684 int i = 0;
1685 int nargs;
1686 tree *argarray;
1688 if (fn == NULL)
1689 return NULL;
1691 nargs = list_length (DECL_ARGUMENTS (fn));
1692 argarray = XALLOCAVEC (tree, nargs);
1694 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1695 if (arg2)
1696 defparm = TREE_CHAIN (defparm);
1698 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1699 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1701 tree inner_type = TREE_TYPE (arg1);
1702 tree start1, end1, p1;
1703 tree start2 = NULL, p2 = NULL;
1704 tree ret = NULL, lab;
1706 start1 = arg1;
1707 start2 = arg2;
1710 inner_type = TREE_TYPE (inner_type);
1711 start1 = build4 (ARRAY_REF, inner_type, start1,
1712 size_zero_node, NULL, NULL);
1713 if (arg2)
1714 start2 = build4 (ARRAY_REF, inner_type, start2,
1715 size_zero_node, NULL, NULL);
1717 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1718 start1 = build_fold_addr_expr_loc (input_location, start1);
1719 if (arg2)
1720 start2 = build_fold_addr_expr_loc (input_location, start2);
1722 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1723 end1 = fold_build_pointer_plus (start1, end1);
1725 p1 = create_tmp_var (TREE_TYPE (start1));
1726 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1727 append_to_statement_list (t, &ret);
1729 if (arg2)
1731 p2 = create_tmp_var (TREE_TYPE (start2));
1732 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1733 append_to_statement_list (t, &ret);
1736 lab = create_artificial_label (input_location);
1737 t = build1 (LABEL_EXPR, void_type_node, lab);
1738 append_to_statement_list (t, &ret);
1740 argarray[i++] = p1;
1741 if (arg2)
1742 argarray[i++] = p2;
1743 /* Handle default arguments. */
1744 for (parm = defparm; parm && parm != void_list_node;
1745 parm = TREE_CHAIN (parm), i++)
1746 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1747 TREE_PURPOSE (parm), fn,
1748 i - is_method, tf_warning_or_error);
1749 t = build_call_a (fn, i, argarray);
1750 t = fold_convert (void_type_node, t);
1751 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1752 append_to_statement_list (t, &ret);
1754 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1755 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1756 append_to_statement_list (t, &ret);
1758 if (arg2)
1760 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1761 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1762 append_to_statement_list (t, &ret);
1765 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1766 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1767 append_to_statement_list (t, &ret);
1769 return ret;
1771 else
1773 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1774 if (arg2)
1775 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1776 /* Handle default arguments. */
1777 for (parm = defparm; parm && parm != void_list_node;
1778 parm = TREE_CHAIN (parm), i++)
1779 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1780 TREE_PURPOSE (parm), fn,
1781 i - is_method, tf_warning_or_error);
1782 t = build_call_a (fn, i, argarray);
1783 t = fold_convert (void_type_node, t);
1784 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1788 /* Return code to initialize DECL with its default constructor, or
1789 NULL if there's nothing to do. */
1791 tree
1792 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1794 tree info = CP_OMP_CLAUSE_INFO (clause);
1795 tree ret = NULL;
1797 if (info)
1798 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1800 return ret;
1803 /* Return code to initialize DST with a copy constructor from SRC. */
1805 tree
1806 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1808 tree info = CP_OMP_CLAUSE_INFO (clause);
1809 tree ret = NULL;
1811 if (info)
1812 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1813 if (ret == NULL)
1814 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1816 return ret;
1819 /* Similarly, except use an assignment operator instead. */
1821 tree
1822 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1824 tree info = CP_OMP_CLAUSE_INFO (clause);
1825 tree ret = NULL;
1827 if (info)
1828 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1829 if (ret == NULL)
1830 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1832 return ret;
1835 /* Return code to destroy DECL. */
1837 tree
1838 cxx_omp_clause_dtor (tree clause, tree decl)
1840 tree info = CP_OMP_CLAUSE_INFO (clause);
1841 tree ret = NULL;
1843 if (info)
1844 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1846 return ret;
1849 /* True if OpenMP should privatize what this DECL points to rather
1850 than the DECL itself. */
1852 bool
1853 cxx_omp_privatize_by_reference (const_tree decl)
1855 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1856 || is_invisiref_parm (decl));
1859 /* Return true if DECL is const qualified var having no mutable member. */
1860 bool
1861 cxx_omp_const_qual_no_mutable (tree decl)
1863 tree type = TREE_TYPE (decl);
1864 if (TREE_CODE (type) == REFERENCE_TYPE)
1866 if (!is_invisiref_parm (decl))
1867 return false;
1868 type = TREE_TYPE (type);
1870 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1872 /* NVR doesn't preserve const qualification of the
1873 variable's type. */
1874 tree outer = outer_curly_brace_block (current_function_decl);
1875 tree var;
1877 if (outer)
1878 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1879 if (VAR_P (var)
1880 && DECL_NAME (decl) == DECL_NAME (var)
1881 && (TYPE_MAIN_VARIANT (type)
1882 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1884 if (TYPE_READONLY (TREE_TYPE (var)))
1885 type = TREE_TYPE (var);
1886 break;
1891 if (type == error_mark_node)
1892 return false;
1894 /* Variables with const-qualified type having no mutable member
1895 are predetermined shared. */
1896 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1897 return true;
1899 return false;
1902 /* True if OpenMP sharing attribute of DECL is predetermined. */
1904 enum omp_clause_default_kind
1905 cxx_omp_predetermined_sharing (tree decl)
1907 /* Static data members are predetermined shared. */
1908 if (TREE_STATIC (decl))
1910 tree ctx = CP_DECL_CONTEXT (decl);
1911 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1912 return OMP_CLAUSE_DEFAULT_SHARED;
1915 /* Const qualified vars having no mutable member are predetermined
1916 shared. */
1917 if (cxx_omp_const_qual_no_mutable (decl))
1918 return OMP_CLAUSE_DEFAULT_SHARED;
1920 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1923 /* Finalize an implicitly determined clause. */
1925 void
1926 cxx_omp_finish_clause (tree c, gimple_seq *)
1928 tree decl, inner_type;
1929 bool make_shared = false;
1931 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1932 return;
1934 decl = OMP_CLAUSE_DECL (c);
1935 decl = require_complete_type (decl);
1936 inner_type = TREE_TYPE (decl);
1937 if (decl == error_mark_node)
1938 make_shared = true;
1939 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1940 inner_type = TREE_TYPE (inner_type);
1942 /* We're interested in the base element, not arrays. */
1943 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1944 inner_type = TREE_TYPE (inner_type);
1946 /* Check for special function availability by building a call to one.
1947 Save the results, because later we won't be in the right context
1948 for making these queries. */
1949 if (!make_shared
1950 && CLASS_TYPE_P (inner_type)
1951 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1952 make_shared = true;
1954 if (make_shared)
1956 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1957 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1958 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1962 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1963 disregarded in OpenMP construct, because it is going to be
1964 remapped during OpenMP lowering. SHARED is true if DECL
1965 is going to be shared, false if it is going to be privatized. */
1967 bool
1968 cxx_omp_disregard_value_expr (tree decl, bool shared)
1970 return !shared
1971 && VAR_P (decl)
1972 && DECL_HAS_VALUE_EXPR_P (decl)
1973 && DECL_ARTIFICIAL (decl)
1974 && DECL_LANG_SPECIFIC (decl)
1975 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1978 /* Fold expression X which is used as an rvalue if RVAL is true. */
1980 static tree
1981 cp_fold_maybe_rvalue (tree x, bool rval)
1983 while (true)
1985 x = cp_fold (x);
1986 if (rval && DECL_P (x)
1987 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1989 tree v = decl_constant_value (x);
1990 if (v != x && v != error_mark_node)
1992 x = v;
1993 continue;
1996 break;
1998 return x;
2001 /* Fold expression X which is used as an rvalue. */
2003 static tree
2004 cp_fold_rvalue (tree x)
2006 return cp_fold_maybe_rvalue (x, true);
2009 /* Perform folding on expression X. */
2011 tree
2012 cp_fully_fold (tree x)
2014 if (processing_template_decl)
2015 return x;
2016 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2017 have to call both. */
2018 if (cxx_dialect >= cxx11)
2019 x = maybe_constant_value (x);
2020 return cp_fold_rvalue (x);
2023 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2024 and certain changes are made to the folding done. Or should be (FIXME). We
2025 never touch maybe_const, as it is only used for the C front-end
2026 C_MAYBE_CONST_EXPR. */
2028 tree
2029 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2031 return cp_fold_maybe_rvalue (x, !lval);
2034 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2036 /* Dispose of the whole FOLD_CACHE. */
2038 void
2039 clear_fold_cache (void)
2041 if (fold_cache != NULL)
2042 fold_cache->empty ();
2045 /* This function tries to fold an expression X.
2046 To avoid combinatorial explosion, folding results are kept in fold_cache.
2047 If we are processing a template or X is invalid, we don't fold at all.
2048 For performance reasons we don't cache expressions representing a
2049 declaration or constant.
2050 Function returns X or its folded variant. */
2052 static tree
2053 cp_fold (tree x)
2055 tree op0, op1, op2, op3;
2056 tree org_x = x, r = NULL_TREE;
2057 enum tree_code code;
2058 location_t loc;
2059 bool rval_ops = true;
2061 if (!x || x == error_mark_node)
2062 return x;
2064 if (processing_template_decl
2065 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2066 return x;
2068 /* Don't bother to cache DECLs or constants. */
2069 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2070 return x;
2072 if (fold_cache == NULL)
2073 fold_cache = hash_map<tree, tree>::create_ggc (101);
2075 if (tree *cached = fold_cache->get (x))
2076 return *cached;
2078 code = TREE_CODE (x);
2079 switch (code)
2081 case CLEANUP_POINT_EXPR:
2082 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2083 effects. */
2084 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2085 if (!TREE_SIDE_EFFECTS (r))
2086 x = r;
2087 break;
2089 case SIZEOF_EXPR:
2090 x = fold_sizeof_expr (x);
2091 break;
2093 case VIEW_CONVERT_EXPR:
2094 rval_ops = false;
2095 /* FALLTHRU */
2096 case CONVERT_EXPR:
2097 case NOP_EXPR:
2098 case NON_LVALUE_EXPR:
2100 if (VOID_TYPE_P (TREE_TYPE (x)))
2101 return x;
2103 loc = EXPR_LOCATION (x);
2104 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2106 if (code == CONVERT_EXPR
2107 && SCALAR_TYPE_P (TREE_TYPE (x))
2108 && op0 != void_node)
2109 /* During parsing we used convert_to_*_nofold; re-convert now using the
2110 folding variants, since fold() doesn't do those transformations. */
2111 x = fold (convert (TREE_TYPE (x), op0));
2112 else if (op0 != TREE_OPERAND (x, 0))
2114 if (op0 == error_mark_node)
2115 x = error_mark_node;
2116 else
2117 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2119 else
2120 x = fold (x);
2122 /* Conversion of an out-of-range value has implementation-defined
2123 behavior; the language considers it different from arithmetic
2124 overflow, which is undefined. */
2125 if (TREE_CODE (op0) == INTEGER_CST
2126 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2127 TREE_OVERFLOW (x) = false;
2129 break;
2131 case INDIRECT_REF:
2132 /* We don't need the decltype(auto) obfuscation anymore. */
2133 if (REF_PARENTHESIZED_P (x))
2135 tree p = maybe_undo_parenthesized_ref (x);
2136 return cp_fold (p);
2138 goto unary;
2140 case ADDR_EXPR:
2141 case REALPART_EXPR:
2142 case IMAGPART_EXPR:
2143 rval_ops = false;
2144 /* FALLTHRU */
2145 case CONJ_EXPR:
2146 case FIX_TRUNC_EXPR:
2147 case FLOAT_EXPR:
2148 case NEGATE_EXPR:
2149 case ABS_EXPR:
2150 case BIT_NOT_EXPR:
2151 case TRUTH_NOT_EXPR:
2152 case FIXED_CONVERT_EXPR:
2153 unary:
2155 loc = EXPR_LOCATION (x);
2156 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2158 if (op0 != TREE_OPERAND (x, 0))
2160 if (op0 == error_mark_node)
2161 x = error_mark_node;
2162 else
2164 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2165 if (code == INDIRECT_REF
2166 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2168 TREE_READONLY (x) = TREE_READONLY (org_x);
2169 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2170 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2174 else
2175 x = fold (x);
2177 gcc_assert (TREE_CODE (x) != COND_EXPR
2178 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2179 break;
2181 case UNARY_PLUS_EXPR:
2182 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2183 if (op0 == error_mark_node)
2184 x = error_mark_node;
2185 else
2186 x = fold_convert (TREE_TYPE (x), op0);
2187 break;
2189 case POSTDECREMENT_EXPR:
2190 case POSTINCREMENT_EXPR:
2191 case INIT_EXPR:
2192 case PREDECREMENT_EXPR:
2193 case PREINCREMENT_EXPR:
2194 case COMPOUND_EXPR:
2195 case MODIFY_EXPR:
2196 rval_ops = false;
2197 /* FALLTHRU */
2198 case POINTER_PLUS_EXPR:
2199 case PLUS_EXPR:
2200 case POINTER_DIFF_EXPR:
2201 case MINUS_EXPR:
2202 case MULT_EXPR:
2203 case TRUNC_DIV_EXPR:
2204 case CEIL_DIV_EXPR:
2205 case FLOOR_DIV_EXPR:
2206 case ROUND_DIV_EXPR:
2207 case TRUNC_MOD_EXPR:
2208 case CEIL_MOD_EXPR:
2209 case ROUND_MOD_EXPR:
2210 case RDIV_EXPR:
2211 case EXACT_DIV_EXPR:
2212 case MIN_EXPR:
2213 case MAX_EXPR:
2214 case LSHIFT_EXPR:
2215 case RSHIFT_EXPR:
2216 case LROTATE_EXPR:
2217 case RROTATE_EXPR:
2218 case BIT_AND_EXPR:
2219 case BIT_IOR_EXPR:
2220 case BIT_XOR_EXPR:
2221 case TRUTH_AND_EXPR:
2222 case TRUTH_ANDIF_EXPR:
2223 case TRUTH_OR_EXPR:
2224 case TRUTH_ORIF_EXPR:
2225 case TRUTH_XOR_EXPR:
2226 case LT_EXPR: case LE_EXPR:
2227 case GT_EXPR: case GE_EXPR:
2228 case EQ_EXPR: case NE_EXPR:
2229 case UNORDERED_EXPR: case ORDERED_EXPR:
2230 case UNLT_EXPR: case UNLE_EXPR:
2231 case UNGT_EXPR: case UNGE_EXPR:
2232 case UNEQ_EXPR: case LTGT_EXPR:
2233 case RANGE_EXPR: case COMPLEX_EXPR:
2235 loc = EXPR_LOCATION (x);
2236 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2237 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2239 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2241 if (op0 == error_mark_node || op1 == error_mark_node)
2242 x = error_mark_node;
2243 else
2244 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2246 else
2247 x = fold (x);
2249 if (TREE_NO_WARNING (org_x)
2250 && warn_nonnull_compare
2251 && COMPARISON_CLASS_P (org_x))
2253 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2255 else if (COMPARISON_CLASS_P (x))
2256 TREE_NO_WARNING (x) = 1;
2257 /* Otherwise give up on optimizing these, let GIMPLE folders
2258 optimize those later on. */
2259 else if (op0 != TREE_OPERAND (org_x, 0)
2260 || op1 != TREE_OPERAND (org_x, 1))
2262 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2263 TREE_NO_WARNING (x) = 1;
2265 else
2266 x = org_x;
2268 break;
2270 case VEC_COND_EXPR:
2271 case COND_EXPR:
2272 loc = EXPR_LOCATION (x);
2273 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2274 op1 = cp_fold (TREE_OPERAND (x, 1));
2275 op2 = cp_fold (TREE_OPERAND (x, 2));
2277 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2279 warning_sentinel s (warn_int_in_bool_context);
2280 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2281 op1 = cp_truthvalue_conversion (op1);
2282 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2283 op2 = cp_truthvalue_conversion (op2);
2285 else if (VOID_TYPE_P (TREE_TYPE (x)))
2287 if (TREE_CODE (op0) == INTEGER_CST)
2289 /* If the condition is constant, fold can fold away
2290 the COND_EXPR. If some statement-level uses of COND_EXPR
2291 have one of the branches NULL, avoid folding crash. */
2292 if (!op1)
2293 op1 = build_empty_stmt (loc);
2294 if (!op2)
2295 op2 = build_empty_stmt (loc);
2297 else
2299 /* Otherwise, don't bother folding a void condition, since
2300 it can't produce a constant value. */
2301 if (op0 != TREE_OPERAND (x, 0)
2302 || op1 != TREE_OPERAND (x, 1)
2303 || op2 != TREE_OPERAND (x, 2))
2304 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2305 break;
2309 if (op0 != TREE_OPERAND (x, 0)
2310 || op1 != TREE_OPERAND (x, 1)
2311 || op2 != TREE_OPERAND (x, 2))
2313 if (op0 == error_mark_node
2314 || op1 == error_mark_node
2315 || op2 == error_mark_node)
2316 x = error_mark_node;
2317 else
2318 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2320 else
2321 x = fold (x);
2323 /* A COND_EXPR might have incompatible types in branches if one or both
2324 arms are bitfields. If folding exposed such a branch, fix it up. */
2325 if (TREE_CODE (x) != code
2326 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2327 x = fold_convert (TREE_TYPE (org_x), x);
2329 break;
2331 case CALL_EXPR:
2333 int i, m, sv = optimize, nw = sv, changed = 0;
2334 tree callee = get_callee_fndecl (x);
2336 /* Some built-in function calls will be evaluated at compile-time in
2337 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2338 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2339 if (callee && DECL_BUILT_IN (callee) && !optimize
2340 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2341 && current_function_decl
2342 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2343 nw = 1;
2345 x = copy_node (x);
2347 m = call_expr_nargs (x);
2348 for (i = 0; i < m; i++)
2350 r = cp_fold (CALL_EXPR_ARG (x, i));
2351 if (r != CALL_EXPR_ARG (x, i))
2353 if (r == error_mark_node)
2355 x = error_mark_node;
2356 break;
2358 changed = 1;
2360 CALL_EXPR_ARG (x, i) = r;
2362 if (x == error_mark_node)
2363 break;
2365 optimize = nw;
2366 r = fold (x);
2367 optimize = sv;
2369 if (TREE_CODE (r) != CALL_EXPR)
2371 x = cp_fold (r);
2372 break;
2375 optimize = nw;
2377 /* Invoke maybe_constant_value for functions declared
2378 constexpr and not called with AGGR_INIT_EXPRs.
2379 TODO:
2380 Do constexpr expansion of expressions where the call itself is not
2381 constant, but the call followed by an INDIRECT_REF is. */
2382 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2383 && !flag_no_inline)
2384 r = maybe_constant_value (x);
2385 optimize = sv;
2387 if (TREE_CODE (r) != CALL_EXPR)
2389 if (DECL_CONSTRUCTOR_P (callee))
2391 loc = EXPR_LOCATION (x);
2392 tree s = build_fold_indirect_ref_loc (loc,
2393 CALL_EXPR_ARG (x, 0));
2394 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2396 x = r;
2397 break;
2400 if (!changed)
2401 x = org_x;
2402 break;
2405 case CONSTRUCTOR:
2407 unsigned i;
2408 constructor_elt *p;
2409 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2410 vec<constructor_elt, va_gc> *nelts = NULL;
2411 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2413 tree op = cp_fold (p->value);
2414 if (op != p->value)
2416 if (op == error_mark_node)
2418 x = error_mark_node;
2419 vec_free (nelts);
2420 break;
2422 if (nelts == NULL)
2423 nelts = elts->copy ();
2424 (*nelts)[i].value = op;
2427 if (nelts)
2428 x = build_constructor (TREE_TYPE (x), nelts);
2429 break;
2431 case TREE_VEC:
2433 bool changed = false;
2434 vec<tree, va_gc> *vec = make_tree_vector ();
2435 int i, n = TREE_VEC_LENGTH (x);
2436 vec_safe_reserve (vec, n);
2438 for (i = 0; i < n; i++)
2440 tree op = cp_fold (TREE_VEC_ELT (x, i));
2441 vec->quick_push (op);
2442 if (op != TREE_VEC_ELT (x, i))
2443 changed = true;
2446 if (changed)
2448 r = copy_node (x);
2449 for (i = 0; i < n; i++)
2450 TREE_VEC_ELT (r, i) = (*vec)[i];
2451 x = r;
2454 release_tree_vector (vec);
2457 break;
2459 case ARRAY_REF:
2460 case ARRAY_RANGE_REF:
2462 loc = EXPR_LOCATION (x);
2463 op0 = cp_fold (TREE_OPERAND (x, 0));
2464 op1 = cp_fold (TREE_OPERAND (x, 1));
2465 op2 = cp_fold (TREE_OPERAND (x, 2));
2466 op3 = cp_fold (TREE_OPERAND (x, 3));
2468 if (op0 != TREE_OPERAND (x, 0)
2469 || op1 != TREE_OPERAND (x, 1)
2470 || op2 != TREE_OPERAND (x, 2)
2471 || op3 != TREE_OPERAND (x, 3))
2473 if (op0 == error_mark_node
2474 || op1 == error_mark_node
2475 || op2 == error_mark_node
2476 || op3 == error_mark_node)
2477 x = error_mark_node;
2478 else
2480 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2481 TREE_READONLY (x) = TREE_READONLY (org_x);
2482 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2483 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2487 x = fold (x);
2488 break;
2490 case SAVE_EXPR:
2491 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2492 folding, evaluates to an invariant. In that case no need to wrap
2493 this folded tree with a SAVE_EXPR. */
2494 r = cp_fold (TREE_OPERAND (x, 0));
2495 if (tree_invariant_p (r))
2496 x = r;
2497 break;
2499 default:
2500 return org_x;
2503 fold_cache->put (org_x, x);
2504 /* Prevent that we try to fold an already folded result again. */
2505 if (x != org_x)
2506 fold_cache->put (x, x);
2508 return x;
2511 #include "gt-cp-cp-gimplify.h"