PR c++/80095
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobf2c52963a9fceb1adbaf744ad715474cbd0dc7a4
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*, bool);
42 static tree cp_fold (tree);
44 /* Local declarations. */
46 enum bc_t { bc_break = 0, bc_continue = 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
53 indicates which.
55 Just creates a label with location LOCATION and pushes it into the current
56 context. */
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
63 bc_label[bc] = label;
64 if (bc == bc_break)
65 LABEL_DECL_BREAK (label) = true;
66 else
67 LABEL_DECL_CONTINUE (label) = true;
68 return label;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 gcc_assert (label == bc_label[bc]);
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
96 static void
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
98 gimple_seq *post_p)
100 int ii = 0;
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
113 static tree
114 get_bc_label (enum bc_t bc)
116 tree label = bc_label[bc];
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
120 return label;
123 /* Genericize a TRY_BLOCK. */
125 static void
126 genericize_try_block (tree *stmt_p)
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
136 static void
137 genericize_catch_block (tree *stmt_p)
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
146 /* A terser interface for building a representation of an exception
147 specification. */
149 static tree
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
152 tree t;
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
161 return t;
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
167 static void
168 genericize_eh_spec_block (tree *stmt_p)
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
181 static void
182 genericize_if_stmt (tree *stmt_p)
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
187 stmt = *stmt_p;
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
192 if (!then_)
193 then_ = build_empty_stmt (locus);
194 if (!else_)
195 else_ = build_empty_stmt (locus);
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
198 stmt = then_;
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
200 stmt = else_;
201 else
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
205 *stmt_p = stmt;
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
215 static void
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
218 void *data)
220 tree blab, clab;
221 tree exit = NULL;
222 tree stmt_list = NULL;
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
227 protected_set_expr_location (incr, start_locus);
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232 *walk_subtrees = 0;
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
253 if (!stmt_list)
254 stmt_list = build_empty_stmt (start_locus);
256 tree loop;
257 if (cond && integer_zerop (cond))
259 if (cond_is_first)
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
263 else
264 loop = stmt_list;
266 else
268 location_t loc = start_locus;
269 if (!cond || integer_nonzerop (cond))
270 loc = EXPR_LOCATION (expr_first (body));
271 if (loc == UNKNOWN_LOCATION)
272 loc = start_locus;
273 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
276 stmt_list = NULL;
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
279 if (!stmt_list)
280 stmt_list = build_empty_stmt (start_locus);
282 *stmt_p = stmt_list;
285 /* Genericize a FOR_STMT node *STMT_P. */
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
290 tree stmt = *stmt_p;
291 tree expr = NULL;
292 tree loop;
293 tree init = FOR_INIT_STMT (stmt);
295 if (init)
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
305 expr = loop;
306 *stmt_p = expr;
309 /* Genericize a WHILE_STMT node *STMT_P. */
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
314 tree stmt = *stmt_p;
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
319 /* Genericize a DO_STMT node *STMT_P. */
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
324 tree stmt = *stmt_p;
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
334 tree stmt = *stmt_p;
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
338 break_block = begin_bc_block (bc_break, stmt_locus);
340 body = SWITCH_STMT_BODY (stmt);
341 if (!body)
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
349 *walk_subtrees = 0;
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
357 static void
358 genericize_continue_stmt (tree *stmt_p)
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list_force (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
367 *stmt_p = stmt_list;
370 /* Genericize a BREAK_STMT node *STMT_P. */
372 static void
373 genericize_break_stmt (tree *stmt_p)
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
380 /* Genericize a OMP_FOR node *STMT_P. */
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
385 tree stmt = *stmt_p;
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 if (TREE_CODE (stmt) != OMP_TASKLOOP)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396 *walk_subtrees = 0;
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411 return GS_UNHANDLED;
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
419 return GS_ALL_DONE;
422 /* Gimplify an EXPR_STMT node. */
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
429 if (stmt == error_mark_node)
430 stmt = NULL;
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
437 gimplification. */
438 if (stmt && warn_unused_value)
440 if (!TREE_SIDE_EFFECTS (stmt))
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
447 else
448 warn_if_unused_value (stmt, input_location);
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
454 *stmt_p = stmt;
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
464 tree t;
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
475 for (t = from; t; )
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
489 else
490 VEC_INIT_EXPR_SLOT (sub) = to;
491 *expr_p = from;
493 /* The initialization is now a side-effect, so the container can
494 become void. */
495 if (from != sub)
496 TREE_TYPE (from) = void_type_node;
499 /* Handle aggregate NSDMI. */
500 replace_placeholders (sub, to);
502 if (t == sub)
503 break;
504 else
505 t = TREE_OPERAND (t, 1);
510 /* Gimplify a MUST_NOT_THROW_EXPR. */
512 static enum gimplify_status
513 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
515 tree stmt = *expr_p;
516 tree temp = voidify_wrapper_expr (stmt, NULL);
517 tree body = TREE_OPERAND (stmt, 0);
518 gimple_seq try_ = NULL;
519 gimple_seq catch_ = NULL;
520 gimple *mnt;
522 gimplify_and_add (body, &try_);
523 mnt = gimple_build_eh_must_not_throw (terminate_node);
524 gimple_seq_add_stmt_without_update (&catch_, mnt);
525 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
527 gimple_seq_add_stmt_without_update (pre_p, mnt);
528 if (temp)
530 *expr_p = temp;
531 return GS_OK;
534 *expr_p = NULL;
535 return GS_ALL_DONE;
538 /* Return TRUE if an operand (OP) of a given TYPE being copied is
539 really just an empty class copy.
541 Check that the operand has a simple form so that TARGET_EXPRs and
542 non-empty CONSTRUCTORs get reduced properly, and we leave the
543 return slot optimization alone because it isn't a copy. */
545 static bool
546 simple_empty_class_p (tree type, tree op)
548 return
549 ((TREE_CODE (op) == COMPOUND_EXPR
550 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
551 || TREE_CODE (op) == EMPTY_CLASS_EXPR
552 || is_gimple_lvalue (op)
553 || INDIRECT_REF_P (op)
554 || (TREE_CODE (op) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op) == 0
556 && !TREE_CLOBBER_P (op))
557 || (TREE_CODE (op) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559 && is_really_empty_class (type);
562 /* Returns true if evaluating E as an lvalue has side-effects;
563 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
564 have side-effects until there is a read or write through it. */
566 static bool
567 lvalue_has_side_effects (tree e)
569 if (!TREE_SIDE_EFFECTS (e))
570 return false;
571 while (handled_component_p (e))
573 if (TREE_CODE (e) == ARRAY_REF
574 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
575 return true;
576 e = TREE_OPERAND (e, 0);
578 if (DECL_P (e))
579 /* Just naming a variable has no side-effects. */
580 return false;
581 else if (INDIRECT_REF_P (e))
582 /* Similarly, indirection has no side-effects. */
583 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
584 else
585 /* For anything else, trust TREE_SIDE_EFFECTS. */
586 return TREE_SIDE_EFFECTS (e);
589 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
592 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
594 int saved_stmts_are_full_exprs_p = 0;
595 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
596 enum tree_code code = TREE_CODE (*expr_p);
597 enum gimplify_status ret;
599 if (STATEMENT_CODE_P (code))
601 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
602 current_stmt_tree ()->stmts_are_full_exprs_p
603 = STMT_IS_FULL_EXPR_P (*expr_p);
606 switch (code)
608 case AGGR_INIT_EXPR:
609 simplify_aggr_init_expr (expr_p);
610 ret = GS_OK;
611 break;
613 case VEC_INIT_EXPR:
615 location_t loc = input_location;
616 tree init = VEC_INIT_EXPR_INIT (*expr_p);
617 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
618 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
619 input_location = EXPR_LOCATION (*expr_p);
620 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
621 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
622 from_array,
623 tf_warning_or_error);
624 hash_set<tree> pset;
625 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
626 cp_genericize_tree (expr_p, false);
627 ret = GS_OK;
628 input_location = loc;
630 break;
632 case THROW_EXPR:
633 /* FIXME communicate throw type to back end, probably by moving
634 THROW_EXPR into ../tree.def. */
635 *expr_p = TREE_OPERAND (*expr_p, 0);
636 ret = GS_OK;
637 break;
639 case MUST_NOT_THROW_EXPR:
640 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
641 break;
643 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
644 LHS of an assignment might also be involved in the RHS, as in bug
645 25979. */
646 case INIT_EXPR:
647 if (fn_contains_cilk_spawn_p (cfun))
649 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
651 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
652 pre_p, post_p);
653 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
655 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
656 return GS_ERROR;
659 cp_gimplify_init_expr (expr_p);
660 if (TREE_CODE (*expr_p) != INIT_EXPR)
661 return GS_OK;
662 /* Fall through. */
663 case MODIFY_EXPR:
664 modify_expr_case:
666 if (fn_contains_cilk_spawn_p (cfun)
667 && cilk_cp_detect_spawn_and_unwrap (expr_p)
668 && !seen_error ())
670 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
671 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
673 /* If the back end isn't clever enough to know that the lhs and rhs
674 types are the same, add an explicit conversion. */
675 tree op0 = TREE_OPERAND (*expr_p, 0);
676 tree op1 = TREE_OPERAND (*expr_p, 1);
678 if (!error_operand_p (op0)
679 && !error_operand_p (op1)
680 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
681 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
682 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
683 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
684 TREE_TYPE (op0), op1);
686 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
688 /* Remove any copies of empty classes. Also drop volatile
689 variables on the RHS to avoid infinite recursion from
690 gimplify_expr trying to load the value. */
691 if (TREE_SIDE_EFFECTS (op1))
693 if (TREE_THIS_VOLATILE (op1)
694 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
695 op1 = build_fold_addr_expr (op1);
697 gimplify_and_add (op1, pre_p);
699 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
700 is_gimple_lvalue, fb_lvalue);
701 *expr_p = TREE_OPERAND (*expr_p, 0);
703 /* P0145 says that the RHS is sequenced before the LHS.
704 gimplify_modify_expr gimplifies the RHS before the LHS, but that
705 isn't quite strong enough in two cases:
707 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
708 mean it's evaluated after the LHS.
710 2) the value calculation of the RHS is also sequenced before the
711 LHS, so for scalar assignment we need to preevaluate if the
712 RHS could be affected by LHS side-effects even if it has no
713 side-effects of its own. We don't need this for classes because
714 class assignment takes its RHS by reference. */
715 else if (flag_strong_eval_order > 1
716 && TREE_CODE (*expr_p) == MODIFY_EXPR
717 && lvalue_has_side_effects (op0)
718 && (TREE_CODE (op1) == CALL_EXPR
719 || (SCALAR_TYPE_P (TREE_TYPE (op1))
720 && !TREE_CONSTANT (op1))))
721 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
723 ret = GS_OK;
724 break;
726 case EMPTY_CLASS_EXPR:
727 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
728 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
729 ret = GS_OK;
730 break;
732 case BASELINK:
733 *expr_p = BASELINK_FUNCTIONS (*expr_p);
734 ret = GS_OK;
735 break;
737 case TRY_BLOCK:
738 genericize_try_block (expr_p);
739 ret = GS_OK;
740 break;
742 case HANDLER:
743 genericize_catch_block (expr_p);
744 ret = GS_OK;
745 break;
747 case EH_SPEC_BLOCK:
748 genericize_eh_spec_block (expr_p);
749 ret = GS_OK;
750 break;
752 case USING_STMT:
753 gcc_unreachable ();
755 case FOR_STMT:
756 case WHILE_STMT:
757 case DO_STMT:
758 case SWITCH_STMT:
759 case CONTINUE_STMT:
760 case BREAK_STMT:
761 gcc_unreachable ();
763 case OMP_FOR:
764 case OMP_SIMD:
765 case OMP_DISTRIBUTE:
766 case OMP_TASKLOOP:
767 ret = cp_gimplify_omp_for (expr_p, pre_p);
768 break;
770 case EXPR_STMT:
771 gimplify_expr_stmt (expr_p);
772 ret = GS_OK;
773 break;
775 case UNARY_PLUS_EXPR:
777 tree arg = TREE_OPERAND (*expr_p, 0);
778 tree type = TREE_TYPE (*expr_p);
779 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
780 : arg;
781 ret = GS_OK;
783 break;
785 case CILK_SPAWN_STMT:
786 gcc_assert(fn_contains_cilk_spawn_p (cfun)
787 && cilk_cp_detect_spawn_and_unwrap (expr_p));
789 if (!seen_error ())
791 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
792 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
794 return GS_ERROR;
796 case CALL_EXPR:
797 if (fn_contains_cilk_spawn_p (cfun)
798 && cilk_cp_detect_spawn_and_unwrap (expr_p)
799 && !seen_error ())
801 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
802 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
804 ret = GS_OK;
805 if (!CALL_EXPR_FN (*expr_p))
806 /* Internal function call. */;
807 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
809 /* This is a call to a (compound) assignment operator that used
810 the operator syntax; gimplify the RHS first. */
811 gcc_assert (call_expr_nargs (*expr_p) == 2);
812 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
813 enum gimplify_status t
814 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
815 if (t == GS_ERROR)
816 ret = GS_ERROR;
818 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
820 /* Leave the last argument for gimplify_call_expr, to avoid problems
821 with __builtin_va_arg_pack(). */
822 int nargs = call_expr_nargs (*expr_p) - 1;
823 for (int i = 0; i < nargs; ++i)
825 enum gimplify_status t
826 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
827 if (t == GS_ERROR)
828 ret = GS_ERROR;
831 else if (flag_strong_eval_order
832 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
834 /* If flag_strong_eval_order, evaluate the object argument first. */
835 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
836 if (POINTER_TYPE_P (fntype))
837 fntype = TREE_TYPE (fntype);
838 if (TREE_CODE (fntype) == METHOD_TYPE)
840 enum gimplify_status t
841 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
842 if (t == GS_ERROR)
843 ret = GS_ERROR;
846 break;
848 case RETURN_EXPR:
849 if (TREE_OPERAND (*expr_p, 0)
850 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
851 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
853 expr_p = &TREE_OPERAND (*expr_p, 0);
854 code = TREE_CODE (*expr_p);
855 /* Avoid going through the INIT_EXPR case, which can
856 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
857 goto modify_expr_case;
859 /* Fall through. */
861 default:
862 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
863 break;
866 /* Restore saved state. */
867 if (STATEMENT_CODE_P (code))
868 current_stmt_tree ()->stmts_are_full_exprs_p
869 = saved_stmts_are_full_exprs_p;
871 return ret;
874 static inline bool
875 is_invisiref_parm (const_tree t)
877 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
878 && DECL_BY_REFERENCE (t));
881 /* Return true if the uid in both int tree maps are equal. */
883 bool
884 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
886 return (a->uid == b->uid);
889 /* Hash a UID in a cxx_int_tree_map. */
891 unsigned int
892 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
894 return item->uid;
897 /* A stable comparison routine for use with splay trees and DECLs. */
899 static int
900 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
902 tree a = (tree) xa;
903 tree b = (tree) xb;
905 return DECL_UID (a) - DECL_UID (b);
908 /* OpenMP context during genericization. */
910 struct cp_genericize_omp_taskreg
912 bool is_parallel;
913 bool default_shared;
914 struct cp_genericize_omp_taskreg *outer;
915 splay_tree variables;
918 /* Return true if genericization should try to determine if
919 DECL is firstprivate or shared within task regions. */
921 static bool
922 omp_var_to_track (tree decl)
924 tree type = TREE_TYPE (decl);
925 if (is_invisiref_parm (decl))
926 type = TREE_TYPE (type);
927 while (TREE_CODE (type) == ARRAY_TYPE)
928 type = TREE_TYPE (type);
929 if (type == error_mark_node || !CLASS_TYPE_P (type))
930 return false;
931 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
932 return false;
933 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
934 return false;
935 return true;
938 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
940 static void
941 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
943 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
944 (splay_tree_key) decl);
945 if (n == NULL)
947 int flags = OMP_CLAUSE_DEFAULT_SHARED;
948 if (omp_ctx->outer)
949 omp_cxx_notice_variable (omp_ctx->outer, decl);
950 if (!omp_ctx->default_shared)
952 struct cp_genericize_omp_taskreg *octx;
954 for (octx = omp_ctx->outer; octx; octx = octx->outer)
956 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
957 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
959 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
960 break;
962 if (octx->is_parallel)
963 break;
965 if (octx == NULL
966 && (TREE_CODE (decl) == PARM_DECL
967 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
968 && DECL_CONTEXT (decl) == current_function_decl)))
969 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
970 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
972 /* DECL is implicitly determined firstprivate in
973 the current task construct. Ensure copy ctor and
974 dtor are instantiated, because during gimplification
975 it will be already too late. */
976 tree type = TREE_TYPE (decl);
977 if (is_invisiref_parm (decl))
978 type = TREE_TYPE (type);
979 while (TREE_CODE (type) == ARRAY_TYPE)
980 type = TREE_TYPE (type);
981 get_copy_ctor (type, tf_none);
982 get_dtor (type, tf_none);
985 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
989 /* Genericization context. */
991 struct cp_genericize_data
993 hash_set<tree> *p_set;
994 vec<tree> bind_expr_stack;
995 struct cp_genericize_omp_taskreg *omp_ctx;
996 tree try_block;
997 bool no_sanitize_p;
998 bool handle_invisiref_parm_p;
1001 /* Perform any pre-gimplification folding of C++ front end trees to
1002 GENERIC.
1003 Note: The folding of none-omp cases is something to move into
1004 the middle-end. As for now we have most foldings only on GENERIC
1005 in fold-const, we need to perform this before transformation to
1006 GIMPLE-form. */
1008 static tree
1009 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1011 tree stmt;
1012 enum tree_code code;
1014 *stmt_p = stmt = cp_fold (*stmt_p);
1016 if (((hash_set<tree> *) data)->add (stmt))
1018 /* Don't walk subtrees of stmts we've already walked once, otherwise
1019 we can have exponential complexity with e.g. lots of nested
1020 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1021 always the same tree, which the first time cp_fold_r has been
1022 called on it had the subtrees walked. */
1023 *walk_subtrees = 0;
1024 return NULL;
1027 code = TREE_CODE (stmt);
1028 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1029 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1030 || code == OACC_LOOP)
1032 tree x;
1033 int i, n;
1035 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1036 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1037 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1038 x = OMP_FOR_COND (stmt);
1039 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1041 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1044 else if (x && TREE_CODE (x) == TREE_VEC)
1046 n = TREE_VEC_LENGTH (x);
1047 for (i = 0; i < n; i++)
1049 tree o = TREE_VEC_ELT (x, i);
1050 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1051 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1054 x = OMP_FOR_INCR (stmt);
1055 if (x && TREE_CODE (x) == TREE_VEC)
1057 n = TREE_VEC_LENGTH (x);
1058 for (i = 0; i < n; i++)
1060 tree o = TREE_VEC_ELT (x, i);
1061 if (o && TREE_CODE (o) == MODIFY_EXPR)
1062 o = TREE_OPERAND (o, 1);
1063 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1064 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1066 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1067 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1071 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1072 *walk_subtrees = 0;
1075 return NULL;
1078 /* Fold ALL the trees! FIXME we should be able to remove this, but
1079 apparently that still causes optimization regressions. */
1081 void
1082 cp_fold_function (tree fndecl)
1084 hash_set<tree> pset;
1085 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1088 /* Perform any pre-gimplification lowering of C++ front end trees to
1089 GENERIC. */
1091 static tree
1092 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1094 tree stmt = *stmt_p;
1095 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1096 hash_set<tree> *p_set = wtd->p_set;
1098 /* If in an OpenMP context, note var uses. */
1099 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1100 && (VAR_P (stmt)
1101 || TREE_CODE (stmt) == PARM_DECL
1102 || TREE_CODE (stmt) == RESULT_DECL)
1103 && omp_var_to_track (stmt))
1104 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1106 /* Dereference invisible reference parms. */
1107 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1109 *stmt_p = convert_from_reference (stmt);
1110 p_set->add (*stmt_p);
1111 *walk_subtrees = 0;
1112 return NULL;
1115 /* Map block scope extern declarations to visible declarations with the
1116 same name and type in outer scopes if any. */
1117 if (cp_function_chain->extern_decl_map
1118 && VAR_OR_FUNCTION_DECL_P (stmt)
1119 && DECL_EXTERNAL (stmt))
1121 struct cxx_int_tree_map *h, in;
1122 in.uid = DECL_UID (stmt);
1123 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1124 if (h)
1126 *stmt_p = h->to;
1127 *walk_subtrees = 0;
1128 return NULL;
1132 if (TREE_CODE (stmt) == INTEGER_CST
1133 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1134 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1135 && !wtd->no_sanitize_p)
1137 ubsan_maybe_instrument_reference (stmt_p);
1138 if (*stmt_p != stmt)
1140 *walk_subtrees = 0;
1141 return NULL_TREE;
1145 /* Other than invisiref parms, don't walk the same tree twice. */
1146 if (p_set->contains (stmt))
1148 *walk_subtrees = 0;
1149 return NULL_TREE;
1152 if (TREE_CODE (stmt) == ADDR_EXPR
1153 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1155 /* If in an OpenMP context, note var uses. */
1156 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1157 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1158 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1159 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1160 *walk_subtrees = 0;
1162 else if (TREE_CODE (stmt) == RETURN_EXPR
1163 && TREE_OPERAND (stmt, 0)
1164 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1165 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1166 *walk_subtrees = 0;
1167 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1168 switch (OMP_CLAUSE_CODE (stmt))
1170 case OMP_CLAUSE_LASTPRIVATE:
1171 /* Don't dereference an invisiref in OpenMP clauses. */
1172 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1174 *walk_subtrees = 0;
1175 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1176 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1177 cp_genericize_r, data, NULL);
1179 break;
1180 case OMP_CLAUSE_PRIVATE:
1181 /* Don't dereference an invisiref in OpenMP clauses. */
1182 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1183 *walk_subtrees = 0;
1184 else if (wtd->omp_ctx != NULL)
1186 /* Private clause doesn't cause any references to the
1187 var in outer contexts, avoid calling
1188 omp_cxx_notice_variable for it. */
1189 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1190 wtd->omp_ctx = NULL;
1191 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1192 data, NULL);
1193 wtd->omp_ctx = old;
1194 *walk_subtrees = 0;
1196 break;
1197 case OMP_CLAUSE_SHARED:
1198 case OMP_CLAUSE_FIRSTPRIVATE:
1199 case OMP_CLAUSE_COPYIN:
1200 case OMP_CLAUSE_COPYPRIVATE:
1201 /* Don't dereference an invisiref in OpenMP clauses. */
1202 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1203 *walk_subtrees = 0;
1204 break;
1205 case OMP_CLAUSE_REDUCTION:
1206 /* Don't dereference an invisiref in reduction clause's
1207 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1208 still needs to be genericized. */
1209 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1211 *walk_subtrees = 0;
1212 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1213 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1214 cp_genericize_r, data, NULL);
1215 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1216 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1217 cp_genericize_r, data, NULL);
1219 break;
1220 default:
1221 break;
1223 else if (IS_TYPE_OR_DECL_P (stmt))
1224 *walk_subtrees = 0;
1226 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1227 to lower this construct before scanning it, so we need to lower these
1228 before doing anything else. */
1229 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1230 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1231 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1232 : TRY_FINALLY_EXPR,
1233 void_type_node,
1234 CLEANUP_BODY (stmt),
1235 CLEANUP_EXPR (stmt));
1237 else if (TREE_CODE (stmt) == IF_STMT)
1239 genericize_if_stmt (stmt_p);
1240 /* *stmt_p has changed, tail recurse to handle it again. */
1241 return cp_genericize_r (stmt_p, walk_subtrees, data);
1244 /* COND_EXPR might have incompatible types in branches if one or both
1245 arms are bitfields. Fix it up now. */
1246 else if (TREE_CODE (stmt) == COND_EXPR)
1248 tree type_left
1249 = (TREE_OPERAND (stmt, 1)
1250 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1251 : NULL_TREE);
1252 tree type_right
1253 = (TREE_OPERAND (stmt, 2)
1254 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1255 : NULL_TREE);
1256 if (type_left
1257 && !useless_type_conversion_p (TREE_TYPE (stmt),
1258 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1260 TREE_OPERAND (stmt, 1)
1261 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1262 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1263 type_left));
1265 if (type_right
1266 && !useless_type_conversion_p (TREE_TYPE (stmt),
1267 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1269 TREE_OPERAND (stmt, 2)
1270 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1271 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1272 type_right));
1276 else if (TREE_CODE (stmt) == BIND_EXPR)
1278 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1280 tree decl;
1281 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1282 if (VAR_P (decl)
1283 && !DECL_EXTERNAL (decl)
1284 && omp_var_to_track (decl))
1286 splay_tree_node n
1287 = splay_tree_lookup (wtd->omp_ctx->variables,
1288 (splay_tree_key) decl);
1289 if (n == NULL)
1290 splay_tree_insert (wtd->omp_ctx->variables,
1291 (splay_tree_key) decl,
1292 TREE_STATIC (decl)
1293 ? OMP_CLAUSE_DEFAULT_SHARED
1294 : OMP_CLAUSE_DEFAULT_PRIVATE);
1297 if (flag_sanitize
1298 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1300 /* The point here is to not sanitize static initializers. */
1301 bool no_sanitize_p = wtd->no_sanitize_p;
1302 wtd->no_sanitize_p = true;
1303 for (tree decl = BIND_EXPR_VARS (stmt);
1304 decl;
1305 decl = DECL_CHAIN (decl))
1306 if (VAR_P (decl)
1307 && TREE_STATIC (decl)
1308 && DECL_INITIAL (decl))
1309 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1310 wtd->no_sanitize_p = no_sanitize_p;
1312 wtd->bind_expr_stack.safe_push (stmt);
1313 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1314 cp_genericize_r, data, NULL);
1315 wtd->bind_expr_stack.pop ();
1318 else if (TREE_CODE (stmt) == USING_STMT)
1320 tree block = NULL_TREE;
1322 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1323 BLOCK, and append an IMPORTED_DECL to its
1324 BLOCK_VARS chained list. */
1325 if (wtd->bind_expr_stack.exists ())
1327 int i;
1328 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1329 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1330 break;
1332 if (block)
1334 tree using_directive;
1335 gcc_assert (TREE_OPERAND (stmt, 0));
1337 using_directive = make_node (IMPORTED_DECL);
1338 TREE_TYPE (using_directive) = void_type_node;
1340 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1341 = TREE_OPERAND (stmt, 0);
1342 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1343 BLOCK_VARS (block) = using_directive;
1345 /* The USING_STMT won't appear in GENERIC. */
1346 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1347 *walk_subtrees = 0;
1350 else if (TREE_CODE (stmt) == DECL_EXPR
1351 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1353 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1354 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1355 *walk_subtrees = 0;
1357 else if (TREE_CODE (stmt) == DECL_EXPR)
1359 tree d = DECL_EXPR_DECL (stmt);
1360 if (VAR_P (d))
1361 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1363 else if (TREE_CODE (stmt) == OMP_PARALLEL
1364 || TREE_CODE (stmt) == OMP_TASK
1365 || TREE_CODE (stmt) == OMP_TASKLOOP)
1367 struct cp_genericize_omp_taskreg omp_ctx;
1368 tree c, decl;
1369 splay_tree_node n;
1371 *walk_subtrees = 0;
1372 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1373 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1374 omp_ctx.default_shared = omp_ctx.is_parallel;
1375 omp_ctx.outer = wtd->omp_ctx;
1376 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1377 wtd->omp_ctx = &omp_ctx;
1378 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1379 switch (OMP_CLAUSE_CODE (c))
1381 case OMP_CLAUSE_SHARED:
1382 case OMP_CLAUSE_PRIVATE:
1383 case OMP_CLAUSE_FIRSTPRIVATE:
1384 case OMP_CLAUSE_LASTPRIVATE:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (decl == error_mark_node || !omp_var_to_track (decl))
1387 break;
1388 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1389 if (n != NULL)
1390 break;
1391 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1392 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1393 ? OMP_CLAUSE_DEFAULT_SHARED
1394 : OMP_CLAUSE_DEFAULT_PRIVATE);
1395 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1396 && omp_ctx.outer)
1397 omp_cxx_notice_variable (omp_ctx.outer, decl);
1398 break;
1399 case OMP_CLAUSE_DEFAULT:
1400 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1401 omp_ctx.default_shared = true;
1402 default:
1403 break;
1405 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1406 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1407 else
1408 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1409 wtd->omp_ctx = omp_ctx.outer;
1410 splay_tree_delete (omp_ctx.variables);
1412 else if (TREE_CODE (stmt) == TRY_BLOCK)
1414 *walk_subtrees = 0;
1415 tree try_block = wtd->try_block;
1416 wtd->try_block = stmt;
1417 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1418 wtd->try_block = try_block;
1419 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1421 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1423 /* MUST_NOT_THROW_COND might be something else with TM. */
1424 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1426 *walk_subtrees = 0;
1427 tree try_block = wtd->try_block;
1428 wtd->try_block = stmt;
1429 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1430 wtd->try_block = try_block;
1433 else if (TREE_CODE (stmt) == THROW_EXPR)
1435 location_t loc = location_of (stmt);
1436 if (TREE_NO_WARNING (stmt))
1437 /* Never mind. */;
1438 else if (wtd->try_block)
1440 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1441 && warning_at (loc, OPT_Wterminate,
1442 "throw will always call terminate()")
1443 && cxx_dialect >= cxx11
1444 && DECL_DESTRUCTOR_P (current_function_decl))
1445 inform (loc, "in C++11 destructors default to noexcept");
1447 else
1449 if (warn_cxx11_compat && cxx_dialect < cxx11
1450 && DECL_DESTRUCTOR_P (current_function_decl)
1451 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1452 == NULL_TREE)
1453 && (get_defaulted_eh_spec (current_function_decl)
1454 == empty_except_spec))
1455 warning_at (loc, OPT_Wc__11_compat,
1456 "in C++11 this throw will terminate because "
1457 "destructors default to noexcept");
1460 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1461 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1462 else if (TREE_CODE (stmt) == FOR_STMT)
1463 genericize_for_stmt (stmt_p, walk_subtrees, data);
1464 else if (TREE_CODE (stmt) == WHILE_STMT)
1465 genericize_while_stmt (stmt_p, walk_subtrees, data);
1466 else if (TREE_CODE (stmt) == DO_STMT)
1467 genericize_do_stmt (stmt_p, walk_subtrees, data);
1468 else if (TREE_CODE (stmt) == SWITCH_STMT)
1469 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1470 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1471 genericize_continue_stmt (stmt_p);
1472 else if (TREE_CODE (stmt) == BREAK_STMT)
1473 genericize_break_stmt (stmt_p);
1474 else if (TREE_CODE (stmt) == OMP_FOR
1475 || TREE_CODE (stmt) == OMP_SIMD
1476 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1477 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1478 else if (TREE_CODE (stmt) == PTRMEM_CST)
1480 /* By the time we get here we're handing off to the back end, so we don't
1481 need or want to preserve PTRMEM_CST anymore. */
1482 *stmt_p = cplus_expand_constant (stmt);
1483 *walk_subtrees = 0;
1485 else if ((flag_sanitize
1486 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1487 && !wtd->no_sanitize_p)
1489 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1490 && TREE_CODE (stmt) == NOP_EXPR
1491 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1492 ubsan_maybe_instrument_reference (stmt_p);
1493 else if (TREE_CODE (stmt) == CALL_EXPR)
1495 tree fn = CALL_EXPR_FN (stmt);
1496 if (fn != NULL_TREE
1497 && !error_operand_p (fn)
1498 && POINTER_TYPE_P (TREE_TYPE (fn))
1499 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1501 bool is_ctor
1502 = TREE_CODE (fn) == ADDR_EXPR
1503 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1504 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1505 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1506 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1507 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1508 cp_ubsan_maybe_instrument_member_call (stmt);
1513 p_set->add (*stmt_p);
1515 return NULL;
1518 /* Lower C++ front end trees to GENERIC in T_P. */
1520 static void
1521 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1523 struct cp_genericize_data wtd;
1525 wtd.p_set = new hash_set<tree>;
1526 wtd.bind_expr_stack.create (0);
1527 wtd.omp_ctx = NULL;
1528 wtd.try_block = NULL_TREE;
1529 wtd.no_sanitize_p = false;
1530 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1531 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1532 delete wtd.p_set;
1533 wtd.bind_expr_stack.release ();
1534 if (flag_sanitize & SANITIZE_VPTR)
1535 cp_ubsan_instrument_member_accesses (t_p);
1538 /* If a function that should end with a return in non-void
1539 function doesn't obviously end with return, add ubsan
1540 instrumentation code to verify it at runtime. */
1542 static void
1543 cp_ubsan_maybe_instrument_return (tree fndecl)
1545 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1546 || DECL_CONSTRUCTOR_P (fndecl)
1547 || DECL_DESTRUCTOR_P (fndecl)
1548 || !targetm.warn_func_return (fndecl))
1549 return;
1551 tree t = DECL_SAVED_TREE (fndecl);
1552 while (t)
1554 switch (TREE_CODE (t))
1556 case BIND_EXPR:
1557 t = BIND_EXPR_BODY (t);
1558 continue;
1559 case TRY_FINALLY_EXPR:
1560 t = TREE_OPERAND (t, 0);
1561 continue;
1562 case STATEMENT_LIST:
1564 tree_stmt_iterator i = tsi_last (t);
1565 if (!tsi_end_p (i))
1567 t = tsi_stmt (i);
1568 continue;
1571 break;
1572 case RETURN_EXPR:
1573 return;
1574 default:
1575 break;
1577 break;
1579 if (t == NULL_TREE)
1580 return;
1581 tree *p = &DECL_SAVED_TREE (fndecl);
1582 if (TREE_CODE (*p) == BIND_EXPR)
1583 p = &BIND_EXPR_BODY (*p);
1584 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1585 append_to_statement_list (t, p);
1588 void
1589 cp_genericize (tree fndecl)
1591 tree t;
1593 /* Fix up the types of parms passed by invisible reference. */
1594 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1595 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1597 /* If a function's arguments are copied to create a thunk,
1598 then DECL_BY_REFERENCE will be set -- but the type of the
1599 argument will be a pointer type, so we will never get
1600 here. */
1601 gcc_assert (!DECL_BY_REFERENCE (t));
1602 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1603 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1604 DECL_BY_REFERENCE (t) = 1;
1605 TREE_ADDRESSABLE (t) = 0;
1606 relayout_decl (t);
1609 /* Do the same for the return value. */
1610 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1612 t = DECL_RESULT (fndecl);
1613 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1614 DECL_BY_REFERENCE (t) = 1;
1615 TREE_ADDRESSABLE (t) = 0;
1616 relayout_decl (t);
1617 if (DECL_NAME (t))
1619 /* Adjust DECL_VALUE_EXPR of the original var. */
1620 tree outer = outer_curly_brace_block (current_function_decl);
1621 tree var;
1623 if (outer)
1624 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1625 if (DECL_NAME (t) == DECL_NAME (var)
1626 && DECL_HAS_VALUE_EXPR_P (var)
1627 && DECL_VALUE_EXPR (var) == t)
1629 tree val = convert_from_reference (t);
1630 SET_DECL_VALUE_EXPR (var, val);
1631 break;
1636 /* If we're a clone, the body is already GIMPLE. */
1637 if (DECL_CLONED_FUNCTION_P (fndecl))
1638 return;
1640 /* Allow cp_genericize calls to be nested. */
1641 tree save_bc_label[2];
1642 save_bc_label[bc_break] = bc_label[bc_break];
1643 save_bc_label[bc_continue] = bc_label[bc_continue];
1644 bc_label[bc_break] = NULL_TREE;
1645 bc_label[bc_continue] = NULL_TREE;
1647 /* Expand all the array notations here. */
1648 if (flag_cilkplus
1649 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1650 DECL_SAVED_TREE (fndecl)
1651 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1653 /* We do want to see every occurrence of the parms, so we can't just use
1654 walk_tree's hash functionality. */
1655 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1657 if (flag_sanitize & SANITIZE_RETURN
1658 && do_ubsan_in_current_function ())
1659 cp_ubsan_maybe_instrument_return (fndecl);
1661 /* Do everything else. */
1662 c_genericize (fndecl);
1664 gcc_assert (bc_label[bc_break] == NULL);
1665 gcc_assert (bc_label[bc_continue] == NULL);
1666 bc_label[bc_break] = save_bc_label[bc_break];
1667 bc_label[bc_continue] = save_bc_label[bc_continue];
1670 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1671 NULL if there is in fact nothing to do. ARG2 may be null if FN
1672 actually only takes one argument. */
1674 static tree
1675 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1677 tree defparm, parm, t;
1678 int i = 0;
1679 int nargs;
1680 tree *argarray;
1682 if (fn == NULL)
1683 return NULL;
1685 nargs = list_length (DECL_ARGUMENTS (fn));
1686 argarray = XALLOCAVEC (tree, nargs);
1688 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1689 if (arg2)
1690 defparm = TREE_CHAIN (defparm);
1692 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1694 tree inner_type = TREE_TYPE (arg1);
1695 tree start1, end1, p1;
1696 tree start2 = NULL, p2 = NULL;
1697 tree ret = NULL, lab;
1699 start1 = arg1;
1700 start2 = arg2;
1703 inner_type = TREE_TYPE (inner_type);
1704 start1 = build4 (ARRAY_REF, inner_type, start1,
1705 size_zero_node, NULL, NULL);
1706 if (arg2)
1707 start2 = build4 (ARRAY_REF, inner_type, start2,
1708 size_zero_node, NULL, NULL);
1710 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1711 start1 = build_fold_addr_expr_loc (input_location, start1);
1712 if (arg2)
1713 start2 = build_fold_addr_expr_loc (input_location, start2);
1715 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1716 end1 = fold_build_pointer_plus (start1, end1);
1718 p1 = create_tmp_var (TREE_TYPE (start1));
1719 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1720 append_to_statement_list (t, &ret);
1722 if (arg2)
1724 p2 = create_tmp_var (TREE_TYPE (start2));
1725 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1726 append_to_statement_list (t, &ret);
1729 lab = create_artificial_label (input_location);
1730 t = build1 (LABEL_EXPR, void_type_node, lab);
1731 append_to_statement_list (t, &ret);
1733 argarray[i++] = p1;
1734 if (arg2)
1735 argarray[i++] = p2;
1736 /* Handle default arguments. */
1737 for (parm = defparm; parm && parm != void_list_node;
1738 parm = TREE_CHAIN (parm), i++)
1739 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1740 TREE_PURPOSE (parm), fn, i,
1741 tf_warning_or_error);
1742 t = build_call_a (fn, i, argarray);
1743 t = fold_convert (void_type_node, t);
1744 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1745 append_to_statement_list (t, &ret);
1747 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1748 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1749 append_to_statement_list (t, &ret);
1751 if (arg2)
1753 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1754 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1755 append_to_statement_list (t, &ret);
1758 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1759 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1760 append_to_statement_list (t, &ret);
1762 return ret;
1764 else
1766 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1767 if (arg2)
1768 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1769 /* Handle default arguments. */
1770 for (parm = defparm; parm && parm != void_list_node;
1771 parm = TREE_CHAIN (parm), i++)
1772 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1773 TREE_PURPOSE (parm),
1774 fn, i, tf_warning_or_error);
1775 t = build_call_a (fn, i, argarray);
1776 t = fold_convert (void_type_node, t);
1777 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1781 /* Return code to initialize DECL with its default constructor, or
1782 NULL if there's nothing to do. */
1784 tree
1785 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1787 tree info = CP_OMP_CLAUSE_INFO (clause);
1788 tree ret = NULL;
1790 if (info)
1791 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1793 return ret;
1796 /* Return code to initialize DST with a copy constructor from SRC. */
1798 tree
1799 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1801 tree info = CP_OMP_CLAUSE_INFO (clause);
1802 tree ret = NULL;
1804 if (info)
1805 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1806 if (ret == NULL)
1807 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1809 return ret;
1812 /* Similarly, except use an assignment operator instead. */
1814 tree
1815 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1817 tree info = CP_OMP_CLAUSE_INFO (clause);
1818 tree ret = NULL;
1820 if (info)
1821 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1822 if (ret == NULL)
1823 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1825 return ret;
1828 /* Return code to destroy DECL. */
1830 tree
1831 cxx_omp_clause_dtor (tree clause, tree decl)
1833 tree info = CP_OMP_CLAUSE_INFO (clause);
1834 tree ret = NULL;
1836 if (info)
1837 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1839 return ret;
1842 /* True if OpenMP should privatize what this DECL points to rather
1843 than the DECL itself. */
1845 bool
1846 cxx_omp_privatize_by_reference (const_tree decl)
1848 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1849 || is_invisiref_parm (decl));
1852 /* Return true if DECL is const qualified var having no mutable member. */
1853 bool
1854 cxx_omp_const_qual_no_mutable (tree decl)
1856 tree type = TREE_TYPE (decl);
1857 if (TREE_CODE (type) == REFERENCE_TYPE)
1859 if (!is_invisiref_parm (decl))
1860 return false;
1861 type = TREE_TYPE (type);
1863 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1865 /* NVR doesn't preserve const qualification of the
1866 variable's type. */
1867 tree outer = outer_curly_brace_block (current_function_decl);
1868 tree var;
1870 if (outer)
1871 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1872 if (DECL_NAME (decl) == DECL_NAME (var)
1873 && (TYPE_MAIN_VARIANT (type)
1874 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1876 if (TYPE_READONLY (TREE_TYPE (var)))
1877 type = TREE_TYPE (var);
1878 break;
1883 if (type == error_mark_node)
1884 return false;
1886 /* Variables with const-qualified type having no mutable member
1887 are predetermined shared. */
1888 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1889 return true;
1891 return false;
1894 /* True if OpenMP sharing attribute of DECL is predetermined. */
1896 enum omp_clause_default_kind
1897 cxx_omp_predetermined_sharing (tree decl)
1899 /* Static data members are predetermined shared. */
1900 if (TREE_STATIC (decl))
1902 tree ctx = CP_DECL_CONTEXT (decl);
1903 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1904 return OMP_CLAUSE_DEFAULT_SHARED;
1907 /* Const qualified vars having no mutable member are predetermined
1908 shared. */
1909 if (cxx_omp_const_qual_no_mutable (decl))
1910 return OMP_CLAUSE_DEFAULT_SHARED;
1912 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1915 /* Finalize an implicitly determined clause. */
1917 void
1918 cxx_omp_finish_clause (tree c, gimple_seq *)
1920 tree decl, inner_type;
1921 bool make_shared = false;
1923 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1924 return;
1926 decl = OMP_CLAUSE_DECL (c);
1927 decl = require_complete_type (decl);
1928 inner_type = TREE_TYPE (decl);
1929 if (decl == error_mark_node)
1930 make_shared = true;
1931 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1932 inner_type = TREE_TYPE (inner_type);
1934 /* We're interested in the base element, not arrays. */
1935 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1936 inner_type = TREE_TYPE (inner_type);
1938 /* Check for special function availability by building a call to one.
1939 Save the results, because later we won't be in the right context
1940 for making these queries. */
1941 if (!make_shared
1942 && CLASS_TYPE_P (inner_type)
1943 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1944 make_shared = true;
1946 if (make_shared)
1947 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1950 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1951 disregarded in OpenMP construct, because it is going to be
1952 remapped during OpenMP lowering. SHARED is true if DECL
1953 is going to be shared, false if it is going to be privatized. */
1955 bool
1956 cxx_omp_disregard_value_expr (tree decl, bool shared)
1958 return !shared
1959 && VAR_P (decl)
1960 && DECL_HAS_VALUE_EXPR_P (decl)
1961 && DECL_ARTIFICIAL (decl)
1962 && DECL_LANG_SPECIFIC (decl)
1963 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1966 /* Perform folding on expression X. */
1968 tree
1969 cp_fully_fold (tree x)
1971 if (processing_template_decl)
1972 return x;
1973 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
1974 have to call both. */
1975 if (cxx_dialect >= cxx11)
1976 x = maybe_constant_value (x);
1977 return cp_fold (x);
1980 /* Fold expression X which is used as an rvalue if RVAL is true. */
1982 static tree
1983 cp_fold_maybe_rvalue (tree x, bool rval)
1985 while (true)
1987 x = cp_fold (x);
1988 if (rval && DECL_P (x)
1989 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1991 tree v = decl_constant_value (x);
1992 if (v != x && v != error_mark_node)
1994 x = v;
1995 continue;
1998 break;
2000 return x;
2003 /* Fold expression X which is used as an rvalue. */
2005 static tree
2006 cp_fold_rvalue (tree x)
2008 return cp_fold_maybe_rvalue (x, true);
2011 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2012 and certain changes are made to the folding done. Or should be (FIXME). We
2013 never touch maybe_const, as it is only used for the C front-end
2014 C_MAYBE_CONST_EXPR. */
2016 tree
2017 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2019 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2020 INTEGER_CST. */
2021 return cp_fold_rvalue (x);
2024 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2026 /* Dispose of the whole FOLD_CACHE. */
2028 void
2029 clear_fold_cache (void)
2031 if (fold_cache != NULL)
2032 fold_cache->empty ();
2035 /* This function tries to fold an expression X.
2036 To avoid combinatorial explosion, folding results are kept in fold_cache.
2037 If we are processing a template or X is invalid, we don't fold at all.
2038 For performance reasons we don't cache expressions representing a
2039 declaration or constant.
2040 Function returns X or its folded variant. */
2042 static tree
2043 cp_fold (tree x)
2045 tree op0, op1, op2, op3;
2046 tree org_x = x, r = NULL_TREE;
2047 enum tree_code code;
2048 location_t loc;
2049 bool rval_ops = true;
2051 if (!x || x == error_mark_node)
2052 return x;
2054 if (processing_template_decl
2055 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2056 return x;
2058 /* Don't bother to cache DECLs or constants. */
2059 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2060 return x;
2062 if (fold_cache == NULL)
2063 fold_cache = hash_map<tree, tree>::create_ggc (101);
2065 if (tree *cached = fold_cache->get (x))
2066 return *cached;
2068 code = TREE_CODE (x);
2069 switch (code)
2071 case CLEANUP_POINT_EXPR:
2072 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2073 effects. */
2074 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2075 if (!TREE_SIDE_EFFECTS (r))
2076 x = r;
2077 break;
2079 case SIZEOF_EXPR:
2080 x = fold_sizeof_expr (x);
2081 break;
2083 case VIEW_CONVERT_EXPR:
2084 rval_ops = false;
2085 /* FALLTHRU */
2086 case CONVERT_EXPR:
2087 case NOP_EXPR:
2088 case NON_LVALUE_EXPR:
2090 if (VOID_TYPE_P (TREE_TYPE (x)))
2091 return x;
2093 loc = EXPR_LOCATION (x);
2094 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2096 if (code == CONVERT_EXPR
2097 && SCALAR_TYPE_P (TREE_TYPE (x))
2098 && op0 != void_node)
2099 /* During parsing we used convert_to_*_nofold; re-convert now using the
2100 folding variants, since fold() doesn't do those transformations. */
2101 x = fold (convert (TREE_TYPE (x), op0));
2102 else if (op0 != TREE_OPERAND (x, 0))
2104 if (op0 == error_mark_node)
2105 x = error_mark_node;
2106 else
2107 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2109 else
2110 x = fold (x);
2112 /* Conversion of an out-of-range value has implementation-defined
2113 behavior; the language considers it different from arithmetic
2114 overflow, which is undefined. */
2115 if (TREE_CODE (op0) == INTEGER_CST
2116 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2117 TREE_OVERFLOW (x) = false;
2119 break;
2121 case INDIRECT_REF:
2122 /* We don't need the decltype(auto) obfuscation anymore. */
2123 if (REF_PARENTHESIZED_P (x))
2125 tree p = maybe_undo_parenthesized_ref (x);
2126 return cp_fold (p);
2128 goto unary;
2130 case ADDR_EXPR:
2131 case REALPART_EXPR:
2132 case IMAGPART_EXPR:
2133 rval_ops = false;
2134 /* FALLTHRU */
2135 case CONJ_EXPR:
2136 case FIX_TRUNC_EXPR:
2137 case FLOAT_EXPR:
2138 case NEGATE_EXPR:
2139 case ABS_EXPR:
2140 case BIT_NOT_EXPR:
2141 case TRUTH_NOT_EXPR:
2142 case FIXED_CONVERT_EXPR:
2143 unary:
2145 loc = EXPR_LOCATION (x);
2146 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2148 if (op0 != TREE_OPERAND (x, 0))
2150 if (op0 == error_mark_node)
2151 x = error_mark_node;
2152 else
2154 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2155 if (code == INDIRECT_REF
2156 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2158 TREE_READONLY (x) = TREE_READONLY (org_x);
2159 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2160 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2164 else
2165 x = fold (x);
2167 gcc_assert (TREE_CODE (x) != COND_EXPR
2168 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2169 break;
2171 case UNARY_PLUS_EXPR:
2172 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2173 if (op0 == error_mark_node)
2174 x = error_mark_node;
2175 else
2176 x = fold_convert (TREE_TYPE (x), op0);
2177 break;
2179 case POSTDECREMENT_EXPR:
2180 case POSTINCREMENT_EXPR:
2181 case INIT_EXPR:
2182 case PREDECREMENT_EXPR:
2183 case PREINCREMENT_EXPR:
2184 case COMPOUND_EXPR:
2185 case MODIFY_EXPR:
2186 rval_ops = false;
2187 /* FALLTHRU */
2188 case POINTER_PLUS_EXPR:
2189 case PLUS_EXPR:
2190 case MINUS_EXPR:
2191 case MULT_EXPR:
2192 case TRUNC_DIV_EXPR:
2193 case CEIL_DIV_EXPR:
2194 case FLOOR_DIV_EXPR:
2195 case ROUND_DIV_EXPR:
2196 case TRUNC_MOD_EXPR:
2197 case CEIL_MOD_EXPR:
2198 case ROUND_MOD_EXPR:
2199 case RDIV_EXPR:
2200 case EXACT_DIV_EXPR:
2201 case MIN_EXPR:
2202 case MAX_EXPR:
2203 case LSHIFT_EXPR:
2204 case RSHIFT_EXPR:
2205 case LROTATE_EXPR:
2206 case RROTATE_EXPR:
2207 case BIT_AND_EXPR:
2208 case BIT_IOR_EXPR:
2209 case BIT_XOR_EXPR:
2210 case TRUTH_AND_EXPR:
2211 case TRUTH_ANDIF_EXPR:
2212 case TRUTH_OR_EXPR:
2213 case TRUTH_ORIF_EXPR:
2214 case TRUTH_XOR_EXPR:
2215 case LT_EXPR: case LE_EXPR:
2216 case GT_EXPR: case GE_EXPR:
2217 case EQ_EXPR: case NE_EXPR:
2218 case UNORDERED_EXPR: case ORDERED_EXPR:
2219 case UNLT_EXPR: case UNLE_EXPR:
2220 case UNGT_EXPR: case UNGE_EXPR:
2221 case UNEQ_EXPR: case LTGT_EXPR:
2222 case RANGE_EXPR: case COMPLEX_EXPR:
2224 loc = EXPR_LOCATION (x);
2225 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2226 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2228 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2230 if (op0 == error_mark_node || op1 == error_mark_node)
2231 x = error_mark_node;
2232 else
2233 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2235 else
2236 x = fold (x);
2238 if (TREE_NO_WARNING (org_x)
2239 && warn_nonnull_compare
2240 && COMPARISON_CLASS_P (org_x))
2242 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2244 else if (COMPARISON_CLASS_P (x))
2245 TREE_NO_WARNING (x) = 1;
2246 /* Otherwise give up on optimizing these, let GIMPLE folders
2247 optimize those later on. */
2248 else if (op0 != TREE_OPERAND (org_x, 0)
2249 || op1 != TREE_OPERAND (org_x, 1))
2251 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2252 TREE_NO_WARNING (x) = 1;
2254 else
2255 x = org_x;
2257 break;
2259 case VEC_COND_EXPR:
2260 case COND_EXPR:
2262 /* Don't bother folding a void condition, since it can't produce a
2263 constant value. Also, some statement-level uses of COND_EXPR leave
2264 one of the branches NULL, so folding would crash. */
2265 if (VOID_TYPE_P (TREE_TYPE (x)))
2266 return x;
2268 loc = EXPR_LOCATION (x);
2269 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2270 op1 = cp_fold (TREE_OPERAND (x, 1));
2271 op2 = cp_fold (TREE_OPERAND (x, 2));
2273 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2275 warning_sentinel s (warn_int_in_bool_context);
2276 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2277 op1 = cp_truthvalue_conversion (op1);
2278 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2279 op2 = cp_truthvalue_conversion (op2);
2282 if (op0 != TREE_OPERAND (x, 0)
2283 || op1 != TREE_OPERAND (x, 1)
2284 || op2 != TREE_OPERAND (x, 2))
2286 if (op0 == error_mark_node
2287 || op1 == error_mark_node
2288 || op2 == error_mark_node)
2289 x = error_mark_node;
2290 else
2291 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2293 else
2294 x = fold (x);
2296 /* A COND_EXPR might have incompatible types in branches if one or both
2297 arms are bitfields. If folding exposed such a branch, fix it up. */
2298 if (TREE_CODE (x) != code)
2299 if (tree type = is_bitfield_expr_with_lowered_type (x))
2300 x = fold_convert (type, x);
2302 break;
2304 case CALL_EXPR:
2306 int i, m, sv = optimize, nw = sv, changed = 0;
2307 tree callee = get_callee_fndecl (x);
2309 /* Some built-in function calls will be evaluated at compile-time in
2310 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2311 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2312 if (callee && DECL_BUILT_IN (callee) && !optimize
2313 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2314 && current_function_decl
2315 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2316 nw = 1;
2318 x = copy_node (x);
2320 m = call_expr_nargs (x);
2321 for (i = 0; i < m; i++)
2323 r = cp_fold (CALL_EXPR_ARG (x, i));
2324 if (r != CALL_EXPR_ARG (x, i))
2326 if (r == error_mark_node)
2328 x = error_mark_node;
2329 break;
2331 changed = 1;
2333 CALL_EXPR_ARG (x, i) = r;
2335 if (x == error_mark_node)
2336 break;
2338 optimize = nw;
2339 r = fold (x);
2340 optimize = sv;
2342 if (TREE_CODE (r) != CALL_EXPR)
2344 x = cp_fold (r);
2345 break;
2348 optimize = nw;
2350 /* Invoke maybe_constant_value for functions declared
2351 constexpr and not called with AGGR_INIT_EXPRs.
2352 TODO:
2353 Do constexpr expansion of expressions where the call itself is not
2354 constant, but the call followed by an INDIRECT_REF is. */
2355 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2356 && !flag_no_inline)
2357 r = maybe_constant_value (x);
2358 optimize = sv;
2360 if (TREE_CODE (r) != CALL_EXPR)
2362 if (DECL_CONSTRUCTOR_P (callee))
2364 loc = EXPR_LOCATION (x);
2365 tree s = build_fold_indirect_ref_loc (loc,
2366 CALL_EXPR_ARG (x, 0));
2367 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2369 x = r;
2370 break;
2373 if (!changed)
2374 x = org_x;
2375 break;
2378 case CONSTRUCTOR:
2380 unsigned i;
2381 constructor_elt *p;
2382 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2383 vec<constructor_elt, va_gc> *nelts = NULL;
2384 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2386 tree op = cp_fold (p->value);
2387 if (op != p->value)
2389 if (op == error_mark_node)
2391 x = error_mark_node;
2392 vec_free (nelts);
2393 break;
2395 if (nelts == NULL)
2396 nelts = elts->copy ();
2397 (*nelts)[i].value = op;
2400 if (nelts)
2401 x = build_constructor (TREE_TYPE (x), nelts);
2402 break;
2404 case TREE_VEC:
2406 bool changed = false;
2407 vec<tree, va_gc> *vec = make_tree_vector ();
2408 int i, n = TREE_VEC_LENGTH (x);
2409 vec_safe_reserve (vec, n);
2411 for (i = 0; i < n; i++)
2413 tree op = cp_fold (TREE_VEC_ELT (x, i));
2414 vec->quick_push (op);
2415 if (op != TREE_VEC_ELT (x, i))
2416 changed = true;
2419 if (changed)
2421 r = copy_node (x);
2422 for (i = 0; i < n; i++)
2423 TREE_VEC_ELT (r, i) = (*vec)[i];
2424 x = r;
2427 release_tree_vector (vec);
2430 break;
2432 case ARRAY_REF:
2433 case ARRAY_RANGE_REF:
2435 loc = EXPR_LOCATION (x);
2436 op0 = cp_fold (TREE_OPERAND (x, 0));
2437 op1 = cp_fold (TREE_OPERAND (x, 1));
2438 op2 = cp_fold (TREE_OPERAND (x, 2));
2439 op3 = cp_fold (TREE_OPERAND (x, 3));
2441 if (op0 != TREE_OPERAND (x, 0)
2442 || op1 != TREE_OPERAND (x, 1)
2443 || op2 != TREE_OPERAND (x, 2)
2444 || op3 != TREE_OPERAND (x, 3))
2446 if (op0 == error_mark_node
2447 || op1 == error_mark_node
2448 || op2 == error_mark_node
2449 || op3 == error_mark_node)
2450 x = error_mark_node;
2451 else
2453 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2454 TREE_READONLY (x) = TREE_READONLY (org_x);
2455 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2456 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2460 x = fold (x);
2461 break;
2463 default:
2464 return org_x;
2467 fold_cache->put (org_x, x);
2468 /* Prevent that we try to fold an already folded result again. */
2469 if (x != org_x)
2470 fold_cache->put (x, x);
2472 return x;
2475 #include "gt-cp-cp-gimplify.h"