PR middle-end/77674
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobe792cfde71b9891718d949f81b33881afb0630f3
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*, bool);
42 static tree cp_fold (tree);
44 /* Local declarations. */
46 enum bc_t { bc_break = 0, bc_continue = 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
53 indicates which.
55 Just creates a label with location LOCATION and pushes it into the current
56 context. */
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
63 bc_label[bc] = label;
64 if (bc == bc_break)
65 LABEL_DECL_BREAK (label) = true;
66 else
67 LABEL_DECL_CONTINUE (label) = true;
68 return label;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 gcc_assert (label == bc_label[bc]);
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
96 static void
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
98 gimple_seq *post_p)
100 int ii = 0;
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
113 static tree
114 get_bc_label (enum bc_t bc)
116 tree label = bc_label[bc];
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
120 return label;
123 /* Genericize a TRY_BLOCK. */
125 static void
126 genericize_try_block (tree *stmt_p)
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
136 static void
137 genericize_catch_block (tree *stmt_p)
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
146 /* A terser interface for building a representation of an exception
147 specification. */
149 static tree
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
152 tree t;
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
161 return t;
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
167 static void
168 genericize_eh_spec_block (tree *stmt_p)
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
181 static void
182 genericize_if_stmt (tree *stmt_p)
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
187 stmt = *stmt_p;
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
192 if (!then_)
193 then_ = build_empty_stmt (locus);
194 if (!else_)
195 else_ = build_empty_stmt (locus);
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
198 stmt = then_;
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
200 stmt = else_;
201 else
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
205 *stmt_p = stmt;
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
215 static void
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
218 void *data)
220 tree blab, clab;
221 tree exit = NULL;
222 tree stmt_list = NULL;
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
227 protected_set_expr_location (incr, start_locus);
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232 *walk_subtrees = 0;
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
253 if (!stmt_list)
254 stmt_list = build_empty_stmt (start_locus);
256 tree loop;
257 if (cond && integer_zerop (cond))
259 if (cond_is_first)
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
263 else
264 loop = stmt_list;
266 else
268 location_t loc = start_locus;
269 if (!cond || integer_nonzerop (cond))
270 loc = EXPR_LOCATION (expr_first (body));
271 if (loc == UNKNOWN_LOCATION)
272 loc = start_locus;
273 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
276 stmt_list = NULL;
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
279 if (!stmt_list)
280 stmt_list = build_empty_stmt (start_locus);
282 *stmt_p = stmt_list;
285 /* Genericize a FOR_STMT node *STMT_P. */
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
290 tree stmt = *stmt_p;
291 tree expr = NULL;
292 tree loop;
293 tree init = FOR_INIT_STMT (stmt);
295 if (init)
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
305 expr = loop;
306 *stmt_p = expr;
309 /* Genericize a WHILE_STMT node *STMT_P. */
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
314 tree stmt = *stmt_p;
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
319 /* Genericize a DO_STMT node *STMT_P. */
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
324 tree stmt = *stmt_p;
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
334 tree stmt = *stmt_p;
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
338 break_block = begin_bc_block (bc_break, stmt_locus);
340 body = SWITCH_STMT_BODY (stmt);
341 if (!body)
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
349 *walk_subtrees = 0;
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
357 static void
358 genericize_continue_stmt (tree *stmt_p)
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list_force (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
367 *stmt_p = stmt_list;
370 /* Genericize a BREAK_STMT node *STMT_P. */
372 static void
373 genericize_break_stmt (tree *stmt_p)
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
380 /* Genericize a OMP_FOR node *STMT_P. */
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
385 tree stmt = *stmt_p;
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 if (TREE_CODE (stmt) != OMP_TASKLOOP)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396 *walk_subtrees = 0;
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411 return GS_UNHANDLED;
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
419 return GS_ALL_DONE;
422 /* Gimplify an EXPR_STMT node. */
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
429 if (stmt == error_mark_node)
430 stmt = NULL;
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
437 gimplification. */
438 if (stmt && warn_unused_value)
440 if (!TREE_SIDE_EFFECTS (stmt))
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
447 else
448 warn_if_unused_value (stmt, input_location);
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
454 *stmt_p = stmt;
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
464 tree t;
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
475 for (t = from; t; )
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
489 else
490 VEC_INIT_EXPR_SLOT (sub) = to;
491 *expr_p = from;
493 /* The initialization is now a side-effect, so the container can
494 become void. */
495 if (from != sub)
496 TREE_TYPE (from) = void_type_node;
499 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub, to);
503 if (t == sub)
504 break;
505 else
506 t = TREE_OPERAND (t, 1);
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
516 tree stmt = *expr_p;
517 tree temp = voidify_wrapper_expr (stmt, NULL);
518 tree body = TREE_OPERAND (stmt, 0);
519 gimple_seq try_ = NULL;
520 gimple_seq catch_ = NULL;
521 gimple *mnt;
523 gimplify_and_add (body, &try_);
524 mnt = gimple_build_eh_must_not_throw (terminate_node);
525 gimple_seq_add_stmt_without_update (&catch_, mnt);
526 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
528 gimple_seq_add_stmt_without_update (pre_p, mnt);
529 if (temp)
531 *expr_p = temp;
532 return GS_OK;
535 *expr_p = NULL;
536 return GS_ALL_DONE;
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
546 static bool
547 simple_empty_class_p (tree type, tree op)
549 return
550 ((TREE_CODE (op) == COMPOUND_EXPR
551 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
552 || is_gimple_lvalue (op)
553 || INDIRECT_REF_P (op)
554 || (TREE_CODE (op) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op) == 0
556 && !TREE_CLOBBER_P (op))
557 || (TREE_CODE (op) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559 && is_really_empty_class (type);
562 /* Returns true if evaluating E as an lvalue has side-effects;
563 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
564 have side-effects until there is a read or write through it. */
566 static bool
567 lvalue_has_side_effects (tree e)
569 if (!TREE_SIDE_EFFECTS (e))
570 return false;
571 while (handled_component_p (e))
573 if (TREE_CODE (e) == ARRAY_REF
574 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
575 return true;
576 e = TREE_OPERAND (e, 0);
578 if (DECL_P (e))
579 /* Just naming a variable has no side-effects. */
580 return false;
581 else if (INDIRECT_REF_P (e))
582 /* Similarly, indirection has no side-effects. */
583 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
584 else
585 /* For anything else, trust TREE_SIDE_EFFECTS. */
586 return TREE_SIDE_EFFECTS (e);
589 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
592 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
594 int saved_stmts_are_full_exprs_p = 0;
595 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
596 enum tree_code code = TREE_CODE (*expr_p);
597 enum gimplify_status ret;
599 if (STATEMENT_CODE_P (code))
601 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
602 current_stmt_tree ()->stmts_are_full_exprs_p
603 = STMT_IS_FULL_EXPR_P (*expr_p);
606 switch (code)
608 case AGGR_INIT_EXPR:
609 simplify_aggr_init_expr (expr_p);
610 ret = GS_OK;
611 break;
613 case VEC_INIT_EXPR:
615 location_t loc = input_location;
616 tree init = VEC_INIT_EXPR_INIT (*expr_p);
617 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
618 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
619 input_location = EXPR_LOCATION (*expr_p);
620 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
621 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
622 from_array,
623 tf_warning_or_error);
624 hash_set<tree> pset;
625 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
626 cp_genericize_tree (expr_p, false);
627 ret = GS_OK;
628 input_location = loc;
630 break;
632 case THROW_EXPR:
633 /* FIXME communicate throw type to back end, probably by moving
634 THROW_EXPR into ../tree.def. */
635 *expr_p = TREE_OPERAND (*expr_p, 0);
636 ret = GS_OK;
637 break;
639 case MUST_NOT_THROW_EXPR:
640 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
641 break;
643 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
644 LHS of an assignment might also be involved in the RHS, as in bug
645 25979. */
646 case INIT_EXPR:
647 if (fn_contains_cilk_spawn_p (cfun))
649 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
651 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
652 pre_p, post_p);
653 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
655 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
656 return GS_ERROR;
659 cp_gimplify_init_expr (expr_p);
660 if (TREE_CODE (*expr_p) != INIT_EXPR)
661 return GS_OK;
662 /* Fall through. */
663 case MODIFY_EXPR:
664 modify_expr_case:
666 if (fn_contains_cilk_spawn_p (cfun)
667 && cilk_cp_detect_spawn_and_unwrap (expr_p)
668 && !seen_error ())
670 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
671 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
673 /* If the back end isn't clever enough to know that the lhs and rhs
674 types are the same, add an explicit conversion. */
675 tree op0 = TREE_OPERAND (*expr_p, 0);
676 tree op1 = TREE_OPERAND (*expr_p, 1);
678 if (!error_operand_p (op0)
679 && !error_operand_p (op1)
680 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
681 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
682 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
683 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
684 TREE_TYPE (op0), op1);
686 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
688 /* Remove any copies of empty classes. Also drop volatile
689 variables on the RHS to avoid infinite recursion from
690 gimplify_expr trying to load the value. */
691 if (TREE_SIDE_EFFECTS (op1))
693 if (TREE_THIS_VOLATILE (op1)
694 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
695 op1 = build_fold_addr_expr (op1);
697 gimplify_and_add (op1, pre_p);
699 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
700 is_gimple_lvalue, fb_lvalue);
701 *expr_p = TREE_OPERAND (*expr_p, 0);
703 /* P0145 says that the RHS is sequenced before the LHS.
704 gimplify_modify_expr gimplifies the RHS before the LHS, but that
705 isn't quite strong enough in two cases:
707 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
708 mean it's evaluated after the LHS.
710 2) the value calculation of the RHS is also sequenced before the
711 LHS, so for scalar assignment we need to preevaluate if the
712 RHS could be affected by LHS side-effects even if it has no
713 side-effects of its own. We don't need this for classes because
714 class assignment takes its RHS by reference. */
715 else if (flag_strong_eval_order > 1
716 && TREE_CODE (*expr_p) == MODIFY_EXPR
717 && lvalue_has_side_effects (op0)
718 && (TREE_CODE (op1) == CALL_EXPR
719 || (SCALAR_TYPE_P (TREE_TYPE (op1))
720 && !TREE_CONSTANT (op1))))
721 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
723 ret = GS_OK;
724 break;
726 case EMPTY_CLASS_EXPR:
727 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
728 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
729 ret = GS_OK;
730 break;
732 case BASELINK:
733 *expr_p = BASELINK_FUNCTIONS (*expr_p);
734 ret = GS_OK;
735 break;
737 case TRY_BLOCK:
738 genericize_try_block (expr_p);
739 ret = GS_OK;
740 break;
742 case HANDLER:
743 genericize_catch_block (expr_p);
744 ret = GS_OK;
745 break;
747 case EH_SPEC_BLOCK:
748 genericize_eh_spec_block (expr_p);
749 ret = GS_OK;
750 break;
752 case USING_STMT:
753 gcc_unreachable ();
755 case FOR_STMT:
756 case WHILE_STMT:
757 case DO_STMT:
758 case SWITCH_STMT:
759 case CONTINUE_STMT:
760 case BREAK_STMT:
761 gcc_unreachable ();
763 case OMP_FOR:
764 case OMP_SIMD:
765 case OMP_DISTRIBUTE:
766 case OMP_TASKLOOP:
767 ret = cp_gimplify_omp_for (expr_p, pre_p);
768 break;
770 case EXPR_STMT:
771 gimplify_expr_stmt (expr_p);
772 ret = GS_OK;
773 break;
775 case UNARY_PLUS_EXPR:
777 tree arg = TREE_OPERAND (*expr_p, 0);
778 tree type = TREE_TYPE (*expr_p);
779 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
780 : arg;
781 ret = GS_OK;
783 break;
785 case CILK_SPAWN_STMT:
786 gcc_assert(fn_contains_cilk_spawn_p (cfun)
787 && cilk_cp_detect_spawn_and_unwrap (expr_p));
789 if (!seen_error ())
791 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
792 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
794 return GS_ERROR;
796 case CALL_EXPR:
797 if (fn_contains_cilk_spawn_p (cfun)
798 && cilk_cp_detect_spawn_and_unwrap (expr_p)
799 && !seen_error ())
801 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
802 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
804 ret = GS_OK;
805 if (!CALL_EXPR_FN (*expr_p))
806 /* Internal function call. */;
807 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
809 /* This is a call to a (compound) assignment operator that used
810 the operator syntax; gimplify the RHS first. */
811 gcc_assert (call_expr_nargs (*expr_p) == 2);
812 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
813 enum gimplify_status t
814 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
815 if (t == GS_ERROR)
816 ret = GS_ERROR;
818 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
820 /* Leave the last argument for gimplify_call_expr, to avoid problems
821 with __builtin_va_arg_pack(). */
822 int nargs = call_expr_nargs (*expr_p) - 1;
823 for (int i = 0; i < nargs; ++i)
825 enum gimplify_status t
826 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
827 if (t == GS_ERROR)
828 ret = GS_ERROR;
831 else if (flag_strong_eval_order
832 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
834 /* If flag_strong_eval_order, evaluate the object argument first. */
835 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
836 if (POINTER_TYPE_P (fntype))
837 fntype = TREE_TYPE (fntype);
838 if (TREE_CODE (fntype) == METHOD_TYPE)
840 enum gimplify_status t
841 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
842 if (t == GS_ERROR)
843 ret = GS_ERROR;
846 break;
848 case RETURN_EXPR:
849 if (TREE_OPERAND (*expr_p, 0)
850 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
851 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
853 expr_p = &TREE_OPERAND (*expr_p, 0);
854 code = TREE_CODE (*expr_p);
855 /* Avoid going through the INIT_EXPR case, which can
856 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
857 goto modify_expr_case;
859 /* Fall through. */
861 default:
862 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
863 break;
866 /* Restore saved state. */
867 if (STATEMENT_CODE_P (code))
868 current_stmt_tree ()->stmts_are_full_exprs_p
869 = saved_stmts_are_full_exprs_p;
871 return ret;
874 static inline bool
875 is_invisiref_parm (const_tree t)
877 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
878 && DECL_BY_REFERENCE (t));
881 /* Return true if the uid in both int tree maps are equal. */
883 bool
884 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
886 return (a->uid == b->uid);
889 /* Hash a UID in a cxx_int_tree_map. */
891 unsigned int
892 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
894 return item->uid;
897 /* A stable comparison routine for use with splay trees and DECLs. */
899 static int
900 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
902 tree a = (tree) xa;
903 tree b = (tree) xb;
905 return DECL_UID (a) - DECL_UID (b);
908 /* OpenMP context during genericization. */
910 struct cp_genericize_omp_taskreg
912 bool is_parallel;
913 bool default_shared;
914 struct cp_genericize_omp_taskreg *outer;
915 splay_tree variables;
918 /* Return true if genericization should try to determine if
919 DECL is firstprivate or shared within task regions. */
921 static bool
922 omp_var_to_track (tree decl)
924 tree type = TREE_TYPE (decl);
925 if (is_invisiref_parm (decl))
926 type = TREE_TYPE (type);
927 while (TREE_CODE (type) == ARRAY_TYPE)
928 type = TREE_TYPE (type);
929 if (type == error_mark_node || !CLASS_TYPE_P (type))
930 return false;
931 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
932 return false;
933 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
934 return false;
935 return true;
938 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
940 static void
941 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
943 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
944 (splay_tree_key) decl);
945 if (n == NULL)
947 int flags = OMP_CLAUSE_DEFAULT_SHARED;
948 if (omp_ctx->outer)
949 omp_cxx_notice_variable (omp_ctx->outer, decl);
950 if (!omp_ctx->default_shared)
952 struct cp_genericize_omp_taskreg *octx;
954 for (octx = omp_ctx->outer; octx; octx = octx->outer)
956 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
957 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
959 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
960 break;
962 if (octx->is_parallel)
963 break;
965 if (octx == NULL
966 && (TREE_CODE (decl) == PARM_DECL
967 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
968 && DECL_CONTEXT (decl) == current_function_decl)))
969 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
970 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
972 /* DECL is implicitly determined firstprivate in
973 the current task construct. Ensure copy ctor and
974 dtor are instantiated, because during gimplification
975 it will be already too late. */
976 tree type = TREE_TYPE (decl);
977 if (is_invisiref_parm (decl))
978 type = TREE_TYPE (type);
979 while (TREE_CODE (type) == ARRAY_TYPE)
980 type = TREE_TYPE (type);
981 get_copy_ctor (type, tf_none);
982 get_dtor (type, tf_none);
985 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
989 /* Genericization context. */
991 struct cp_genericize_data
993 hash_set<tree> *p_set;
994 vec<tree> bind_expr_stack;
995 struct cp_genericize_omp_taskreg *omp_ctx;
996 tree try_block;
997 bool no_sanitize_p;
998 bool handle_invisiref_parm_p;
1001 /* Perform any pre-gimplification folding of C++ front end trees to
1002 GENERIC.
1003 Note: The folding of none-omp cases is something to move into
1004 the middle-end. As for now we have most foldings only on GENERIC
1005 in fold-const, we need to perform this before transformation to
1006 GIMPLE-form. */
1008 static tree
1009 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1011 tree stmt;
1012 enum tree_code code;
1014 *stmt_p = stmt = cp_fold (*stmt_p);
1016 if (((hash_set<tree> *) data)->add (stmt))
1018 /* Don't walk subtrees of stmts we've already walked once, otherwise
1019 we can have exponential complexity with e.g. lots of nested
1020 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1021 always the same tree, which the first time cp_fold_r has been
1022 called on it had the subtrees walked. */
1023 *walk_subtrees = 0;
1024 return NULL;
1027 code = TREE_CODE (stmt);
1028 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1029 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1030 || code == OACC_LOOP)
1032 tree x;
1033 int i, n;
1035 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1036 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1037 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1038 x = OMP_FOR_COND (stmt);
1039 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1041 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1044 else if (x && TREE_CODE (x) == TREE_VEC)
1046 n = TREE_VEC_LENGTH (x);
1047 for (i = 0; i < n; i++)
1049 tree o = TREE_VEC_ELT (x, i);
1050 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1051 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1054 x = OMP_FOR_INCR (stmt);
1055 if (x && TREE_CODE (x) == TREE_VEC)
1057 n = TREE_VEC_LENGTH (x);
1058 for (i = 0; i < n; i++)
1060 tree o = TREE_VEC_ELT (x, i);
1061 if (o && TREE_CODE (o) == MODIFY_EXPR)
1062 o = TREE_OPERAND (o, 1);
1063 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1064 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1066 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1067 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1071 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1072 *walk_subtrees = 0;
1075 return NULL;
1078 /* Fold ALL the trees! FIXME we should be able to remove this, but
1079 apparently that still causes optimization regressions. */
1081 void
1082 cp_fold_function (tree fndecl)
1084 hash_set<tree> pset;
1085 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1088 /* Perform any pre-gimplification lowering of C++ front end trees to
1089 GENERIC. */
1091 static tree
1092 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1094 tree stmt = *stmt_p;
1095 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1096 hash_set<tree> *p_set = wtd->p_set;
1098 /* If in an OpenMP context, note var uses. */
1099 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1100 && (VAR_P (stmt)
1101 || TREE_CODE (stmt) == PARM_DECL
1102 || TREE_CODE (stmt) == RESULT_DECL)
1103 && omp_var_to_track (stmt))
1104 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1106 /* Don't dereference parms in a thunk, pass the references through. */
1107 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1108 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1110 *walk_subtrees = 0;
1111 return NULL;
1114 /* Otherwise, do dereference invisible reference parms. */
1115 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1117 *stmt_p = convert_from_reference (stmt);
1118 *walk_subtrees = 0;
1119 return NULL;
1122 /* Map block scope extern declarations to visible declarations with the
1123 same name and type in outer scopes if any. */
1124 if (cp_function_chain->extern_decl_map
1125 && VAR_OR_FUNCTION_DECL_P (stmt)
1126 && DECL_EXTERNAL (stmt))
1128 struct cxx_int_tree_map *h, in;
1129 in.uid = DECL_UID (stmt);
1130 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1131 if (h)
1133 *stmt_p = h->to;
1134 *walk_subtrees = 0;
1135 return NULL;
1139 /* Other than invisiref parms, don't walk the same tree twice. */
1140 if (p_set->contains (stmt))
1142 *walk_subtrees = 0;
1143 return NULL_TREE;
1146 if (TREE_CODE (stmt) == ADDR_EXPR
1147 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1149 /* If in an OpenMP context, note var uses. */
1150 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1151 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1152 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1153 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1154 *walk_subtrees = 0;
1156 else if (TREE_CODE (stmt) == RETURN_EXPR
1157 && TREE_OPERAND (stmt, 0)
1158 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1159 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1160 *walk_subtrees = 0;
1161 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1162 switch (OMP_CLAUSE_CODE (stmt))
1164 case OMP_CLAUSE_LASTPRIVATE:
1165 /* Don't dereference an invisiref in OpenMP clauses. */
1166 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1168 *walk_subtrees = 0;
1169 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1170 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1171 cp_genericize_r, data, NULL);
1173 break;
1174 case OMP_CLAUSE_PRIVATE:
1175 /* Don't dereference an invisiref in OpenMP clauses. */
1176 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1177 *walk_subtrees = 0;
1178 else if (wtd->omp_ctx != NULL)
1180 /* Private clause doesn't cause any references to the
1181 var in outer contexts, avoid calling
1182 omp_cxx_notice_variable for it. */
1183 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1184 wtd->omp_ctx = NULL;
1185 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1186 data, NULL);
1187 wtd->omp_ctx = old;
1188 *walk_subtrees = 0;
1190 break;
1191 case OMP_CLAUSE_SHARED:
1192 case OMP_CLAUSE_FIRSTPRIVATE:
1193 case OMP_CLAUSE_COPYIN:
1194 case OMP_CLAUSE_COPYPRIVATE:
1195 /* Don't dereference an invisiref in OpenMP clauses. */
1196 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1197 *walk_subtrees = 0;
1198 break;
1199 case OMP_CLAUSE_REDUCTION:
1200 /* Don't dereference an invisiref in reduction clause's
1201 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1202 still needs to be genericized. */
1203 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1205 *walk_subtrees = 0;
1206 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1207 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1208 cp_genericize_r, data, NULL);
1209 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1210 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1211 cp_genericize_r, data, NULL);
1213 break;
1214 default:
1215 break;
1217 else if (IS_TYPE_OR_DECL_P (stmt))
1218 *walk_subtrees = 0;
1220 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1221 to lower this construct before scanning it, so we need to lower these
1222 before doing anything else. */
1223 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1224 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1225 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1226 : TRY_FINALLY_EXPR,
1227 void_type_node,
1228 CLEANUP_BODY (stmt),
1229 CLEANUP_EXPR (stmt));
1231 else if (TREE_CODE (stmt) == IF_STMT)
1233 genericize_if_stmt (stmt_p);
1234 /* *stmt_p has changed, tail recurse to handle it again. */
1235 return cp_genericize_r (stmt_p, walk_subtrees, data);
1238 /* COND_EXPR might have incompatible types in branches if one or both
1239 arms are bitfields. Fix it up now. */
1240 else if (TREE_CODE (stmt) == COND_EXPR)
1242 tree type_left
1243 = (TREE_OPERAND (stmt, 1)
1244 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1245 : NULL_TREE);
1246 tree type_right
1247 = (TREE_OPERAND (stmt, 2)
1248 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1249 : NULL_TREE);
1250 if (type_left
1251 && !useless_type_conversion_p (TREE_TYPE (stmt),
1252 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1254 TREE_OPERAND (stmt, 1)
1255 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1256 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1257 type_left));
1259 if (type_right
1260 && !useless_type_conversion_p (TREE_TYPE (stmt),
1261 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1263 TREE_OPERAND (stmt, 2)
1264 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1265 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1266 type_right));
1270 else if (TREE_CODE (stmt) == BIND_EXPR)
1272 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1274 tree decl;
1275 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1276 if (VAR_P (decl)
1277 && !DECL_EXTERNAL (decl)
1278 && omp_var_to_track (decl))
1280 splay_tree_node n
1281 = splay_tree_lookup (wtd->omp_ctx->variables,
1282 (splay_tree_key) decl);
1283 if (n == NULL)
1284 splay_tree_insert (wtd->omp_ctx->variables,
1285 (splay_tree_key) decl,
1286 TREE_STATIC (decl)
1287 ? OMP_CLAUSE_DEFAULT_SHARED
1288 : OMP_CLAUSE_DEFAULT_PRIVATE);
1291 if (flag_sanitize
1292 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1294 /* The point here is to not sanitize static initializers. */
1295 bool no_sanitize_p = wtd->no_sanitize_p;
1296 wtd->no_sanitize_p = true;
1297 for (tree decl = BIND_EXPR_VARS (stmt);
1298 decl;
1299 decl = DECL_CHAIN (decl))
1300 if (VAR_P (decl)
1301 && TREE_STATIC (decl)
1302 && DECL_INITIAL (decl))
1303 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1304 wtd->no_sanitize_p = no_sanitize_p;
1306 wtd->bind_expr_stack.safe_push (stmt);
1307 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1308 cp_genericize_r, data, NULL);
1309 wtd->bind_expr_stack.pop ();
1312 else if (TREE_CODE (stmt) == USING_STMT)
1314 tree block = NULL_TREE;
1316 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1317 BLOCK, and append an IMPORTED_DECL to its
1318 BLOCK_VARS chained list. */
1319 if (wtd->bind_expr_stack.exists ())
1321 int i;
1322 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1323 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1324 break;
1326 if (block)
1328 tree using_directive;
1329 gcc_assert (TREE_OPERAND (stmt, 0));
1331 using_directive = make_node (IMPORTED_DECL);
1332 TREE_TYPE (using_directive) = void_type_node;
1334 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1335 = TREE_OPERAND (stmt, 0);
1336 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1337 BLOCK_VARS (block) = using_directive;
1339 /* The USING_STMT won't appear in GENERIC. */
1340 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1341 *walk_subtrees = 0;
1344 else if (TREE_CODE (stmt) == DECL_EXPR
1345 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1347 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1348 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1349 *walk_subtrees = 0;
1351 else if (TREE_CODE (stmt) == DECL_EXPR)
1353 tree d = DECL_EXPR_DECL (stmt);
1354 if (VAR_P (d))
1355 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1357 else if (TREE_CODE (stmt) == OMP_PARALLEL
1358 || TREE_CODE (stmt) == OMP_TASK
1359 || TREE_CODE (stmt) == OMP_TASKLOOP)
1361 struct cp_genericize_omp_taskreg omp_ctx;
1362 tree c, decl;
1363 splay_tree_node n;
1365 *walk_subtrees = 0;
1366 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1367 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1368 omp_ctx.default_shared = omp_ctx.is_parallel;
1369 omp_ctx.outer = wtd->omp_ctx;
1370 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1371 wtd->omp_ctx = &omp_ctx;
1372 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1373 switch (OMP_CLAUSE_CODE (c))
1375 case OMP_CLAUSE_SHARED:
1376 case OMP_CLAUSE_PRIVATE:
1377 case OMP_CLAUSE_FIRSTPRIVATE:
1378 case OMP_CLAUSE_LASTPRIVATE:
1379 decl = OMP_CLAUSE_DECL (c);
1380 if (decl == error_mark_node || !omp_var_to_track (decl))
1381 break;
1382 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1383 if (n != NULL)
1384 break;
1385 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1386 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1387 ? OMP_CLAUSE_DEFAULT_SHARED
1388 : OMP_CLAUSE_DEFAULT_PRIVATE);
1389 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1390 && omp_ctx.outer)
1391 omp_cxx_notice_variable (omp_ctx.outer, decl);
1392 break;
1393 case OMP_CLAUSE_DEFAULT:
1394 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1395 omp_ctx.default_shared = true;
1396 default:
1397 break;
1399 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1400 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1401 else
1402 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1403 wtd->omp_ctx = omp_ctx.outer;
1404 splay_tree_delete (omp_ctx.variables);
1406 else if (TREE_CODE (stmt) == TRY_BLOCK)
1408 *walk_subtrees = 0;
1409 tree try_block = wtd->try_block;
1410 wtd->try_block = stmt;
1411 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1412 wtd->try_block = try_block;
1413 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1415 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1417 /* MUST_NOT_THROW_COND might be something else with TM. */
1418 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1420 *walk_subtrees = 0;
1421 tree try_block = wtd->try_block;
1422 wtd->try_block = stmt;
1423 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1424 wtd->try_block = try_block;
1427 else if (TREE_CODE (stmt) == THROW_EXPR)
1429 location_t loc = location_of (stmt);
1430 if (TREE_NO_WARNING (stmt))
1431 /* Never mind. */;
1432 else if (wtd->try_block)
1434 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1435 && warning_at (loc, OPT_Wterminate,
1436 "throw will always call terminate()")
1437 && cxx_dialect >= cxx11
1438 && DECL_DESTRUCTOR_P (current_function_decl))
1439 inform (loc, "in C++11 destructors default to noexcept");
1441 else
1443 if (warn_cxx11_compat && cxx_dialect < cxx11
1444 && DECL_DESTRUCTOR_P (current_function_decl)
1445 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1446 == NULL_TREE)
1447 && (get_defaulted_eh_spec (current_function_decl)
1448 == empty_except_spec))
1449 warning_at (loc, OPT_Wc__11_compat,
1450 "in C++11 this throw will terminate because "
1451 "destructors default to noexcept");
1454 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1455 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1456 else if (TREE_CODE (stmt) == FOR_STMT)
1457 genericize_for_stmt (stmt_p, walk_subtrees, data);
1458 else if (TREE_CODE (stmt) == WHILE_STMT)
1459 genericize_while_stmt (stmt_p, walk_subtrees, data);
1460 else if (TREE_CODE (stmt) == DO_STMT)
1461 genericize_do_stmt (stmt_p, walk_subtrees, data);
1462 else if (TREE_CODE (stmt) == SWITCH_STMT)
1463 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1464 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1465 genericize_continue_stmt (stmt_p);
1466 else if (TREE_CODE (stmt) == BREAK_STMT)
1467 genericize_break_stmt (stmt_p);
1468 else if (TREE_CODE (stmt) == OMP_FOR
1469 || TREE_CODE (stmt) == OMP_SIMD
1470 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1471 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1472 else if (TREE_CODE (stmt) == PTRMEM_CST)
1474 /* By the time we get here we're handing off to the back end, so we don't
1475 need or want to preserve PTRMEM_CST anymore. */
1476 *stmt_p = cplus_expand_constant (stmt);
1477 *walk_subtrees = 0;
1479 else if ((flag_sanitize
1480 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1481 && !wtd->no_sanitize_p)
1483 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1484 && TREE_CODE (stmt) == NOP_EXPR
1485 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1486 ubsan_maybe_instrument_reference (stmt);
1487 else if (TREE_CODE (stmt) == CALL_EXPR)
1489 tree fn = CALL_EXPR_FN (stmt);
1490 if (fn != NULL_TREE
1491 && !error_operand_p (fn)
1492 && POINTER_TYPE_P (TREE_TYPE (fn))
1493 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1495 bool is_ctor
1496 = TREE_CODE (fn) == ADDR_EXPR
1497 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1498 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1499 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1500 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1501 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1502 cp_ubsan_maybe_instrument_member_call (stmt);
1507 p_set->add (*stmt_p);
1509 return NULL;
1512 /* Lower C++ front end trees to GENERIC in T_P. */
1514 static void
1515 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1517 struct cp_genericize_data wtd;
1519 wtd.p_set = new hash_set<tree>;
1520 wtd.bind_expr_stack.create (0);
1521 wtd.omp_ctx = NULL;
1522 wtd.try_block = NULL_TREE;
1523 wtd.no_sanitize_p = false;
1524 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1525 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1526 delete wtd.p_set;
1527 wtd.bind_expr_stack.release ();
1528 if (flag_sanitize & SANITIZE_VPTR)
1529 cp_ubsan_instrument_member_accesses (t_p);
1532 /* If a function that should end with a return in non-void
1533 function doesn't obviously end with return, add ubsan
1534 instrumentation code to verify it at runtime. */
1536 static void
1537 cp_ubsan_maybe_instrument_return (tree fndecl)
1539 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1540 || DECL_CONSTRUCTOR_P (fndecl)
1541 || DECL_DESTRUCTOR_P (fndecl)
1542 || !targetm.warn_func_return (fndecl))
1543 return;
1545 tree t = DECL_SAVED_TREE (fndecl);
1546 while (t)
1548 switch (TREE_CODE (t))
1550 case BIND_EXPR:
1551 t = BIND_EXPR_BODY (t);
1552 continue;
1553 case TRY_FINALLY_EXPR:
1554 t = TREE_OPERAND (t, 0);
1555 continue;
1556 case STATEMENT_LIST:
1558 tree_stmt_iterator i = tsi_last (t);
1559 if (!tsi_end_p (i))
1561 t = tsi_stmt (i);
1562 continue;
1565 break;
1566 case RETURN_EXPR:
1567 return;
1568 default:
1569 break;
1571 break;
1573 if (t == NULL_TREE)
1574 return;
1575 tree *p = &DECL_SAVED_TREE (fndecl);
1576 if (TREE_CODE (*p) == BIND_EXPR)
1577 p = &BIND_EXPR_BODY (*p);
1578 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1579 append_to_statement_list (t, p);
1582 void
1583 cp_genericize (tree fndecl)
1585 tree t;
1587 /* Fix up the types of parms passed by invisible reference. */
1588 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1589 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1591 /* If a function's arguments are copied to create a thunk,
1592 then DECL_BY_REFERENCE will be set -- but the type of the
1593 argument will be a pointer type, so we will never get
1594 here. */
1595 gcc_assert (!DECL_BY_REFERENCE (t));
1596 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1597 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1598 DECL_BY_REFERENCE (t) = 1;
1599 TREE_ADDRESSABLE (t) = 0;
1600 relayout_decl (t);
1603 /* Do the same for the return value. */
1604 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1606 t = DECL_RESULT (fndecl);
1607 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1608 DECL_BY_REFERENCE (t) = 1;
1609 TREE_ADDRESSABLE (t) = 0;
1610 relayout_decl (t);
1611 if (DECL_NAME (t))
1613 /* Adjust DECL_VALUE_EXPR of the original var. */
1614 tree outer = outer_curly_brace_block (current_function_decl);
1615 tree var;
1617 if (outer)
1618 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1619 if (DECL_NAME (t) == DECL_NAME (var)
1620 && DECL_HAS_VALUE_EXPR_P (var)
1621 && DECL_VALUE_EXPR (var) == t)
1623 tree val = convert_from_reference (t);
1624 SET_DECL_VALUE_EXPR (var, val);
1625 break;
1630 /* If we're a clone, the body is already GIMPLE. */
1631 if (DECL_CLONED_FUNCTION_P (fndecl))
1632 return;
1634 /* Allow cp_genericize calls to be nested. */
1635 tree save_bc_label[2];
1636 save_bc_label[bc_break] = bc_label[bc_break];
1637 save_bc_label[bc_continue] = bc_label[bc_continue];
1638 bc_label[bc_break] = NULL_TREE;
1639 bc_label[bc_continue] = NULL_TREE;
1641 /* Expand all the array notations here. */
1642 if (flag_cilkplus
1643 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1644 DECL_SAVED_TREE (fndecl)
1645 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1647 /* We do want to see every occurrence of the parms, so we can't just use
1648 walk_tree's hash functionality. */
1649 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1651 if (flag_sanitize & SANITIZE_RETURN
1652 && do_ubsan_in_current_function ())
1653 cp_ubsan_maybe_instrument_return (fndecl);
1655 /* Do everything else. */
1656 c_genericize (fndecl);
1658 gcc_assert (bc_label[bc_break] == NULL);
1659 gcc_assert (bc_label[bc_continue] == NULL);
1660 bc_label[bc_break] = save_bc_label[bc_break];
1661 bc_label[bc_continue] = save_bc_label[bc_continue];
1664 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1665 NULL if there is in fact nothing to do. ARG2 may be null if FN
1666 actually only takes one argument. */
1668 static tree
1669 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1671 tree defparm, parm, t;
1672 int i = 0;
1673 int nargs;
1674 tree *argarray;
1676 if (fn == NULL)
1677 return NULL;
1679 nargs = list_length (DECL_ARGUMENTS (fn));
1680 argarray = XALLOCAVEC (tree, nargs);
1682 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1683 if (arg2)
1684 defparm = TREE_CHAIN (defparm);
1686 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1688 tree inner_type = TREE_TYPE (arg1);
1689 tree start1, end1, p1;
1690 tree start2 = NULL, p2 = NULL;
1691 tree ret = NULL, lab;
1693 start1 = arg1;
1694 start2 = arg2;
1697 inner_type = TREE_TYPE (inner_type);
1698 start1 = build4 (ARRAY_REF, inner_type, start1,
1699 size_zero_node, NULL, NULL);
1700 if (arg2)
1701 start2 = build4 (ARRAY_REF, inner_type, start2,
1702 size_zero_node, NULL, NULL);
1704 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1705 start1 = build_fold_addr_expr_loc (input_location, start1);
1706 if (arg2)
1707 start2 = build_fold_addr_expr_loc (input_location, start2);
1709 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1710 end1 = fold_build_pointer_plus (start1, end1);
1712 p1 = create_tmp_var (TREE_TYPE (start1));
1713 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1714 append_to_statement_list (t, &ret);
1716 if (arg2)
1718 p2 = create_tmp_var (TREE_TYPE (start2));
1719 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1720 append_to_statement_list (t, &ret);
1723 lab = create_artificial_label (input_location);
1724 t = build1 (LABEL_EXPR, void_type_node, lab);
1725 append_to_statement_list (t, &ret);
1727 argarray[i++] = p1;
1728 if (arg2)
1729 argarray[i++] = p2;
1730 /* Handle default arguments. */
1731 for (parm = defparm; parm && parm != void_list_node;
1732 parm = TREE_CHAIN (parm), i++)
1733 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1734 TREE_PURPOSE (parm), fn, i,
1735 tf_warning_or_error);
1736 t = build_call_a (fn, i, argarray);
1737 t = fold_convert (void_type_node, t);
1738 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1739 append_to_statement_list (t, &ret);
1741 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1742 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1743 append_to_statement_list (t, &ret);
1745 if (arg2)
1747 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1748 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1749 append_to_statement_list (t, &ret);
1752 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1753 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1754 append_to_statement_list (t, &ret);
1756 return ret;
1758 else
1760 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1761 if (arg2)
1762 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1763 /* Handle default arguments. */
1764 for (parm = defparm; parm && parm != void_list_node;
1765 parm = TREE_CHAIN (parm), i++)
1766 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1767 TREE_PURPOSE (parm),
1768 fn, i, tf_warning_or_error);
1769 t = build_call_a (fn, i, argarray);
1770 t = fold_convert (void_type_node, t);
1771 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1775 /* Return code to initialize DECL with its default constructor, or
1776 NULL if there's nothing to do. */
1778 tree
1779 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1781 tree info = CP_OMP_CLAUSE_INFO (clause);
1782 tree ret = NULL;
1784 if (info)
1785 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1787 return ret;
1790 /* Return code to initialize DST with a copy constructor from SRC. */
1792 tree
1793 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1795 tree info = CP_OMP_CLAUSE_INFO (clause);
1796 tree ret = NULL;
1798 if (info)
1799 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1800 if (ret == NULL)
1801 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1803 return ret;
1806 /* Similarly, except use an assignment operator instead. */
1808 tree
1809 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1811 tree info = CP_OMP_CLAUSE_INFO (clause);
1812 tree ret = NULL;
1814 if (info)
1815 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1816 if (ret == NULL)
1817 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1819 return ret;
1822 /* Return code to destroy DECL. */
1824 tree
1825 cxx_omp_clause_dtor (tree clause, tree decl)
1827 tree info = CP_OMP_CLAUSE_INFO (clause);
1828 tree ret = NULL;
1830 if (info)
1831 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1833 return ret;
1836 /* True if OpenMP should privatize what this DECL points to rather
1837 than the DECL itself. */
1839 bool
1840 cxx_omp_privatize_by_reference (const_tree decl)
1842 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1843 || is_invisiref_parm (decl));
1846 /* Return true if DECL is const qualified var having no mutable member. */
1847 bool
1848 cxx_omp_const_qual_no_mutable (tree decl)
1850 tree type = TREE_TYPE (decl);
1851 if (TREE_CODE (type) == REFERENCE_TYPE)
1853 if (!is_invisiref_parm (decl))
1854 return false;
1855 type = TREE_TYPE (type);
1857 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1859 /* NVR doesn't preserve const qualification of the
1860 variable's type. */
1861 tree outer = outer_curly_brace_block (current_function_decl);
1862 tree var;
1864 if (outer)
1865 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1866 if (DECL_NAME (decl) == DECL_NAME (var)
1867 && (TYPE_MAIN_VARIANT (type)
1868 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1870 if (TYPE_READONLY (TREE_TYPE (var)))
1871 type = TREE_TYPE (var);
1872 break;
1877 if (type == error_mark_node)
1878 return false;
1880 /* Variables with const-qualified type having no mutable member
1881 are predetermined shared. */
1882 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1883 return true;
1885 return false;
1888 /* True if OpenMP sharing attribute of DECL is predetermined. */
1890 enum omp_clause_default_kind
1891 cxx_omp_predetermined_sharing (tree decl)
1893 /* Static data members are predetermined shared. */
1894 if (TREE_STATIC (decl))
1896 tree ctx = CP_DECL_CONTEXT (decl);
1897 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1898 return OMP_CLAUSE_DEFAULT_SHARED;
1901 /* Const qualified vars having no mutable member are predetermined
1902 shared. */
1903 if (cxx_omp_const_qual_no_mutable (decl))
1904 return OMP_CLAUSE_DEFAULT_SHARED;
1906 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1909 /* Finalize an implicitly determined clause. */
1911 void
1912 cxx_omp_finish_clause (tree c, gimple_seq *)
1914 tree decl, inner_type;
1915 bool make_shared = false;
1917 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1918 return;
1920 decl = OMP_CLAUSE_DECL (c);
1921 decl = require_complete_type (decl);
1922 inner_type = TREE_TYPE (decl);
1923 if (decl == error_mark_node)
1924 make_shared = true;
1925 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1926 inner_type = TREE_TYPE (inner_type);
1928 /* We're interested in the base element, not arrays. */
1929 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1930 inner_type = TREE_TYPE (inner_type);
1932 /* Check for special function availability by building a call to one.
1933 Save the results, because later we won't be in the right context
1934 for making these queries. */
1935 if (!make_shared
1936 && CLASS_TYPE_P (inner_type)
1937 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1938 make_shared = true;
1940 if (make_shared)
1941 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1944 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1945 disregarded in OpenMP construct, because it is going to be
1946 remapped during OpenMP lowering. SHARED is true if DECL
1947 is going to be shared, false if it is going to be privatized. */
1949 bool
1950 cxx_omp_disregard_value_expr (tree decl, bool shared)
1952 return !shared
1953 && VAR_P (decl)
1954 && DECL_HAS_VALUE_EXPR_P (decl)
1955 && DECL_ARTIFICIAL (decl)
1956 && DECL_LANG_SPECIFIC (decl)
1957 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1960 /* Perform folding on expression X. */
1962 tree
1963 cp_fully_fold (tree x)
1965 if (processing_template_decl)
1966 return x;
1967 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
1968 have to call both. */
1969 if (cxx_dialect >= cxx11)
1970 x = maybe_constant_value (x);
1971 return cp_fold (x);
1974 /* Fold expression X which is used as an rvalue if RVAL is true. */
1976 static tree
1977 cp_fold_maybe_rvalue (tree x, bool rval)
1979 while (true)
1981 x = cp_fold (x);
1982 if (rval && DECL_P (x)
1983 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
1985 tree v = decl_constant_value (x);
1986 if (v != x && v != error_mark_node)
1988 x = v;
1989 continue;
1992 break;
1994 return x;
1997 /* Fold expression X which is used as an rvalue. */
1999 static tree
2000 cp_fold_rvalue (tree x)
2002 return cp_fold_maybe_rvalue (x, true);
2005 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2006 and certain changes are made to the folding done. Or should be (FIXME). We
2007 never touch maybe_const, as it is only used for the C front-end
2008 C_MAYBE_CONST_EXPR. */
2010 tree
2011 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2013 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2014 INTEGER_CST. */
2015 return cp_fold_rvalue (x);
2018 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2020 /* Dispose of the whole FOLD_CACHE. */
2022 void
2023 clear_fold_cache (void)
2025 if (fold_cache != NULL)
2026 fold_cache->empty ();
2029 /* This function tries to fold an expression X.
2030 To avoid combinatorial explosion, folding results are kept in fold_cache.
2031 If we are processing a template or X is invalid, we don't fold at all.
2032 For performance reasons we don't cache expressions representing a
2033 declaration or constant.
2034 Function returns X or its folded variant. */
2036 static tree
2037 cp_fold (tree x)
2039 tree op0, op1, op2, op3;
2040 tree org_x = x, r = NULL_TREE;
2041 enum tree_code code;
2042 location_t loc;
2043 bool rval_ops = true;
2045 if (!x || x == error_mark_node)
2046 return x;
2048 if (processing_template_decl
2049 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2050 return x;
2052 /* Don't bother to cache DECLs or constants. */
2053 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2054 return x;
2056 if (fold_cache == NULL)
2057 fold_cache = hash_map<tree, tree>::create_ggc (101);
2059 if (tree *cached = fold_cache->get (x))
2060 return *cached;
2062 code = TREE_CODE (x);
2063 switch (code)
2065 case SIZEOF_EXPR:
2066 x = fold_sizeof_expr (x);
2067 break;
2069 case VIEW_CONVERT_EXPR:
2070 rval_ops = false;
2071 /* FALLTHRU */
2072 case CONVERT_EXPR:
2073 case NOP_EXPR:
2074 case NON_LVALUE_EXPR:
2076 if (VOID_TYPE_P (TREE_TYPE (x)))
2077 return x;
2079 loc = EXPR_LOCATION (x);
2080 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2082 if (code == CONVERT_EXPR
2083 && SCALAR_TYPE_P (TREE_TYPE (x))
2084 && op0 != void_node)
2085 /* During parsing we used convert_to_*_nofold; re-convert now using the
2086 folding variants, since fold() doesn't do those transformations. */
2087 x = fold (convert (TREE_TYPE (x), op0));
2088 else if (op0 != TREE_OPERAND (x, 0))
2090 if (op0 == error_mark_node)
2091 x = error_mark_node;
2092 else
2093 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2095 else
2096 x = fold (x);
2098 /* Conversion of an out-of-range value has implementation-defined
2099 behavior; the language considers it different from arithmetic
2100 overflow, which is undefined. */
2101 if (TREE_CODE (op0) == INTEGER_CST
2102 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2103 TREE_OVERFLOW (x) = false;
2105 break;
2107 case INDIRECT_REF:
2108 /* We don't need the decltype(auto) obfuscation anymore. */
2109 if (REF_PARENTHESIZED_P (x))
2111 tree p = maybe_undo_parenthesized_ref (x);
2112 return cp_fold (p);
2114 goto unary;
2116 case ADDR_EXPR:
2117 case REALPART_EXPR:
2118 case IMAGPART_EXPR:
2119 rval_ops = false;
2120 /* FALLTHRU */
2121 case CONJ_EXPR:
2122 case FIX_TRUNC_EXPR:
2123 case FLOAT_EXPR:
2124 case NEGATE_EXPR:
2125 case ABS_EXPR:
2126 case BIT_NOT_EXPR:
2127 case TRUTH_NOT_EXPR:
2128 case FIXED_CONVERT_EXPR:
2129 unary:
2131 loc = EXPR_LOCATION (x);
2132 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2134 if (op0 != TREE_OPERAND (x, 0))
2136 if (op0 == error_mark_node)
2137 x = error_mark_node;
2138 else
2140 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2141 if (code == INDIRECT_REF
2142 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2144 TREE_READONLY (x) = TREE_READONLY (org_x);
2145 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2146 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2150 else
2151 x = fold (x);
2153 gcc_assert (TREE_CODE (x) != COND_EXPR
2154 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2155 break;
2157 case UNARY_PLUS_EXPR:
2158 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2159 if (op0 == error_mark_node)
2160 x = error_mark_node;
2161 else
2162 x = fold_convert (TREE_TYPE (x), op0);
2163 break;
2165 case POSTDECREMENT_EXPR:
2166 case POSTINCREMENT_EXPR:
2167 case INIT_EXPR:
2168 case PREDECREMENT_EXPR:
2169 case PREINCREMENT_EXPR:
2170 case COMPOUND_EXPR:
2171 case MODIFY_EXPR:
2172 rval_ops = false;
2173 /* FALLTHRU */
2174 case POINTER_PLUS_EXPR:
2175 case PLUS_EXPR:
2176 case MINUS_EXPR:
2177 case MULT_EXPR:
2178 case TRUNC_DIV_EXPR:
2179 case CEIL_DIV_EXPR:
2180 case FLOOR_DIV_EXPR:
2181 case ROUND_DIV_EXPR:
2182 case TRUNC_MOD_EXPR:
2183 case CEIL_MOD_EXPR:
2184 case ROUND_MOD_EXPR:
2185 case RDIV_EXPR:
2186 case EXACT_DIV_EXPR:
2187 case MIN_EXPR:
2188 case MAX_EXPR:
2189 case LSHIFT_EXPR:
2190 case RSHIFT_EXPR:
2191 case LROTATE_EXPR:
2192 case RROTATE_EXPR:
2193 case BIT_AND_EXPR:
2194 case BIT_IOR_EXPR:
2195 case BIT_XOR_EXPR:
2196 case TRUTH_AND_EXPR:
2197 case TRUTH_ANDIF_EXPR:
2198 case TRUTH_OR_EXPR:
2199 case TRUTH_ORIF_EXPR:
2200 case TRUTH_XOR_EXPR:
2201 case LT_EXPR: case LE_EXPR:
2202 case GT_EXPR: case GE_EXPR:
2203 case EQ_EXPR: case NE_EXPR:
2204 case UNORDERED_EXPR: case ORDERED_EXPR:
2205 case UNLT_EXPR: case UNLE_EXPR:
2206 case UNGT_EXPR: case UNGE_EXPR:
2207 case UNEQ_EXPR: case LTGT_EXPR:
2208 case RANGE_EXPR: case COMPLEX_EXPR:
2210 loc = EXPR_LOCATION (x);
2211 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2212 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2214 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2216 if (op0 == error_mark_node || op1 == error_mark_node)
2217 x = error_mark_node;
2218 else
2219 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2221 else
2222 x = fold (x);
2224 if (TREE_NO_WARNING (org_x)
2225 && warn_nonnull_compare
2226 && COMPARISON_CLASS_P (org_x))
2228 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2230 else if (COMPARISON_CLASS_P (x))
2231 TREE_NO_WARNING (x) = 1;
2232 /* Otherwise give up on optimizing these, let GIMPLE folders
2233 optimize those later on. */
2234 else if (op0 != TREE_OPERAND (org_x, 0)
2235 || op1 != TREE_OPERAND (org_x, 1))
2237 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2238 TREE_NO_WARNING (x) = 1;
2240 else
2241 x = org_x;
2243 break;
2245 case VEC_COND_EXPR:
2246 case COND_EXPR:
2248 /* Don't bother folding a void condition, since it can't produce a
2249 constant value. Also, some statement-level uses of COND_EXPR leave
2250 one of the branches NULL, so folding would crash. */
2251 if (VOID_TYPE_P (TREE_TYPE (x)))
2252 return x;
2254 loc = EXPR_LOCATION (x);
2255 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2256 op1 = cp_fold (TREE_OPERAND (x, 1));
2257 op2 = cp_fold (TREE_OPERAND (x, 2));
2259 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2261 warning_sentinel s (warn_int_in_bool_context);
2262 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2263 op1 = cp_truthvalue_conversion (op1);
2264 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2265 op2 = cp_truthvalue_conversion (op2);
2268 if (op0 != TREE_OPERAND (x, 0)
2269 || op1 != TREE_OPERAND (x, 1)
2270 || op2 != TREE_OPERAND (x, 2))
2272 if (op0 == error_mark_node
2273 || op1 == error_mark_node
2274 || op2 == error_mark_node)
2275 x = error_mark_node;
2276 else
2277 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2279 else
2280 x = fold (x);
2282 /* A COND_EXPR might have incompatible types in branches if one or both
2283 arms are bitfields. If folding exposed such a branch, fix it up. */
2284 if (TREE_CODE (x) != code)
2285 if (tree type = is_bitfield_expr_with_lowered_type (x))
2286 x = fold_convert (type, x);
2288 break;
2290 case CALL_EXPR:
2292 int i, m, sv = optimize, nw = sv, changed = 0;
2293 tree callee = get_callee_fndecl (x);
2295 /* Some built-in function calls will be evaluated at compile-time in
2296 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2297 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2298 if (callee && DECL_BUILT_IN (callee) && !optimize
2299 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2300 && current_function_decl
2301 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2302 nw = 1;
2304 x = copy_node (x);
2306 m = call_expr_nargs (x);
2307 for (i = 0; i < m; i++)
2309 r = cp_fold (CALL_EXPR_ARG (x, i));
2310 if (r != CALL_EXPR_ARG (x, i))
2312 if (r == error_mark_node)
2314 x = error_mark_node;
2315 break;
2317 changed = 1;
2319 CALL_EXPR_ARG (x, i) = r;
2321 if (x == error_mark_node)
2322 break;
2324 optimize = nw;
2325 r = fold (x);
2326 optimize = sv;
2328 if (TREE_CODE (r) != CALL_EXPR)
2330 x = cp_fold (r);
2331 break;
2334 optimize = nw;
2336 /* Invoke maybe_constant_value for functions declared
2337 constexpr and not called with AGGR_INIT_EXPRs.
2338 TODO:
2339 Do constexpr expansion of expressions where the call itself is not
2340 constant, but the call followed by an INDIRECT_REF is. */
2341 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2342 && !flag_no_inline)
2343 r = maybe_constant_value (x);
2344 optimize = sv;
2346 if (TREE_CODE (r) != CALL_EXPR)
2348 if (DECL_CONSTRUCTOR_P (callee))
2350 loc = EXPR_LOCATION (x);
2351 tree s = build_fold_indirect_ref_loc (loc,
2352 CALL_EXPR_ARG (x, 0));
2353 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2355 x = r;
2356 break;
2359 if (!changed)
2360 x = org_x;
2361 break;
2364 case CONSTRUCTOR:
2366 unsigned i;
2367 constructor_elt *p;
2368 bool changed = false;
2369 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2370 vec<constructor_elt, va_gc> *nelts = NULL;
2371 vec_safe_reserve (nelts, vec_safe_length (elts));
2372 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2374 tree op = cp_fold (p->value);
2375 constructor_elt e = { p->index, op };
2376 nelts->quick_push (e);
2377 if (op != p->value)
2379 if (op == error_mark_node)
2381 x = error_mark_node;
2382 changed = false;
2383 break;
2385 changed = true;
2388 if (changed)
2389 x = build_constructor (TREE_TYPE (x), nelts);
2390 else
2391 vec_free (nelts);
2392 break;
2394 case TREE_VEC:
2396 bool changed = false;
2397 vec<tree, va_gc> *vec = make_tree_vector ();
2398 int i, n = TREE_VEC_LENGTH (x);
2399 vec_safe_reserve (vec, n);
2401 for (i = 0; i < n; i++)
2403 tree op = cp_fold (TREE_VEC_ELT (x, i));
2404 vec->quick_push (op);
2405 if (op != TREE_VEC_ELT (x, i))
2406 changed = true;
2409 if (changed)
2411 r = copy_node (x);
2412 for (i = 0; i < n; i++)
2413 TREE_VEC_ELT (r, i) = (*vec)[i];
2414 x = r;
2417 release_tree_vector (vec);
2420 break;
2422 case ARRAY_REF:
2423 case ARRAY_RANGE_REF:
2425 loc = EXPR_LOCATION (x);
2426 op0 = cp_fold (TREE_OPERAND (x, 0));
2427 op1 = cp_fold (TREE_OPERAND (x, 1));
2428 op2 = cp_fold (TREE_OPERAND (x, 2));
2429 op3 = cp_fold (TREE_OPERAND (x, 3));
2431 if (op0 != TREE_OPERAND (x, 0)
2432 || op1 != TREE_OPERAND (x, 1)
2433 || op2 != TREE_OPERAND (x, 2)
2434 || op3 != TREE_OPERAND (x, 3))
2436 if (op0 == error_mark_node
2437 || op1 == error_mark_node
2438 || op2 == error_mark_node
2439 || op3 == error_mark_node)
2440 x = error_mark_node;
2441 else
2443 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2444 TREE_READONLY (x) = TREE_READONLY (org_x);
2445 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2446 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2450 x = fold (x);
2451 break;
2453 default:
2454 return org_x;
2457 fold_cache->put (org_x, x);
2458 /* Prevent that we try to fold an already folded result again. */
2459 if (x != org_x)
2460 fold_cache->put (x, x);
2462 return x;
2465 #include "gt-cp-cp-gimplify.h"