Remove obsolete ECOFF support.
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob7c7c0409af8378769a6fd8420a70e23e1e63ee60
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
40 /* Forward declarations. */
42 static tree cp_genericize_r (tree *, int *, void *);
43 static tree cp_fold_r (tree *, int *, void *);
44 static void cp_genericize_tree (tree*, bool);
45 static tree cp_fold (tree);
47 /* Local declarations. */
49 enum bc_t { bc_break = 0, bc_continue = 1 };
51 /* Stack of labels which are targets for "break" or "continue",
52 linked through TREE_CHAIN. */
53 static tree bc_label[2];
55 /* Begin a scope which can be exited by a break or continue statement. BC
56 indicates which.
58 Just creates a label with location LOCATION and pushes it into the current
59 context. */
61 static tree
62 begin_bc_block (enum bc_t bc, location_t location)
64 tree label = create_artificial_label (location);
65 DECL_CHAIN (label) = bc_label[bc];
66 bc_label[bc] = label;
67 if (bc == bc_break)
68 LABEL_DECL_BREAK (label) = true;
69 else
70 LABEL_DECL_CONTINUE (label) = true;
71 return label;
74 /* Finish a scope which can be exited by a break or continue statement.
75 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
76 an expression for the contents of the scope.
78 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
79 BLOCK. Otherwise, just forget the label. */
81 static void
82 finish_bc_block (tree *block, enum bc_t bc, tree label)
84 gcc_assert (label == bc_label[bc]);
86 if (TREE_USED (label))
87 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
88 block);
90 bc_label[bc] = DECL_CHAIN (label);
91 DECL_CHAIN (label) = NULL_TREE;
94 /* Get the LABEL_EXPR to represent a break or continue statement
95 in the current block scope. BC indicates which. */
97 static tree
98 get_bc_label (enum bc_t bc)
100 tree label = bc_label[bc];
102 /* Mark the label used for finish_bc_block. */
103 TREE_USED (label) = 1;
104 return label;
107 /* Genericize a TRY_BLOCK. */
109 static void
110 genericize_try_block (tree *stmt_p)
112 tree body = TRY_STMTS (*stmt_p);
113 tree cleanup = TRY_HANDLERS (*stmt_p);
115 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
118 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
120 static void
121 genericize_catch_block (tree *stmt_p)
123 tree type = HANDLER_TYPE (*stmt_p);
124 tree body = HANDLER_BODY (*stmt_p);
126 /* FIXME should the caught type go in TREE_TYPE? */
127 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
130 /* A terser interface for building a representation of an exception
131 specification. */
133 static tree
134 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
136 tree t;
138 /* FIXME should the allowed types go in TREE_TYPE? */
139 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
140 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
142 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
143 append_to_statement_list (body, &TREE_OPERAND (t, 0));
145 return t;
148 /* Genericize an EH_SPEC_BLOCK by converting it to a
149 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
151 static void
152 genericize_eh_spec_block (tree *stmt_p)
154 tree body = EH_SPEC_STMTS (*stmt_p);
155 tree allowed = EH_SPEC_RAISES (*stmt_p);
156 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
158 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
159 TREE_NO_WARNING (*stmt_p) = true;
160 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
163 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
165 static void
166 genericize_if_stmt (tree *stmt_p)
168 tree stmt, cond, then_, else_;
169 location_t locus = EXPR_LOCATION (*stmt_p);
171 stmt = *stmt_p;
172 cond = IF_COND (stmt);
173 then_ = THEN_CLAUSE (stmt);
174 else_ = ELSE_CLAUSE (stmt);
176 if (!then_)
177 then_ = build_empty_stmt (locus);
178 if (!else_)
179 else_ = build_empty_stmt (locus);
181 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
182 stmt = then_;
183 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
184 stmt = else_;
185 else
186 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
187 if (!EXPR_HAS_LOCATION (stmt))
188 protected_set_expr_location (stmt, locus);
189 *stmt_p = stmt;
192 /* Build a generic representation of one of the C loop forms. COND is the
193 loop condition or NULL_TREE. BODY is the (possibly compound) statement
194 controlled by the loop. INCR is the increment expression of a for-loop,
195 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
196 evaluated before the loop body as in while and for loops, or after the
197 loop body as in do-while loops. */
199 static void
200 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
201 tree incr, bool cond_is_first, int *walk_subtrees,
202 void *data)
204 tree blab, clab;
205 tree exit = NULL;
206 tree stmt_list = NULL;
208 blab = begin_bc_block (bc_break, start_locus);
209 clab = begin_bc_block (bc_continue, start_locus);
211 protected_set_expr_location (incr, start_locus);
213 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214 cp_walk_tree (&body, cp_genericize_r, data, NULL);
215 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216 *walk_subtrees = 0;
218 if (cond && TREE_CODE (cond) != INTEGER_CST)
220 /* If COND is constant, don't bother building an exit. If it's false,
221 we won't build a loop. If it's true, any exits are in the body. */
222 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
223 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
224 get_bc_label (bc_break));
225 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
226 build_empty_stmt (cloc), exit);
229 if (exit && cond_is_first)
230 append_to_statement_list (exit, &stmt_list);
231 append_to_statement_list (body, &stmt_list);
232 finish_bc_block (&stmt_list, bc_continue, clab);
233 append_to_statement_list (incr, &stmt_list);
234 if (exit && !cond_is_first)
235 append_to_statement_list (exit, &stmt_list);
237 if (!stmt_list)
238 stmt_list = build_empty_stmt (start_locus);
240 tree loop;
241 if (cond && integer_zerop (cond))
243 if (cond_is_first)
244 loop = fold_build3_loc (start_locus, COND_EXPR,
245 void_type_node, cond, stmt_list,
246 build_empty_stmt (start_locus));
247 else
248 loop = stmt_list;
250 else
252 location_t loc = start_locus;
253 if (!cond || integer_nonzerop (cond))
254 loc = EXPR_LOCATION (expr_first (body));
255 if (loc == UNKNOWN_LOCATION)
256 loc = start_locus;
257 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
260 stmt_list = NULL;
261 append_to_statement_list (loop, &stmt_list);
262 finish_bc_block (&stmt_list, bc_break, blab);
263 if (!stmt_list)
264 stmt_list = build_empty_stmt (start_locus);
266 *stmt_p = stmt_list;
269 /* Genericize a FOR_STMT node *STMT_P. */
271 static void
272 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
274 tree stmt = *stmt_p;
275 tree expr = NULL;
276 tree loop;
277 tree init = FOR_INIT_STMT (stmt);
279 if (init)
281 cp_walk_tree (&init, cp_genericize_r, data, NULL);
282 append_to_statement_list (init, &expr);
285 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
286 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
287 append_to_statement_list (loop, &expr);
288 if (expr == NULL_TREE)
289 expr = loop;
290 *stmt_p = expr;
293 /* Genericize a WHILE_STMT node *STMT_P. */
295 static void
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
298 tree stmt = *stmt_p;
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
305 static void
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 tree stmt = *stmt_p;
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
315 static void
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 tree stmt = *stmt_p;
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
325 if (!body)
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
333 *walk_subtrees = 0;
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
341 static void
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list_force (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
351 *stmt_p = stmt_list;
354 /* Genericize a BREAK_STMT node *STMT_P. */
356 static void
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
366 static void
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
369 tree stmt = *stmt_p;
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 if (TREE_CODE (stmt) != OMP_TASKLOOP)
375 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
380 *walk_subtrees = 0;
382 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
385 /* Hook into the middle of gimplifying an OMP_FOR node. */
387 static enum gimplify_status
388 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
390 tree for_stmt = *expr_p;
391 gimple_seq seq = NULL;
393 /* Protect ourselves from recursion. */
394 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
395 return GS_UNHANDLED;
396 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
398 gimplify_and_add (for_stmt, &seq);
399 gimple_seq_add_seq (pre_p, seq);
401 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
403 return GS_ALL_DONE;
406 /* Gimplify an EXPR_STMT node. */
408 static void
409 gimplify_expr_stmt (tree *stmt_p)
411 tree stmt = EXPR_STMT_EXPR (*stmt_p);
413 if (stmt == error_mark_node)
414 stmt = NULL;
416 /* Gimplification of a statement expression will nullify the
417 statement if all its side effects are moved to *PRE_P and *POST_P.
419 In this case we will not want to emit the gimplified statement.
420 However, we may still want to emit a warning, so we do that before
421 gimplification. */
422 if (stmt && warn_unused_value)
424 if (!TREE_SIDE_EFFECTS (stmt))
426 if (!IS_EMPTY_STMT (stmt)
427 && !VOID_TYPE_P (TREE_TYPE (stmt))
428 && !TREE_NO_WARNING (stmt))
429 warning (OPT_Wunused_value, "statement with no effect");
431 else
432 warn_if_unused_value (stmt, input_location);
435 if (stmt == NULL_TREE)
436 stmt = alloc_stmt_list ();
438 *stmt_p = stmt;
441 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 static void
444 cp_gimplify_init_expr (tree *expr_p)
446 tree from = TREE_OPERAND (*expr_p, 1);
447 tree to = TREE_OPERAND (*expr_p, 0);
448 tree t;
450 /* What about code that pulls out the temp and uses it elsewhere? I
451 think that such code never uses the TARGET_EXPR as an initializer. If
452 I'm wrong, we'll abort because the temp won't have any RTL. In that
453 case, I guess we'll need to replace references somehow. */
454 if (TREE_CODE (from) == TARGET_EXPR)
455 from = TARGET_EXPR_INITIAL (from);
457 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
458 inside the TARGET_EXPR. */
459 for (t = from; t; )
461 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
463 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
464 replace the slot operand with our target.
466 Should we add a target parm to gimplify_expr instead? No, as in this
467 case we want to replace the INIT_EXPR. */
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR
469 || TREE_CODE (sub) == VEC_INIT_EXPR)
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
472 AGGR_INIT_EXPR_SLOT (sub) = to;
473 else
474 VEC_INIT_EXPR_SLOT (sub) = to;
475 *expr_p = from;
477 /* The initialization is now a side-effect, so the container can
478 become void. */
479 if (from != sub)
480 TREE_TYPE (from) = void_type_node;
483 /* Handle aggregate NSDMI. */
484 replace_placeholders (sub, to);
486 if (t == sub)
487 break;
488 else
489 t = TREE_OPERAND (t, 1);
494 /* Gimplify a MUST_NOT_THROW_EXPR. */
496 static enum gimplify_status
497 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
499 tree stmt = *expr_p;
500 tree temp = voidify_wrapper_expr (stmt, NULL);
501 tree body = TREE_OPERAND (stmt, 0);
502 gimple_seq try_ = NULL;
503 gimple_seq catch_ = NULL;
504 gimple *mnt;
506 gimplify_and_add (body, &try_);
507 mnt = gimple_build_eh_must_not_throw (terminate_fn);
508 gimple_seq_add_stmt_without_update (&catch_, mnt);
509 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
511 gimple_seq_add_stmt_without_update (pre_p, mnt);
512 if (temp)
514 *expr_p = temp;
515 return GS_OK;
518 *expr_p = NULL;
519 return GS_ALL_DONE;
522 /* Return TRUE if an operand (OP) of a given TYPE being copied is
523 really just an empty class copy.
525 Check that the operand has a simple form so that TARGET_EXPRs and
526 non-empty CONSTRUCTORs get reduced properly, and we leave the
527 return slot optimization alone because it isn't a copy. */
529 static bool
530 simple_empty_class_p (tree type, tree op)
532 return
533 ((TREE_CODE (op) == COMPOUND_EXPR
534 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
535 || TREE_CODE (op) == EMPTY_CLASS_EXPR
536 || is_gimple_lvalue (op)
537 || INDIRECT_REF_P (op)
538 || (TREE_CODE (op) == CONSTRUCTOR
539 && CONSTRUCTOR_NELTS (op) == 0
540 && !TREE_CLOBBER_P (op))
541 || (TREE_CODE (op) == CALL_EXPR
542 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
543 && is_really_empty_class (type);
546 /* Returns true if evaluating E as an lvalue has side-effects;
547 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
548 have side-effects until there is a read or write through it. */
550 static bool
551 lvalue_has_side_effects (tree e)
553 if (!TREE_SIDE_EFFECTS (e))
554 return false;
555 while (handled_component_p (e))
557 if (TREE_CODE (e) == ARRAY_REF
558 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
559 return true;
560 e = TREE_OPERAND (e, 0);
562 if (DECL_P (e))
563 /* Just naming a variable has no side-effects. */
564 return false;
565 else if (INDIRECT_REF_P (e))
566 /* Similarly, indirection has no side-effects. */
567 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
568 else
569 /* For anything else, trust TREE_SIDE_EFFECTS. */
570 return TREE_SIDE_EFFECTS (e);
573 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
576 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
578 int saved_stmts_are_full_exprs_p = 0;
579 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
580 enum tree_code code = TREE_CODE (*expr_p);
581 enum gimplify_status ret;
583 if (STATEMENT_CODE_P (code))
585 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
586 current_stmt_tree ()->stmts_are_full_exprs_p
587 = STMT_IS_FULL_EXPR_P (*expr_p);
590 switch (code)
592 case AGGR_INIT_EXPR:
593 simplify_aggr_init_expr (expr_p);
594 ret = GS_OK;
595 break;
597 case VEC_INIT_EXPR:
599 location_t loc = input_location;
600 tree init = VEC_INIT_EXPR_INIT (*expr_p);
601 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
602 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
603 input_location = EXPR_LOCATION (*expr_p);
604 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
605 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
606 from_array,
607 tf_warning_or_error);
608 hash_set<tree> pset;
609 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
610 cp_genericize_tree (expr_p, false);
611 ret = GS_OK;
612 input_location = loc;
614 break;
616 case THROW_EXPR:
617 /* FIXME communicate throw type to back end, probably by moving
618 THROW_EXPR into ../tree.def. */
619 *expr_p = TREE_OPERAND (*expr_p, 0);
620 ret = GS_OK;
621 break;
623 case MUST_NOT_THROW_EXPR:
624 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
625 break;
627 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
628 LHS of an assignment might also be involved in the RHS, as in bug
629 25979. */
630 case INIT_EXPR:
631 if (fn_contains_cilk_spawn_p (cfun))
633 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
634 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
635 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
636 return GS_ERROR;
639 cp_gimplify_init_expr (expr_p);
640 if (TREE_CODE (*expr_p) != INIT_EXPR)
641 return GS_OK;
642 /* Fall through. */
643 case MODIFY_EXPR:
644 modify_expr_case:
646 if (fn_contains_cilk_spawn_p (cfun)
647 && cilk_cp_detect_spawn_and_unwrap (expr_p)
648 && !seen_error ())
649 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
650 /* If the back end isn't clever enough to know that the lhs and rhs
651 types are the same, add an explicit conversion. */
652 tree op0 = TREE_OPERAND (*expr_p, 0);
653 tree op1 = TREE_OPERAND (*expr_p, 1);
655 if (!error_operand_p (op0)
656 && !error_operand_p (op1)
657 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
658 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
659 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
660 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
661 TREE_TYPE (op0), op1);
663 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
665 /* Remove any copies of empty classes. Also drop volatile
666 variables on the RHS to avoid infinite recursion from
667 gimplify_expr trying to load the value. */
668 if (TREE_SIDE_EFFECTS (op1))
670 if (TREE_THIS_VOLATILE (op1)
671 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
672 op1 = build_fold_addr_expr (op1);
674 gimplify_and_add (op1, pre_p);
676 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
677 is_gimple_lvalue, fb_lvalue);
678 *expr_p = TREE_OPERAND (*expr_p, 0);
680 /* P0145 says that the RHS is sequenced before the LHS.
681 gimplify_modify_expr gimplifies the RHS before the LHS, but that
682 isn't quite strong enough in two cases:
684 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
685 mean it's evaluated after the LHS.
687 2) the value calculation of the RHS is also sequenced before the
688 LHS, so for scalar assignment we need to preevaluate if the
689 RHS could be affected by LHS side-effects even if it has no
690 side-effects of its own. We don't need this for classes because
691 class assignment takes its RHS by reference. */
692 else if (flag_strong_eval_order > 1
693 && TREE_CODE (*expr_p) == MODIFY_EXPR
694 && lvalue_has_side_effects (op0)
695 && (TREE_CODE (op1) == CALL_EXPR
696 || (SCALAR_TYPE_P (TREE_TYPE (op1))
697 && !TREE_CONSTANT (op1))))
698 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
700 ret = GS_OK;
701 break;
703 case EMPTY_CLASS_EXPR:
704 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
705 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
706 ret = GS_OK;
707 break;
709 case BASELINK:
710 *expr_p = BASELINK_FUNCTIONS (*expr_p);
711 ret = GS_OK;
712 break;
714 case TRY_BLOCK:
715 genericize_try_block (expr_p);
716 ret = GS_OK;
717 break;
719 case HANDLER:
720 genericize_catch_block (expr_p);
721 ret = GS_OK;
722 break;
724 case EH_SPEC_BLOCK:
725 genericize_eh_spec_block (expr_p);
726 ret = GS_OK;
727 break;
729 case USING_STMT:
730 gcc_unreachable ();
732 case FOR_STMT:
733 case WHILE_STMT:
734 case DO_STMT:
735 case SWITCH_STMT:
736 case CONTINUE_STMT:
737 case BREAK_STMT:
738 gcc_unreachable ();
740 case OMP_FOR:
741 case OMP_SIMD:
742 case OMP_DISTRIBUTE:
743 case OMP_TASKLOOP:
744 ret = cp_gimplify_omp_for (expr_p, pre_p);
745 break;
747 case EXPR_STMT:
748 gimplify_expr_stmt (expr_p);
749 ret = GS_OK;
750 break;
752 case UNARY_PLUS_EXPR:
754 tree arg = TREE_OPERAND (*expr_p, 0);
755 tree type = TREE_TYPE (*expr_p);
756 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
757 : arg;
758 ret = GS_OK;
760 break;
762 case CILK_SPAWN_STMT:
763 gcc_assert(fn_contains_cilk_spawn_p (cfun)
764 && cilk_cp_detect_spawn_and_unwrap (expr_p));
766 if (!seen_error ())
767 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
768 return GS_ERROR;
770 case CALL_EXPR:
771 if (fn_contains_cilk_spawn_p (cfun)
772 && cilk_cp_detect_spawn_and_unwrap (expr_p)
773 && !seen_error ())
774 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
775 ret = GS_OK;
776 if (!CALL_EXPR_FN (*expr_p))
777 /* Internal function call. */;
778 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
780 /* This is a call to a (compound) assignment operator that used
781 the operator syntax; gimplify the RHS first. */
782 gcc_assert (call_expr_nargs (*expr_p) == 2);
783 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
784 enum gimplify_status t
785 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
786 if (t == GS_ERROR)
787 ret = GS_ERROR;
789 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
791 /* Leave the last argument for gimplify_call_expr, to avoid problems
792 with __builtin_va_arg_pack(). */
793 int nargs = call_expr_nargs (*expr_p) - 1;
794 for (int i = 0; i < nargs; ++i)
796 enum gimplify_status t
797 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
798 if (t == GS_ERROR)
799 ret = GS_ERROR;
802 else if (flag_strong_eval_order
803 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
805 /* If flag_strong_eval_order, evaluate the object argument first. */
806 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
807 if (POINTER_TYPE_P (fntype))
808 fntype = TREE_TYPE (fntype);
809 if (TREE_CODE (fntype) == METHOD_TYPE)
811 enum gimplify_status t
812 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
813 if (t == GS_ERROR)
814 ret = GS_ERROR;
817 break;
819 case RETURN_EXPR:
820 if (TREE_OPERAND (*expr_p, 0)
821 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
822 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
824 expr_p = &TREE_OPERAND (*expr_p, 0);
825 code = TREE_CODE (*expr_p);
826 /* Avoid going through the INIT_EXPR case, which can
827 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
828 goto modify_expr_case;
830 /* Fall through. */
832 default:
833 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
834 break;
837 /* Restore saved state. */
838 if (STATEMENT_CODE_P (code))
839 current_stmt_tree ()->stmts_are_full_exprs_p
840 = saved_stmts_are_full_exprs_p;
842 return ret;
845 static inline bool
846 is_invisiref_parm (const_tree t)
848 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
849 && DECL_BY_REFERENCE (t));
852 /* Return true if the uid in both int tree maps are equal. */
854 bool
855 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
857 return (a->uid == b->uid);
860 /* Hash a UID in a cxx_int_tree_map. */
862 unsigned int
863 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
865 return item->uid;
868 /* A stable comparison routine for use with splay trees and DECLs. */
870 static int
871 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
873 tree a = (tree) xa;
874 tree b = (tree) xb;
876 return DECL_UID (a) - DECL_UID (b);
879 /* OpenMP context during genericization. */
881 struct cp_genericize_omp_taskreg
883 bool is_parallel;
884 bool default_shared;
885 struct cp_genericize_omp_taskreg *outer;
886 splay_tree variables;
889 /* Return true if genericization should try to determine if
890 DECL is firstprivate or shared within task regions. */
892 static bool
893 omp_var_to_track (tree decl)
895 tree type = TREE_TYPE (decl);
896 if (is_invisiref_parm (decl))
897 type = TREE_TYPE (type);
898 else if (TREE_CODE (type) == REFERENCE_TYPE)
899 type = TREE_TYPE (type);
900 while (TREE_CODE (type) == ARRAY_TYPE)
901 type = TREE_TYPE (type);
902 if (type == error_mark_node || !CLASS_TYPE_P (type))
903 return false;
904 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
905 return false;
906 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
907 return false;
908 return true;
911 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
913 static void
914 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
916 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
917 (splay_tree_key) decl);
918 if (n == NULL)
920 int flags = OMP_CLAUSE_DEFAULT_SHARED;
921 if (omp_ctx->outer)
922 omp_cxx_notice_variable (omp_ctx->outer, decl);
923 if (!omp_ctx->default_shared)
925 struct cp_genericize_omp_taskreg *octx;
927 for (octx = omp_ctx->outer; octx; octx = octx->outer)
929 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
930 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
932 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
933 break;
935 if (octx->is_parallel)
936 break;
938 if (octx == NULL
939 && (TREE_CODE (decl) == PARM_DECL
940 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
941 && DECL_CONTEXT (decl) == current_function_decl)))
942 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
943 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
945 /* DECL is implicitly determined firstprivate in
946 the current task construct. Ensure copy ctor and
947 dtor are instantiated, because during gimplification
948 it will be already too late. */
949 tree type = TREE_TYPE (decl);
950 if (is_invisiref_parm (decl))
951 type = TREE_TYPE (type);
952 else if (TREE_CODE (type) == REFERENCE_TYPE)
953 type = TREE_TYPE (type);
954 while (TREE_CODE (type) == ARRAY_TYPE)
955 type = TREE_TYPE (type);
956 get_copy_ctor (type, tf_none);
957 get_dtor (type, tf_none);
960 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
964 /* Genericization context. */
966 struct cp_genericize_data
968 hash_set<tree> *p_set;
969 vec<tree> bind_expr_stack;
970 struct cp_genericize_omp_taskreg *omp_ctx;
971 tree try_block;
972 bool no_sanitize_p;
973 bool handle_invisiref_parm_p;
976 /* Perform any pre-gimplification folding of C++ front end trees to
977 GENERIC.
978 Note: The folding of none-omp cases is something to move into
979 the middle-end. As for now we have most foldings only on GENERIC
980 in fold-const, we need to perform this before transformation to
981 GIMPLE-form. */
983 static tree
984 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
986 tree stmt;
987 enum tree_code code;
989 *stmt_p = stmt = cp_fold (*stmt_p);
991 if (((hash_set<tree> *) data)->add (stmt))
993 /* Don't walk subtrees of stmts we've already walked once, otherwise
994 we can have exponential complexity with e.g. lots of nested
995 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
996 always the same tree, which the first time cp_fold_r has been
997 called on it had the subtrees walked. */
998 *walk_subtrees = 0;
999 return NULL;
1002 code = TREE_CODE (stmt);
1003 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1004 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1005 || code == OACC_LOOP)
1007 tree x;
1008 int i, n;
1010 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1011 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1012 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1013 x = OMP_FOR_COND (stmt);
1014 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1016 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1017 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1019 else if (x && TREE_CODE (x) == TREE_VEC)
1021 n = TREE_VEC_LENGTH (x);
1022 for (i = 0; i < n; i++)
1024 tree o = TREE_VEC_ELT (x, i);
1025 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1026 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1029 x = OMP_FOR_INCR (stmt);
1030 if (x && TREE_CODE (x) == TREE_VEC)
1032 n = TREE_VEC_LENGTH (x);
1033 for (i = 0; i < n; i++)
1035 tree o = TREE_VEC_ELT (x, i);
1036 if (o && TREE_CODE (o) == MODIFY_EXPR)
1037 o = TREE_OPERAND (o, 1);
1038 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1039 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1041 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1046 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1047 *walk_subtrees = 0;
1050 return NULL;
1053 /* Fold ALL the trees! FIXME we should be able to remove this, but
1054 apparently that still causes optimization regressions. */
1056 void
1057 cp_fold_function (tree fndecl)
1059 hash_set<tree> pset;
1060 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1063 /* Perform any pre-gimplification lowering of C++ front end trees to
1064 GENERIC. */
1066 static tree
1067 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1069 tree stmt = *stmt_p;
1070 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1071 hash_set<tree> *p_set = wtd->p_set;
1073 /* If in an OpenMP context, note var uses. */
1074 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1075 && (VAR_P (stmt)
1076 || TREE_CODE (stmt) == PARM_DECL
1077 || TREE_CODE (stmt) == RESULT_DECL)
1078 && omp_var_to_track (stmt))
1079 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1081 /* Dereference invisible reference parms. */
1082 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1084 *stmt_p = convert_from_reference (stmt);
1085 p_set->add (*stmt_p);
1086 *walk_subtrees = 0;
1087 return NULL;
1090 /* Map block scope extern declarations to visible declarations with the
1091 same name and type in outer scopes if any. */
1092 if (cp_function_chain->extern_decl_map
1093 && VAR_OR_FUNCTION_DECL_P (stmt)
1094 && DECL_EXTERNAL (stmt))
1096 struct cxx_int_tree_map *h, in;
1097 in.uid = DECL_UID (stmt);
1098 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1099 if (h)
1101 *stmt_p = h->to;
1102 *walk_subtrees = 0;
1103 return NULL;
1107 if (TREE_CODE (stmt) == INTEGER_CST
1108 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1109 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1110 && !wtd->no_sanitize_p)
1112 ubsan_maybe_instrument_reference (stmt_p);
1113 if (*stmt_p != stmt)
1115 *walk_subtrees = 0;
1116 return NULL_TREE;
1120 /* Other than invisiref parms, don't walk the same tree twice. */
1121 if (p_set->contains (stmt))
1123 *walk_subtrees = 0;
1124 return NULL_TREE;
1127 switch (TREE_CODE (stmt))
1129 case ADDR_EXPR:
1130 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1132 /* If in an OpenMP context, note var uses. */
1133 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1134 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1135 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1136 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1137 *walk_subtrees = 0;
1139 break;
1141 case RETURN_EXPR:
1142 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1143 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1144 *walk_subtrees = 0;
1145 break;
1147 case OMP_CLAUSE:
1148 switch (OMP_CLAUSE_CODE (stmt))
1150 case OMP_CLAUSE_LASTPRIVATE:
1151 /* Don't dereference an invisiref in OpenMP clauses. */
1152 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1154 *walk_subtrees = 0;
1155 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1156 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1157 cp_genericize_r, data, NULL);
1159 break;
1160 case OMP_CLAUSE_PRIVATE:
1161 /* Don't dereference an invisiref in OpenMP clauses. */
1162 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1163 *walk_subtrees = 0;
1164 else if (wtd->omp_ctx != NULL)
1166 /* Private clause doesn't cause any references to the
1167 var in outer contexts, avoid calling
1168 omp_cxx_notice_variable for it. */
1169 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1170 wtd->omp_ctx = NULL;
1171 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1172 data, NULL);
1173 wtd->omp_ctx = old;
1174 *walk_subtrees = 0;
1176 break;
1177 case OMP_CLAUSE_SHARED:
1178 case OMP_CLAUSE_FIRSTPRIVATE:
1179 case OMP_CLAUSE_COPYIN:
1180 case OMP_CLAUSE_COPYPRIVATE:
1181 /* Don't dereference an invisiref in OpenMP clauses. */
1182 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1183 *walk_subtrees = 0;
1184 break;
1185 case OMP_CLAUSE_REDUCTION:
1186 /* Don't dereference an invisiref in reduction clause's
1187 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1188 still needs to be genericized. */
1189 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1191 *walk_subtrees = 0;
1192 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1193 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1194 cp_genericize_r, data, NULL);
1195 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1196 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1197 cp_genericize_r, data, NULL);
1199 break;
1200 default:
1201 break;
1203 break;
1205 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1206 to lower this construct before scanning it, so we need to lower these
1207 before doing anything else. */
1208 case CLEANUP_STMT:
1209 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1210 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1211 : TRY_FINALLY_EXPR,
1212 void_type_node,
1213 CLEANUP_BODY (stmt),
1214 CLEANUP_EXPR (stmt));
1215 break;
1217 case IF_STMT:
1218 genericize_if_stmt (stmt_p);
1219 /* *stmt_p has changed, tail recurse to handle it again. */
1220 return cp_genericize_r (stmt_p, walk_subtrees, data);
1222 /* COND_EXPR might have incompatible types in branches if one or both
1223 arms are bitfields. Fix it up now. */
1224 case COND_EXPR:
1226 tree type_left
1227 = (TREE_OPERAND (stmt, 1)
1228 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1229 : NULL_TREE);
1230 tree type_right
1231 = (TREE_OPERAND (stmt, 2)
1232 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1233 : NULL_TREE);
1234 if (type_left
1235 && !useless_type_conversion_p (TREE_TYPE (stmt),
1236 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1238 TREE_OPERAND (stmt, 1)
1239 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1240 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1241 type_left));
1243 if (type_right
1244 && !useless_type_conversion_p (TREE_TYPE (stmt),
1245 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1247 TREE_OPERAND (stmt, 2)
1248 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1249 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1250 type_right));
1253 break;
1255 case BIND_EXPR:
1256 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1258 tree decl;
1259 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1260 if (VAR_P (decl)
1261 && !DECL_EXTERNAL (decl)
1262 && omp_var_to_track (decl))
1264 splay_tree_node n
1265 = splay_tree_lookup (wtd->omp_ctx->variables,
1266 (splay_tree_key) decl);
1267 if (n == NULL)
1268 splay_tree_insert (wtd->omp_ctx->variables,
1269 (splay_tree_key) decl,
1270 TREE_STATIC (decl)
1271 ? OMP_CLAUSE_DEFAULT_SHARED
1272 : OMP_CLAUSE_DEFAULT_PRIVATE);
1275 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1277 /* The point here is to not sanitize static initializers. */
1278 bool no_sanitize_p = wtd->no_sanitize_p;
1279 wtd->no_sanitize_p = true;
1280 for (tree decl = BIND_EXPR_VARS (stmt);
1281 decl;
1282 decl = DECL_CHAIN (decl))
1283 if (VAR_P (decl)
1284 && TREE_STATIC (decl)
1285 && DECL_INITIAL (decl))
1286 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1287 wtd->no_sanitize_p = no_sanitize_p;
1289 wtd->bind_expr_stack.safe_push (stmt);
1290 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1291 cp_genericize_r, data, NULL);
1292 wtd->bind_expr_stack.pop ();
1293 break;
1295 case USING_STMT:
1297 tree block = NULL_TREE;
1299 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1300 BLOCK, and append an IMPORTED_DECL to its
1301 BLOCK_VARS chained list. */
1302 if (wtd->bind_expr_stack.exists ())
1304 int i;
1305 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1306 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1307 break;
1309 if (block)
1311 tree using_directive;
1312 gcc_assert (TREE_OPERAND (stmt, 0));
1314 using_directive = make_node (IMPORTED_DECL);
1315 TREE_TYPE (using_directive) = void_type_node;
1317 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1318 = TREE_OPERAND (stmt, 0);
1319 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1320 BLOCK_VARS (block) = using_directive;
1322 /* The USING_STMT won't appear in GENERIC. */
1323 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1324 *walk_subtrees = 0;
1326 break;
1328 case DECL_EXPR:
1329 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1331 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1332 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1333 *walk_subtrees = 0;
1335 else
1337 tree d = DECL_EXPR_DECL (stmt);
1338 if (VAR_P (d))
1339 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1341 break;
1343 case OMP_PARALLEL:
1344 case OMP_TASK:
1345 case OMP_TASKLOOP:
1347 struct cp_genericize_omp_taskreg omp_ctx;
1348 tree c, decl;
1349 splay_tree_node n;
1351 *walk_subtrees = 0;
1352 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1353 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1354 omp_ctx.default_shared = omp_ctx.is_parallel;
1355 omp_ctx.outer = wtd->omp_ctx;
1356 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1357 wtd->omp_ctx = &omp_ctx;
1358 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1359 switch (OMP_CLAUSE_CODE (c))
1361 case OMP_CLAUSE_SHARED:
1362 case OMP_CLAUSE_PRIVATE:
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_LASTPRIVATE:
1365 decl = OMP_CLAUSE_DECL (c);
1366 if (decl == error_mark_node || !omp_var_to_track (decl))
1367 break;
1368 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1369 if (n != NULL)
1370 break;
1371 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1372 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1373 ? OMP_CLAUSE_DEFAULT_SHARED
1374 : OMP_CLAUSE_DEFAULT_PRIVATE);
1375 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1376 omp_cxx_notice_variable (omp_ctx.outer, decl);
1377 break;
1378 case OMP_CLAUSE_DEFAULT:
1379 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1380 omp_ctx.default_shared = true;
1381 default:
1382 break;
1384 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1385 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1386 else
1387 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1388 wtd->omp_ctx = omp_ctx.outer;
1389 splay_tree_delete (omp_ctx.variables);
1391 break;
1393 case TRY_BLOCK:
1395 *walk_subtrees = 0;
1396 tree try_block = wtd->try_block;
1397 wtd->try_block = stmt;
1398 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1399 wtd->try_block = try_block;
1400 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1402 break;
1404 case MUST_NOT_THROW_EXPR:
1405 /* MUST_NOT_THROW_COND might be something else with TM. */
1406 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1408 *walk_subtrees = 0;
1409 tree try_block = wtd->try_block;
1410 wtd->try_block = stmt;
1411 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1412 wtd->try_block = try_block;
1414 break;
1416 case THROW_EXPR:
1418 location_t loc = location_of (stmt);
1419 if (TREE_NO_WARNING (stmt))
1420 /* Never mind. */;
1421 else if (wtd->try_block)
1423 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1424 && warning_at (loc, OPT_Wterminate,
1425 "throw will always call terminate()")
1426 && cxx_dialect >= cxx11
1427 && DECL_DESTRUCTOR_P (current_function_decl))
1428 inform (loc, "in C++11 destructors default to noexcept");
1430 else
1432 if (warn_cxx11_compat && cxx_dialect < cxx11
1433 && DECL_DESTRUCTOR_P (current_function_decl)
1434 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1435 == NULL_TREE)
1436 && (get_defaulted_eh_spec (current_function_decl)
1437 == empty_except_spec))
1438 warning_at (loc, OPT_Wc__11_compat,
1439 "in C++11 this throw will terminate because "
1440 "destructors default to noexcept");
1443 break;
1445 case CONVERT_EXPR:
1446 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1447 break;
1449 case FOR_STMT:
1450 genericize_for_stmt (stmt_p, walk_subtrees, data);
1451 break;
1453 case WHILE_STMT:
1454 genericize_while_stmt (stmt_p, walk_subtrees, data);
1455 break;
1457 case DO_STMT:
1458 genericize_do_stmt (stmt_p, walk_subtrees, data);
1459 break;
1461 case SWITCH_STMT:
1462 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1463 break;
1465 case CONTINUE_STMT:
1466 genericize_continue_stmt (stmt_p);
1467 break;
1469 case BREAK_STMT:
1470 genericize_break_stmt (stmt_p);
1471 break;
1473 case OMP_FOR:
1474 case OMP_SIMD:
1475 case OMP_DISTRIBUTE:
1476 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1477 break;
1479 case PTRMEM_CST:
1480 /* By the time we get here we're handing off to the back end, so we don't
1481 need or want to preserve PTRMEM_CST anymore. */
1482 *stmt_p = cplus_expand_constant (stmt);
1483 *walk_subtrees = 0;
1484 break;
1486 case MEM_REF:
1487 /* For MEM_REF, make sure not to sanitize the second operand even
1488 if it has reference type. It is just an offset with a type
1489 holding other information. There is no other processing we
1490 need to do for INTEGER_CSTs, so just ignore the second argument
1491 unconditionally. */
1492 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1493 *walk_subtrees = 0;
1494 break;
1496 case NOP_EXPR:
1497 if (!wtd->no_sanitize_p
1498 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1499 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1500 ubsan_maybe_instrument_reference (stmt_p);
1501 break;
1503 case CALL_EXPR:
1504 if (!wtd->no_sanitize_p
1505 && sanitize_flags_p ((SANITIZE_NULL
1506 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1508 tree fn = CALL_EXPR_FN (stmt);
1509 if (fn != NULL_TREE
1510 && !error_operand_p (fn)
1511 && POINTER_TYPE_P (TREE_TYPE (fn))
1512 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1514 bool is_ctor
1515 = TREE_CODE (fn) == ADDR_EXPR
1516 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1517 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1518 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1519 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1520 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1521 cp_ubsan_maybe_instrument_member_call (stmt);
1524 break;
1526 default:
1527 if (IS_TYPE_OR_DECL_P (stmt))
1528 *walk_subtrees = 0;
1529 break;
1532 p_set->add (*stmt_p);
1534 return NULL;
1537 /* Lower C++ front end trees to GENERIC in T_P. */
1539 static void
1540 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1542 struct cp_genericize_data wtd;
1544 wtd.p_set = new hash_set<tree>;
1545 wtd.bind_expr_stack.create (0);
1546 wtd.omp_ctx = NULL;
1547 wtd.try_block = NULL_TREE;
1548 wtd.no_sanitize_p = false;
1549 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1550 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1551 delete wtd.p_set;
1552 wtd.bind_expr_stack.release ();
1553 if (sanitize_flags_p (SANITIZE_VPTR))
1554 cp_ubsan_instrument_member_accesses (t_p);
1557 /* If a function that should end with a return in non-void
1558 function doesn't obviously end with return, add ubsan
1559 instrumentation code to verify it at runtime. If -fsanitize=return
1560 is not enabled, instrument __builtin_unreachable. */
1562 static void
1563 cp_maybe_instrument_return (tree fndecl)
1565 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1566 || DECL_CONSTRUCTOR_P (fndecl)
1567 || DECL_DESTRUCTOR_P (fndecl)
1568 || !targetm.warn_func_return (fndecl))
1569 return;
1571 tree t = DECL_SAVED_TREE (fndecl);
1572 while (t)
1574 switch (TREE_CODE (t))
1576 case BIND_EXPR:
1577 t = BIND_EXPR_BODY (t);
1578 continue;
1579 case TRY_FINALLY_EXPR:
1580 t = TREE_OPERAND (t, 0);
1581 continue;
1582 case STATEMENT_LIST:
1584 tree_stmt_iterator i = tsi_last (t);
1585 if (!tsi_end_p (i))
1587 t = tsi_stmt (i);
1588 continue;
1591 break;
1592 case RETURN_EXPR:
1593 return;
1594 default:
1595 break;
1597 break;
1599 if (t == NULL_TREE)
1600 return;
1601 tree *p = &DECL_SAVED_TREE (fndecl);
1602 if (TREE_CODE (*p) == BIND_EXPR)
1603 p = &BIND_EXPR_BODY (*p);
1605 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1606 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1607 t = ubsan_instrument_return (loc);
1608 else
1610 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1611 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1614 append_to_statement_list (t, p);
1617 void
1618 cp_genericize (tree fndecl)
1620 tree t;
1622 /* Fix up the types of parms passed by invisible reference. */
1623 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1624 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1626 /* If a function's arguments are copied to create a thunk,
1627 then DECL_BY_REFERENCE will be set -- but the type of the
1628 argument will be a pointer type, so we will never get
1629 here. */
1630 gcc_assert (!DECL_BY_REFERENCE (t));
1631 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1632 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1633 DECL_BY_REFERENCE (t) = 1;
1634 TREE_ADDRESSABLE (t) = 0;
1635 relayout_decl (t);
1638 /* Do the same for the return value. */
1639 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1641 t = DECL_RESULT (fndecl);
1642 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1643 DECL_BY_REFERENCE (t) = 1;
1644 TREE_ADDRESSABLE (t) = 0;
1645 relayout_decl (t);
1646 if (DECL_NAME (t))
1648 /* Adjust DECL_VALUE_EXPR of the original var. */
1649 tree outer = outer_curly_brace_block (current_function_decl);
1650 tree var;
1652 if (outer)
1653 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1654 if (VAR_P (var)
1655 && DECL_NAME (t) == DECL_NAME (var)
1656 && DECL_HAS_VALUE_EXPR_P (var)
1657 && DECL_VALUE_EXPR (var) == t)
1659 tree val = convert_from_reference (t);
1660 SET_DECL_VALUE_EXPR (var, val);
1661 break;
1666 /* If we're a clone, the body is already GIMPLE. */
1667 if (DECL_CLONED_FUNCTION_P (fndecl))
1668 return;
1670 /* Allow cp_genericize calls to be nested. */
1671 tree save_bc_label[2];
1672 save_bc_label[bc_break] = bc_label[bc_break];
1673 save_bc_label[bc_continue] = bc_label[bc_continue];
1674 bc_label[bc_break] = NULL_TREE;
1675 bc_label[bc_continue] = NULL_TREE;
1677 /* Expand all the array notations here. */
1678 if (flag_cilkplus
1679 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1680 DECL_SAVED_TREE (fndecl)
1681 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1683 /* We do want to see every occurrence of the parms, so we can't just use
1684 walk_tree's hash functionality. */
1685 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1687 cp_maybe_instrument_return (fndecl);
1689 /* Do everything else. */
1690 c_genericize (fndecl);
1692 gcc_assert (bc_label[bc_break] == NULL);
1693 gcc_assert (bc_label[bc_continue] == NULL);
1694 bc_label[bc_break] = save_bc_label[bc_break];
1695 bc_label[bc_continue] = save_bc_label[bc_continue];
1698 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1699 NULL if there is in fact nothing to do. ARG2 may be null if FN
1700 actually only takes one argument. */
1702 static tree
1703 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1705 tree defparm, parm, t;
1706 int i = 0;
1707 int nargs;
1708 tree *argarray;
1710 if (fn == NULL)
1711 return NULL;
1713 nargs = list_length (DECL_ARGUMENTS (fn));
1714 argarray = XALLOCAVEC (tree, nargs);
1716 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1717 if (arg2)
1718 defparm = TREE_CHAIN (defparm);
1720 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1721 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1723 tree inner_type = TREE_TYPE (arg1);
1724 tree start1, end1, p1;
1725 tree start2 = NULL, p2 = NULL;
1726 tree ret = NULL, lab;
1728 start1 = arg1;
1729 start2 = arg2;
1732 inner_type = TREE_TYPE (inner_type);
1733 start1 = build4 (ARRAY_REF, inner_type, start1,
1734 size_zero_node, NULL, NULL);
1735 if (arg2)
1736 start2 = build4 (ARRAY_REF, inner_type, start2,
1737 size_zero_node, NULL, NULL);
1739 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1740 start1 = build_fold_addr_expr_loc (input_location, start1);
1741 if (arg2)
1742 start2 = build_fold_addr_expr_loc (input_location, start2);
1744 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1745 end1 = fold_build_pointer_plus (start1, end1);
1747 p1 = create_tmp_var (TREE_TYPE (start1));
1748 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1749 append_to_statement_list (t, &ret);
1751 if (arg2)
1753 p2 = create_tmp_var (TREE_TYPE (start2));
1754 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1755 append_to_statement_list (t, &ret);
1758 lab = create_artificial_label (input_location);
1759 t = build1 (LABEL_EXPR, void_type_node, lab);
1760 append_to_statement_list (t, &ret);
1762 argarray[i++] = p1;
1763 if (arg2)
1764 argarray[i++] = p2;
1765 /* Handle default arguments. */
1766 for (parm = defparm; parm && parm != void_list_node;
1767 parm = TREE_CHAIN (parm), i++)
1768 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1769 TREE_PURPOSE (parm), fn,
1770 i - is_method, tf_warning_or_error);
1771 t = build_call_a (fn, i, argarray);
1772 t = fold_convert (void_type_node, t);
1773 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1774 append_to_statement_list (t, &ret);
1776 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1777 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1778 append_to_statement_list (t, &ret);
1780 if (arg2)
1782 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1783 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1784 append_to_statement_list (t, &ret);
1787 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1788 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1789 append_to_statement_list (t, &ret);
1791 return ret;
1793 else
1795 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1796 if (arg2)
1797 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1798 /* Handle default arguments. */
1799 for (parm = defparm; parm && parm != void_list_node;
1800 parm = TREE_CHAIN (parm), i++)
1801 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1802 TREE_PURPOSE (parm), fn,
1803 i - is_method, tf_warning_or_error);
1804 t = build_call_a (fn, i, argarray);
1805 t = fold_convert (void_type_node, t);
1806 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1810 /* Return code to initialize DECL with its default constructor, or
1811 NULL if there's nothing to do. */
1813 tree
1814 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1816 tree info = CP_OMP_CLAUSE_INFO (clause);
1817 tree ret = NULL;
1819 if (info)
1820 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1822 return ret;
1825 /* Return code to initialize DST with a copy constructor from SRC. */
1827 tree
1828 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1830 tree info = CP_OMP_CLAUSE_INFO (clause);
1831 tree ret = NULL;
1833 if (info)
1834 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1835 if (ret == NULL)
1836 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1838 return ret;
1841 /* Similarly, except use an assignment operator instead. */
1843 tree
1844 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1846 tree info = CP_OMP_CLAUSE_INFO (clause);
1847 tree ret = NULL;
1849 if (info)
1850 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1851 if (ret == NULL)
1852 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1854 return ret;
1857 /* Return code to destroy DECL. */
1859 tree
1860 cxx_omp_clause_dtor (tree clause, tree decl)
1862 tree info = CP_OMP_CLAUSE_INFO (clause);
1863 tree ret = NULL;
1865 if (info)
1866 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1868 return ret;
1871 /* True if OpenMP should privatize what this DECL points to rather
1872 than the DECL itself. */
1874 bool
1875 cxx_omp_privatize_by_reference (const_tree decl)
1877 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1878 || is_invisiref_parm (decl));
1881 /* Return true if DECL is const qualified var having no mutable member. */
1882 bool
1883 cxx_omp_const_qual_no_mutable (tree decl)
1885 tree type = TREE_TYPE (decl);
1886 if (TREE_CODE (type) == REFERENCE_TYPE)
1888 if (!is_invisiref_parm (decl))
1889 return false;
1890 type = TREE_TYPE (type);
1892 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1894 /* NVR doesn't preserve const qualification of the
1895 variable's type. */
1896 tree outer = outer_curly_brace_block (current_function_decl);
1897 tree var;
1899 if (outer)
1900 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1901 if (VAR_P (var)
1902 && DECL_NAME (decl) == DECL_NAME (var)
1903 && (TYPE_MAIN_VARIANT (type)
1904 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1906 if (TYPE_READONLY (TREE_TYPE (var)))
1907 type = TREE_TYPE (var);
1908 break;
1913 if (type == error_mark_node)
1914 return false;
1916 /* Variables with const-qualified type having no mutable member
1917 are predetermined shared. */
1918 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1919 return true;
1921 return false;
1924 /* True if OpenMP sharing attribute of DECL is predetermined. */
1926 enum omp_clause_default_kind
1927 cxx_omp_predetermined_sharing (tree decl)
1929 /* Static data members are predetermined shared. */
1930 if (TREE_STATIC (decl))
1932 tree ctx = CP_DECL_CONTEXT (decl);
1933 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1934 return OMP_CLAUSE_DEFAULT_SHARED;
1937 /* Const qualified vars having no mutable member are predetermined
1938 shared. */
1939 if (cxx_omp_const_qual_no_mutable (decl))
1940 return OMP_CLAUSE_DEFAULT_SHARED;
1942 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1945 /* Finalize an implicitly determined clause. */
1947 void
1948 cxx_omp_finish_clause (tree c, gimple_seq *)
1950 tree decl, inner_type;
1951 bool make_shared = false;
1953 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1954 return;
1956 decl = OMP_CLAUSE_DECL (c);
1957 decl = require_complete_type (decl);
1958 inner_type = TREE_TYPE (decl);
1959 if (decl == error_mark_node)
1960 make_shared = true;
1961 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1962 inner_type = TREE_TYPE (inner_type);
1964 /* We're interested in the base element, not arrays. */
1965 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1966 inner_type = TREE_TYPE (inner_type);
1968 /* Check for special function availability by building a call to one.
1969 Save the results, because later we won't be in the right context
1970 for making these queries. */
1971 if (!make_shared
1972 && CLASS_TYPE_P (inner_type)
1973 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1974 make_shared = true;
1976 if (make_shared)
1978 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1979 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
1980 OMP_CLAUSE_SHARED_READONLY (c) = 0;
1984 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1985 disregarded in OpenMP construct, because it is going to be
1986 remapped during OpenMP lowering. SHARED is true if DECL
1987 is going to be shared, false if it is going to be privatized. */
1989 bool
1990 cxx_omp_disregard_value_expr (tree decl, bool shared)
1992 return !shared
1993 && VAR_P (decl)
1994 && DECL_HAS_VALUE_EXPR_P (decl)
1995 && DECL_ARTIFICIAL (decl)
1996 && DECL_LANG_SPECIFIC (decl)
1997 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2000 /* Fold expression X which is used as an rvalue if RVAL is true. */
2002 static tree
2003 cp_fold_maybe_rvalue (tree x, bool rval)
2005 while (true)
2007 x = cp_fold (x);
2008 if (rval && DECL_P (x)
2009 && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2011 tree v = decl_constant_value (x);
2012 if (v != x && v != error_mark_node)
2014 x = v;
2015 continue;
2018 break;
2020 return x;
2023 /* Fold expression X which is used as an rvalue. */
2025 static tree
2026 cp_fold_rvalue (tree x)
2028 return cp_fold_maybe_rvalue (x, true);
2031 /* Perform folding on expression X. */
2033 tree
2034 cp_fully_fold (tree x)
2036 if (processing_template_decl)
2037 return x;
2038 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2039 have to call both. */
2040 if (cxx_dialect >= cxx11)
2041 x = maybe_constant_value (x);
2042 return cp_fold_rvalue (x);
2045 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2046 and certain changes are made to the folding done. Or should be (FIXME). We
2047 never touch maybe_const, as it is only used for the C front-end
2048 C_MAYBE_CONST_EXPR. */
2050 tree
2051 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2053 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2054 INTEGER_CST. */
2055 return cp_fold_rvalue (x);
2058 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2060 /* Dispose of the whole FOLD_CACHE. */
2062 void
2063 clear_fold_cache (void)
2065 if (fold_cache != NULL)
2066 fold_cache->empty ();
2069 /* This function tries to fold an expression X.
2070 To avoid combinatorial explosion, folding results are kept in fold_cache.
2071 If we are processing a template or X is invalid, we don't fold at all.
2072 For performance reasons we don't cache expressions representing a
2073 declaration or constant.
2074 Function returns X or its folded variant. */
2076 static tree
2077 cp_fold (tree x)
2079 tree op0, op1, op2, op3;
2080 tree org_x = x, r = NULL_TREE;
2081 enum tree_code code;
2082 location_t loc;
2083 bool rval_ops = true;
2085 if (!x || x == error_mark_node)
2086 return x;
2088 if (processing_template_decl
2089 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2090 return x;
2092 /* Don't bother to cache DECLs or constants. */
2093 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2094 return x;
2096 if (fold_cache == NULL)
2097 fold_cache = hash_map<tree, tree>::create_ggc (101);
2099 if (tree *cached = fold_cache->get (x))
2100 return *cached;
2102 code = TREE_CODE (x);
2103 switch (code)
2105 case CLEANUP_POINT_EXPR:
2106 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2107 effects. */
2108 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2109 if (!TREE_SIDE_EFFECTS (r))
2110 x = r;
2111 break;
2113 case SIZEOF_EXPR:
2114 x = fold_sizeof_expr (x);
2115 break;
2117 case VIEW_CONVERT_EXPR:
2118 rval_ops = false;
2119 /* FALLTHRU */
2120 case CONVERT_EXPR:
2121 case NOP_EXPR:
2122 case NON_LVALUE_EXPR:
2124 if (VOID_TYPE_P (TREE_TYPE (x)))
2125 return x;
2127 loc = EXPR_LOCATION (x);
2128 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2130 if (code == CONVERT_EXPR
2131 && SCALAR_TYPE_P (TREE_TYPE (x))
2132 && op0 != void_node)
2133 /* During parsing we used convert_to_*_nofold; re-convert now using the
2134 folding variants, since fold() doesn't do those transformations. */
2135 x = fold (convert (TREE_TYPE (x), op0));
2136 else if (op0 != TREE_OPERAND (x, 0))
2138 if (op0 == error_mark_node)
2139 x = error_mark_node;
2140 else
2141 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2143 else
2144 x = fold (x);
2146 /* Conversion of an out-of-range value has implementation-defined
2147 behavior; the language considers it different from arithmetic
2148 overflow, which is undefined. */
2149 if (TREE_CODE (op0) == INTEGER_CST
2150 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2151 TREE_OVERFLOW (x) = false;
2153 break;
2155 case INDIRECT_REF:
2156 /* We don't need the decltype(auto) obfuscation anymore. */
2157 if (REF_PARENTHESIZED_P (x))
2159 tree p = maybe_undo_parenthesized_ref (x);
2160 return cp_fold (p);
2162 goto unary;
2164 case ADDR_EXPR:
2165 case REALPART_EXPR:
2166 case IMAGPART_EXPR:
2167 rval_ops = false;
2168 /* FALLTHRU */
2169 case CONJ_EXPR:
2170 case FIX_TRUNC_EXPR:
2171 case FLOAT_EXPR:
2172 case NEGATE_EXPR:
2173 case ABS_EXPR:
2174 case BIT_NOT_EXPR:
2175 case TRUTH_NOT_EXPR:
2176 case FIXED_CONVERT_EXPR:
2177 unary:
2179 loc = EXPR_LOCATION (x);
2180 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2182 if (op0 != TREE_OPERAND (x, 0))
2184 if (op0 == error_mark_node)
2185 x = error_mark_node;
2186 else
2188 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2189 if (code == INDIRECT_REF
2190 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2192 TREE_READONLY (x) = TREE_READONLY (org_x);
2193 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2194 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2198 else
2199 x = fold (x);
2201 gcc_assert (TREE_CODE (x) != COND_EXPR
2202 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2203 break;
2205 case UNARY_PLUS_EXPR:
2206 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2207 if (op0 == error_mark_node)
2208 x = error_mark_node;
2209 else
2210 x = fold_convert (TREE_TYPE (x), op0);
2211 break;
2213 case POSTDECREMENT_EXPR:
2214 case POSTINCREMENT_EXPR:
2215 case INIT_EXPR:
2216 case PREDECREMENT_EXPR:
2217 case PREINCREMENT_EXPR:
2218 case COMPOUND_EXPR:
2219 case MODIFY_EXPR:
2220 rval_ops = false;
2221 /* FALLTHRU */
2222 case POINTER_PLUS_EXPR:
2223 case PLUS_EXPR:
2224 case MINUS_EXPR:
2225 case MULT_EXPR:
2226 case TRUNC_DIV_EXPR:
2227 case CEIL_DIV_EXPR:
2228 case FLOOR_DIV_EXPR:
2229 case ROUND_DIV_EXPR:
2230 case TRUNC_MOD_EXPR:
2231 case CEIL_MOD_EXPR:
2232 case ROUND_MOD_EXPR:
2233 case RDIV_EXPR:
2234 case EXACT_DIV_EXPR:
2235 case MIN_EXPR:
2236 case MAX_EXPR:
2237 case LSHIFT_EXPR:
2238 case RSHIFT_EXPR:
2239 case LROTATE_EXPR:
2240 case RROTATE_EXPR:
2241 case BIT_AND_EXPR:
2242 case BIT_IOR_EXPR:
2243 case BIT_XOR_EXPR:
2244 case TRUTH_AND_EXPR:
2245 case TRUTH_ANDIF_EXPR:
2246 case TRUTH_OR_EXPR:
2247 case TRUTH_ORIF_EXPR:
2248 case TRUTH_XOR_EXPR:
2249 case LT_EXPR: case LE_EXPR:
2250 case GT_EXPR: case GE_EXPR:
2251 case EQ_EXPR: case NE_EXPR:
2252 case UNORDERED_EXPR: case ORDERED_EXPR:
2253 case UNLT_EXPR: case UNLE_EXPR:
2254 case UNGT_EXPR: case UNGE_EXPR:
2255 case UNEQ_EXPR: case LTGT_EXPR:
2256 case RANGE_EXPR: case COMPLEX_EXPR:
2258 loc = EXPR_LOCATION (x);
2259 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2260 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2262 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2264 if (op0 == error_mark_node || op1 == error_mark_node)
2265 x = error_mark_node;
2266 else
2267 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2269 else
2270 x = fold (x);
2272 if (TREE_NO_WARNING (org_x)
2273 && warn_nonnull_compare
2274 && COMPARISON_CLASS_P (org_x))
2276 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2278 else if (COMPARISON_CLASS_P (x))
2279 TREE_NO_WARNING (x) = 1;
2280 /* Otherwise give up on optimizing these, let GIMPLE folders
2281 optimize those later on. */
2282 else if (op0 != TREE_OPERAND (org_x, 0)
2283 || op1 != TREE_OPERAND (org_x, 1))
2285 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2286 TREE_NO_WARNING (x) = 1;
2288 else
2289 x = org_x;
2291 break;
2293 case VEC_COND_EXPR:
2294 case COND_EXPR:
2296 /* Don't bother folding a void condition, since it can't produce a
2297 constant value. Also, some statement-level uses of COND_EXPR leave
2298 one of the branches NULL, so folding would crash. */
2299 if (VOID_TYPE_P (TREE_TYPE (x)))
2300 return x;
2302 loc = EXPR_LOCATION (x);
2303 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2304 op1 = cp_fold (TREE_OPERAND (x, 1));
2305 op2 = cp_fold (TREE_OPERAND (x, 2));
2307 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2309 warning_sentinel s (warn_int_in_bool_context);
2310 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2311 op1 = cp_truthvalue_conversion (op1);
2312 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2313 op2 = cp_truthvalue_conversion (op2);
2316 if (op0 != TREE_OPERAND (x, 0)
2317 || op1 != TREE_OPERAND (x, 1)
2318 || op2 != TREE_OPERAND (x, 2))
2320 if (op0 == error_mark_node
2321 || op1 == error_mark_node
2322 || op2 == error_mark_node)
2323 x = error_mark_node;
2324 else
2325 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2327 else
2328 x = fold (x);
2330 /* A COND_EXPR might have incompatible types in branches if one or both
2331 arms are bitfields. If folding exposed such a branch, fix it up. */
2332 if (TREE_CODE (x) != code
2333 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2334 x = fold_convert (TREE_TYPE (org_x), x);
2336 break;
2338 case CALL_EXPR:
2340 int i, m, sv = optimize, nw = sv, changed = 0;
2341 tree callee = get_callee_fndecl (x);
2343 /* Some built-in function calls will be evaluated at compile-time in
2344 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2345 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2346 if (callee && DECL_BUILT_IN (callee) && !optimize
2347 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2348 && current_function_decl
2349 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2350 nw = 1;
2352 x = copy_node (x);
2354 m = call_expr_nargs (x);
2355 for (i = 0; i < m; i++)
2357 r = cp_fold (CALL_EXPR_ARG (x, i));
2358 if (r != CALL_EXPR_ARG (x, i))
2360 if (r == error_mark_node)
2362 x = error_mark_node;
2363 break;
2365 changed = 1;
2367 CALL_EXPR_ARG (x, i) = r;
2369 if (x == error_mark_node)
2370 break;
2372 optimize = nw;
2373 r = fold (x);
2374 optimize = sv;
2376 if (TREE_CODE (r) != CALL_EXPR)
2378 x = cp_fold (r);
2379 break;
2382 optimize = nw;
2384 /* Invoke maybe_constant_value for functions declared
2385 constexpr and not called with AGGR_INIT_EXPRs.
2386 TODO:
2387 Do constexpr expansion of expressions where the call itself is not
2388 constant, but the call followed by an INDIRECT_REF is. */
2389 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2390 && !flag_no_inline)
2391 r = maybe_constant_value (x);
2392 optimize = sv;
2394 if (TREE_CODE (r) != CALL_EXPR)
2396 if (DECL_CONSTRUCTOR_P (callee))
2398 loc = EXPR_LOCATION (x);
2399 tree s = build_fold_indirect_ref_loc (loc,
2400 CALL_EXPR_ARG (x, 0));
2401 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2403 x = r;
2404 break;
2407 if (!changed)
2408 x = org_x;
2409 break;
2412 case CONSTRUCTOR:
2414 unsigned i;
2415 constructor_elt *p;
2416 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2417 vec<constructor_elt, va_gc> *nelts = NULL;
2418 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2420 tree op = cp_fold (p->value);
2421 if (op != p->value)
2423 if (op == error_mark_node)
2425 x = error_mark_node;
2426 vec_free (nelts);
2427 break;
2429 if (nelts == NULL)
2430 nelts = elts->copy ();
2431 (*nelts)[i].value = op;
2434 if (nelts)
2435 x = build_constructor (TREE_TYPE (x), nelts);
2436 break;
2438 case TREE_VEC:
2440 bool changed = false;
2441 vec<tree, va_gc> *vec = make_tree_vector ();
2442 int i, n = TREE_VEC_LENGTH (x);
2443 vec_safe_reserve (vec, n);
2445 for (i = 0; i < n; i++)
2447 tree op = cp_fold (TREE_VEC_ELT (x, i));
2448 vec->quick_push (op);
2449 if (op != TREE_VEC_ELT (x, i))
2450 changed = true;
2453 if (changed)
2455 r = copy_node (x);
2456 for (i = 0; i < n; i++)
2457 TREE_VEC_ELT (r, i) = (*vec)[i];
2458 x = r;
2461 release_tree_vector (vec);
2464 break;
2466 case ARRAY_REF:
2467 case ARRAY_RANGE_REF:
2469 loc = EXPR_LOCATION (x);
2470 op0 = cp_fold (TREE_OPERAND (x, 0));
2471 op1 = cp_fold (TREE_OPERAND (x, 1));
2472 op2 = cp_fold (TREE_OPERAND (x, 2));
2473 op3 = cp_fold (TREE_OPERAND (x, 3));
2475 if (op0 != TREE_OPERAND (x, 0)
2476 || op1 != TREE_OPERAND (x, 1)
2477 || op2 != TREE_OPERAND (x, 2)
2478 || op3 != TREE_OPERAND (x, 3))
2480 if (op0 == error_mark_node
2481 || op1 == error_mark_node
2482 || op2 == error_mark_node
2483 || op3 == error_mark_node)
2484 x = error_mark_node;
2485 else
2487 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2488 TREE_READONLY (x) = TREE_READONLY (org_x);
2489 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2490 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2494 x = fold (x);
2495 break;
2497 case SAVE_EXPR:
2498 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2499 folding, evaluates to an invariant. In that case no need to wrap
2500 this folded tree with a SAVE_EXPR. */
2501 r = cp_fold (TREE_OPERAND (x, 0));
2502 if (tree_invariant_p (r))
2503 x = r;
2504 break;
2506 default:
2507 return org_x;
2510 fold_cache->put (org_x, x);
2511 /* Prevent that we try to fold an already folded result again. */
2512 if (x != org_x)
2513 fold_cache->put (x, x);
2515 return x;
2518 #include "gt-cp-cp-gimplify.h"