Merged from trunk from revision r222717 up to r222905.
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob76758121f027e6015675c37fe43d7d4fb8cffaee
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hash-set.h"
27 #include "hash-map.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "stor-layout.h"
38 #include "cp-tree.h"
39 #include "c-family/c-common.h"
40 #include "tree-iterator.h"
41 #include "predict.h"
42 #include "hard-reg-set.h"
43 #include "input.h"
44 #include "function.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimplify.h"
52 #include "flags.h"
53 #include "splay-tree.h"
54 #include "target.h"
55 #include "c-family/c-ubsan.h"
56 #include "cilk.h"
57 #include "gimplify.h"
58 #include "gimple-expr.h"
60 /* Forward declarations. */
62 static tree cp_genericize_r (tree *, int *, void *);
63 static void cp_genericize_tree (tree*);
64 static tree cp_fold (tree, hash_map<tree, tree> *);
66 /* Local declarations. */
68 enum bc_t { bc_break = 0, bc_continue = 1 };
70 /* Stack of labels which are targets for "break" or "continue",
71 linked through TREE_CHAIN. */
72 static tree bc_label[2];
74 /* Begin a scope which can be exited by a break or continue statement. BC
75 indicates which.
77 Just creates a label with location LOCATION and pushes it into the current
78 context. */
80 static tree
81 begin_bc_block (enum bc_t bc, location_t location)
83 tree label = create_artificial_label (location);
84 DECL_CHAIN (label) = bc_label[bc];
85 bc_label[bc] = label;
86 if (bc == bc_break)
87 LABEL_DECL_BREAK (label) = true;
88 else
89 LABEL_DECL_CONTINUE (label) = true;
90 return label;
93 /* Finish a scope which can be exited by a break or continue statement.
94 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
95 an expression for the contents of the scope.
97 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
98 BLOCK. Otherwise, just forget the label. */
100 static void
101 finish_bc_block (tree *block, enum bc_t bc, tree label)
103 gcc_assert (label == bc_label[bc]);
105 if (TREE_USED (label))
106 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
107 block);
109 bc_label[bc] = DECL_CHAIN (label);
110 DECL_CHAIN (label) = NULL_TREE;
113 /* Get the LABEL_EXPR to represent a break or continue statement
114 in the current block scope. BC indicates which. */
116 static tree
117 get_bc_label (enum bc_t bc)
119 tree label = bc_label[bc];
121 /* Mark the label used for finish_bc_block. */
122 TREE_USED (label) = 1;
123 return label;
126 /* Genericize a TRY_BLOCK. */
128 static void
129 genericize_try_block (tree *stmt_p)
131 tree body = TRY_STMTS (*stmt_p);
132 tree cleanup = TRY_HANDLERS (*stmt_p);
134 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
137 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
139 static void
140 genericize_catch_block (tree *stmt_p)
142 tree type = HANDLER_TYPE (*stmt_p);
143 tree body = HANDLER_BODY (*stmt_p);
145 /* FIXME should the caught type go in TREE_TYPE? */
146 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
149 /* A terser interface for building a representation of an exception
150 specification. */
152 static tree
153 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
155 tree t;
157 /* FIXME should the allowed types go in TREE_TYPE? */
158 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
159 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
161 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
162 append_to_statement_list (body, &TREE_OPERAND (t, 0));
164 return t;
167 /* Genericize an EH_SPEC_BLOCK by converting it to a
168 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
170 static void
171 genericize_eh_spec_block (tree *stmt_p)
173 tree body = EH_SPEC_STMTS (*stmt_p);
174 tree allowed = EH_SPEC_RAISES (*stmt_p);
175 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
177 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
178 TREE_NO_WARNING (*stmt_p) = true;
179 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
182 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
184 static void
185 genericize_if_stmt (tree *stmt_p, hash_map<tree, tree> *fold_hash)
187 tree stmt, cond, then_, else_;
188 location_t locus = EXPR_LOCATION (*stmt_p);
190 stmt = *stmt_p;
191 cond = cp_fold (IF_COND (stmt), fold_hash);
192 then_ = THEN_CLAUSE (stmt);
193 else_ = ELSE_CLAUSE (stmt);
195 if (!then_)
196 then_ = build_empty_stmt (locus);
197 if (!else_)
198 else_ = build_empty_stmt (locus);
200 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
201 stmt = then_;
202 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
203 stmt = else_;
204 else
205 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
206 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
207 SET_EXPR_LOCATION (stmt, locus);
209 *stmt_p = cp_fold (stmt, fold_hash);
212 /* Build a generic representation of one of the C loop forms. COND is the
213 loop condition or NULL_TREE. BODY is the (possibly compound) statement
214 controlled by the loop. INCR is the increment expression of a for-loop,
215 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
216 evaluated before the loop body as in while and for loops, or after the
217 loop body as in do-while loops. */
219 static void
220 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
221 tree incr, bool cond_is_first, int *walk_subtrees,
222 void *data)
224 tree blab, clab;
225 tree exit = NULL;
226 tree stmt_list = NULL;
228 blab = begin_bc_block (bc_break, start_locus);
229 clab = begin_bc_block (bc_continue, start_locus);
231 if (incr && EXPR_P (incr))
232 SET_EXPR_LOCATION (incr, start_locus);
234 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
235 cp_walk_tree (&body, cp_genericize_r, data, NULL);
236 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
237 *walk_subtrees = 0;
239 if (cond && TREE_CODE (cond) != INTEGER_CST)
241 /* If COND is constant, don't bother building an exit. If it's false,
242 we won't build a loop. If it's true, any exits are in the body. */
243 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
244 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
245 get_bc_label (bc_break));
246 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
247 build_empty_stmt (cloc), exit);
250 if (exit && cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
252 append_to_statement_list (body, &stmt_list);
253 finish_bc_block (&stmt_list, bc_continue, clab);
254 append_to_statement_list (incr, &stmt_list);
255 if (exit && !cond_is_first)
256 append_to_statement_list (exit, &stmt_list);
258 if (!stmt_list)
259 stmt_list = build_empty_stmt (start_locus);
261 tree loop;
262 if (cond && integer_zerop (cond))
264 if (cond_is_first)
265 loop = fold_build3_loc (start_locus, COND_EXPR,
266 void_type_node, cond, stmt_list,
267 build_empty_stmt (start_locus));
268 else
269 loop = stmt_list;
271 else
272 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
274 stmt_list = NULL;
275 append_to_statement_list (loop, &stmt_list);
276 finish_bc_block (&stmt_list, bc_break, blab);
277 if (!stmt_list)
278 stmt_list = build_empty_stmt (start_locus);
280 *stmt_p = stmt_list;
283 /* Genericize a FOR_STMT node *STMT_P. */
285 static void
286 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
288 tree stmt = *stmt_p;
289 tree expr = NULL;
290 tree loop;
291 tree init = FOR_INIT_STMT (stmt);
293 if (init)
295 cp_walk_tree (&init, cp_genericize_r, data, NULL);
296 append_to_statement_list (init, &expr);
299 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
300 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
301 append_to_statement_list (loop, &expr);
302 if (expr == NULL_TREE)
303 expr = loop;
304 *stmt_p = expr;
307 /* Genericize a WHILE_STMT node *STMT_P. */
309 static void
310 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
312 tree stmt = *stmt_p;
313 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
314 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
317 /* Genericize a DO_STMT node *STMT_P. */
319 static void
320 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
322 tree stmt = *stmt_p;
323 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
324 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
327 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
329 static void
330 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
332 tree stmt = *stmt_p;
333 tree break_block, body, cond, type;
334 location_t stmt_locus = EXPR_LOCATION (stmt);
336 break_block = begin_bc_block (bc_break, stmt_locus);
338 body = SWITCH_STMT_BODY (stmt);
339 if (!body)
340 body = build_empty_stmt (stmt_locus);
341 cond = SWITCH_STMT_COND (stmt);
342 type = SWITCH_STMT_TYPE (stmt);
344 cp_walk_tree (&body, cp_genericize_r, data, NULL);
345 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
346 cp_walk_tree (&type, cp_genericize_r, data, NULL);
347 *walk_subtrees = 0;
349 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
350 finish_bc_block (stmt_p, bc_break, break_block);
353 /* Genericize a CONTINUE_STMT node *STMT_P. */
355 static void
356 genericize_continue_stmt (tree *stmt_p)
358 tree stmt_list = NULL;
359 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
360 tree label = get_bc_label (bc_continue);
361 location_t location = EXPR_LOCATION (*stmt_p);
362 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
363 append_to_statement_list (pred, &stmt_list);
364 append_to_statement_list (jump, &stmt_list);
365 *stmt_p = stmt_list;
368 /* Genericize a BREAK_STMT node *STMT_P. */
370 static void
371 genericize_break_stmt (tree *stmt_p)
373 tree label = get_bc_label (bc_break);
374 location_t location = EXPR_LOCATION (*stmt_p);
375 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
378 /* Genericize a OMP_FOR node *STMT_P. */
380 static void
381 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
383 tree stmt = *stmt_p;
384 location_t locus = EXPR_LOCATION (stmt);
385 tree clab = begin_bc_block (bc_continue, locus);
387 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
388 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
389 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
390 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
391 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
393 *walk_subtrees = 0;
395 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
398 /* Hook into the middle of gimplifying an OMP_FOR node. */
400 static enum gimplify_status
401 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
403 tree for_stmt = *expr_p;
404 gimple_seq seq = NULL;
406 /* Protect ourselves from recursion. */
407 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
408 return GS_UNHANDLED;
409 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
411 gimplify_and_add (for_stmt, &seq);
412 gimple_seq_add_seq (pre_p, seq);
414 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
416 return GS_ALL_DONE;
419 /* Gimplify an EXPR_STMT node. */
421 static void
422 gimplify_expr_stmt (tree *stmt_p)
424 tree stmt = EXPR_STMT_EXPR (*stmt_p);
426 if (stmt == error_mark_node)
427 stmt = NULL;
429 /* Gimplification of a statement expression will nullify the
430 statement if all its side effects are moved to *PRE_P and *POST_P.
432 In this case we will not want to emit the gimplified statement.
433 However, we may still want to emit a warning, so we do that before
434 gimplification. */
435 if (stmt && warn_unused_value)
437 if (!TREE_SIDE_EFFECTS (stmt))
439 if (!IS_EMPTY_STMT (stmt)
440 && !VOID_TYPE_P (TREE_TYPE (stmt))
441 && !TREE_NO_WARNING (stmt))
442 warning (OPT_Wunused_value, "statement with no effect");
444 else
445 warn_if_unused_value (stmt, input_location);
448 if (stmt == NULL_TREE)
449 stmt = alloc_stmt_list ();
451 *stmt_p = stmt;
454 /* Gimplify initialization from an AGGR_INIT_EXPR. */
456 static void
457 cp_gimplify_init_expr (tree *expr_p)
459 tree from = TREE_OPERAND (*expr_p, 1);
460 tree to = TREE_OPERAND (*expr_p, 0);
461 tree t;
463 /* What about code that pulls out the temp and uses it elsewhere? I
464 think that such code never uses the TARGET_EXPR as an initializer. If
465 I'm wrong, we'll abort because the temp won't have any RTL. In that
466 case, I guess we'll need to replace references somehow. */
467 if (TREE_CODE (from) == TARGET_EXPR)
468 from = TARGET_EXPR_INITIAL (from);
470 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
471 inside the TARGET_EXPR. */
472 for (t = from; t; )
474 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
476 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
477 replace the slot operand with our target.
479 Should we add a target parm to gimplify_expr instead? No, as in this
480 case we want to replace the INIT_EXPR. */
481 if (TREE_CODE (sub) == AGGR_INIT_EXPR
482 || TREE_CODE (sub) == VEC_INIT_EXPR)
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
485 AGGR_INIT_EXPR_SLOT (sub) = to;
486 else
487 VEC_INIT_EXPR_SLOT (sub) = to;
488 *expr_p = from;
490 /* The initialization is now a side-effect, so the container can
491 become void. */
492 if (from != sub)
493 TREE_TYPE (from) = void_type_node;
496 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
497 /* Handle aggregate NSDMI. */
498 replace_placeholders (sub, to);
500 if (t == sub)
501 break;
502 else
503 t = TREE_OPERAND (t, 1);
508 /* Gimplify a MUST_NOT_THROW_EXPR. */
510 static enum gimplify_status
511 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
513 tree stmt = *expr_p;
514 tree temp = voidify_wrapper_expr (stmt, NULL);
515 tree body = TREE_OPERAND (stmt, 0);
516 gimple_seq try_ = NULL;
517 gimple_seq catch_ = NULL;
518 gimple mnt;
520 gimplify_and_add (body, &try_);
521 mnt = gimple_build_eh_must_not_throw (terminate_node);
522 gimple_seq_add_stmt_without_update (&catch_, mnt);
523 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
525 gimple_seq_add_stmt_without_update (pre_p, mnt);
526 if (temp)
528 *expr_p = temp;
529 return GS_OK;
532 *expr_p = NULL;
533 return GS_ALL_DONE;
536 /* Return TRUE if an operand (OP) of a given TYPE being copied is
537 really just an empty class copy.
539 Check that the operand has a simple form so that TARGET_EXPRs and
540 non-empty CONSTRUCTORs get reduced properly, and we leave the
541 return slot optimization alone because it isn't a copy. */
543 static bool
544 simple_empty_class_p (tree type, tree op)
546 return
547 ((TREE_CODE (op) == COMPOUND_EXPR
548 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
549 || is_gimple_lvalue (op)
550 || INDIRECT_REF_P (op)
551 || (TREE_CODE (op) == CONSTRUCTOR
552 && CONSTRUCTOR_NELTS (op) == 0
553 && !TREE_CLOBBER_P (op))
554 || (TREE_CODE (op) == CALL_EXPR
555 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
556 && is_really_empty_class (type);
559 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
562 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
564 int saved_stmts_are_full_exprs_p = 0;
565 enum tree_code code;
566 enum gimplify_status ret;
568 code = TREE_CODE (*expr_p);
569 if (STATEMENT_CODE_P (code))
571 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
572 current_stmt_tree ()->stmts_are_full_exprs_p
573 = STMT_IS_FULL_EXPR_P (*expr_p);
576 switch (code)
578 case SIZEOF_EXPR:
579 if (SIZEOF_EXPR_TYPE_P (*expr_p))
580 *expr_p = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (*expr_p,
581 0)),
582 SIZEOF_EXPR, false);
583 else if (TYPE_P (TREE_OPERAND (*expr_p, 0)))
584 *expr_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (*expr_p, 0),
585 SIZEOF_EXPR, false);
586 else
587 *expr_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (*expr_p, 0),
588 SIZEOF_EXPR, false);
589 if (*expr_p == error_mark_node)
590 *expr_p = size_one_node;
592 *expr_p = maybe_constant_value (*expr_p);
593 ret = GS_OK;
594 break;
595 case PTRMEM_CST:
596 *expr_p = cplus_expand_constant (*expr_p);
597 ret = GS_OK;
598 break;
600 case AGGR_INIT_EXPR:
601 simplify_aggr_init_expr (expr_p);
602 ret = GS_OK;
603 break;
605 case VEC_INIT_EXPR:
607 location_t loc = input_location;
608 tree init = VEC_INIT_EXPR_INIT (*expr_p);
609 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
610 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
611 input_location = EXPR_LOCATION (*expr_p);
612 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
613 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
614 from_array,
615 tf_warning_or_error);
616 cp_genericize_tree (expr_p);
617 ret = GS_OK;
618 input_location = loc;
620 break;
622 case THROW_EXPR:
623 /* FIXME communicate throw type to back end, probably by moving
624 THROW_EXPR into ../tree.def. */
625 *expr_p = TREE_OPERAND (*expr_p, 0);
626 ret = GS_OK;
627 break;
629 case MUST_NOT_THROW_EXPR:
630 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
631 break;
633 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
634 LHS of an assignment might also be involved in the RHS, as in bug
635 25979. */
636 case INIT_EXPR:
637 if (fn_contains_cilk_spawn_p (cfun)
638 && cilk_detect_spawn_and_unwrap (expr_p)
639 && !seen_error ())
640 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
641 cp_gimplify_init_expr (expr_p);
642 if (TREE_CODE (*expr_p) != INIT_EXPR)
643 return GS_OK;
644 /* Otherwise fall through. */
645 case MODIFY_EXPR:
646 modify_expr_case:
648 if (fn_contains_cilk_spawn_p (cfun)
649 && cilk_detect_spawn_and_unwrap (expr_p)
650 && !seen_error ())
651 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
653 /* If the back end isn't clever enough to know that the lhs and rhs
654 types are the same, add an explicit conversion. */
655 tree op0 = TREE_OPERAND (*expr_p, 0);
656 tree op1 = TREE_OPERAND (*expr_p, 1);
658 if (!error_operand_p (op0)
659 && !error_operand_p (op1)
660 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
661 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
662 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
663 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
664 TREE_TYPE (op0), op1);
666 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
668 /* Remove any copies of empty classes. Also drop volatile
669 variables on the RHS to avoid infinite recursion from
670 gimplify_expr trying to load the value. */
671 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
672 is_gimple_lvalue, fb_lvalue);
673 if (TREE_SIDE_EFFECTS (op1))
675 if (TREE_THIS_VOLATILE (op1)
676 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
677 op1 = build_fold_addr_expr (op1);
679 gimplify_and_add (op1, pre_p);
681 *expr_p = TREE_OPERAND (*expr_p, 0);
684 ret = GS_OK;
685 break;
687 case EMPTY_CLASS_EXPR:
688 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
689 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
690 ret = GS_OK;
691 break;
693 case BASELINK:
694 *expr_p = BASELINK_FUNCTIONS (*expr_p);
695 ret = GS_OK;
696 break;
698 case TRY_BLOCK:
699 genericize_try_block (expr_p);
700 ret = GS_OK;
701 break;
703 case HANDLER:
704 genericize_catch_block (expr_p);
705 ret = GS_OK;
706 break;
708 case EH_SPEC_BLOCK:
709 genericize_eh_spec_block (expr_p);
710 ret = GS_OK;
711 break;
713 case USING_STMT:
714 gcc_unreachable ();
716 case FOR_STMT:
717 case WHILE_STMT:
718 case DO_STMT:
719 case SWITCH_STMT:
720 case CONTINUE_STMT:
721 case BREAK_STMT:
722 gcc_unreachable ();
724 case OMP_FOR:
725 case OMP_SIMD:
726 case OMP_DISTRIBUTE:
727 ret = cp_gimplify_omp_for (expr_p, pre_p);
728 break;
730 case EXPR_STMT:
731 gimplify_expr_stmt (expr_p);
732 ret = GS_OK;
733 break;
735 case UNARY_PLUS_EXPR:
737 tree arg = TREE_OPERAND (*expr_p, 0);
738 tree type = TREE_TYPE (*expr_p);
739 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
740 : arg;
741 ret = GS_OK;
743 break;
745 case CILK_SPAWN_STMT:
746 gcc_assert
747 (fn_contains_cilk_spawn_p (cfun)
748 && cilk_detect_spawn_and_unwrap (expr_p));
750 /* If errors are seen, then just process it as a CALL_EXPR. */
751 if (!seen_error ())
752 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
754 case CALL_EXPR:
755 if (fn_contains_cilk_spawn_p (cfun)
756 && cilk_detect_spawn_and_unwrap (expr_p)
757 && !seen_error ())
758 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
760 /* DR 1030 says that we need to evaluate the elements of an
761 initializer-list in forward order even when it's used as arguments to
762 a constructor. So if the target wants to evaluate them in reverse
763 order and there's more than one argument other than 'this', gimplify
764 them in order. */
765 ret = GS_OK;
766 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
767 && call_expr_nargs (*expr_p) > 2)
769 int nargs = call_expr_nargs (*expr_p);
770 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
771 for (int i = 1; i < nargs; ++i)
773 enum gimplify_status t
774 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
775 if (t == GS_ERROR)
776 ret = GS_ERROR;
779 break;
781 case RETURN_EXPR:
782 if (TREE_OPERAND (*expr_p, 0)
783 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
784 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
786 expr_p = &TREE_OPERAND (*expr_p, 0);
787 code = TREE_CODE (*expr_p);
788 /* Avoid going through the INIT_EXPR case, which can
789 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
790 goto modify_expr_case;
792 /* Fall through. */
794 default:
795 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
796 break;
799 /* Restore saved state. */
800 if (STATEMENT_CODE_P (code))
801 current_stmt_tree ()->stmts_are_full_exprs_p
802 = saved_stmts_are_full_exprs_p;
804 return ret;
807 static inline bool
808 is_invisiref_parm (const_tree t)
810 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
811 && DECL_BY_REFERENCE (t));
814 /* Return true if the uid in both int tree maps are equal. */
816 bool
817 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
819 return (a->uid == b->uid);
822 /* Hash a UID in a cxx_int_tree_map. */
824 unsigned int
825 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
827 return item->uid;
830 /* A stable comparison routine for use with splay trees and DECLs. */
832 static int
833 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
835 tree a = (tree) xa;
836 tree b = (tree) xb;
838 return DECL_UID (a) - DECL_UID (b);
841 /* OpenMP context during genericization. */
843 struct cp_genericize_omp_taskreg
845 bool is_parallel;
846 bool default_shared;
847 struct cp_genericize_omp_taskreg *outer;
848 splay_tree variables;
851 /* Return true if genericization should try to determine if
852 DECL is firstprivate or shared within task regions. */
854 static bool
855 omp_var_to_track (tree decl)
857 tree type = TREE_TYPE (decl);
858 if (is_invisiref_parm (decl))
859 type = TREE_TYPE (type);
860 while (TREE_CODE (type) == ARRAY_TYPE)
861 type = TREE_TYPE (type);
862 if (type == error_mark_node || !CLASS_TYPE_P (type))
863 return false;
864 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
865 return false;
866 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
867 return false;
868 return true;
871 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
873 static void
874 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
876 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
877 (splay_tree_key) decl);
878 if (n == NULL)
880 int flags = OMP_CLAUSE_DEFAULT_SHARED;
881 if (omp_ctx->outer)
882 omp_cxx_notice_variable (omp_ctx->outer, decl);
883 if (!omp_ctx->default_shared)
885 struct cp_genericize_omp_taskreg *octx;
887 for (octx = omp_ctx->outer; octx; octx = octx->outer)
889 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
890 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
892 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
893 break;
895 if (octx->is_parallel)
896 break;
898 if (octx == NULL
899 && (TREE_CODE (decl) == PARM_DECL
900 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
901 && DECL_CONTEXT (decl) == current_function_decl)))
902 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
903 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
905 /* DECL is implicitly determined firstprivate in
906 the current task construct. Ensure copy ctor and
907 dtor are instantiated, because during gimplification
908 it will be already too late. */
909 tree type = TREE_TYPE (decl);
910 if (is_invisiref_parm (decl))
911 type = TREE_TYPE (type);
912 while (TREE_CODE (type) == ARRAY_TYPE)
913 type = TREE_TYPE (type);
914 get_copy_ctor (type, tf_none);
915 get_dtor (type, tf_none);
918 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
922 /* Genericization context. */
924 struct cp_genericize_data
926 hash_set<tree> *p_set;
927 vec<tree> bind_expr_stack;
928 struct cp_genericize_omp_taskreg *omp_ctx;
929 tree try_block;
930 hash_map<tree, tree> *fold_hash;
933 /* Perform any pre-gimplification lowering of C++ front end trees to
934 GENERIC. */
936 static tree
937 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
939 tree stmt;
940 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
941 hash_set<tree> *p_set = wtd->p_set;
943 *stmt_p = stmt = cp_fold (*stmt_p, wtd->fold_hash);
945 /* If in an OpenMP context, note var uses. */
946 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
947 && (VAR_P (stmt)
948 || TREE_CODE (stmt) == PARM_DECL
949 || TREE_CODE (stmt) == RESULT_DECL)
950 && omp_var_to_track (stmt))
951 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
953 if (is_invisiref_parm (stmt)
954 /* Don't dereference parms in a thunk, pass the references through. */
955 && !(DECL_THUNK_P (current_function_decl)
956 && TREE_CODE (stmt) == PARM_DECL))
958 *stmt_p = convert_from_reference (stmt);
959 *walk_subtrees = 0;
960 return NULL;
963 /* Map block scope extern declarations to visible declarations with the
964 same name and type in outer scopes if any. */
965 if (cp_function_chain->extern_decl_map
966 && VAR_OR_FUNCTION_DECL_P (stmt)
967 && DECL_EXTERNAL (stmt))
969 struct cxx_int_tree_map *h, in;
970 in.uid = DECL_UID (stmt);
971 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
972 if (h)
974 *stmt_p = h->to;
975 *walk_subtrees = 0;
976 return NULL;
980 /* Other than invisiref parms, don't walk the same tree twice. */
981 if (p_set->contains (stmt))
983 *walk_subtrees = 0;
984 return NULL_TREE;
987 if (TREE_CODE (stmt) == ADDR_EXPR
988 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
990 /* If in an OpenMP context, note var uses. */
991 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
992 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
993 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
994 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
995 *walk_subtrees = 0;
997 else if (TREE_CODE (stmt) == RETURN_EXPR
998 && TREE_OPERAND (stmt, 0)
999 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1000 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1001 *walk_subtrees = 0;
1002 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1003 switch (OMP_CLAUSE_CODE (stmt))
1005 case OMP_CLAUSE_LASTPRIVATE:
1006 /* Don't dereference an invisiref in OpenMP clauses. */
1007 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1009 *walk_subtrees = 0;
1010 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1011 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1012 cp_genericize_r, data, NULL);
1014 break;
1015 case OMP_CLAUSE_PRIVATE:
1016 /* Don't dereference an invisiref in OpenMP clauses. */
1017 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1018 *walk_subtrees = 0;
1019 else if (wtd->omp_ctx != NULL)
1021 /* Private clause doesn't cause any references to the
1022 var in outer contexts, avoid calling
1023 omp_cxx_notice_variable for it. */
1024 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1025 wtd->omp_ctx = NULL;
1026 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1027 data, NULL);
1028 wtd->omp_ctx = old;
1029 *walk_subtrees = 0;
1031 break;
1032 case OMP_CLAUSE_SHARED:
1033 case OMP_CLAUSE_FIRSTPRIVATE:
1034 case OMP_CLAUSE_COPYIN:
1035 case OMP_CLAUSE_COPYPRIVATE:
1036 /* Don't dereference an invisiref in OpenMP clauses. */
1037 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1038 *walk_subtrees = 0;
1039 break;
1040 case OMP_CLAUSE_REDUCTION:
1041 /* Don't dereference an invisiref in reduction clause's
1042 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1043 still needs to be genericized. */
1044 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1046 *walk_subtrees = 0;
1047 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1048 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1049 cp_genericize_r, data, NULL);
1050 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1051 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1052 cp_genericize_r, data, NULL);
1054 break;
1055 default:
1056 break;
1058 else if (IS_TYPE_OR_DECL_P (stmt))
1059 *walk_subtrees = 0;
1061 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1062 to lower this construct before scanning it, so we need to lower these
1063 before doing anything else. */
1064 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1065 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1066 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1067 : TRY_FINALLY_EXPR,
1068 void_type_node,
1069 CLEANUP_BODY (stmt),
1070 CLEANUP_EXPR (stmt));
1072 else if (TREE_CODE (stmt) == IF_STMT)
1074 genericize_if_stmt (stmt_p, wtd->fold_hash);
1075 /* *stmt_p has changed, tail recurse to handle it again. */
1076 return cp_genericize_r (stmt_p, walk_subtrees, data);
1079 /* COND_EXPR might have incompatible types in branches if one or both
1080 arms are bitfields. Fix it up now. */
1081 else if (TREE_CODE (stmt) == COND_EXPR)
1083 tree type_left
1084 = (TREE_OPERAND (stmt, 1)
1085 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1086 : NULL_TREE);
1087 tree type_right
1088 = (TREE_OPERAND (stmt, 2)
1089 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1090 : NULL_TREE);
1091 if (type_left
1092 && !useless_type_conversion_p (TREE_TYPE (stmt),
1093 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1095 TREE_OPERAND (stmt, 1)
1096 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1097 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1098 type_left));
1100 if (type_right
1101 && !useless_type_conversion_p (TREE_TYPE (stmt),
1102 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1104 TREE_OPERAND (stmt, 2)
1105 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1106 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1107 type_right));
1111 else if (TREE_CODE (stmt) == BIND_EXPR)
1113 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1115 tree decl;
1116 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1117 if (VAR_P (decl)
1118 && !DECL_EXTERNAL (decl)
1119 && omp_var_to_track (decl))
1121 splay_tree_node n
1122 = splay_tree_lookup (wtd->omp_ctx->variables,
1123 (splay_tree_key) decl);
1124 if (n == NULL)
1125 splay_tree_insert (wtd->omp_ctx->variables,
1126 (splay_tree_key) decl,
1127 TREE_STATIC (decl)
1128 ? OMP_CLAUSE_DEFAULT_SHARED
1129 : OMP_CLAUSE_DEFAULT_PRIVATE);
1132 wtd->bind_expr_stack.safe_push (stmt);
1133 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1134 cp_genericize_r, data, NULL);
1135 wtd->bind_expr_stack.pop ();
1138 else if (TREE_CODE (stmt) == USING_STMT)
1140 tree block = NULL_TREE;
1142 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1143 BLOCK, and append an IMPORTED_DECL to its
1144 BLOCK_VARS chained list. */
1145 if (wtd->bind_expr_stack.exists ())
1147 int i;
1148 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1149 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1150 break;
1152 if (block)
1154 tree using_directive;
1155 gcc_assert (TREE_OPERAND (stmt, 0));
1157 using_directive = make_node (IMPORTED_DECL);
1158 TREE_TYPE (using_directive) = void_type_node;
1160 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1161 = TREE_OPERAND (stmt, 0);
1162 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1163 BLOCK_VARS (block) = using_directive;
1165 /* The USING_STMT won't appear in GENERIC. */
1166 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1167 *walk_subtrees = 0;
1170 else if (TREE_CODE (stmt) == DECL_EXPR
1171 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1173 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1174 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1175 *walk_subtrees = 0;
1177 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1179 struct cp_genericize_omp_taskreg omp_ctx;
1180 tree c, decl;
1181 splay_tree_node n;
1183 *walk_subtrees = 0;
1184 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1185 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1186 omp_ctx.default_shared = omp_ctx.is_parallel;
1187 omp_ctx.outer = wtd->omp_ctx;
1188 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1189 wtd->omp_ctx = &omp_ctx;
1190 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1191 switch (OMP_CLAUSE_CODE (c))
1193 case OMP_CLAUSE_SHARED:
1194 case OMP_CLAUSE_PRIVATE:
1195 case OMP_CLAUSE_FIRSTPRIVATE:
1196 case OMP_CLAUSE_LASTPRIVATE:
1197 decl = OMP_CLAUSE_DECL (c);
1198 if (decl == error_mark_node || !omp_var_to_track (decl))
1199 break;
1200 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1201 if (n != NULL)
1202 break;
1203 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1204 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1205 ? OMP_CLAUSE_DEFAULT_SHARED
1206 : OMP_CLAUSE_DEFAULT_PRIVATE);
1207 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1208 && omp_ctx.outer)
1209 omp_cxx_notice_variable (omp_ctx.outer, decl);
1210 break;
1211 case OMP_CLAUSE_DEFAULT:
1212 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1213 omp_ctx.default_shared = true;
1214 default:
1215 break;
1217 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1218 wtd->omp_ctx = omp_ctx.outer;
1219 splay_tree_delete (omp_ctx.variables);
1221 else if (TREE_CODE (stmt) == TRY_BLOCK)
1223 *walk_subtrees = 0;
1224 tree try_block = wtd->try_block;
1225 wtd->try_block = stmt;
1226 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1227 wtd->try_block = try_block;
1228 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1230 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1232 /* MUST_NOT_THROW_COND might be something else with TM. */
1233 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1235 *walk_subtrees = 0;
1236 tree try_block = wtd->try_block;
1237 wtd->try_block = stmt;
1238 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1239 wtd->try_block = try_block;
1242 else if (TREE_CODE (stmt) == THROW_EXPR)
1244 location_t loc = location_of (stmt);
1245 if (TREE_NO_WARNING (stmt))
1246 /* Never mind. */;
1247 else if (wtd->try_block)
1249 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1250 && warning_at (loc, OPT_Wterminate,
1251 "throw will always call terminate()")
1252 && cxx_dialect >= cxx11
1253 && DECL_DESTRUCTOR_P (current_function_decl))
1254 inform (loc, "in C++11 destructors default to noexcept");
1256 else
1258 if (warn_cxx0x_compat && cxx_dialect < cxx11
1259 && DECL_DESTRUCTOR_P (current_function_decl)
1260 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1261 == NULL_TREE)
1262 && (get_defaulted_eh_spec (current_function_decl)
1263 == empty_except_spec))
1264 warning_at (loc, OPT_Wc__0x_compat,
1265 "in C++11 this throw will terminate because "
1266 "destructors default to noexcept");
1269 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1270 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1271 else if (TREE_CODE (stmt) == FOR_STMT)
1272 genericize_for_stmt (stmt_p, walk_subtrees, data);
1273 else if (TREE_CODE (stmt) == WHILE_STMT)
1274 genericize_while_stmt (stmt_p, walk_subtrees, data);
1275 else if (TREE_CODE (stmt) == DO_STMT)
1276 genericize_do_stmt (stmt_p, walk_subtrees, data);
1277 else if (TREE_CODE (stmt) == SWITCH_STMT)
1278 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1279 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1280 genericize_continue_stmt (stmt_p);
1281 else if (TREE_CODE (stmt) == BREAK_STMT)
1282 genericize_break_stmt (stmt_p);
1283 else if (TREE_CODE (stmt) == OMP_FOR
1284 || TREE_CODE (stmt) == OMP_SIMD
1285 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1286 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1287 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1289 if (SIZEOF_EXPR_TYPE_P (stmt))
1290 *stmt_p
1291 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1292 SIZEOF_EXPR, false);
1293 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1294 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1295 SIZEOF_EXPR, false);
1296 else
1297 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1298 SIZEOF_EXPR, false);
1299 if (*stmt_p == error_mark_node)
1300 *stmt_p = size_one_node;
1301 *stmt_p = cp_fold (*stmt_p, wtd->fold_hash);
1302 return NULL;
1304 else if (flag_sanitize
1305 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1307 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1308 && TREE_CODE (stmt) == NOP_EXPR
1309 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1310 ubsan_maybe_instrument_reference (stmt);
1311 else if (TREE_CODE (stmt) == CALL_EXPR)
1313 tree fn = CALL_EXPR_FN (stmt);
1314 if (fn != NULL_TREE
1315 && !error_operand_p (fn)
1316 && POINTER_TYPE_P (TREE_TYPE (fn))
1317 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1319 bool is_ctor
1320 = TREE_CODE (fn) == ADDR_EXPR
1321 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1322 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1323 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1324 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1325 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1326 cp_ubsan_maybe_instrument_member_call (stmt);
1331 p_set->add (*stmt_p);
1333 return NULL;
1336 /* Lower C++ front end trees to GENERIC in T_P. */
1338 static void
1339 cp_genericize_tree (tree* t_p)
1341 struct cp_genericize_data wtd;
1343 wtd.fold_hash = new hash_map<tree, tree>;
1344 wtd.p_set = new hash_set<tree>;
1345 wtd.bind_expr_stack.create (0);
1346 wtd.omp_ctx = NULL;
1347 wtd.try_block = NULL_TREE;
1348 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1349 delete wtd.fold_hash;
1350 delete wtd.p_set;
1351 wtd.bind_expr_stack.release ();
1352 if (flag_sanitize & SANITIZE_VPTR)
1353 cp_ubsan_instrument_member_accesses (t_p);
1356 /* If a function that should end with a return in non-void
1357 function doesn't obviously end with return, add ubsan
1358 instrumentation code to verify it at runtime. */
1360 static void
1361 cp_ubsan_maybe_instrument_return (tree fndecl)
1363 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1364 || DECL_CONSTRUCTOR_P (fndecl)
1365 || DECL_DESTRUCTOR_P (fndecl)
1366 || !targetm.warn_func_return (fndecl))
1367 return;
1369 tree t = DECL_SAVED_TREE (fndecl);
1370 while (t)
1372 switch (TREE_CODE (t))
1374 case BIND_EXPR:
1375 t = BIND_EXPR_BODY (t);
1376 continue;
1377 case TRY_FINALLY_EXPR:
1378 t = TREE_OPERAND (t, 0);
1379 continue;
1380 case STATEMENT_LIST:
1382 tree_stmt_iterator i = tsi_last (t);
1383 if (!tsi_end_p (i))
1385 t = tsi_stmt (i);
1386 continue;
1389 break;
1390 case RETURN_EXPR:
1391 return;
1392 default:
1393 break;
1395 break;
1397 if (t == NULL_TREE)
1398 return;
1399 t = DECL_SAVED_TREE (fndecl);
1400 if (TREE_CODE (t) == BIND_EXPR
1401 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1403 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1404 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1405 tsi_link_after (&i, t, TSI_NEW_STMT);
1409 void
1410 cp_genericize (tree fndecl)
1412 tree t;
1414 /* Fix up the types of parms passed by invisible reference. */
1415 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1416 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1418 /* If a function's arguments are copied to create a thunk,
1419 then DECL_BY_REFERENCE will be set -- but the type of the
1420 argument will be a pointer type, so we will never get
1421 here. */
1422 gcc_assert (!DECL_BY_REFERENCE (t));
1423 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1424 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1425 DECL_BY_REFERENCE (t) = 1;
1426 TREE_ADDRESSABLE (t) = 0;
1427 relayout_decl (t);
1430 /* Do the same for the return value. */
1431 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1433 t = DECL_RESULT (fndecl);
1434 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1435 DECL_BY_REFERENCE (t) = 1;
1436 TREE_ADDRESSABLE (t) = 0;
1437 relayout_decl (t);
1438 if (DECL_NAME (t))
1440 /* Adjust DECL_VALUE_EXPR of the original var. */
1441 tree outer = outer_curly_brace_block (current_function_decl);
1442 tree var;
1444 if (outer)
1445 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1446 if (DECL_NAME (t) == DECL_NAME (var)
1447 && DECL_HAS_VALUE_EXPR_P (var)
1448 && DECL_VALUE_EXPR (var) == t)
1450 tree val = convert_from_reference (t);
1451 SET_DECL_VALUE_EXPR (var, val);
1452 break;
1457 /* If we're a clone, the body is already GIMPLE. */
1458 if (DECL_CLONED_FUNCTION_P (fndecl))
1459 return;
1461 /* Expand all the array notations here. */
1462 if (flag_cilkplus
1463 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1464 DECL_SAVED_TREE (fndecl) =
1465 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1467 /* We do want to see every occurrence of the parms, so we can't just use
1468 walk_tree's hash functionality. */
1469 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1471 if (flag_sanitize & SANITIZE_RETURN
1472 && do_ubsan_in_current_function ())
1473 cp_ubsan_maybe_instrument_return (fndecl);
1475 /* Do everything else. */
1476 c_genericize (fndecl);
1478 gcc_assert (bc_label[bc_break] == NULL);
1479 gcc_assert (bc_label[bc_continue] == NULL);
1482 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1483 NULL if there is in fact nothing to do. ARG2 may be null if FN
1484 actually only takes one argument. */
1486 static tree
1487 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1489 tree defparm, parm, t;
1490 int i = 0;
1491 int nargs;
1492 tree *argarray;
1494 if (fn == NULL)
1495 return NULL;
1497 nargs = list_length (DECL_ARGUMENTS (fn));
1498 argarray = XALLOCAVEC (tree, nargs);
1500 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1501 if (arg2)
1502 defparm = TREE_CHAIN (defparm);
1504 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1506 tree inner_type = TREE_TYPE (arg1);
1507 tree start1, end1, p1;
1508 tree start2 = NULL, p2 = NULL;
1509 tree ret = NULL, lab;
1511 start1 = arg1;
1512 start2 = arg2;
1515 inner_type = TREE_TYPE (inner_type);
1516 start1 = build4 (ARRAY_REF, inner_type, start1,
1517 size_zero_node, NULL, NULL);
1518 if (arg2)
1519 start2 = build4 (ARRAY_REF, inner_type, start2,
1520 size_zero_node, NULL, NULL);
1522 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1523 start1 = build_fold_addr_expr_loc (input_location, start1);
1524 if (arg2)
1525 start2 = build_fold_addr_expr_loc (input_location, start2);
1527 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1528 end1 = fold_build_pointer_plus (start1, end1);
1530 p1 = create_tmp_var (TREE_TYPE (start1));
1531 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1532 append_to_statement_list (t, &ret);
1534 if (arg2)
1536 p2 = create_tmp_var (TREE_TYPE (start2));
1537 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1538 append_to_statement_list (t, &ret);
1541 lab = create_artificial_label (input_location);
1542 t = build1 (LABEL_EXPR, void_type_node, lab);
1543 append_to_statement_list (t, &ret);
1545 argarray[i++] = p1;
1546 if (arg2)
1547 argarray[i++] = p2;
1548 /* Handle default arguments. */
1549 for (parm = defparm; parm && parm != void_list_node;
1550 parm = TREE_CHAIN (parm), i++)
1551 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1552 TREE_PURPOSE (parm), fn, i,
1553 tf_warning_or_error);
1554 t = build_call_a (fn, i, argarray);
1555 t = fold_convert (void_type_node, t);
1556 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1557 append_to_statement_list (t, &ret);
1559 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1560 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1561 append_to_statement_list (t, &ret);
1563 if (arg2)
1565 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1566 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1567 append_to_statement_list (t, &ret);
1570 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1571 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1572 append_to_statement_list (t, &ret);
1574 return ret;
1576 else
1578 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1579 if (arg2)
1580 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1581 /* Handle default arguments. */
1582 for (parm = defparm; parm && parm != void_list_node;
1583 parm = TREE_CHAIN (parm), i++)
1584 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1585 TREE_PURPOSE (parm),
1586 fn, i, tf_warning_or_error);
1587 t = build_call_a (fn, i, argarray);
1588 t = fold_convert (void_type_node, t);
1589 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1593 /* Return code to initialize DECL with its default constructor, or
1594 NULL if there's nothing to do. */
1596 tree
1597 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1599 tree info = CP_OMP_CLAUSE_INFO (clause);
1600 tree ret = NULL;
1602 if (info)
1603 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1605 return ret;
1608 /* Return code to initialize DST with a copy constructor from SRC. */
1610 tree
1611 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1613 tree info = CP_OMP_CLAUSE_INFO (clause);
1614 tree ret = NULL;
1616 if (info)
1617 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1618 if (ret == NULL)
1619 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1621 return ret;
1624 /* Similarly, except use an assignment operator instead. */
1626 tree
1627 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1629 tree info = CP_OMP_CLAUSE_INFO (clause);
1630 tree ret = NULL;
1632 if (info)
1633 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1634 if (ret == NULL)
1635 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1637 return ret;
1640 /* Return code to destroy DECL. */
1642 tree
1643 cxx_omp_clause_dtor (tree clause, tree decl)
1645 tree info = CP_OMP_CLAUSE_INFO (clause);
1646 tree ret = NULL;
1648 if (info)
1649 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1651 return ret;
1654 /* True if OpenMP should privatize what this DECL points to rather
1655 than the DECL itself. */
1657 bool
1658 cxx_omp_privatize_by_reference (const_tree decl)
1660 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1661 || is_invisiref_parm (decl));
1664 /* Return true if DECL is const qualified var having no mutable member. */
1665 bool
1666 cxx_omp_const_qual_no_mutable (tree decl)
1668 tree type = TREE_TYPE (decl);
1669 if (TREE_CODE (type) == REFERENCE_TYPE)
1671 if (!is_invisiref_parm (decl))
1672 return false;
1673 type = TREE_TYPE (type);
1675 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1677 /* NVR doesn't preserve const qualification of the
1678 variable's type. */
1679 tree outer = outer_curly_brace_block (current_function_decl);
1680 tree var;
1682 if (outer)
1683 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1684 if (DECL_NAME (decl) == DECL_NAME (var)
1685 && (TYPE_MAIN_VARIANT (type)
1686 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1688 if (TYPE_READONLY (TREE_TYPE (var)))
1689 type = TREE_TYPE (var);
1690 break;
1695 if (type == error_mark_node)
1696 return false;
1698 /* Variables with const-qualified type having no mutable member
1699 are predetermined shared. */
1700 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1701 return true;
1703 return false;
1706 /* True if OpenMP sharing attribute of DECL is predetermined. */
1708 enum omp_clause_default_kind
1709 cxx_omp_predetermined_sharing (tree decl)
1711 /* Static data members are predetermined shared. */
1712 if (TREE_STATIC (decl))
1714 tree ctx = CP_DECL_CONTEXT (decl);
1715 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1716 return OMP_CLAUSE_DEFAULT_SHARED;
1719 /* Const qualified vars having no mutable member are predetermined
1720 shared. */
1721 if (cxx_omp_const_qual_no_mutable (decl))
1722 return OMP_CLAUSE_DEFAULT_SHARED;
1724 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1727 /* Finalize an implicitly determined clause. */
1729 void
1730 cxx_omp_finish_clause (tree c, gimple_seq *)
1732 tree decl, inner_type;
1733 bool make_shared = false;
1735 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1736 return;
1738 decl = OMP_CLAUSE_DECL (c);
1739 decl = require_complete_type (decl);
1740 inner_type = TREE_TYPE (decl);
1741 if (decl == error_mark_node)
1742 make_shared = true;
1743 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1745 if (is_invisiref_parm (decl))
1746 inner_type = TREE_TYPE (inner_type);
1747 else
1749 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1750 decl);
1751 make_shared = true;
1755 /* We're interested in the base element, not arrays. */
1756 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1757 inner_type = TREE_TYPE (inner_type);
1759 /* Check for special function availability by building a call to one.
1760 Save the results, because later we won't be in the right context
1761 for making these queries. */
1762 if (!make_shared
1763 && CLASS_TYPE_P (inner_type)
1764 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1765 make_shared = true;
1767 if (make_shared)
1768 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1771 tree
1772 cp_fully_fold (tree x)
1774 hash_map<tree, tree> fold_hash;
1775 return cp_fold (x, &fold_hash);
1778 static tree
1779 cp_fold (tree x, hash_map<tree, tree> *fold_hash)
1781 tree org_x = x, r = NULL_TREE;
1782 tree *slot;
1784 if (x == error_mark_node || !x || CONSTANT_CLASS_P (x))
1785 return x;
1787 if (!fold_hash)
1788 fold_hash = new hash_map<tree, tree>;
1789 slot = fold_hash->get (org_x);
1790 if (slot)
1791 return *slot;
1792 switch (TREE_CODE (x))
1794 case CONVERT_EXPR:
1795 case VIEW_CONVERT_EXPR:
1796 case NOP_EXPR:
1797 if (VOID_TYPE_P (TREE_TYPE (x)))
1798 return x;
1799 /* Fall through. */
1800 case SIZEOF_EXPR:
1801 case ALIGNOF_EXPR:
1802 case SAVE_EXPR:
1803 case ADDR_EXPR:
1804 case REALPART_EXPR:
1805 case IMAGPART_EXPR:
1806 case CONJ_EXPR:
1807 case FIX_TRUNC_EXPR:
1808 case FLOAT_EXPR:
1809 case NEGATE_EXPR:
1810 case ABS_EXPR:
1811 case BIT_NOT_EXPR:
1812 case TRUTH_NOT_EXPR:
1813 case FIXED_CONVERT_EXPR:
1814 case UNARY_PLUS_EXPR:
1815 case CLEANUP_POINT_EXPR:
1816 case INDIRECT_REF:
1817 case NON_LVALUE_EXPR:
1818 case RETURN_EXPR:
1819 case EXPR_STMT:
1820 case STMT_EXPR:
1821 case GOTO_EXPR:
1822 case EXIT_EXPR:
1823 case LOOP_EXPR:
1825 location_t loc = EXPR_LOCATION (x);
1826 tree op0 = cp_fold (TREE_OPERAND (x, 0), fold_hash);
1827 if (!op0) op0 = TREE_OPERAND (x, 0);
1828 if (op0 != TREE_OPERAND (x, 0))
1829 r = fold_build1_loc (loc, TREE_CODE (x), TREE_TYPE (x), op0);
1830 if (!r)
1831 r = fold_unary_loc (loc, TREE_CODE (x), TREE_TYPE (x), op0);
1832 if (r)
1833 x = r;
1834 gcc_assert (TREE_CODE (x) != COND_EXPR || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
1835 break;
1837 case PREDECREMENT_EXPR:
1838 case PREINCREMENT_EXPR:
1839 case POSTDECREMENT_EXPR:
1840 case POSTINCREMENT_EXPR:
1841 case COMPOUND_EXPR:
1842 case POINTER_PLUS_EXPR:
1843 case PLUS_EXPR:
1844 case MINUS_EXPR:
1845 case MULT_EXPR:
1846 case TRUNC_DIV_EXPR:
1847 case CEIL_DIV_EXPR:
1848 case FLOOR_DIV_EXPR:
1849 case ROUND_DIV_EXPR:
1850 case TRUNC_MOD_EXPR:
1851 case CEIL_MOD_EXPR:
1852 case ROUND_MOD_EXPR:
1853 case RDIV_EXPR:
1854 case EXACT_DIV_EXPR:
1855 case MIN_EXPR:
1856 case MAX_EXPR:
1857 case LSHIFT_EXPR:
1858 case RSHIFT_EXPR:
1859 case LROTATE_EXPR:
1860 case RROTATE_EXPR:
1861 case BIT_AND_EXPR:
1862 case BIT_IOR_EXPR:
1863 case BIT_XOR_EXPR:
1864 case TRUTH_AND_EXPR:
1865 case TRUTH_XOR_EXPR:
1866 case LT_EXPR: case LE_EXPR:
1867 case GT_EXPR: case GE_EXPR:
1868 case EQ_EXPR: case NE_EXPR:
1869 case UNORDERED_EXPR: case ORDERED_EXPR:
1870 case UNLT_EXPR: case UNLE_EXPR:
1871 case UNGT_EXPR: case UNGE_EXPR:
1872 case UNEQ_EXPR: case LTGT_EXPR:
1873 case RANGE_EXPR: case COMPLEX_EXPR:
1874 case MODIFY_EXPR:
1875 case INIT_EXPR:
1877 location_t loc = EXPR_LOCATION (x);
1878 tree op0 = cp_fold (TREE_OPERAND (x, 0), fold_hash);
1879 tree op1 = cp_fold (TREE_OPERAND (x, 1), fold_hash);
1881 if (!op0) op0 = TREE_OPERAND (x, 0);
1882 if (!op1) op1 = TREE_OPERAND (x, 1);
1884 if (TREE_CODE (x) == COMPOUND_EXPR && op0 == NULL_TREE)
1885 op0 = build_empty_stmt (loc);
1887 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
1888 r = fold_build2_loc (loc, TREE_CODE (x), TREE_TYPE (x), op0, op1);
1889 if (!r)
1890 r = fold_binary_loc (loc, TREE_CODE (x), TREE_TYPE (x), op0, op1);
1891 if (r)
1892 x = r;
1893 if (TREE_CODE (x) == COMPOUND_EXPR && TREE_OPERAND (x, 0) == NULL_TREE)
1894 return TREE_OPERAND (x, 1);
1895 break;
1897 case VEC_COND_EXPR:
1898 case COND_EXPR:
1900 location_t loc = EXPR_LOCATION (x);
1901 tree op0 = cp_fold (TREE_OPERAND (x, 0), fold_hash);
1902 if (CONSTANT_CLASS_P (op0))
1904 if (integer_zerop (op0) && !TREE_SIDE_EFFECTS (TREE_OPERAND (x, 1)))
1905 x = cp_fold (TREE_OPERAND (x, 2), fold_hash);
1906 else if (integer_nonzerop (op0) && !TREE_SIDE_EFFECTS (TREE_OPERAND (x, 2)))
1907 x = cp_fold (TREE_OPERAND (x, 1), fold_hash);
1908 break;
1911 if (VOID_TYPE_P (TREE_TYPE (x)))
1912 break;
1914 tree op1 = cp_fold (TREE_OPERAND (x, 1), fold_hash);
1915 tree op2 = cp_fold (TREE_OPERAND (x, 2), fold_hash);
1917 if (TREE_CODE (x) == COND_EXPR)
1919 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1) || op2 != TREE_OPERAND (x, 2))
1920 x = build3_loc (loc, COND_EXPR, TREE_TYPE (x), op0, op1, op2);
1921 break;
1923 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1) || op2 != TREE_OPERAND (x, 2))
1924 r = fold_build3_loc (loc, TREE_CODE (x), TREE_TYPE (x), op0, op1, op2);
1925 if (!r)
1926 r = fold_ternary_loc (loc, TREE_CODE (x), TREE_TYPE (x), op0, op1, op2);
1927 if (r)
1928 x = r;
1929 break;
1931 case CALL_EXPR:
1932 r = fold (x);
1933 if (TREE_CODE (r) != CALL_EXPR)
1935 x = cp_fold (r, fold_hash);
1936 break;
1939 int i, m = call_expr_nargs (x);
1940 for (i = 0; i < m; i++)
1942 CALL_EXPR_ARG (x, i) = cp_fold (CALL_EXPR_ARG (x, i), fold_hash);
1945 r = fold (x);
1946 if (TREE_CODE (r) != CALL_EXPR)
1948 x = cp_fold (r, fold_hash);
1949 break;
1951 return org_x;
1953 case BIND_EXPR:
1954 if (TREE_OPERAND (x, 0))
1955 TREE_OPERAND (x, 0) = cp_fold (TREE_OPERAND (x, 0), fold_hash);
1956 if (TREE_OPERAND (x, 1))
1957 TREE_OPERAND (x, 1) = cp_fold (TREE_OPERAND (x, 1), fold_hash);
1958 if (TREE_OPERAND (x, 2))
1959 TREE_OPERAND (x, 2) = cp_fold (TREE_OPERAND (x, 2), fold_hash);
1960 break;
1961 case TREE_VEC:
1963 bool changed = false;
1964 vec<tree, va_gc> *vec = make_tree_vector ();
1965 int i, n = TREE_VEC_LENGTH (x);
1966 vec_safe_reserve (vec, n);
1967 for (i = 0; i < n; i++)
1969 tree op = cp_fold (TREE_VEC_ELT (x, i), fold_hash);
1970 vec->quick_push (op);
1971 if (op != TREE_VEC_ELT (x, i))
1972 changed = true;
1974 if (changed)
1976 r = copy_node (x);
1977 for (i = 0; i < n; i++)
1978 TREE_VEC_ELT (r, i) = (*vec)[i];
1979 x = r;
1981 release_tree_vector (vec);
1983 break;
1984 case ARRAY_REF:
1985 case ARRAY_RANGE_REF:
1987 location_t loc = EXPR_LOCATION (x);
1988 tree nop1 = NULL_TREE, op1 = TREE_OPERAND (x, 0);
1989 tree nop2 = NULL_TREE, op2 = TREE_OPERAND (x, 1);
1990 tree nop3 = NULL_TREE, op3 = TREE_OPERAND (x, 2);
1991 tree nop4 = NULL_TREE, op4 = TREE_OPERAND (x, 3);
1992 if (op1) nop1 = cp_fold (op1, fold_hash);
1993 if (op2) nop2 = cp_fold (op2, fold_hash);
1994 if (op3) nop3 = cp_fold (op3, fold_hash);
1995 if (op4) nop4 = cp_fold (op4, fold_hash);
1996 if (op1 != nop1 || op2 != nop2 || op3 != nop3 || op4 != nop4)
1997 x = build4_loc (loc, TREE_CODE (x), TREE_TYPE (x), nop1, nop2, nop3, nop4);
1998 r = fold (x);
1999 if (r != x)
2000 x = r;
2002 break;
2003 case DECL_EXPR:
2004 if (TREE_OPERAND (x, 0))
2005 TREE_OPERAND (x, 0) = cp_fold (TREE_OPERAND (x, 0), fold_hash);
2006 break;
2007 default:
2008 return org_x;
2010 slot = &fold_hash->get_or_insert (org_x);
2011 *slot = x;
2012 /* Prevent that we try to fold an already folded result again. */
2013 if (x != org_x)
2015 slot = &fold_hash->get_or_insert (x);
2016 *slot = x;
2018 return x;