re PR bootstrap/51346 (LTO bootstrap failed with bootstrap-profiled)
[official-gcc.git] / gcc / tree-ssa.c
blob97857217ce7da39e5bfc2ab3e769903b92dafcee
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "ggc.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "tree-flow.h"
39 #include "gimple.h"
40 #include "tree-inline.h"
41 #include "timevar.h"
42 #include "hashtab.h"
43 #include "tree-dump.h"
44 #include "tree-pass.h"
45 #include "diagnostic-core.h"
46 #include "cfgloop.h"
48 /* Pointer map of variable mappings, keyed by edge. */
49 static struct pointer_map_t *edge_var_maps;
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
57 void **slot;
58 edge_var_map_vector old_head, head;
59 edge_var_map new_node;
61 if (edge_var_maps == NULL)
62 edge_var_maps = pointer_map_create ();
64 slot = pointer_map_insert (edge_var_maps, e);
65 old_head = head = (edge_var_map_vector) *slot;
66 if (!head)
68 head = VEC_alloc (edge_var_map, heap, 5);
69 *slot = head;
71 new_node.def = def;
72 new_node.result = result;
73 new_node.locus = locus;
75 VEC_safe_push (edge_var_map, heap, head, &new_node);
76 if (old_head != head)
78 /* The push did some reallocation. Update the pointer map. */
79 *slot = head;
84 /* Clear the var mappings in edge E. */
86 void
87 redirect_edge_var_map_clear (edge e)
89 void **slot;
90 edge_var_map_vector head;
92 if (!edge_var_maps)
93 return;
95 slot = pointer_map_contains (edge_var_maps, e);
97 if (slot)
99 head = (edge_var_map_vector) *slot;
100 VEC_free (edge_var_map, heap, head);
101 *slot = NULL;
106 /* Duplicate the redirected var mappings in OLDE in NEWE.
108 Since we can't remove a mapping, let's just duplicate it. This assumes a
109 pointer_map can have multiple edges mapping to the same var_map (many to
110 one mapping), since we don't remove the previous mappings. */
112 void
113 redirect_edge_var_map_dup (edge newe, edge olde)
115 void **new_slot, **old_slot;
116 edge_var_map_vector head;
118 if (!edge_var_maps)
119 return;
121 new_slot = pointer_map_insert (edge_var_maps, newe);
122 old_slot = pointer_map_contains (edge_var_maps, olde);
123 if (!old_slot)
124 return;
125 head = (edge_var_map_vector) *old_slot;
127 if (head)
128 *new_slot = VEC_copy (edge_var_map, heap, head);
129 else
130 *new_slot = VEC_alloc (edge_var_map, heap, 5);
134 /* Return the variable mappings for a given edge. If there is none, return
135 NULL. */
137 edge_var_map_vector
138 redirect_edge_var_map_vector (edge e)
140 void **slot;
142 /* Hey, what kind of idiot would... you'd be surprised. */
143 if (!edge_var_maps)
144 return NULL;
146 slot = pointer_map_contains (edge_var_maps, e);
147 if (!slot)
148 return NULL;
150 return (edge_var_map_vector) *slot;
153 /* Used by redirect_edge_var_map_destroy to free all memory. */
155 static bool
156 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
157 void **value,
158 void *data ATTRIBUTE_UNUSED)
160 edge_var_map_vector head = (edge_var_map_vector) *value;
161 VEC_free (edge_var_map, heap, head);
162 return true;
165 /* Clear the edge variable mappings. */
167 void
168 redirect_edge_var_map_destroy (void)
170 if (edge_var_maps)
172 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
173 pointer_map_destroy (edge_var_maps);
174 edge_var_maps = NULL;
179 /* Remove the corresponding arguments from the PHI nodes in E's
180 destination block and redirect it to DEST. Return redirected edge.
181 The list of removed arguments is stored in a vector accessed
182 through edge_var_maps. */
184 edge
185 ssa_redirect_edge (edge e, basic_block dest)
187 gimple_stmt_iterator gsi;
188 gimple phi;
190 redirect_edge_var_map_clear (e);
192 /* Remove the appropriate PHI arguments in E's destination block. */
193 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
195 tree def;
196 source_location locus ;
198 phi = gsi_stmt (gsi);
199 def = gimple_phi_arg_def (phi, e->dest_idx);
200 locus = gimple_phi_arg_location (phi, e->dest_idx);
202 if (def == NULL_TREE)
203 continue;
205 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
208 e = redirect_edge_succ_nodup (e, dest);
210 return e;
214 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
215 E->dest. */
217 void
218 flush_pending_stmts (edge e)
220 gimple phi;
221 edge_var_map_vector v;
222 edge_var_map *vm;
223 int i;
224 gimple_stmt_iterator gsi;
226 v = redirect_edge_var_map_vector (e);
227 if (!v)
228 return;
230 for (gsi = gsi_start_phis (e->dest), i = 0;
231 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
232 gsi_next (&gsi), i++)
234 tree def;
236 phi = gsi_stmt (gsi);
237 def = redirect_edge_var_map_def (vm);
238 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
241 redirect_edge_var_map_clear (e);
244 /* Given a tree for an expression for which we might want to emit
245 locations or values in debug information (generally a variable, but
246 we might deal with other kinds of trees in the future), return the
247 tree that should be used as the variable of a DEBUG_BIND STMT or
248 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
250 tree
251 target_for_debug_bind (tree var)
253 if (!MAY_HAVE_DEBUG_STMTS)
254 return NULL_TREE;
256 if (TREE_CODE (var) != VAR_DECL
257 && TREE_CODE (var) != PARM_DECL)
258 return NULL_TREE;
260 if (DECL_HAS_VALUE_EXPR_P (var))
261 return target_for_debug_bind (DECL_VALUE_EXPR (var));
263 if (DECL_IGNORED_P (var))
264 return NULL_TREE;
266 if (!is_gimple_reg (var))
267 return NULL_TREE;
269 return var;
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273 released. */
275 static tree
276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
278 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
280 if (wi && wi->is_lhs)
281 return NULL_TREE;
283 if (TREE_CODE (*tp) == SSA_NAME)
285 if (SSA_NAME_IN_FREE_LIST (*tp))
286 return *tp;
288 *walk_subtrees = 0;
290 else if (IS_TYPE_OR_DECL_P (*tp))
291 *walk_subtrees = 0;
293 return NULL_TREE;
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297 by other DEBUG stmts, and replace uses of the DEF with the
298 newly-created debug temp. */
300 void
301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
303 imm_use_iterator imm_iter;
304 use_operand_p use_p;
305 gimple stmt;
306 gimple def_stmt = NULL;
307 int usecount = 0;
308 tree value = NULL;
310 if (!MAY_HAVE_DEBUG_STMTS)
311 return;
313 /* If this name has already been registered for replacement, do nothing
314 as anything that uses this name isn't in SSA form. */
315 if (name_registered_for_update_p (var))
316 return;
318 /* Check whether there are debug stmts that reference this variable and,
319 if there are, decide whether we should use a debug temp. */
320 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
322 stmt = USE_STMT (use_p);
324 if (!gimple_debug_bind_p (stmt))
325 continue;
327 if (usecount++)
328 break;
330 if (gimple_debug_bind_get_value (stmt) != var)
332 /* Count this as an additional use, so as to make sure we
333 use a temp unless VAR's definition has a SINGLE_RHS that
334 can be shared. */
335 usecount++;
336 break;
340 if (!usecount)
341 return;
343 if (gsi)
344 def_stmt = gsi_stmt (*gsi);
345 else
346 def_stmt = SSA_NAME_DEF_STMT (var);
348 /* If we didn't get an insertion point, and the stmt has already
349 been removed, we won't be able to insert the debug bind stmt, so
350 we'll have to drop debug information. */
351 if (gimple_code (def_stmt) == GIMPLE_PHI)
353 value = degenerate_phi_result (def_stmt);
354 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 value = NULL;
356 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 to. */
358 else if (value == error_mark_node)
359 value = NULL;
361 else if (is_gimple_assign (def_stmt))
363 bool no_value = false;
365 if (!dom_info_available_p (CDI_DOMINATORS))
367 struct walk_stmt_info wi;
369 memset (&wi, 0, sizeof (wi));
371 /* When removing blocks without following reverse dominance
372 order, we may sometimes encounter SSA_NAMEs that have
373 already been released, referenced in other SSA_DEFs that
374 we're about to release. Consider:
376 <bb X>:
377 v_1 = foo;
379 <bb Y>:
380 w_2 = v_1 + bar;
381 # DEBUG w => w_2
383 If we deleted BB X first, propagating the value of w_2
384 won't do us any good. It's too late to recover their
385 original definition of v_1: when it was deleted, it was
386 only referenced in other DEFs, it couldn't possibly know
387 it should have been retained, and propagating every
388 single DEF just in case it might have to be propagated
389 into a DEBUG STMT would probably be too wasteful.
391 When dominator information is not readily available, we
392 check for and accept some loss of debug information. But
393 if it is available, there's no excuse for us to remove
394 blocks in the wrong order, so we don't even check for
395 dead SSA NAMEs. SSA verification shall catch any
396 errors. */
397 if ((!gsi && !gimple_bb (def_stmt))
398 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
399 no_value = true;
402 if (!no_value)
403 value = gimple_assign_rhs_to_tree (def_stmt);
406 if (value)
408 /* If there's a single use of VAR, and VAR is the entire debug
409 expression (usecount would have been incremented again
410 otherwise), and the definition involves only constants and
411 SSA names, then we can propagate VALUE into this single use,
412 avoiding the temp.
414 We can also avoid using a temp if VALUE can be shared and
415 propagated into all uses, without generating expressions that
416 wouldn't be valid gimple RHSs.
418 Other cases that would require unsharing or non-gimple RHSs
419 are deferred to a debug temp, although we could avoid temps
420 at the expense of duplication of expressions. */
422 if (CONSTANT_CLASS_P (value)
423 || gimple_code (def_stmt) == GIMPLE_PHI
424 || (usecount == 1
425 && (!gimple_assign_single_p (def_stmt)
426 || is_gimple_min_invariant (value)))
427 || is_gimple_reg (value))
428 value = unshare_expr (value);
429 else
431 gimple def_temp;
432 tree vexpr = make_node (DEBUG_EXPR_DECL);
434 def_temp = gimple_build_debug_bind (vexpr,
435 unshare_expr (value),
436 def_stmt);
438 DECL_ARTIFICIAL (vexpr) = 1;
439 TREE_TYPE (vexpr) = TREE_TYPE (value);
440 if (DECL_P (value))
441 DECL_MODE (vexpr) = DECL_MODE (value);
442 else
443 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
445 if (gsi)
446 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
447 else
449 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
450 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
453 value = vexpr;
457 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
459 if (!gimple_debug_bind_p (stmt))
460 continue;
462 if (value)
464 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
465 /* unshare_expr is not needed here. vexpr is either a
466 SINGLE_RHS, that can be safely shared, some other RHS
467 that was unshared when we found it had a single debug
468 use, or a DEBUG_EXPR_DECL, that can be safely
469 shared. */
470 SET_USE (use_p, value);
471 /* If we didn't replace uses with a debug decl fold the
472 resulting expression. Otherwise we end up with invalid IL. */
473 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
475 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
476 fold_stmt_inplace (&gsi);
479 else
480 gimple_debug_bind_reset_value (stmt);
482 update_stmt (stmt);
487 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
488 other DEBUG stmts, and replace uses of the DEF with the
489 newly-created debug temp. */
491 void
492 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
494 gimple stmt;
495 ssa_op_iter op_iter;
496 def_operand_p def_p;
498 if (!MAY_HAVE_DEBUG_STMTS)
499 return;
501 stmt = gsi_stmt (*gsi);
503 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
505 tree var = DEF_FROM_PTR (def_p);
507 if (TREE_CODE (var) != SSA_NAME)
508 continue;
510 insert_debug_temp_for_var_def (gsi, var);
514 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
516 void
517 reset_debug_uses (gimple stmt)
519 ssa_op_iter op_iter;
520 def_operand_p def_p;
521 imm_use_iterator imm_iter;
522 gimple use_stmt;
524 if (!MAY_HAVE_DEBUG_STMTS)
525 return;
527 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
529 tree var = DEF_FROM_PTR (def_p);
531 if (TREE_CODE (var) != SSA_NAME)
532 continue;
534 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
536 if (!gimple_debug_bind_p (use_stmt))
537 continue;
539 gimple_debug_bind_reset_value (use_stmt);
540 update_stmt (use_stmt);
545 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
546 dominated stmts before their dominators, so that release_ssa_defs
547 stands a chance of propagating DEFs into debug bind stmts. */
549 void
550 release_defs_bitset (bitmap toremove)
552 unsigned j;
553 bitmap_iterator bi;
555 /* Performing a topological sort is probably overkill, this will
556 most likely run in slightly superlinear time, rather than the
557 pathological quadratic worst case. */
558 while (!bitmap_empty_p (toremove))
559 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
561 bool remove_now = true;
562 tree var = ssa_name (j);
563 gimple stmt;
564 imm_use_iterator uit;
566 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
568 ssa_op_iter dit;
569 def_operand_p def_p;
571 /* We can't propagate PHI nodes into debug stmts. */
572 if (gimple_code (stmt) == GIMPLE_PHI
573 || is_gimple_debug (stmt))
574 continue;
576 /* If we find another definition to remove that uses
577 the one we're looking at, defer the removal of this
578 one, so that it can be propagated into debug stmts
579 after the other is. */
580 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
582 tree odef = DEF_FROM_PTR (def_p);
584 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
586 remove_now = false;
587 break;
591 if (!remove_now)
592 BREAK_FROM_IMM_USE_STMT (uit);
595 if (remove_now)
597 gimple def = SSA_NAME_DEF_STMT (var);
598 gimple_stmt_iterator gsi = gsi_for_stmt (def);
600 if (gimple_code (def) == GIMPLE_PHI)
601 remove_phi_node (&gsi, true);
602 else
604 gsi_remove (&gsi, true);
605 release_defs (def);
608 bitmap_clear_bit (toremove, j);
613 /* Return true if SSA_NAME is malformed and mark it visited.
615 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
616 operand. */
618 static bool
619 verify_ssa_name (tree ssa_name, bool is_virtual)
621 if (TREE_CODE (ssa_name) != SSA_NAME)
623 error ("expected an SSA_NAME object");
624 return true;
627 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
629 error ("type mismatch between an SSA_NAME and its symbol");
630 return true;
633 if (SSA_NAME_IN_FREE_LIST (ssa_name))
635 error ("found an SSA_NAME that had been released into the free pool");
636 return true;
639 if (is_virtual && is_gimple_reg (ssa_name))
641 error ("found a virtual definition for a GIMPLE register");
642 return true;
645 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
647 error ("virtual SSA name for non-VOP decl");
648 return true;
651 if (!is_virtual && !is_gimple_reg (ssa_name))
653 error ("found a real definition for a non-register");
654 return true;
657 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
658 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
660 error ("found a default name with a non-empty defining statement");
661 return true;
664 return false;
668 /* Return true if the definition of SSA_NAME at block BB is malformed.
670 STMT is the statement where SSA_NAME is created.
672 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
673 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
674 it means that the block in that array slot contains the
675 definition of SSA_NAME.
677 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
679 static bool
680 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
681 gimple stmt, bool is_virtual)
683 if (verify_ssa_name (ssa_name, is_virtual))
684 goto err;
686 if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
687 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
689 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
690 goto err;
693 if (definition_block[SSA_NAME_VERSION (ssa_name)])
695 error ("SSA_NAME created in two different blocks %i and %i",
696 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
697 goto err;
700 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
702 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
704 error ("SSA_NAME_DEF_STMT is wrong");
705 fprintf (stderr, "Expected definition statement:\n");
706 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
707 fprintf (stderr, "\nActual definition statement:\n");
708 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
709 goto err;
712 return false;
714 err:
715 fprintf (stderr, "while verifying SSA_NAME ");
716 print_generic_expr (stderr, ssa_name, 0);
717 fprintf (stderr, " in statement\n");
718 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
720 return true;
724 /* Return true if the use of SSA_NAME at statement STMT in block BB is
725 malformed.
727 DEF_BB is the block where SSA_NAME was found to be created.
729 IDOM contains immediate dominator information for the flowgraph.
731 CHECK_ABNORMAL is true if the caller wants to check whether this use
732 is flowing through an abnormal edge (only used when checking PHI
733 arguments).
735 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
736 that are defined before STMT in basic block BB. */
738 static bool
739 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
740 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
742 bool err = false;
743 tree ssa_name = USE_FROM_PTR (use_p);
745 if (!TREE_VISITED (ssa_name))
746 if (verify_imm_links (stderr, ssa_name))
747 err = true;
749 TREE_VISITED (ssa_name) = 1;
751 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
752 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
753 ; /* Default definitions have empty statements. Nothing to do. */
754 else if (!def_bb)
756 error ("missing definition");
757 err = true;
759 else if (bb != def_bb
760 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
762 error ("definition in block %i does not dominate use in block %i",
763 def_bb->index, bb->index);
764 err = true;
766 else if (bb == def_bb
767 && names_defined_in_bb != NULL
768 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
770 error ("definition in block %i follows the use", def_bb->index);
771 err = true;
774 if (check_abnormal
775 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
777 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
778 err = true;
781 /* Make sure the use is in an appropriate list by checking the previous
782 element to make sure it's the same. */
783 if (use_p->prev == NULL)
785 error ("no immediate_use list");
786 err = true;
788 else
790 tree listvar;
791 if (use_p->prev->use == NULL)
792 listvar = use_p->prev->loc.ssa_name;
793 else
794 listvar = USE_FROM_PTR (use_p->prev);
795 if (listvar != ssa_name)
797 error ("wrong immediate use list");
798 err = true;
802 if (err)
804 fprintf (stderr, "for SSA_NAME: ");
805 print_generic_expr (stderr, ssa_name, TDF_VOPS);
806 fprintf (stderr, " in statement:\n");
807 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
810 return err;
814 /* Return true if any of the arguments for PHI node PHI at block BB is
815 malformed.
817 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
818 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
819 it means that the block in that array slot contains the
820 definition of SSA_NAME. */
822 static bool
823 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
825 edge e;
826 bool err = false;
827 size_t i, phi_num_args = gimple_phi_num_args (phi);
829 if (EDGE_COUNT (bb->preds) != phi_num_args)
831 error ("incoming edge count does not match number of PHI arguments");
832 err = true;
833 goto error;
836 for (i = 0; i < phi_num_args; i++)
838 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
839 tree op = USE_FROM_PTR (op_p);
841 e = EDGE_PRED (bb, i);
843 if (op == NULL_TREE)
845 error ("PHI argument is missing for edge %d->%d",
846 e->src->index,
847 e->dest->index);
848 err = true;
849 goto error;
852 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
854 error ("PHI argument is not SSA_NAME, or invariant");
855 err = true;
858 if (TREE_CODE (op) == SSA_NAME)
860 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
861 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
862 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
865 if (TREE_CODE (op) == ADDR_EXPR)
867 tree base = TREE_OPERAND (op, 0);
868 while (handled_component_p (base))
869 base = TREE_OPERAND (base, 0);
870 if ((TREE_CODE (base) == VAR_DECL
871 || TREE_CODE (base) == PARM_DECL
872 || TREE_CODE (base) == RESULT_DECL)
873 && !TREE_ADDRESSABLE (base))
875 error ("address taken, but ADDRESSABLE bit not set");
876 err = true;
880 if (e->dest != bb)
882 error ("wrong edge %d->%d for PHI argument",
883 e->src->index, e->dest->index);
884 err = true;
887 if (err)
889 fprintf (stderr, "PHI argument\n");
890 print_generic_stmt (stderr, op, TDF_VOPS);
891 goto error;
895 error:
896 if (err)
898 fprintf (stderr, "for PHI node\n");
899 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
903 return err;
907 /* Verify common invariants in the SSA web.
908 TODO: verify the variable annotations. */
910 DEBUG_FUNCTION void
911 verify_ssa (bool check_modified_stmt)
913 size_t i;
914 basic_block bb;
915 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
916 ssa_op_iter iter;
917 tree op;
918 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
919 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
921 gcc_assert (!need_ssa_update_p (cfun));
923 verify_gimple_in_cfg (cfun);
925 timevar_push (TV_TREE_SSA_VERIFY);
927 /* Keep track of SSA names present in the IL. */
928 for (i = 1; i < num_ssa_names; i++)
930 tree name = ssa_name (i);
931 if (name)
933 gimple stmt;
934 TREE_VISITED (name) = 0;
936 stmt = SSA_NAME_DEF_STMT (name);
937 if (!gimple_nop_p (stmt))
939 basic_block bb = gimple_bb (stmt);
940 verify_def (bb, definition_block,
941 name, stmt, !is_gimple_reg (name));
947 calculate_dominance_info (CDI_DOMINATORS);
949 /* Now verify all the uses and make sure they agree with the definitions
950 found in the previous pass. */
951 FOR_EACH_BB (bb)
953 edge e;
954 gimple phi;
955 edge_iterator ei;
956 gimple_stmt_iterator gsi;
958 /* Make sure that all edges have a clear 'aux' field. */
959 FOR_EACH_EDGE (e, ei, bb->preds)
961 if (e->aux)
963 error ("AUX pointer initialized for edge %d->%d", e->src->index,
964 e->dest->index);
965 goto err;
969 /* Verify the arguments for every PHI node in the block. */
970 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 phi = gsi_stmt (gsi);
973 if (verify_phi_args (phi, bb, definition_block))
974 goto err;
976 bitmap_set_bit (names_defined_in_bb,
977 SSA_NAME_VERSION (gimple_phi_result (phi)));
980 /* Now verify all the uses and vuses in every statement of the block. */
981 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
983 gimple stmt = gsi_stmt (gsi);
984 use_operand_p use_p;
985 bool has_err;
986 int count;
987 unsigned i;
989 if (check_modified_stmt && gimple_modified_p (stmt))
991 error ("stmt (%p) marked modified after optimization pass: ",
992 (void *)stmt);
993 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
994 goto err;
997 if (is_gimple_assign (stmt)
998 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
1000 tree lhs, base_address;
1002 lhs = gimple_assign_lhs (stmt);
1003 base_address = get_base_address (lhs);
1005 if (base_address
1006 && SSA_VAR_P (base_address)
1007 && !gimple_vdef (stmt)
1008 && optimize > 0)
1010 error ("statement makes a memory store, but has no VDEFS");
1011 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1012 goto err;
1015 else if (gimple_debug_bind_p (stmt)
1016 && !gimple_debug_bind_has_value_p (stmt))
1017 continue;
1019 /* Verify the single virtual operand and its constraints. */
1020 has_err = false;
1021 if (gimple_vdef (stmt))
1023 if (gimple_vdef_op (stmt) == NULL_DEF_OPERAND_P)
1025 error ("statement has VDEF operand not in defs list");
1026 has_err = true;
1028 if (!gimple_vuse (stmt))
1030 error ("statement has VDEF but no VUSE operand");
1031 has_err = true;
1033 else if (SSA_NAME_VAR (gimple_vdef (stmt))
1034 != SSA_NAME_VAR (gimple_vuse (stmt)))
1036 error ("VDEF and VUSE do not use the same symbol");
1037 has_err = true;
1039 has_err |= verify_ssa_name (gimple_vdef (stmt), true);
1041 if (gimple_vuse (stmt))
1043 if (gimple_vuse_op (stmt) == NULL_USE_OPERAND_P)
1045 error ("statement has VUSE operand not in uses list");
1046 has_err = true;
1048 has_err |= verify_ssa_name (gimple_vuse (stmt), true);
1050 if (has_err)
1052 error ("in statement");
1053 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1054 goto err;
1057 count = 0;
1058 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
1060 if (verify_ssa_name (op, false))
1062 error ("in statement");
1063 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1064 goto err;
1066 count++;
1069 for (i = 0; i < gimple_num_ops (stmt); i++)
1071 op = gimple_op (stmt, i);
1072 if (op && TREE_CODE (op) == SSA_NAME && --count < 0)
1074 error ("number of operands and imm-links don%'t agree"
1075 " in statement");
1076 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1077 goto err;
1081 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1083 op = USE_FROM_PTR (use_p);
1084 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1085 use_p, stmt, false, names_defined_in_bb))
1086 goto err;
1089 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1091 if (SSA_NAME_DEF_STMT (op) != stmt)
1093 error ("SSA_NAME_DEF_STMT is wrong");
1094 fprintf (stderr, "Expected definition statement:\n");
1095 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1096 fprintf (stderr, "\nActual definition statement:\n");
1097 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1098 4, TDF_VOPS);
1099 goto err;
1101 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1105 bitmap_clear (names_defined_in_bb);
1108 free (definition_block);
1110 /* Restore the dominance information to its prior known state, so
1111 that we do not perturb the compiler's subsequent behavior. */
1112 if (orig_dom_state == DOM_NONE)
1113 free_dominance_info (CDI_DOMINATORS);
1114 else
1115 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1117 BITMAP_FREE (names_defined_in_bb);
1118 timevar_pop (TV_TREE_SSA_VERIFY);
1119 return;
1121 err:
1122 internal_error ("verify_ssa failed");
1125 /* Return true if the uid in both int tree maps are equal. */
1128 int_tree_map_eq (const void *va, const void *vb)
1130 const struct int_tree_map *a = (const struct int_tree_map *) va;
1131 const struct int_tree_map *b = (const struct int_tree_map *) vb;
1132 return (a->uid == b->uid);
1135 /* Hash a UID in a int_tree_map. */
1137 unsigned int
1138 int_tree_map_hash (const void *item)
1140 return ((const struct int_tree_map *)item)->uid;
1143 /* Return true if the DECL_UID in both trees are equal. */
1146 uid_decl_map_eq (const void *va, const void *vb)
1148 const_tree a = (const_tree) va;
1149 const_tree b = (const_tree) vb;
1150 return (a->decl_minimal.uid == b->decl_minimal.uid);
1153 /* Hash a tree in a uid_decl_map. */
1155 unsigned int
1156 uid_decl_map_hash (const void *item)
1158 return ((const_tree)item)->decl_minimal.uid;
1161 /* Return true if the DECL_UID in both trees are equal. */
1163 static int
1164 uid_ssaname_map_eq (const void *va, const void *vb)
1166 const_tree a = (const_tree) va;
1167 const_tree b = (const_tree) vb;
1168 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1171 /* Hash a tree in a uid_decl_map. */
1173 static unsigned int
1174 uid_ssaname_map_hash (const void *item)
1176 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1180 /* Initialize global DFA and SSA structures. */
1182 void
1183 init_tree_ssa (struct function *fn)
1185 fn->gimple_df = ggc_alloc_cleared_gimple_df ();
1186 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
1187 uid_decl_map_eq, NULL);
1188 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1189 uid_ssaname_map_eq, NULL);
1190 pt_solution_reset (&fn->gimple_df->escaped);
1191 init_ssanames (fn, 0);
1192 init_phinodes ();
1196 /* Deallocate memory associated with SSA data structures for FNDECL. */
1198 void
1199 delete_tree_ssa (void)
1201 referenced_var_iterator rvi;
1202 tree var;
1204 /* Remove annotations from every referenced local variable. */
1205 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1207 if (is_global_var (var))
1208 continue;
1209 if (var_ann (var))
1211 ggc_free (var_ann (var));
1212 *DECL_VAR_ANN_PTR (var) = NULL;
1215 htab_delete (gimple_referenced_vars (cfun));
1216 cfun->gimple_df->referenced_vars = NULL;
1218 fini_ssanames ();
1219 fini_phinodes ();
1221 /* We no longer maintain the SSA operand cache at this point. */
1222 if (ssa_operands_active ())
1223 fini_ssa_operands ();
1225 htab_delete (cfun->gimple_df->default_defs);
1226 cfun->gimple_df->default_defs = NULL;
1227 pt_solution_reset (&cfun->gimple_df->escaped);
1228 if (cfun->gimple_df->decls_to_pointers != NULL)
1229 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1230 cfun->gimple_df->decls_to_pointers = NULL;
1231 cfun->gimple_df->modified_noreturn_calls = NULL;
1232 cfun->gimple_df = NULL;
1234 /* We no longer need the edge variable maps. */
1235 redirect_edge_var_map_destroy ();
1238 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1239 useless type conversion, otherwise return false.
1241 This function implicitly defines the middle-end type system. With
1242 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1243 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1244 the following invariants shall be fulfilled:
1246 1) useless_type_conversion_p is transitive.
1247 If a < b and b < c then a < c.
1249 2) useless_type_conversion_p is not symmetric.
1250 From a < b does not follow a > b.
1252 3) Types define the available set of operations applicable to values.
1253 A type conversion is useless if the operations for the target type
1254 is a subset of the operations for the source type. For example
1255 casts to void* are useless, casts from void* are not (void* can't
1256 be dereferenced or offsetted, but copied, hence its set of operations
1257 is a strict subset of that of all other data pointer types). Casts
1258 to const T* are useless (can't be written to), casts from const T*
1259 to T* are not. */
1261 bool
1262 useless_type_conversion_p (tree outer_type, tree inner_type)
1264 /* Do the following before stripping toplevel qualifiers. */
1265 if (POINTER_TYPE_P (inner_type)
1266 && POINTER_TYPE_P (outer_type))
1268 /* Do not lose casts between pointers to different address spaces. */
1269 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
1270 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
1271 return false;
1273 /* If the outer type is (void *), the conversion is not necessary. */
1274 if (VOID_TYPE_P (TREE_TYPE (outer_type)))
1275 return true;
1278 /* From now on qualifiers on value types do not matter. */
1279 inner_type = TYPE_MAIN_VARIANT (inner_type);
1280 outer_type = TYPE_MAIN_VARIANT (outer_type);
1282 if (inner_type == outer_type)
1283 return true;
1285 /* If we know the canonical types, compare them. */
1286 if (TYPE_CANONICAL (inner_type)
1287 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1288 return true;
1290 /* Changes in machine mode are never useless conversions unless we
1291 deal with aggregate types in which case we defer to later checks. */
1292 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
1293 && !AGGREGATE_TYPE_P (inner_type))
1294 return false;
1296 /* If both the inner and outer types are integral types, then the
1297 conversion is not necessary if they have the same mode and
1298 signedness and precision, and both or neither are boolean. */
1299 if (INTEGRAL_TYPE_P (inner_type)
1300 && INTEGRAL_TYPE_P (outer_type))
1302 /* Preserve changes in signedness or precision. */
1303 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1304 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1305 return false;
1307 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
1308 of precision one. */
1309 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
1310 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
1311 && TYPE_PRECISION (outer_type) != 1)
1312 return false;
1314 /* We don't need to preserve changes in the types minimum or
1315 maximum value in general as these do not generate code
1316 unless the types precisions are different. */
1317 return true;
1320 /* Scalar floating point types with the same mode are compatible. */
1321 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1322 && SCALAR_FLOAT_TYPE_P (outer_type))
1323 return true;
1325 /* Fixed point types with the same mode are compatible. */
1326 else if (FIXED_POINT_TYPE_P (inner_type)
1327 && FIXED_POINT_TYPE_P (outer_type))
1328 return true;
1330 /* We need to take special care recursing to pointed-to types. */
1331 else if (POINTER_TYPE_P (inner_type)
1332 && POINTER_TYPE_P (outer_type))
1334 /* Do not lose casts to function pointer types. */
1335 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
1336 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
1337 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
1338 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
1339 return false;
1341 /* We do not care for const qualification of the pointed-to types
1342 as const qualification has no semantic value to the middle-end. */
1344 /* Otherwise pointers/references are equivalent. */
1345 return true;
1348 /* Recurse for complex types. */
1349 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1350 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1351 return useless_type_conversion_p (TREE_TYPE (outer_type),
1352 TREE_TYPE (inner_type));
1354 /* Recurse for vector types with the same number of subparts. */
1355 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1356 && TREE_CODE (outer_type) == VECTOR_TYPE
1357 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1358 return useless_type_conversion_p (TREE_TYPE (outer_type),
1359 TREE_TYPE (inner_type));
1361 else if (TREE_CODE (inner_type) == ARRAY_TYPE
1362 && TREE_CODE (outer_type) == ARRAY_TYPE)
1364 /* Preserve string attributes. */
1365 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
1366 return false;
1368 /* Conversions from array types with unknown extent to
1369 array types with known extent are not useless. */
1370 if (!TYPE_DOMAIN (inner_type)
1371 && TYPE_DOMAIN (outer_type))
1372 return false;
1374 /* Nor are conversions from array types with non-constant size to
1375 array types with constant size or to different size. */
1376 if (TYPE_SIZE (outer_type)
1377 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1378 && (!TYPE_SIZE (inner_type)
1379 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1380 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1381 TYPE_SIZE (inner_type))))
1382 return false;
1384 /* Check conversions between arrays with partially known extents.
1385 If the array min/max values are constant they have to match.
1386 Otherwise allow conversions to unknown and variable extents.
1387 In particular this declares conversions that may change the
1388 mode to BLKmode as useless. */
1389 if (TYPE_DOMAIN (inner_type)
1390 && TYPE_DOMAIN (outer_type)
1391 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1393 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1394 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1395 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1396 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1398 /* After gimplification a variable min/max value carries no
1399 additional information compared to a NULL value. All that
1400 matters has been lowered to be part of the IL. */
1401 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1402 inner_min = NULL_TREE;
1403 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1404 outer_min = NULL_TREE;
1405 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1406 inner_max = NULL_TREE;
1407 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1408 outer_max = NULL_TREE;
1410 /* Conversions NULL / variable <- cst are useless, but not
1411 the other way around. */
1412 if (outer_min
1413 && (!inner_min
1414 || !tree_int_cst_equal (inner_min, outer_min)))
1415 return false;
1416 if (outer_max
1417 && (!inner_max
1418 || !tree_int_cst_equal (inner_max, outer_max)))
1419 return false;
1422 /* Recurse on the element check. */
1423 return useless_type_conversion_p (TREE_TYPE (outer_type),
1424 TREE_TYPE (inner_type));
1427 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1428 || TREE_CODE (inner_type) == METHOD_TYPE)
1429 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1431 tree outer_parm, inner_parm;
1433 /* If the return types are not compatible bail out. */
1434 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1435 TREE_TYPE (inner_type)))
1436 return false;
1438 /* Method types should belong to a compatible base class. */
1439 if (TREE_CODE (inner_type) == METHOD_TYPE
1440 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1441 TYPE_METHOD_BASETYPE (inner_type)))
1442 return false;
1444 /* A conversion to an unprototyped argument list is ok. */
1445 if (!prototype_p (outer_type))
1446 return true;
1448 /* If the unqualified argument types are compatible the conversion
1449 is useless. */
1450 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1451 return true;
1453 for (outer_parm = TYPE_ARG_TYPES (outer_type),
1454 inner_parm = TYPE_ARG_TYPES (inner_type);
1455 outer_parm && inner_parm;
1456 outer_parm = TREE_CHAIN (outer_parm),
1457 inner_parm = TREE_CHAIN (inner_parm))
1458 if (!useless_type_conversion_p
1459 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
1460 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
1461 return false;
1463 /* If there is a mismatch in the number of arguments the functions
1464 are not compatible. */
1465 if (outer_parm || inner_parm)
1466 return false;
1468 /* Defer to the target if necessary. */
1469 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
1470 return comp_type_attributes (outer_type, inner_type) != 0;
1472 return true;
1475 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1476 explicit conversions for types involving to be structurally
1477 compared types. */
1478 else if (AGGREGATE_TYPE_P (inner_type)
1479 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1480 return false;
1482 return false;
1485 /* Return true if a conversion from either type of TYPE1 and TYPE2
1486 to the other is not required. Otherwise return false. */
1488 bool
1489 types_compatible_p (tree type1, tree type2)
1491 return (type1 == type2
1492 || (useless_type_conversion_p (type1, type2)
1493 && useless_type_conversion_p (type2, type1)));
1496 /* Return true if EXPR is a useless type conversion, otherwise return
1497 false. */
1499 bool
1500 tree_ssa_useless_type_conversion (tree expr)
1502 /* If we have an assignment that merely uses a NOP_EXPR to change
1503 the top of the RHS to the type of the LHS and the type conversion
1504 is "safe", then strip away the type conversion so that we can
1505 enter LHS = RHS into the const_and_copies table. */
1506 if (CONVERT_EXPR_P (expr)
1507 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1508 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1509 return useless_type_conversion_p
1510 (TREE_TYPE (expr),
1511 TREE_TYPE (TREE_OPERAND (expr, 0)));
1513 return false;
1516 /* Strip conversions from EXP according to
1517 tree_ssa_useless_type_conversion and return the resulting
1518 expression. */
1520 tree
1521 tree_ssa_strip_useless_type_conversions (tree exp)
1523 while (tree_ssa_useless_type_conversion (exp))
1524 exp = TREE_OPERAND (exp, 0);
1525 return exp;
1529 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1530 described in walk_use_def_chains.
1532 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1533 infinite loops. We used to have a bitmap for this to just mark
1534 SSA versions we had visited. But non-sparse bitmaps are way too
1535 expensive, while sparse bitmaps may cause quadratic behavior.
1537 IS_DFS is true if the caller wants to perform a depth-first search
1538 when visiting PHI nodes. A DFS will visit each PHI argument and
1539 call FN after each one. Otherwise, all the arguments are
1540 visited first and then FN is called with each of the visited
1541 arguments in a separate pass. */
1543 static bool
1544 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1545 struct pointer_set_t *visited, bool is_dfs)
1547 gimple def_stmt;
1549 if (pointer_set_insert (visited, var))
1550 return false;
1552 def_stmt = SSA_NAME_DEF_STMT (var);
1554 if (gimple_code (def_stmt) != GIMPLE_PHI)
1556 /* If we reached the end of the use-def chain, call FN. */
1557 return fn (var, def_stmt, data);
1559 else
1561 size_t i;
1563 /* When doing a breadth-first search, call FN before following the
1564 use-def links for each argument. */
1565 if (!is_dfs)
1566 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1567 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1568 return true;
1570 /* Follow use-def links out of each PHI argument. */
1571 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1573 tree arg = gimple_phi_arg_def (def_stmt, i);
1575 /* ARG may be NULL for newly introduced PHI nodes. */
1576 if (arg
1577 && TREE_CODE (arg) == SSA_NAME
1578 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1579 return true;
1582 /* When doing a depth-first search, call FN after following the
1583 use-def links for each argument. */
1584 if (is_dfs)
1585 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1586 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1587 return true;
1590 return false;
1595 /* Walk use-def chains starting at the SSA variable VAR. Call
1596 function FN at each reaching definition found. FN takes three
1597 arguments: VAR, its defining statement (DEF_STMT) and a generic
1598 pointer to whatever state information that FN may want to maintain
1599 (DATA). FN is able to stop the walk by returning true, otherwise
1600 in order to continue the walk, FN should return false.
1602 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1603 different. The first argument to FN is no longer the original
1604 variable VAR, but the PHI argument currently being examined. If FN
1605 wants to get at VAR, it should call PHI_RESULT (PHI).
1607 If IS_DFS is true, this function will:
1609 1- walk the use-def chains for all the PHI arguments, and,
1610 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1612 If IS_DFS is false, the two steps above are done in reverse order
1613 (i.e., a breadth-first search). */
1615 void
1616 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1617 bool is_dfs)
1619 gimple def_stmt;
1621 gcc_assert (TREE_CODE (var) == SSA_NAME);
1623 def_stmt = SSA_NAME_DEF_STMT (var);
1625 /* We only need to recurse if the reaching definition comes from a PHI
1626 node. */
1627 if (gimple_code (def_stmt) != GIMPLE_PHI)
1628 (*fn) (var, def_stmt, data);
1629 else
1631 struct pointer_set_t *visited = pointer_set_create ();
1632 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1633 pointer_set_destroy (visited);
1638 /* Emit warnings for uninitialized variables. This is done in two passes.
1640 The first pass notices real uses of SSA names with undefined values.
1641 Such uses are unconditionally uninitialized, and we can be certain that
1642 such a use is a mistake. This pass is run before most optimizations,
1643 so that we catch as many as we can.
1645 The second pass follows PHI nodes to find uses that are potentially
1646 uninitialized. In this case we can't necessarily prove that the use
1647 is really uninitialized. This pass is run after most optimizations,
1648 so that we thread as many jumps and possible, and delete as much dead
1649 code as possible, in order to reduce false positives. We also look
1650 again for plain uninitialized variables, since optimization may have
1651 changed conditionally uninitialized to unconditionally uninitialized. */
1653 /* Emit a warning for EXPR based on variable VAR at the point in the
1654 program T, an SSA_NAME, is used being uninitialized. The exact
1655 warning text is in MSGID and LOCUS may contain a location or be null.
1656 WC is the warning code. */
1658 void
1659 warn_uninit (enum opt_code wc, tree t,
1660 tree expr, tree var, const char *gmsgid, void *data)
1662 gimple context = (gimple) data;
1663 location_t location;
1664 expanded_location xloc, floc;
1666 if (!ssa_undefined_value_p (t))
1667 return;
1669 /* TREE_NO_WARNING either means we already warned, or the front end
1670 wishes to suppress the warning. */
1671 if ((context
1672 && (gimple_no_warning_p (context)
1673 || (gimple_assign_single_p (context)
1674 && TREE_NO_WARNING (gimple_assign_rhs1 (context)))))
1675 || TREE_NO_WARNING (expr))
1676 return;
1678 location = (context != NULL && gimple_has_location (context))
1679 ? gimple_location (context)
1680 : DECL_SOURCE_LOCATION (var);
1681 xloc = expand_location (location);
1682 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1683 if (warning_at (location, wc, gmsgid, expr))
1685 TREE_NO_WARNING (expr) = 1;
1687 if (location == DECL_SOURCE_LOCATION (var))
1688 return;
1689 if (xloc.file != floc.file
1690 || xloc.line < floc.line
1691 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1692 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
1696 unsigned int
1697 warn_uninitialized_vars (bool warn_possibly_uninitialized)
1699 gimple_stmt_iterator gsi;
1700 basic_block bb;
1702 FOR_EACH_BB (bb)
1704 bool always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1705 single_succ (ENTRY_BLOCK_PTR), bb);
1706 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1708 gimple stmt = gsi_stmt (gsi);
1709 use_operand_p use_p;
1710 ssa_op_iter op_iter;
1711 tree use;
1713 if (is_gimple_debug (stmt))
1714 continue;
1716 /* We only do data flow with SSA_NAMEs, so that's all we
1717 can warn about. */
1718 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, op_iter, SSA_OP_USE)
1720 use = USE_FROM_PTR (use_p);
1721 if (always_executed)
1722 warn_uninit (OPT_Wuninitialized, use,
1723 SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1724 "%qD is used uninitialized in this function",
1725 stmt);
1726 else if (warn_possibly_uninitialized)
1727 warn_uninit (OPT_Wuninitialized, use,
1728 SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1729 "%qD may be used uninitialized in this function",
1730 stmt);
1733 /* For memory the only cheap thing we can do is see if we
1734 have a use of the default def of the virtual operand.
1735 ??? Note that at -O0 we do not have virtual operands.
1736 ??? Not so cheap would be to use the alias oracle via
1737 walk_aliased_vdefs, if we don't find any aliasing vdef
1738 warn as is-used-uninitialized, if we don't find an aliasing
1739 vdef that kills our use (stmt_kills_ref_p), warn as
1740 may-be-used-uninitialized. But this walk is quadratic and
1741 so must be limited which means we would miss warning
1742 opportunities. */
1743 use = gimple_vuse (stmt);
1744 if (use
1745 && gimple_assign_single_p (stmt)
1746 && !gimple_vdef (stmt)
1747 && SSA_NAME_IS_DEFAULT_DEF (use))
1749 tree rhs = gimple_assign_rhs1 (stmt);
1750 tree base = get_base_address (rhs);
1752 /* Do not warn if it can be initialized outside this function. */
1753 if (TREE_CODE (base) != VAR_DECL
1754 || DECL_HARD_REGISTER (base)
1755 || is_global_var (base))
1756 continue;
1758 if (always_executed)
1759 warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1760 base,
1761 "%qE is used uninitialized in this function",
1762 stmt);
1763 else if (warn_possibly_uninitialized)
1764 warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1765 base,
1766 "%qE may be used uninitialized in this function",
1767 stmt);
1772 return 0;
1775 static unsigned int
1776 execute_early_warn_uninitialized (void)
1778 /* Currently, this pass runs always but
1779 execute_late_warn_uninitialized only runs with optimization. With
1780 optimization we want to warn about possible uninitialized as late
1781 as possible, thus don't do it here. However, without
1782 optimization we need to warn here about "may be uninitialized".
1784 calculate_dominance_info (CDI_POST_DOMINATORS);
1786 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
1788 /* Post-dominator information can not be reliably updated. Free it
1789 after the use. */
1791 free_dominance_info (CDI_POST_DOMINATORS);
1792 return 0;
1795 static bool
1796 gate_warn_uninitialized (void)
1798 return warn_uninitialized != 0;
1801 struct gimple_opt_pass pass_early_warn_uninitialized =
1804 GIMPLE_PASS,
1805 "*early_warn_uninitialized", /* name */
1806 gate_warn_uninitialized, /* gate */
1807 execute_early_warn_uninitialized, /* execute */
1808 NULL, /* sub */
1809 NULL, /* next */
1810 0, /* static_pass_number */
1811 TV_TREE_UNINIT, /* tv_id */
1812 PROP_ssa, /* properties_required */
1813 0, /* properties_provided */
1814 0, /* properties_destroyed */
1815 0, /* todo_flags_start */
1816 0 /* todo_flags_finish */
1821 /* If necessary, rewrite the base of the reference tree *TP from
1822 a MEM_REF to a plain or converted symbol. */
1824 static void
1825 maybe_rewrite_mem_ref_base (tree *tp)
1827 tree sym;
1829 while (handled_component_p (*tp))
1830 tp = &TREE_OPERAND (*tp, 0);
1831 if (TREE_CODE (*tp) == MEM_REF
1832 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1833 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1834 && DECL_P (sym)
1835 && !TREE_ADDRESSABLE (sym)
1836 && symbol_marked_for_renaming (sym))
1838 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1839 && useless_type_conversion_p (TREE_TYPE (*tp),
1840 TREE_TYPE (TREE_TYPE (sym)))
1841 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1842 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1844 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1845 TYPE_SIZE (TREE_TYPE (*tp)),
1846 int_const_binop (MULT_EXPR,
1847 bitsize_int (BITS_PER_UNIT),
1848 TREE_OPERAND (*tp, 1)));
1850 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1851 && useless_type_conversion_p (TREE_TYPE (*tp),
1852 TREE_TYPE (TREE_TYPE (sym))))
1854 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1855 ? REALPART_EXPR : IMAGPART_EXPR,
1856 TREE_TYPE (*tp), sym);
1858 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1860 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1861 TREE_TYPE (sym)))
1862 *tp = build1 (VIEW_CONVERT_EXPR,
1863 TREE_TYPE (*tp), sym);
1864 else
1865 *tp = sym;
1870 /* For a tree REF return its base if it is the base of a MEM_REF
1871 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1873 static tree
1874 non_rewritable_mem_ref_base (tree ref)
1876 tree base = ref;
1878 /* A plain decl does not need it set. */
1879 if (DECL_P (ref))
1880 return NULL_TREE;
1882 while (handled_component_p (base))
1883 base = TREE_OPERAND (base, 0);
1885 /* But watch out for MEM_REFs we cannot lower to a
1886 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1887 if (TREE_CODE (base) == MEM_REF
1888 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1890 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1891 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1892 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1893 && useless_type_conversion_p (TREE_TYPE (base),
1894 TREE_TYPE (TREE_TYPE (decl)))
1895 && double_int_fits_in_uhwi_p (mem_ref_offset (base))
1896 && double_int_ucmp
1897 (tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1898 mem_ref_offset (base)) == 1
1899 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1900 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1901 return NULL_TREE;
1902 if (DECL_P (decl)
1903 && (!integer_zerop (TREE_OPERAND (base, 1))
1904 || (DECL_SIZE (decl)
1905 != TYPE_SIZE (TREE_TYPE (base)))
1906 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1907 return decl;
1910 return NULL_TREE;
1913 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1914 Otherwise return true. */
1916 static bool
1917 non_rewritable_lvalue_p (tree lhs)
1919 /* A plain decl is always rewritable. */
1920 if (DECL_P (lhs))
1921 return false;
1923 /* A decl that is wrapped inside a MEM-REF that covers
1924 it full is also rewritable.
1925 ??? The following could be relaxed allowing component
1926 references that do not change the access size. */
1927 if (TREE_CODE (lhs) == MEM_REF
1928 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1929 && integer_zerop (TREE_OPERAND (lhs, 1)))
1931 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1932 if (DECL_P (decl)
1933 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1934 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1935 return false;
1938 return true;
1941 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1942 mark the variable VAR for conversion into SSA. Return true when updating
1943 stmts is required. */
1945 static bool
1946 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs)
1948 bool update_vops = false;
1950 /* Global Variables, result decls cannot be changed. */
1951 if (is_global_var (var)
1952 || TREE_CODE (var) == RESULT_DECL
1953 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1954 return false;
1956 /* If the variable is not in the list of referenced vars then we
1957 do not need to touch it nor can we rename it. */
1958 if (!referenced_var_lookup (cfun, DECL_UID (var)))
1959 return false;
1961 if (TREE_ADDRESSABLE (var)
1962 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1963 a non-register. Otherwise we are confused and forget to
1964 add virtual operands for it. */
1965 && (!is_gimple_reg_type (TREE_TYPE (var))
1966 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1967 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1968 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1970 TREE_ADDRESSABLE (var) = 0;
1971 if (is_gimple_reg (var))
1972 mark_sym_for_renaming (var);
1973 update_vops = true;
1974 if (dump_file)
1976 fprintf (dump_file, "No longer having address taken: ");
1977 print_generic_expr (dump_file, var, 0);
1978 fprintf (dump_file, "\n");
1982 if (!DECL_GIMPLE_REG_P (var)
1983 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1984 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1985 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1986 && !TREE_THIS_VOLATILE (var)
1987 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1989 DECL_GIMPLE_REG_P (var) = 1;
1990 mark_sym_for_renaming (var);
1991 update_vops = true;
1992 if (dump_file)
1994 fprintf (dump_file, "Now a gimple register: ");
1995 print_generic_expr (dump_file, var, 0);
1996 fprintf (dump_file, "\n");
2000 return update_vops;
2003 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
2005 void
2006 execute_update_addresses_taken (void)
2008 gimple_stmt_iterator gsi;
2009 basic_block bb;
2010 bitmap addresses_taken = BITMAP_ALLOC (NULL);
2011 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
2012 bool update_vops = false;
2013 tree var;
2014 unsigned i;
2016 timevar_push (TV_ADDRESS_TAKEN);
2018 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
2019 the function body. */
2020 FOR_EACH_BB (bb)
2022 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2024 gimple stmt = gsi_stmt (gsi);
2025 enum gimple_code code = gimple_code (stmt);
2026 tree decl;
2028 /* Note all addresses taken by the stmt. */
2029 gimple_ior_addresses_taken (addresses_taken, stmt);
2031 /* If we have a call or an assignment, see if the lhs contains
2032 a local decl that requires not to be a gimple register. */
2033 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
2035 tree lhs = gimple_get_lhs (stmt);
2036 if (lhs
2037 && TREE_CODE (lhs) != SSA_NAME
2038 && non_rewritable_lvalue_p (lhs))
2040 decl = get_base_address (lhs);
2041 if (DECL_P (decl))
2042 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2046 if (gimple_assign_single_p (stmt))
2048 tree rhs = gimple_assign_rhs1 (stmt);
2049 if ((decl = non_rewritable_mem_ref_base (rhs)))
2050 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2053 else if (code == GIMPLE_CALL)
2055 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2057 tree arg = gimple_call_arg (stmt, i);
2058 if ((decl = non_rewritable_mem_ref_base (arg)))
2059 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2063 else if (code == GIMPLE_ASM)
2065 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2067 tree link = gimple_asm_output_op (stmt, i);
2068 tree lhs = TREE_VALUE (link);
2069 if (TREE_CODE (lhs) != SSA_NAME)
2071 decl = get_base_address (lhs);
2072 if (DECL_P (decl)
2073 && (non_rewritable_lvalue_p (lhs)
2074 /* We cannot move required conversions from
2075 the lhs to the rhs in asm statements, so
2076 require we do not need any. */
2077 || !useless_type_conversion_p
2078 (TREE_TYPE (lhs), TREE_TYPE (decl))))
2079 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2082 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2084 tree link = gimple_asm_input_op (stmt, i);
2085 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
2086 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2091 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2093 size_t i;
2094 gimple phi = gsi_stmt (gsi);
2096 for (i = 0; i < gimple_phi_num_args (phi); i++)
2098 tree op = PHI_ARG_DEF (phi, i), var;
2099 if (TREE_CODE (op) == ADDR_EXPR
2100 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
2101 && DECL_P (var))
2102 bitmap_set_bit (addresses_taken, DECL_UID (var));
2107 /* We cannot iterate over all referenced vars because that can contain
2108 unused vars from BLOCK trees, which causes code generation differences
2109 for -g vs. -g0. */
2110 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
2111 update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2113 FOR_EACH_VEC_ELT (tree, cfun->local_decls, i, var)
2114 update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2116 /* Operand caches need to be recomputed for operands referencing the updated
2117 variables. */
2118 if (update_vops)
2120 FOR_EACH_BB (bb)
2121 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2123 gimple stmt = gsi_stmt (gsi);
2125 /* Re-write TARGET_MEM_REFs of symbols we want to
2126 rewrite into SSA form. */
2127 if (gimple_assign_single_p (stmt))
2129 tree lhs = gimple_assign_lhs (stmt);
2130 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
2131 tree sym;
2133 /* We shouldn't have any fancy wrapping of
2134 component-refs on the LHS, but look through
2135 VIEW_CONVERT_EXPRs as that is easy. */
2136 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
2137 lhs = TREE_OPERAND (lhs, 0);
2138 if (TREE_CODE (lhs) == MEM_REF
2139 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
2140 && integer_zerop (TREE_OPERAND (lhs, 1))
2141 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
2142 && DECL_P (sym)
2143 && !TREE_ADDRESSABLE (sym)
2144 && symbol_marked_for_renaming (sym))
2145 lhs = sym;
2146 else
2147 lhs = gimple_assign_lhs (stmt);
2149 /* Rewrite the RHS and make sure the resulting assignment
2150 is validly typed. */
2151 maybe_rewrite_mem_ref_base (rhsp);
2152 rhs = gimple_assign_rhs1 (stmt);
2153 if (gimple_assign_lhs (stmt) != lhs
2154 && !useless_type_conversion_p (TREE_TYPE (lhs),
2155 TREE_TYPE (rhs)))
2156 rhs = fold_build1 (VIEW_CONVERT_EXPR,
2157 TREE_TYPE (lhs), rhs);
2159 if (gimple_assign_lhs (stmt) != lhs)
2160 gimple_assign_set_lhs (stmt, lhs);
2162 /* For var ={v} {CLOBBER}; where var lost
2163 TREE_ADDRESSABLE just remove the stmt. */
2164 if (DECL_P (lhs)
2165 && TREE_CLOBBER_P (rhs)
2166 && symbol_marked_for_renaming (lhs))
2168 unlink_stmt_vdef (stmt);
2169 gsi_remove (&gsi, true);
2170 release_defs (stmt);
2171 continue;
2174 if (gimple_assign_rhs1 (stmt) != rhs)
2176 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2177 gimple_assign_set_rhs_from_tree (&gsi, rhs);
2181 else if (gimple_code (stmt) == GIMPLE_CALL)
2183 unsigned i;
2184 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2186 tree *argp = gimple_call_arg_ptr (stmt, i);
2187 maybe_rewrite_mem_ref_base (argp);
2191 else if (gimple_code (stmt) == GIMPLE_ASM)
2193 unsigned i;
2194 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2196 tree link = gimple_asm_output_op (stmt, i);
2197 maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2199 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2201 tree link = gimple_asm_input_op (stmt, i);
2202 maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2206 else if (gimple_debug_bind_p (stmt)
2207 && gimple_debug_bind_has_value_p (stmt))
2209 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2210 tree decl;
2211 maybe_rewrite_mem_ref_base (valuep);
2212 decl = non_rewritable_mem_ref_base (*valuep);
2213 if (decl && symbol_marked_for_renaming (decl))
2214 gimple_debug_bind_reset_value (stmt);
2217 if (gimple_references_memory_p (stmt)
2218 || is_gimple_debug (stmt))
2219 update_stmt (stmt);
2221 gsi_next (&gsi);
2224 /* Update SSA form here, we are called as non-pass as well. */
2225 if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2226 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2227 else
2228 update_ssa (TODO_update_ssa);
2231 BITMAP_FREE (not_reg_needs);
2232 BITMAP_FREE (addresses_taken);
2233 timevar_pop (TV_ADDRESS_TAKEN);
2236 struct gimple_opt_pass pass_update_address_taken =
2239 GIMPLE_PASS,
2240 "addressables", /* name */
2241 NULL, /* gate */
2242 NULL, /* execute */
2243 NULL, /* sub */
2244 NULL, /* next */
2245 0, /* static_pass_number */
2246 TV_ADDRESS_TAKEN, /* tv_id */
2247 PROP_ssa, /* properties_required */
2248 0, /* properties_provided */
2249 0, /* properties_destroyed */
2250 0, /* todo_flags_start */
2251 TODO_update_address_taken /* todo_flags_finish */