1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
35 #include "diagnostic.h"
36 #include "tree-pretty-print.h"
37 #include "gimple-pretty-print.h"
39 #include "pointer-set.h"
40 #include "tree-flow.h"
42 #include "tree-inline.h"
45 #include "tree-dump.h"
46 #include "tree-pass.h"
49 /* Pointer map of variable mappings, keyed by edge. */
50 static struct pointer_map_t
*edge_var_maps
;
53 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
56 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
59 edge_var_map_vector old_head
, head
;
60 edge_var_map new_node
;
62 if (edge_var_maps
== NULL
)
63 edge_var_maps
= pointer_map_create ();
65 slot
= pointer_map_insert (edge_var_maps
, e
);
66 old_head
= head
= (edge_var_map_vector
) *slot
;
69 head
= VEC_alloc (edge_var_map
, heap
, 5);
73 new_node
.result
= result
;
74 new_node
.locus
= locus
;
76 VEC_safe_push (edge_var_map
, heap
, head
, &new_node
);
79 /* The push did some reallocation. Update the pointer map. */
85 /* Clear the var mappings in edge E. */
88 redirect_edge_var_map_clear (edge e
)
91 edge_var_map_vector head
;
96 slot
= pointer_map_contains (edge_var_maps
, e
);
100 head
= (edge_var_map_vector
) *slot
;
101 VEC_free (edge_var_map
, heap
, head
);
107 /* Duplicate the redirected var mappings in OLDE in NEWE.
109 Since we can't remove a mapping, let's just duplicate it. This assumes a
110 pointer_map can have multiple edges mapping to the same var_map (many to
111 one mapping), since we don't remove the previous mappings. */
114 redirect_edge_var_map_dup (edge newe
, edge olde
)
116 void **new_slot
, **old_slot
;
117 edge_var_map_vector head
;
122 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
123 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
126 head
= (edge_var_map_vector
) *old_slot
;
129 *new_slot
= VEC_copy (edge_var_map
, heap
, head
);
131 *new_slot
= VEC_alloc (edge_var_map
, heap
, 5);
135 /* Return the variable mappings for a given edge. If there is none, return
139 redirect_edge_var_map_vector (edge e
)
143 /* Hey, what kind of idiot would... you'd be surprised. */
147 slot
= pointer_map_contains (edge_var_maps
, e
);
151 return (edge_var_map_vector
) *slot
;
154 /* Used by redirect_edge_var_map_destroy to free all memory. */
157 free_var_map_entry (const void *key ATTRIBUTE_UNUSED
,
159 void *data ATTRIBUTE_UNUSED
)
161 edge_var_map_vector head
= (edge_var_map_vector
) *value
;
162 VEC_free (edge_var_map
, heap
, head
);
166 /* Clear the edge variable mappings. */
169 redirect_edge_var_map_destroy (void)
173 pointer_map_traverse (edge_var_maps
, free_var_map_entry
, NULL
);
174 pointer_map_destroy (edge_var_maps
);
175 edge_var_maps
= NULL
;
180 /* Remove the corresponding arguments from the PHI nodes in E's
181 destination block and redirect it to DEST. Return redirected edge.
182 The list of removed arguments is stored in a vector accessed
183 through edge_var_maps. */
186 ssa_redirect_edge (edge e
, basic_block dest
)
188 gimple_stmt_iterator gsi
;
191 redirect_edge_var_map_clear (e
);
193 /* Remove the appropriate PHI arguments in E's destination block. */
194 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
197 source_location locus
;
199 phi
= gsi_stmt (gsi
);
200 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
201 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
203 if (def
== NULL_TREE
)
206 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
209 e
= redirect_edge_succ_nodup (e
, dest
);
215 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
219 flush_pending_stmts (edge e
)
222 edge_var_map_vector v
;
225 gimple_stmt_iterator gsi
;
227 v
= redirect_edge_var_map_vector (e
);
231 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
232 !gsi_end_p (gsi
) && VEC_iterate (edge_var_map
, v
, i
, vm
);
233 gsi_next (&gsi
), i
++)
237 phi
= gsi_stmt (gsi
);
238 def
= redirect_edge_var_map_def (vm
);
239 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
242 redirect_edge_var_map_clear (e
);
245 /* Given a tree for an expression for which we might want to emit
246 locations or values in debug information (generally a variable, but
247 we might deal with other kinds of trees in the future), return the
248 tree that should be used as the variable of a DEBUG_BIND STMT or
249 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
252 target_for_debug_bind (tree var
)
254 if (!MAY_HAVE_DEBUG_STMTS
)
257 if (TREE_CODE (var
) != VAR_DECL
258 && TREE_CODE (var
) != PARM_DECL
)
261 if (DECL_HAS_VALUE_EXPR_P (var
))
262 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
264 if (DECL_IGNORED_P (var
))
267 if (!is_gimple_reg (var
))
273 /* Called via walk_tree, look for SSA_NAMEs that have already been
277 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
279 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
281 if (wi
&& wi
->is_lhs
)
284 if (TREE_CODE (*tp
) == SSA_NAME
)
286 if (SSA_NAME_IN_FREE_LIST (*tp
))
291 else if (IS_TYPE_OR_DECL_P (*tp
))
297 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
298 by other DEBUG stmts, and replace uses of the DEF with the
299 newly-created debug temp. */
302 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
304 imm_use_iterator imm_iter
;
307 gimple def_stmt
= NULL
;
311 if (!MAY_HAVE_DEBUG_STMTS
)
314 /* If this name has already been registered for replacement, do nothing
315 as anything that uses this name isn't in SSA form. */
316 if (name_registered_for_update_p (var
))
319 /* Check whether there are debug stmts that reference this variable and,
320 if there are, decide whether we should use a debug temp. */
321 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
323 stmt
= USE_STMT (use_p
);
325 if (!gimple_debug_bind_p (stmt
))
331 if (gimple_debug_bind_get_value (stmt
) != var
)
333 /* Count this as an additional use, so as to make sure we
334 use a temp unless VAR's definition has a SINGLE_RHS that
345 def_stmt
= gsi_stmt (*gsi
);
347 def_stmt
= SSA_NAME_DEF_STMT (var
);
349 /* If we didn't get an insertion point, and the stmt has already
350 been removed, we won't be able to insert the debug bind stmt, so
351 we'll have to drop debug information. */
352 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
354 value
= degenerate_phi_result (def_stmt
);
355 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
358 else if (is_gimple_assign (def_stmt
))
360 bool no_value
= false;
362 if (!dom_info_available_p (CDI_DOMINATORS
))
364 struct walk_stmt_info wi
;
366 memset (&wi
, 0, sizeof (wi
));
368 /* When removing blocks without following reverse dominance
369 order, we may sometimes encounter SSA_NAMEs that have
370 already been released, referenced in other SSA_DEFs that
371 we're about to release. Consider:
380 If we deleted BB X first, propagating the value of w_2
381 won't do us any good. It's too late to recover their
382 original definition of v_1: when it was deleted, it was
383 only referenced in other DEFs, it couldn't possibly know
384 it should have been retained, and propagating every
385 single DEF just in case it might have to be propagated
386 into a DEBUG STMT would probably be too wasteful.
388 When dominator information is not readily available, we
389 check for and accept some loss of debug information. But
390 if it is available, there's no excuse for us to remove
391 blocks in the wrong order, so we don't even check for
392 dead SSA NAMEs. SSA verification shall catch any
394 if ((!gsi
&& !gimple_bb (def_stmt
))
395 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
400 value
= gimple_assign_rhs_to_tree (def_stmt
);
405 /* If there's a single use of VAR, and VAR is the entire debug
406 expression (usecount would have been incremented again
407 otherwise), and the definition involves only constants and
408 SSA names, then we can propagate VALUE into this single use,
411 We can also avoid using a temp if VALUE can be shared and
412 propagated into all uses, without generating expressions that
413 wouldn't be valid gimple RHSs.
415 Other cases that would require unsharing or non-gimple RHSs
416 are deferred to a debug temp, although we could avoid temps
417 at the expense of duplication of expressions. */
419 if (CONSTANT_CLASS_P (value
)
420 || gimple_code (def_stmt
) == GIMPLE_PHI
422 && (!gimple_assign_single_p (def_stmt
)
423 || is_gimple_min_invariant (value
)))
424 || is_gimple_reg (value
))
425 value
= unshare_expr (value
);
429 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
431 def_temp
= gimple_build_debug_bind (vexpr
,
432 unshare_expr (value
),
435 DECL_ARTIFICIAL (vexpr
) = 1;
436 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
438 DECL_MODE (vexpr
) = DECL_MODE (value
);
440 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (value
));
443 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
446 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
447 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
454 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
456 if (!gimple_debug_bind_p (stmt
))
460 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
461 /* unshare_expr is not needed here. vexpr is either a
462 SINGLE_RHS, that can be safely shared, some other RHS
463 that was unshared when we found it had a single debug
464 use, or a DEBUG_EXPR_DECL, that can be safely
466 SET_USE (use_p
, value
);
468 gimple_debug_bind_reset_value (stmt
);
475 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
476 other DEBUG stmts, and replace uses of the DEF with the
477 newly-created debug temp. */
480 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
486 if (!MAY_HAVE_DEBUG_STMTS
)
489 stmt
= gsi_stmt (*gsi
);
491 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
493 tree var
= DEF_FROM_PTR (def_p
);
495 if (TREE_CODE (var
) != SSA_NAME
)
498 insert_debug_temp_for_var_def (gsi
, var
);
502 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
503 dominated stmts before their dominators, so that release_ssa_defs
504 stands a chance of propagating DEFs into debug bind stmts. */
507 release_defs_bitset (bitmap toremove
)
512 /* Performing a topological sort is probably overkill, this will
513 most likely run in slightly superlinear time, rather than the
514 pathological quadratic worst case. */
515 while (!bitmap_empty_p (toremove
))
516 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
518 bool remove_now
= true;
519 tree var
= ssa_name (j
);
521 imm_use_iterator uit
;
523 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
528 /* We can't propagate PHI nodes into debug stmts. */
529 if (gimple_code (stmt
) == GIMPLE_PHI
530 || is_gimple_debug (stmt
))
533 /* If we find another definition to remove that uses
534 the one we're looking at, defer the removal of this
535 one, so that it can be propagated into debug stmts
536 after the other is. */
537 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
539 tree odef
= DEF_FROM_PTR (def_p
);
541 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
549 BREAK_FROM_IMM_USE_STMT (uit
);
554 gimple def
= SSA_NAME_DEF_STMT (var
);
555 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
557 if (gimple_code (def
) == GIMPLE_PHI
)
558 remove_phi_node (&gsi
, true);
561 gsi_remove (&gsi
, true);
565 bitmap_clear_bit (toremove
, j
);
570 /* Return true if SSA_NAME is malformed and mark it visited.
572 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
576 verify_ssa_name (tree ssa_name
, bool is_virtual
)
578 if (TREE_CODE (ssa_name
) != SSA_NAME
)
580 error ("expected an SSA_NAME object");
584 if (TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
586 error ("type mismatch between an SSA_NAME and its symbol");
590 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
592 error ("found an SSA_NAME that had been released into the free pool");
596 if (is_virtual
&& is_gimple_reg (ssa_name
))
598 error ("found a virtual definition for a GIMPLE register");
602 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
604 error ("virtual SSA name for non-VOP decl");
608 if (!is_virtual
&& !is_gimple_reg (ssa_name
))
610 error ("found a real definition for a non-register");
614 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
615 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
617 error ("found a default name with a non-empty defining statement");
625 /* Return true if the definition of SSA_NAME at block BB is malformed.
627 STMT is the statement where SSA_NAME is created.
629 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
630 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
631 it means that the block in that array slot contains the
632 definition of SSA_NAME.
634 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
637 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
638 gimple stmt
, bool is_virtual
)
640 if (verify_ssa_name (ssa_name
, is_virtual
))
643 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
645 error ("SSA_NAME created in two different blocks %i and %i",
646 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
650 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
652 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
654 error ("SSA_NAME_DEF_STMT is wrong");
655 fprintf (stderr
, "Expected definition statement:\n");
656 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
657 fprintf (stderr
, "\nActual definition statement:\n");
658 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
665 fprintf (stderr
, "while verifying SSA_NAME ");
666 print_generic_expr (stderr
, ssa_name
, 0);
667 fprintf (stderr
, " in statement\n");
668 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
674 /* Return true if the use of SSA_NAME at statement STMT in block BB is
677 DEF_BB is the block where SSA_NAME was found to be created.
679 IDOM contains immediate dominator information for the flowgraph.
681 CHECK_ABNORMAL is true if the caller wants to check whether this use
682 is flowing through an abnormal edge (only used when checking PHI
685 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
686 that are defined before STMT in basic block BB. */
689 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
690 gimple stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
693 tree ssa_name
= USE_FROM_PTR (use_p
);
695 if (!TREE_VISITED (ssa_name
))
696 if (verify_imm_links (stderr
, ssa_name
))
699 TREE_VISITED (ssa_name
) = 1;
701 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
702 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
703 ; /* Default definitions have empty statements. Nothing to do. */
706 error ("missing definition");
709 else if (bb
!= def_bb
710 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
712 error ("definition in block %i does not dominate use in block %i",
713 def_bb
->index
, bb
->index
);
716 else if (bb
== def_bb
717 && names_defined_in_bb
!= NULL
718 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
720 error ("definition in block %i follows the use", def_bb
->index
);
725 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
727 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
731 /* Make sure the use is in an appropriate list by checking the previous
732 element to make sure it's the same. */
733 if (use_p
->prev
== NULL
)
735 error ("no immediate_use list");
741 if (use_p
->prev
->use
== NULL
)
742 listvar
= use_p
->prev
->loc
.ssa_name
;
744 listvar
= USE_FROM_PTR (use_p
->prev
);
745 if (listvar
!= ssa_name
)
747 error ("wrong immediate use list");
754 fprintf (stderr
, "for SSA_NAME: ");
755 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
756 fprintf (stderr
, " in statement:\n");
757 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
764 /* Return true if any of the arguments for PHI node PHI at block BB is
767 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
768 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
769 it means that the block in that array slot contains the
770 definition of SSA_NAME. */
773 verify_phi_args (gimple phi
, basic_block bb
, basic_block
*definition_block
)
777 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
779 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
781 error ("incoming edge count does not match number of PHI arguments");
786 for (i
= 0; i
< phi_num_args
; i
++)
788 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
789 tree op
= USE_FROM_PTR (op_p
);
791 e
= EDGE_PRED (bb
, i
);
795 error ("PHI argument is missing for edge %d->%d",
802 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
804 error ("PHI argument is not SSA_NAME, or invariant");
808 if (TREE_CODE (op
) == SSA_NAME
)
810 err
= verify_ssa_name (op
, !is_gimple_reg (gimple_phi_result (phi
)));
811 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
812 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
815 if (TREE_CODE (op
) == ADDR_EXPR
)
817 tree base
= TREE_OPERAND (op
, 0);
818 while (handled_component_p (base
))
819 base
= TREE_OPERAND (base
, 0);
820 if ((TREE_CODE (base
) == VAR_DECL
821 || TREE_CODE (base
) == PARM_DECL
822 || TREE_CODE (base
) == RESULT_DECL
)
823 && !TREE_ADDRESSABLE (base
))
825 error ("address taken, but ADDRESSABLE bit not set");
832 error ("wrong edge %d->%d for PHI argument",
833 e
->src
->index
, e
->dest
->index
);
839 fprintf (stderr
, "PHI argument\n");
840 print_generic_stmt (stderr
, op
, TDF_VOPS
);
848 fprintf (stderr
, "for PHI node\n");
849 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
857 /* Verify common invariants in the SSA web.
858 TODO: verify the variable annotations. */
861 verify_ssa (bool check_modified_stmt
)
865 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
868 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
869 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
871 gcc_assert (!need_ssa_update_p (cfun
));
875 timevar_push (TV_TREE_SSA_VERIFY
);
877 /* Keep track of SSA names present in the IL. */
878 for (i
= 1; i
< num_ssa_names
; i
++)
880 tree name
= ssa_name (i
);
884 TREE_VISITED (name
) = 0;
886 stmt
= SSA_NAME_DEF_STMT (name
);
887 if (!gimple_nop_p (stmt
))
889 basic_block bb
= gimple_bb (stmt
);
890 verify_def (bb
, definition_block
,
891 name
, stmt
, !is_gimple_reg (name
));
897 calculate_dominance_info (CDI_DOMINATORS
);
899 /* Now verify all the uses and make sure they agree with the definitions
900 found in the previous pass. */
906 gimple_stmt_iterator gsi
;
908 /* Make sure that all edges have a clear 'aux' field. */
909 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
913 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
919 /* Verify the arguments for every PHI node in the block. */
920 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
922 phi
= gsi_stmt (gsi
);
923 if (verify_phi_args (phi
, bb
, definition_block
))
926 bitmap_set_bit (names_defined_in_bb
,
927 SSA_NAME_VERSION (gimple_phi_result (phi
)));
930 /* Now verify all the uses and vuses in every statement of the block. */
931 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
933 gimple stmt
= gsi_stmt (gsi
);
937 if (check_modified_stmt
&& gimple_modified_p (stmt
))
939 error ("stmt (%p) marked modified after optimization pass: ",
941 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
945 if (is_gimple_assign (stmt
)
946 && TREE_CODE (gimple_assign_lhs (stmt
)) != SSA_NAME
)
948 tree lhs
, base_address
;
950 lhs
= gimple_assign_lhs (stmt
);
951 base_address
= get_base_address (lhs
);
954 && SSA_VAR_P (base_address
)
955 && !gimple_vdef (stmt
)
958 error ("statement makes a memory store, but has no VDEFS");
959 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
963 else if (gimple_debug_bind_p (stmt
)
964 && !gimple_debug_bind_has_value_p (stmt
))
967 /* Verify the single virtual operand and its constraints. */
969 if (gimple_vdef (stmt
))
971 if (gimple_vdef_op (stmt
) == NULL_DEF_OPERAND_P
)
973 error ("statement has VDEF operand not in defs list");
976 if (!gimple_vuse (stmt
))
978 error ("statement has VDEF but no VUSE operand");
981 else if (SSA_NAME_VAR (gimple_vdef (stmt
))
982 != SSA_NAME_VAR (gimple_vuse (stmt
)))
984 error ("VDEF and VUSE do not use the same symbol");
987 has_err
|= verify_ssa_name (gimple_vdef (stmt
), true);
989 if (gimple_vuse (stmt
))
991 if (gimple_vuse_op (stmt
) == NULL_USE_OPERAND_P
)
993 error ("statement has VUSE operand not in uses list");
996 has_err
|= verify_ssa_name (gimple_vuse (stmt
), true);
1000 error ("in statement");
1001 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1005 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
|SSA_OP_DEF
)
1007 if (verify_ssa_name (op
, false))
1009 error ("in statement");
1010 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1015 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1017 op
= USE_FROM_PTR (use_p
);
1018 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1019 use_p
, stmt
, false, names_defined_in_bb
))
1023 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1025 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1027 error ("SSA_NAME_DEF_STMT is wrong");
1028 fprintf (stderr
, "Expected definition statement:\n");
1029 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1030 fprintf (stderr
, "\nActual definition statement:\n");
1031 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1035 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1039 bitmap_clear (names_defined_in_bb
);
1042 free (definition_block
);
1044 /* Restore the dominance information to its prior known state, so
1045 that we do not perturb the compiler's subsequent behavior. */
1046 if (orig_dom_state
== DOM_NONE
)
1047 free_dominance_info (CDI_DOMINATORS
);
1049 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1051 BITMAP_FREE (names_defined_in_bb
);
1052 timevar_pop (TV_TREE_SSA_VERIFY
);
1056 internal_error ("verify_ssa failed");
1059 /* Return true if the uid in both int tree maps are equal. */
1062 int_tree_map_eq (const void *va
, const void *vb
)
1064 const struct int_tree_map
*a
= (const struct int_tree_map
*) va
;
1065 const struct int_tree_map
*b
= (const struct int_tree_map
*) vb
;
1066 return (a
->uid
== b
->uid
);
1069 /* Hash a UID in a int_tree_map. */
1072 int_tree_map_hash (const void *item
)
1074 return ((const struct int_tree_map
*)item
)->uid
;
1077 /* Return true if the DECL_UID in both trees are equal. */
1080 uid_decl_map_eq (const void *va
, const void *vb
)
1082 const_tree a
= (const_tree
) va
;
1083 const_tree b
= (const_tree
) vb
;
1084 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
1087 /* Hash a tree in a uid_decl_map. */
1090 uid_decl_map_hash (const void *item
)
1092 return ((const_tree
)item
)->decl_minimal
.uid
;
1095 /* Return true if the DECL_UID in both trees are equal. */
1098 uid_ssaname_map_eq (const void *va
, const void *vb
)
1100 const_tree a
= (const_tree
) va
;
1101 const_tree b
= (const_tree
) vb
;
1102 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
1105 /* Hash a tree in a uid_decl_map. */
1108 uid_ssaname_map_hash (const void *item
)
1110 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
1114 /* Initialize global DFA and SSA structures. */
1117 init_tree_ssa (struct function
*fn
)
1119 fn
->gimple_df
= GGC_CNEW (struct gimple_df
);
1120 fn
->gimple_df
->referenced_vars
= htab_create_ggc (20, uid_decl_map_hash
,
1121 uid_decl_map_eq
, NULL
);
1122 fn
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
1123 uid_ssaname_map_eq
, NULL
);
1124 pt_solution_reset (&fn
->gimple_df
->escaped
);
1125 init_ssanames (fn
, 0);
1130 /* Deallocate memory associated with SSA data structures for FNDECL. */
1133 delete_tree_ssa (void)
1135 referenced_var_iterator rvi
;
1138 /* Remove annotations from every referenced local variable. */
1139 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1141 if (is_global_var (var
))
1145 ggc_free (var_ann (var
));
1146 *DECL_VAR_ANN_PTR (var
) = NULL
;
1149 htab_delete (gimple_referenced_vars (cfun
));
1150 cfun
->gimple_df
->referenced_vars
= NULL
;
1155 /* We no longer maintain the SSA operand cache at this point. */
1156 if (ssa_operands_active ())
1157 fini_ssa_operands ();
1159 delete_alias_heapvars ();
1161 htab_delete (cfun
->gimple_df
->default_defs
);
1162 cfun
->gimple_df
->default_defs
= NULL
;
1163 pt_solution_reset (&cfun
->gimple_df
->escaped
);
1164 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
1165 pointer_map_destroy (cfun
->gimple_df
->decls_to_pointers
);
1166 cfun
->gimple_df
->decls_to_pointers
= NULL
;
1167 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1168 cfun
->gimple_df
= NULL
;
1170 /* We no longer need the edge variable maps. */
1171 redirect_edge_var_map_destroy ();
1174 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1175 useless type conversion, otherwise return false.
1177 This function implicitly defines the middle-end type system. With
1178 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1179 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1180 the following invariants shall be fulfilled:
1182 1) useless_type_conversion_p is transitive.
1183 If a < b and b < c then a < c.
1185 2) useless_type_conversion_p is not symmetric.
1186 From a < b does not follow a > b.
1188 3) Types define the available set of operations applicable to values.
1189 A type conversion is useless if the operations for the target type
1190 is a subset of the operations for the source type. For example
1191 casts to void* are useless, casts from void* are not (void* can't
1192 be dereferenced or offsetted, but copied, hence its set of operations
1193 is a strict subset of that of all other data pointer types). Casts
1194 to const T* are useless (can't be written to), casts from const T*
1198 useless_type_conversion_p (tree outer_type
, tree inner_type
)
1200 /* Do the following before stripping toplevel qualifiers. */
1201 if (POINTER_TYPE_P (inner_type
)
1202 && POINTER_TYPE_P (outer_type
))
1204 /* Do not lose casts between pointers to different address spaces. */
1205 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
1206 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)))
1209 /* If the outer type is (void *) or a pointer to an incomplete
1210 record type or a pointer to an unprototyped function,
1211 then the conversion is not necessary. */
1212 if (VOID_TYPE_P (TREE_TYPE (outer_type
))
1213 || ((TREE_CODE (TREE_TYPE (outer_type
)) == FUNCTION_TYPE
1214 || TREE_CODE (TREE_TYPE (outer_type
)) == METHOD_TYPE
)
1215 && (TREE_CODE (TREE_TYPE (outer_type
))
1216 == TREE_CODE (TREE_TYPE (inner_type
)))
1217 && !TYPE_ARG_TYPES (TREE_TYPE (outer_type
))
1218 && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type
)),
1219 TREE_TYPE (TREE_TYPE (inner_type
)))))
1222 /* Do not lose casts to restrict qualified pointers. */
1223 if ((TYPE_RESTRICT (outer_type
)
1224 != TYPE_RESTRICT (inner_type
))
1225 && TYPE_RESTRICT (outer_type
))
1229 /* From now on qualifiers on value types do not matter. */
1230 inner_type
= TYPE_MAIN_VARIANT (inner_type
);
1231 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
1233 if (inner_type
== outer_type
)
1236 /* If we know the canonical types, compare them. */
1237 if (TYPE_CANONICAL (inner_type
)
1238 && TYPE_CANONICAL (inner_type
) == TYPE_CANONICAL (outer_type
))
1241 /* Changes in machine mode are never useless conversions unless we
1242 deal with aggregate types in which case we defer to later checks. */
1243 if (TYPE_MODE (inner_type
) != TYPE_MODE (outer_type
)
1244 && !AGGREGATE_TYPE_P (inner_type
))
1247 /* If both the inner and outer types are integral types, then the
1248 conversion is not necessary if they have the same mode and
1249 signedness and precision, and both or neither are boolean. */
1250 if (INTEGRAL_TYPE_P (inner_type
)
1251 && INTEGRAL_TYPE_P (outer_type
))
1253 /* Preserve changes in signedness or precision. */
1254 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
1255 || TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
1258 /* We don't need to preserve changes in the types minimum or
1259 maximum value in general as these do not generate code
1260 unless the types precisions are different. */
1264 /* Scalar floating point types with the same mode are compatible. */
1265 else if (SCALAR_FLOAT_TYPE_P (inner_type
)
1266 && SCALAR_FLOAT_TYPE_P (outer_type
))
1269 /* Fixed point types with the same mode are compatible. */
1270 else if (FIXED_POINT_TYPE_P (inner_type
)
1271 && FIXED_POINT_TYPE_P (outer_type
))
1274 /* We need to take special care recursing to pointed-to types. */
1275 else if (POINTER_TYPE_P (inner_type
)
1276 && POINTER_TYPE_P (outer_type
))
1278 /* Don't lose casts between pointers to volatile and non-volatile
1279 qualified types. Doing so would result in changing the semantics
1280 of later accesses. For function types the volatile qualifier
1281 is used to indicate noreturn functions. */
1282 if (TREE_CODE (TREE_TYPE (outer_type
)) != FUNCTION_TYPE
1283 && TREE_CODE (TREE_TYPE (outer_type
)) != METHOD_TYPE
1284 && TREE_CODE (TREE_TYPE (inner_type
)) != FUNCTION_TYPE
1285 && TREE_CODE (TREE_TYPE (inner_type
)) != METHOD_TYPE
1286 && (TYPE_VOLATILE (TREE_TYPE (outer_type
))
1287 != TYPE_VOLATILE (TREE_TYPE (inner_type
)))
1288 && TYPE_VOLATILE (TREE_TYPE (outer_type
)))
1291 /* We require explicit conversions from incomplete target types. */
1292 if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type
))
1293 && COMPLETE_TYPE_P (TREE_TYPE (outer_type
)))
1296 /* Do not lose casts between pointers that when dereferenced access
1297 memory with different alias sets. */
1298 if (get_deref_alias_set (inner_type
) != get_deref_alias_set (outer_type
))
1301 /* We do not care for const qualification of the pointed-to types
1302 as const qualification has no semantic value to the middle-end. */
1304 /* Otherwise pointers/references are equivalent if their pointed
1305 to types are effectively the same. We can strip qualifiers
1306 on pointed-to types for further comparison, which is done in
1307 the callee. Note we have to use true compatibility here
1308 because addresses are subject to propagation into dereferences
1309 and thus might get the original type exposed which is equivalent
1310 to a reverse conversion. */
1311 return types_compatible_p (TREE_TYPE (outer_type
),
1312 TREE_TYPE (inner_type
));
1315 /* Recurse for complex types. */
1316 else if (TREE_CODE (inner_type
) == COMPLEX_TYPE
1317 && TREE_CODE (outer_type
) == COMPLEX_TYPE
)
1318 return useless_type_conversion_p (TREE_TYPE (outer_type
),
1319 TREE_TYPE (inner_type
));
1321 /* Recurse for vector types with the same number of subparts. */
1322 else if (TREE_CODE (inner_type
) == VECTOR_TYPE
1323 && TREE_CODE (outer_type
) == VECTOR_TYPE
1324 && TYPE_PRECISION (inner_type
) == TYPE_PRECISION (outer_type
))
1325 return useless_type_conversion_p (TREE_TYPE (outer_type
),
1326 TREE_TYPE (inner_type
));
1328 else if (TREE_CODE (inner_type
) == ARRAY_TYPE
1329 && TREE_CODE (outer_type
) == ARRAY_TYPE
)
1331 /* Preserve string attributes. */
1332 if (TYPE_STRING_FLAG (inner_type
) != TYPE_STRING_FLAG (outer_type
))
1335 /* Conversions from array types with unknown extent to
1336 array types with known extent are not useless. */
1337 if (!TYPE_DOMAIN (inner_type
)
1338 && TYPE_DOMAIN (outer_type
))
1341 /* Nor are conversions from array types with non-constant size to
1342 array types with constant size or to different size. */
1343 if (TYPE_SIZE (outer_type
)
1344 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
1345 && (!TYPE_SIZE (inner_type
)
1346 || TREE_CODE (TYPE_SIZE (inner_type
)) != INTEGER_CST
1347 || !tree_int_cst_equal (TYPE_SIZE (outer_type
),
1348 TYPE_SIZE (inner_type
))))
1351 /* Check conversions between arrays with partially known extents.
1352 If the array min/max values are constant they have to match.
1353 Otherwise allow conversions to unknown and variable extents.
1354 In particular this declares conversions that may change the
1355 mode to BLKmode as useless. */
1356 if (TYPE_DOMAIN (inner_type
)
1357 && TYPE_DOMAIN (outer_type
)
1358 && TYPE_DOMAIN (inner_type
) != TYPE_DOMAIN (outer_type
))
1360 tree inner_min
= TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type
));
1361 tree outer_min
= TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type
));
1362 tree inner_max
= TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type
));
1363 tree outer_max
= TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type
));
1365 /* After gimplification a variable min/max value carries no
1366 additional information compared to a NULL value. All that
1367 matters has been lowered to be part of the IL. */
1368 if (inner_min
&& TREE_CODE (inner_min
) != INTEGER_CST
)
1369 inner_min
= NULL_TREE
;
1370 if (outer_min
&& TREE_CODE (outer_min
) != INTEGER_CST
)
1371 outer_min
= NULL_TREE
;
1372 if (inner_max
&& TREE_CODE (inner_max
) != INTEGER_CST
)
1373 inner_max
= NULL_TREE
;
1374 if (outer_max
&& TREE_CODE (outer_max
) != INTEGER_CST
)
1375 outer_max
= NULL_TREE
;
1377 /* Conversions NULL / variable <- cst are useless, but not
1378 the other way around. */
1381 || !tree_int_cst_equal (inner_min
, outer_min
)))
1385 || !tree_int_cst_equal (inner_max
, outer_max
)))
1389 /* Recurse on the element check. */
1390 return useless_type_conversion_p (TREE_TYPE (outer_type
),
1391 TREE_TYPE (inner_type
));
1394 else if ((TREE_CODE (inner_type
) == FUNCTION_TYPE
1395 || TREE_CODE (inner_type
) == METHOD_TYPE
)
1396 && TREE_CODE (inner_type
) == TREE_CODE (outer_type
))
1398 tree outer_parm
, inner_parm
;
1400 /* If the return types are not compatible bail out. */
1401 if (!useless_type_conversion_p (TREE_TYPE (outer_type
),
1402 TREE_TYPE (inner_type
)))
1405 /* Method types should belong to a compatible base class. */
1406 if (TREE_CODE (inner_type
) == METHOD_TYPE
1407 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type
),
1408 TYPE_METHOD_BASETYPE (inner_type
)))
1411 /* A conversion to an unprototyped argument list is ok. */
1412 if (!TYPE_ARG_TYPES (outer_type
))
1415 /* If the unqualified argument types are compatible the conversion
1417 if (TYPE_ARG_TYPES (outer_type
) == TYPE_ARG_TYPES (inner_type
))
1420 for (outer_parm
= TYPE_ARG_TYPES (outer_type
),
1421 inner_parm
= TYPE_ARG_TYPES (inner_type
);
1422 outer_parm
&& inner_parm
;
1423 outer_parm
= TREE_CHAIN (outer_parm
),
1424 inner_parm
= TREE_CHAIN (inner_parm
))
1425 if (!useless_type_conversion_p
1426 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm
)),
1427 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm
))))
1430 /* If there is a mismatch in the number of arguments the functions
1431 are not compatible. */
1432 if (outer_parm
|| inner_parm
)
1435 /* Defer to the target if necessary. */
1436 if (TYPE_ATTRIBUTES (inner_type
) || TYPE_ATTRIBUTES (outer_type
))
1437 return targetm
.comp_type_attributes (outer_type
, inner_type
) != 0;
1442 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1443 explicit conversions for types involving to be structurally
1445 else if (AGGREGATE_TYPE_P (inner_type
)
1446 && TREE_CODE (inner_type
) == TREE_CODE (outer_type
))
1452 /* Return true if a conversion from either type of TYPE1 and TYPE2
1453 to the other is not required. Otherwise return false. */
1456 types_compatible_p (tree type1
, tree type2
)
1458 return (type1
== type2
1459 || (useless_type_conversion_p (type1
, type2
)
1460 && useless_type_conversion_p (type2
, type1
)));
1463 /* Return true if EXPR is a useless type conversion, otherwise return
1467 tree_ssa_useless_type_conversion (tree expr
)
1469 /* If we have an assignment that merely uses a NOP_EXPR to change
1470 the top of the RHS to the type of the LHS and the type conversion
1471 is "safe", then strip away the type conversion so that we can
1472 enter LHS = RHS into the const_and_copies table. */
1473 if (CONVERT_EXPR_P (expr
)
1474 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1475 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1476 return useless_type_conversion_p
1478 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1483 /* Strip conversions from EXP according to
1484 tree_ssa_useless_type_conversion and return the resulting
1488 tree_ssa_strip_useless_type_conversions (tree exp
)
1490 while (tree_ssa_useless_type_conversion (exp
))
1491 exp
= TREE_OPERAND (exp
, 0);
1496 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1497 described in walk_use_def_chains.
1499 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1500 infinite loops. We used to have a bitmap for this to just mark
1501 SSA versions we had visited. But non-sparse bitmaps are way too
1502 expensive, while sparse bitmaps may cause quadratic behavior.
1504 IS_DFS is true if the caller wants to perform a depth-first search
1505 when visiting PHI nodes. A DFS will visit each PHI argument and
1506 call FN after each one. Otherwise, all the arguments are
1507 visited first and then FN is called with each of the visited
1508 arguments in a separate pass. */
1511 walk_use_def_chains_1 (tree var
, walk_use_def_chains_fn fn
, void *data
,
1512 struct pointer_set_t
*visited
, bool is_dfs
)
1516 if (pointer_set_insert (visited
, var
))
1519 def_stmt
= SSA_NAME_DEF_STMT (var
);
1521 if (gimple_code (def_stmt
) != GIMPLE_PHI
)
1523 /* If we reached the end of the use-def chain, call FN. */
1524 return fn (var
, def_stmt
, data
);
1530 /* When doing a breadth-first search, call FN before following the
1531 use-def links for each argument. */
1533 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1534 if (fn (gimple_phi_arg_def (def_stmt
, i
), def_stmt
, data
))
1537 /* Follow use-def links out of each PHI argument. */
1538 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1540 tree arg
= gimple_phi_arg_def (def_stmt
, i
);
1542 /* ARG may be NULL for newly introduced PHI nodes. */
1544 && TREE_CODE (arg
) == SSA_NAME
1545 && walk_use_def_chains_1 (arg
, fn
, data
, visited
, is_dfs
))
1549 /* When doing a depth-first search, call FN after following the
1550 use-def links for each argument. */
1552 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1553 if (fn (gimple_phi_arg_def (def_stmt
, i
), def_stmt
, data
))
1562 /* Walk use-def chains starting at the SSA variable VAR. Call
1563 function FN at each reaching definition found. FN takes three
1564 arguments: VAR, its defining statement (DEF_STMT) and a generic
1565 pointer to whatever state information that FN may want to maintain
1566 (DATA). FN is able to stop the walk by returning true, otherwise
1567 in order to continue the walk, FN should return false.
1569 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1570 different. The first argument to FN is no longer the original
1571 variable VAR, but the PHI argument currently being examined. If FN
1572 wants to get at VAR, it should call PHI_RESULT (PHI).
1574 If IS_DFS is true, this function will:
1576 1- walk the use-def chains for all the PHI arguments, and,
1577 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1579 If IS_DFS is false, the two steps above are done in reverse order
1580 (i.e., a breadth-first search). */
1583 walk_use_def_chains (tree var
, walk_use_def_chains_fn fn
, void *data
,
1588 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1590 def_stmt
= SSA_NAME_DEF_STMT (var
);
1592 /* We only need to recurse if the reaching definition comes from a PHI
1594 if (gimple_code (def_stmt
) != GIMPLE_PHI
)
1595 (*fn
) (var
, def_stmt
, data
);
1598 struct pointer_set_t
*visited
= pointer_set_create ();
1599 walk_use_def_chains_1 (var
, fn
, data
, visited
, is_dfs
);
1600 pointer_set_destroy (visited
);
1605 /* Emit warnings for uninitialized variables. This is done in two passes.
1607 The first pass notices real uses of SSA names with undefined values.
1608 Such uses are unconditionally uninitialized, and we can be certain that
1609 such a use is a mistake. This pass is run before most optimizations,
1610 so that we catch as many as we can.
1612 The second pass follows PHI nodes to find uses that are potentially
1613 uninitialized. In this case we can't necessarily prove that the use
1614 is really uninitialized. This pass is run after most optimizations,
1615 so that we thread as many jumps and possible, and delete as much dead
1616 code as possible, in order to reduce false positives. We also look
1617 again for plain uninitialized variables, since optimization may have
1618 changed conditionally uninitialized to unconditionally uninitialized. */
1620 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1621 warning text is in MSGID and LOCUS may contain a location or be null. */
1624 warn_uninit (tree t
, const char *gmsgid
, void *data
)
1626 tree var
= SSA_NAME_VAR (t
);
1627 gimple context
= (gimple
) data
;
1628 location_t location
;
1629 expanded_location xloc
, floc
;
1631 if (!ssa_undefined_value_p (t
))
1634 /* TREE_NO_WARNING either means we already warned, or the front end
1635 wishes to suppress the warning. */
1636 if (TREE_NO_WARNING (var
))
1639 /* Do not warn if it can be initialized outside this module. */
1640 if (is_global_var (var
))
1643 location
= (context
!= NULL
&& gimple_has_location (context
))
1644 ? gimple_location (context
)
1645 : DECL_SOURCE_LOCATION (var
);
1646 xloc
= expand_location (location
);
1647 floc
= expand_location (DECL_SOURCE_LOCATION (cfun
->decl
));
1648 if (warning_at (location
, OPT_Wuninitialized
, gmsgid
, var
))
1650 TREE_NO_WARNING (var
) = 1;
1652 if (xloc
.file
!= floc
.file
1653 || xloc
.line
< floc
.line
1654 || xloc
.line
> LOCATION_LINE (cfun
->function_end_locus
))
1655 inform (DECL_SOURCE_LOCATION (var
), "%qD was declared here", var
);
1661 bool always_executed
;
1662 bool warn_possibly_uninitialized
;
1665 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1666 and warn about them. */
1669 warn_uninitialized_var (tree
*tp
, int *walk_subtrees
, void *data_
)
1671 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
1672 struct walk_data
*data
= (struct walk_data
*) wi
->info
;
1675 /* We do not care about LHS. */
1678 /* Except for operands of INDIRECT_REF. */
1679 if (!INDIRECT_REF_P (t
))
1681 t
= TREE_OPERAND (t
, 0);
1684 switch (TREE_CODE (t
))
1687 /* Taking the address of an uninitialized variable does not
1688 count as using it. */
1694 /* A VAR_DECL in the RHS of a gimple statement may mean that
1695 this variable is loaded from memory. */
1699 /* If there is not gimple stmt,
1700 or alias information has not been computed,
1701 then we cannot check VUSE ops. */
1702 if (data
->stmt
== NULL
)
1705 /* If the load happens as part of a call do not warn about it. */
1706 if (is_gimple_call (data
->stmt
))
1709 vuse
= gimple_vuse_op (data
->stmt
);
1710 if (vuse
== NULL_USE_OPERAND_P
)
1713 op
= USE_FROM_PTR (vuse
);
1714 if (t
!= SSA_NAME_VAR (op
)
1715 || !SSA_NAME_IS_DEFAULT_DEF (op
))
1717 /* If this is a VUSE of t and it is the default definition,
1718 then warn about op. */
1720 /* Fall through into SSA_NAME. */
1724 /* We only do data flow with SSA_NAMEs, so that's all we
1726 if (data
->always_executed
)
1727 warn_uninit (t
, "%qD is used uninitialized in this function",
1729 else if (data
->warn_possibly_uninitialized
)
1730 warn_uninit (t
, "%qD may be used uninitialized in this function",
1737 /* The total store transformation performed during gimplification
1738 creates uninitialized variable uses. If all is well, these will
1739 be optimized away, so don't warn now. */
1740 if (TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
)
1745 if (IS_TYPE_OR_DECL_P (t
))
1754 warn_uninitialized_vars (bool warn_possibly_uninitialized
)
1756 gimple_stmt_iterator gsi
;
1758 struct walk_data data
;
1760 data
.warn_possibly_uninitialized
= warn_possibly_uninitialized
;
1765 data
.always_executed
= dominated_by_p (CDI_POST_DOMINATORS
,
1766 single_succ (ENTRY_BLOCK_PTR
), bb
);
1767 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1769 struct walk_stmt_info wi
;
1770 data
.stmt
= gsi_stmt (gsi
);
1771 if (is_gimple_debug (data
.stmt
))
1773 memset (&wi
, 0, sizeof (wi
));
1775 walk_gimple_op (gsi_stmt (gsi
), warn_uninitialized_var
, &wi
);
1783 execute_early_warn_uninitialized (void)
1785 /* Currently, this pass runs always but
1786 execute_late_warn_uninitialized only runs with optimization. With
1787 optimization we want to warn about possible uninitialized as late
1788 as possible, thus don't do it here. However, without
1789 optimization we need to warn here about "may be uninitialized".
1791 calculate_dominance_info (CDI_POST_DOMINATORS
);
1793 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize
);
1795 /* Post-dominator information can not be reliably updated. Free it
1798 free_dominance_info (CDI_POST_DOMINATORS
);
1803 gate_warn_uninitialized (void)
1805 return warn_uninitialized
!= 0;
1808 struct gimple_opt_pass pass_early_warn_uninitialized
=
1812 "*early_warn_uninitialized", /* name */
1813 gate_warn_uninitialized
, /* gate */
1814 execute_early_warn_uninitialized
, /* execute */
1817 0, /* static_pass_number */
1818 TV_NONE
, /* tv_id */
1819 PROP_ssa
, /* properties_required */
1820 0, /* properties_provided */
1821 0, /* properties_destroyed */
1822 0, /* todo_flags_start */
1823 0 /* todo_flags_finish */
1827 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1830 execute_update_addresses_taken (bool do_optimize
)
1833 referenced_var_iterator rvi
;
1834 gimple_stmt_iterator gsi
;
1836 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1837 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1838 bool update_vops
= false;
1840 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1841 the function body. */
1844 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1846 gimple stmt
= gsi_stmt (gsi
);
1847 enum gimple_code code
= gimple_code (stmt
);
1849 /* Note all addresses taken by the stmt. */
1850 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1852 /* If we have a call or an assignment, see if the lhs contains
1853 a local decl that requires not to be a gimple register. */
1854 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1856 tree lhs
= gimple_get_lhs (stmt
);
1858 /* We may not rewrite TMR_SYMBOL to SSA. */
1859 if (lhs
&& TREE_CODE (lhs
) == TARGET_MEM_REF
1860 && TMR_SYMBOL (lhs
))
1861 bitmap_set_bit (not_reg_needs
, DECL_UID (TMR_SYMBOL (lhs
)));
1863 /* A plain decl does not need it set. */
1864 else if (lhs
&& handled_component_p (lhs
))
1866 var
= get_base_address (lhs
);
1868 bitmap_set_bit (not_reg_needs
, DECL_UID (var
));
1873 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1876 gimple phi
= gsi_stmt (gsi
);
1878 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1880 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1881 if (TREE_CODE (op
) == ADDR_EXPR
1882 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1884 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1889 /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
1890 and mark variable for conversion into SSA. */
1891 if (optimize
&& do_optimize
)
1892 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1894 /* Global Variables, result decls cannot be changed. */
1895 if (is_global_var (var
)
1896 || TREE_CODE (var
) == RESULT_DECL
1897 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1900 if (TREE_ADDRESSABLE (var
)
1901 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1902 a non-register. Otherwise we are confused and forget to
1903 add virtual operands for it. */
1904 && (!is_gimple_reg_type (TREE_TYPE (var
))
1905 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1907 TREE_ADDRESSABLE (var
) = 0;
1908 if (is_gimple_reg (var
))
1909 mark_sym_for_renaming (var
);
1913 fprintf (dump_file
, "No longer having address taken ");
1914 print_generic_expr (dump_file
, var
, 0);
1915 fprintf (dump_file
, "\n");
1918 if (!DECL_GIMPLE_REG_P (var
)
1919 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1920 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1921 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1922 && !TREE_THIS_VOLATILE (var
)
1923 && (TREE_CODE (var
) != VAR_DECL
|| !DECL_HARD_REGISTER (var
)))
1925 DECL_GIMPLE_REG_P (var
) = 1;
1926 mark_sym_for_renaming (var
);
1930 fprintf (dump_file
, "Decl is now a gimple register ");
1931 print_generic_expr (dump_file
, var
, 0);
1932 fprintf (dump_file
, "\n");
1937 /* Operand caches needs to be recomputed for operands referencing the updated
1942 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1944 gimple stmt
= gsi_stmt (gsi
);
1946 if (gimple_references_memory_p (stmt
)
1947 || is_gimple_debug (stmt
))
1951 /* Update SSA form here, we are called as non-pass as well. */
1952 update_ssa (TODO_update_ssa
);
1955 BITMAP_FREE (not_reg_needs
);
1956 BITMAP_FREE (addresses_taken
);
1959 struct gimple_opt_pass pass_update_address_taken
=
1963 "addressables", /* name */
1968 0, /* static_pass_number */
1969 TV_NONE
, /* tv_id */
1970 PROP_ssa
, /* properties_required */
1971 0, /* properties_provided */
1972 0, /* properties_destroyed */
1973 0, /* todo_flags_start */
1974 TODO_update_address_taken
1975 | TODO_dump_func
/* todo_flags_finish */