1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "langhooks.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
35 #include "diagnostic.h"
37 #include "pointer-set.h"
38 #include "tree-flow.h"
39 #include "tree-gimple.h"
40 #include "tree-inline.h"
44 #include "tree-dump.h"
45 #include "tree-pass.h"
48 /* Pointer map of variable mappings, keyed by edge. */
49 static struct pointer_map_t
*edge_var_maps
;
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
55 redirect_edge_var_map_add (edge e
, tree result
, tree def
)
58 edge_var_map_vector old_head
, head
;
59 edge_var_map new_node
;
61 if (edge_var_maps
== NULL
)
62 edge_var_maps
= pointer_map_create ();
64 slot
= pointer_map_insert (edge_var_maps
, e
);
65 old_head
= head
= *slot
;
68 head
= VEC_alloc (edge_var_map
, heap
, 5);
72 new_node
.result
= result
;
74 VEC_safe_push (edge_var_map
, heap
, head
, &new_node
);
77 /* The push did some reallocation. Update the pointer map. */
83 /* Clear the var mappings in edge E. */
86 redirect_edge_var_map_clear (edge e
)
89 edge_var_map_vector head
;
94 slot
= pointer_map_contains (edge_var_maps
, e
);
99 VEC_free (edge_var_map
, heap
, head
);
105 /* Duplicate the redirected var mappings in OLDE in NEWE.
107 Since we can't remove a mapping, let's just duplicate it. This assumes a
108 pointer_map can have multiple edges mapping to the same var_map (many to
109 one mapping), since we don't remove the previous mappings. */
112 redirect_edge_var_map_dup (edge newe
, edge olde
)
114 void **new_slot
, **old_slot
; edge_var_map_vector head
;
119 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
120 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
126 *new_slot
= VEC_copy (edge_var_map
, heap
, head
);
128 *new_slot
= VEC_alloc (edge_var_map
, heap
, 5);
132 /* Return the varable mappings for a given edge. If there is none, return
136 redirect_edge_var_map_vector (edge e
)
140 /* Hey, what kind of idiot would... you'd be surprised. */
144 slot
= pointer_map_contains (edge_var_maps
, e
);
148 return (edge_var_map_vector
) *slot
;
152 /* Clear the edge variable mappings. */
155 redirect_edge_var_map_destroy (void)
159 pointer_map_destroy (edge_var_maps
);
160 edge_var_maps
= NULL
;
165 /* Remove the corresponding arguments from the PHI nodes in E's
166 destination block and redirect it to DEST. Return redirected edge.
167 The list of removed arguments is stored in a vector accessed
168 through edge_var_maps. */
171 ssa_redirect_edge (edge e
, basic_block dest
)
175 redirect_edge_var_map_clear (e
);
177 /* Remove the appropriate PHI arguments in E's destination block. */
178 for (phi
= phi_nodes (e
->dest
); phi
; phi
= PHI_CHAIN (phi
))
180 tree def
= PHI_ARG_DEF (phi
, e
->dest_idx
);
182 if (def
== NULL_TREE
)
185 redirect_edge_var_map_add (e
, PHI_RESULT (phi
), def
);
188 e
= redirect_edge_succ_nodup (e
, dest
);
193 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
197 flush_pending_stmts (edge e
)
200 edge_var_map_vector v
;
204 v
= redirect_edge_var_map_vector (e
);
208 for (phi
= phi_nodes (e
->dest
), i
= 0;
209 phi
&& VEC_iterate (edge_var_map
, v
, i
, vm
);
210 phi
= PHI_CHAIN (phi
), i
++)
212 tree def
= redirect_edge_var_map_def (vm
);
213 add_phi_arg (phi
, def
, e
);
216 redirect_edge_var_map_clear (e
);
219 /* Return true if SSA_NAME is malformed and mark it visited.
221 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
225 verify_ssa_name (tree ssa_name
, bool is_virtual
)
227 if (TREE_CODE (ssa_name
) != SSA_NAME
)
229 error ("expected an SSA_NAME object");
233 if (TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
235 error ("type mismatch between an SSA_NAME and its symbol");
239 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
241 error ("found an SSA_NAME that had been released into the free pool");
245 if (is_virtual
&& is_gimple_reg (ssa_name
))
247 error ("found a virtual definition for a GIMPLE register");
251 if (!is_virtual
&& !is_gimple_reg (ssa_name
))
253 error ("found a real definition for a non-register");
257 if (is_virtual
&& var_ann (SSA_NAME_VAR (ssa_name
))
258 && get_subvars_for_var (SSA_NAME_VAR (ssa_name
)) != NULL
)
260 error ("found real variable when subvariables should have appeared");
264 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
265 && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name
)))
267 error ("found a default name with a non-empty defining statement");
275 /* Return true if the definition of SSA_NAME at block BB is malformed.
277 STMT is the statement where SSA_NAME is created.
279 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
280 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
281 it means that the block in that array slot contains the
282 definition of SSA_NAME.
284 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
287 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
288 tree stmt
, bool is_virtual
)
290 if (verify_ssa_name (ssa_name
, is_virtual
))
293 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
295 error ("SSA_NAME created in two different blocks %i and %i",
296 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
300 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
302 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
304 error ("SSA_NAME_DEF_STMT is wrong");
305 fprintf (stderr
, "Expected definition statement:\n");
306 print_generic_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), TDF_VOPS
);
307 fprintf (stderr
, "\nActual definition statement:\n");
308 print_generic_stmt (stderr
, stmt
, TDF_VOPS
);
315 fprintf (stderr
, "while verifying SSA_NAME ");
316 print_generic_expr (stderr
, ssa_name
, 0);
317 fprintf (stderr
, " in statement\n");
318 print_generic_stmt (stderr
, stmt
, TDF_VOPS
);
324 /* Return true if the use of SSA_NAME at statement STMT in block BB is
327 DEF_BB is the block where SSA_NAME was found to be created.
329 IDOM contains immediate dominator information for the flowgraph.
331 CHECK_ABNORMAL is true if the caller wants to check whether this use
332 is flowing through an abnormal edge (only used when checking PHI
335 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
336 that are defined before STMT in basic block BB. */
339 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
340 tree stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
343 tree ssa_name
= USE_FROM_PTR (use_p
);
345 if (!TREE_VISITED (ssa_name
))
346 if (verify_imm_links (stderr
, ssa_name
))
349 TREE_VISITED (ssa_name
) = 1;
351 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name
))
352 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
353 ; /* Default definitions have empty statements. Nothing to do. */
356 error ("missing definition");
359 else if (bb
!= def_bb
360 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
362 error ("definition in block %i does not dominate use in block %i",
363 def_bb
->index
, bb
->index
);
366 else if (bb
== def_bb
367 && names_defined_in_bb
!= NULL
368 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
370 error ("definition in block %i follows the use", def_bb
->index
);
375 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
377 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
381 /* Make sure the use is in an appropriate list by checking the previous
382 element to make sure it's the same. */
383 if (use_p
->prev
== NULL
)
385 error ("no immediate_use list");
391 if (use_p
->prev
->use
== NULL
)
392 listvar
= use_p
->prev
->stmt
;
394 listvar
= USE_FROM_PTR (use_p
->prev
);
395 if (listvar
!= ssa_name
)
397 error ("wrong immediate use list");
404 fprintf (stderr
, "for SSA_NAME: ");
405 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
406 fprintf (stderr
, " in statement:\n");
407 print_generic_stmt (stderr
, stmt
, TDF_VOPS
);
414 /* Return true if any of the arguments for PHI node PHI at block BB is
417 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
418 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
419 it means that the block in that array slot contains the
420 definition of SSA_NAME. */
423 verify_phi_args (tree phi
, basic_block bb
, basic_block
*definition_block
)
427 unsigned i
, phi_num_args
= PHI_NUM_ARGS (phi
);
429 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
431 error ("incoming edge count does not match number of PHI arguments");
436 for (i
= 0; i
< phi_num_args
; i
++)
438 use_operand_p op_p
= PHI_ARG_DEF_PTR (phi
, i
);
439 tree op
= USE_FROM_PTR (op_p
);
441 e
= EDGE_PRED (bb
, i
);
445 error ("PHI argument is missing for edge %d->%d",
452 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
454 error ("PHI argument is not SSA_NAME, or invariant");
458 if (TREE_CODE (op
) == SSA_NAME
)
460 err
= verify_ssa_name (op
, !is_gimple_reg (PHI_RESULT (phi
)));
461 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
462 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
467 error ("wrong edge %d->%d for PHI argument",
468 e
->src
->index
, e
->dest
->index
);
474 fprintf (stderr
, "PHI argument\n");
475 print_generic_stmt (stderr
, op
, TDF_VOPS
);
483 fprintf (stderr
, "for PHI node\n");
484 print_generic_stmt (stderr
, phi
, TDF_VOPS
|TDF_MEMSYMS
);
493 verify_flow_insensitive_alias_info (void)
496 referenced_var_iterator rvi
;
498 FOR_EACH_REFERENCED_VAR (var
, rvi
)
505 if (!MTAG_P (var
) || !MTAG_ALIASES (var
))
508 aliases
= MTAG_ALIASES (var
);
510 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, j
, bi
)
512 alias
= referenced_var (j
);
514 if (TREE_CODE (alias
) != MEMORY_PARTITION_TAG
515 && !may_be_aliased (alias
))
517 error ("non-addressable variable inside an alias set");
518 debug_variable (alias
);
527 debug_variable (var
);
528 internal_error ("verify_flow_insensitive_alias_info failed");
533 verify_flow_sensitive_alias_info (void)
538 for (i
= 1; i
< num_ssa_names
; i
++)
542 struct ptr_info_def
*pi
;
549 /* We only care for pointers that are actually referenced in the
551 if (!POINTER_TYPE_P (TREE_TYPE (ptr
)) || !TREE_VISITED (ptr
))
554 /* RESULT_DECL is special. If it's a GIMPLE register, then it
555 is only written-to only once in the return statement.
556 Otherwise, aggregate RESULT_DECLs may be written-to more than
557 once in virtual operands. */
558 var
= SSA_NAME_VAR (ptr
);
559 if (TREE_CODE (var
) == RESULT_DECL
560 && is_gimple_reg (ptr
))
563 pi
= SSA_NAME_PTR_INFO (ptr
);
568 if (pi
->is_dereferenced
&& !pi
->name_mem_tag
&& !ann
->symbol_mem_tag
)
570 error ("dereferenced pointers should have a name or a symbol tag");
575 && (pi
->pt_vars
== NULL
|| bitmap_empty_p (pi
->pt_vars
)))
577 error ("pointers with a memory tag, should have points-to sets");
581 if (pi
->value_escapes_p
&& pi
->name_mem_tag
)
583 tree t
= memory_partition (pi
->name_mem_tag
);
585 t
= pi
->name_mem_tag
;
587 if (!is_call_clobbered (t
))
589 error ("pointer escapes but its name tag is not call-clobbered");
598 debug_variable (ptr
);
599 internal_error ("verify_flow_sensitive_alias_info failed");
603 /* Verify the consistency of call clobbering information. */
606 verify_call_clobbering (void)
611 referenced_var_iterator rvi
;
613 /* At all times, the result of the call_clobbered flag should
614 match the result of the call_clobbered_vars bitmap. Verify both
615 that everything in call_clobbered_vars is marked
616 call_clobbered, and that everything marked
617 call_clobbered is in call_clobbered_vars. */
618 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
620 var
= referenced_var (i
);
622 if (memory_partition (var
))
623 var
= memory_partition (var
);
625 if (!MTAG_P (var
) && !var_ann (var
)->call_clobbered
)
627 error ("variable in call_clobbered_vars but not marked "
629 debug_variable (var
);
634 FOR_EACH_REFERENCED_VAR (var
, rvi
)
636 if (is_gimple_reg (var
))
639 if (memory_partition (var
))
640 var
= memory_partition (var
);
643 && var_ann (var
)->call_clobbered
644 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun
), DECL_UID (var
)))
646 error ("variable marked call_clobbered but not in "
647 "call_clobbered_vars bitmap.");
648 debug_variable (var
);
656 internal_error ("verify_call_clobbering failed");
660 /* Verify invariants in memory partitions. */
663 verify_memory_partitions (void)
667 VEC(tree
,heap
) *mpt_table
= gimple_ssa_operands (cfun
)->mpt_table
;
668 struct pointer_set_t
*partitioned_syms
= pointer_set_create ();
670 for (i
= 0; VEC_iterate (tree
, mpt_table
, i
, mpt
); i
++)
675 if (MPT_SYMBOLS (mpt
) == NULL
)
677 error ("Memory partitions should have at least one symbol");
678 debug_variable (mpt
);
682 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt
), 0, j
, bj
)
684 tree var
= referenced_var (j
);
685 if (pointer_set_insert (partitioned_syms
, var
))
687 error ("Partitioned symbols should belong to exactly one "
689 debug_variable (var
);
695 pointer_set_destroy (partitioned_syms
);
700 internal_error ("verify_memory_partitions failed");
704 /* Verify the consistency of aliasing information. */
707 verify_alias_info (void)
709 verify_flow_sensitive_alias_info ();
710 verify_call_clobbering ();
711 verify_flow_insensitive_alias_info ();
712 verify_memory_partitions ();
716 /* Verify common invariants in the SSA web.
717 TODO: verify the variable annotations. */
720 verify_ssa (bool check_modified_stmt
)
724 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
727 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
728 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
730 gcc_assert (!need_ssa_update_p ());
734 timevar_push (TV_TREE_SSA_VERIFY
);
736 /* Keep track of SSA names present in the IL. */
737 for (i
= 1; i
< num_ssa_names
; i
++)
739 tree name
= ssa_name (i
);
743 TREE_VISITED (name
) = 0;
745 stmt
= SSA_NAME_DEF_STMT (name
);
746 if (!IS_EMPTY_STMT (stmt
))
748 basic_block bb
= bb_for_stmt (stmt
);
749 verify_def (bb
, definition_block
,
750 name
, stmt
, !is_gimple_reg (name
));
756 calculate_dominance_info (CDI_DOMINATORS
);
758 /* Now verify all the uses and make sure they agree with the definitions
759 found in the previous pass. */
765 block_stmt_iterator bsi
;
767 /* Make sure that all edges have a clear 'aux' field. */
768 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
772 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
778 /* Verify the arguments for every PHI node in the block. */
779 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
781 if (verify_phi_args (phi
, bb
, definition_block
))
784 bitmap_set_bit (names_defined_in_bb
,
785 SSA_NAME_VERSION (PHI_RESULT (phi
)));
788 /* Now verify all the uses and vuses in every statement of the block. */
789 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
791 tree stmt
= bsi_stmt (bsi
);
794 if (check_modified_stmt
&& stmt_modified_p (stmt
))
796 error ("stmt (%p) marked modified after optimization pass: ",
798 print_generic_stmt (stderr
, stmt
, TDF_VOPS
);
802 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
803 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 0)) != SSA_NAME
)
805 tree lhs
, base_address
;
807 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
808 base_address
= get_base_address (lhs
);
811 && gimple_aliases_computed_p (cfun
)
812 && SSA_VAR_P (base_address
)
813 && !stmt_ann (stmt
)->has_volatile_ops
814 && ZERO_SSA_OPERANDS (stmt
, SSA_OP_VDEF
))
816 error ("statement makes a memory store, but has no VDEFS");
817 print_generic_stmt (stderr
, stmt
, TDF_VOPS
);
822 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
824 if (verify_ssa_name (op
, true))
826 error ("in statement");
827 print_generic_stmt (stderr
, stmt
, TDF_VOPS
|TDF_MEMSYMS
);
832 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
|SSA_OP_DEF
)
834 if (verify_ssa_name (op
, false))
836 error ("in statement");
837 print_generic_stmt (stderr
, stmt
, TDF_VOPS
|TDF_MEMSYMS
);
842 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
844 op
= USE_FROM_PTR (use_p
);
845 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
846 use_p
, stmt
, false, names_defined_in_bb
))
850 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
851 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
854 bitmap_clear (names_defined_in_bb
);
857 /* Finally, verify alias information. */
858 if (gimple_aliases_computed_p (cfun
))
859 verify_alias_info ();
861 free (definition_block
);
863 /* Restore the dominance information to its prior known state, so
864 that we do not perturb the compiler's subsequent behavior. */
865 if (orig_dom_state
== DOM_NONE
)
866 free_dominance_info (CDI_DOMINATORS
);
868 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
870 BITMAP_FREE (names_defined_in_bb
);
871 timevar_pop (TV_TREE_SSA_VERIFY
);
875 internal_error ("verify_ssa failed");
878 /* Return true if the uid in both int tree maps are equal. */
881 int_tree_map_eq (const void *va
, const void *vb
)
883 const struct int_tree_map
*a
= (const struct int_tree_map
*) va
;
884 const struct int_tree_map
*b
= (const struct int_tree_map
*) vb
;
885 return (a
->uid
== b
->uid
);
888 /* Hash a UID in a int_tree_map. */
891 int_tree_map_hash (const void *item
)
893 return ((const struct int_tree_map
*)item
)->uid
;
896 /* Return true if the DECL_UID in both trees are equal. */
899 uid_decl_map_eq (const void *va
, const void *vb
)
901 const_tree a
= (const_tree
) va
;
902 const_tree b
= (const_tree
) vb
;
903 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
906 /* Hash a tree in a uid_decl_map. */
909 uid_decl_map_hash (const void *item
)
911 return ((const_tree
)item
)->decl_minimal
.uid
;
914 /* Return true if the uid in both int tree maps are equal. */
917 var_ann_eq (const void *va
, const void *vb
)
919 const struct static_var_ann_d
*a
= (const struct static_var_ann_d
*) va
;
920 const_tree
const b
= (const_tree
) vb
;
921 return (a
->uid
== DECL_UID (b
));
924 /* Hash a UID in a int_tree_map. */
927 var_ann_hash (const void *item
)
929 return ((const struct static_var_ann_d
*)item
)->uid
;
932 /* Return true if the DECL_UID in both trees are equal. */
935 uid_ssaname_map_eq (const void *va
, const void *vb
)
937 const_tree a
= (const_tree
) va
;
938 const_tree b
= (const_tree
) vb
;
939 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
942 /* Hash a tree in a uid_decl_map. */
945 uid_ssaname_map_hash (const void *item
)
947 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
951 /* Initialize global DFA and SSA structures. */
956 cfun
->gimple_df
= GGC_CNEW (struct gimple_df
);
957 cfun
->gimple_df
->referenced_vars
= htab_create_ggc (20, uid_decl_map_hash
,
958 uid_decl_map_eq
, NULL
);
959 cfun
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
960 uid_ssaname_map_eq
, NULL
);
961 cfun
->gimple_df
->var_anns
= htab_create_ggc (20, var_ann_hash
,
963 cfun
->gimple_df
->call_clobbered_vars
= BITMAP_GGC_ALLOC ();
964 cfun
->gimple_df
->addressable_vars
= BITMAP_GGC_ALLOC ();
970 /* Deallocate memory associated with SSA data structures for FNDECL. */
973 delete_tree_ssa (void)
977 block_stmt_iterator bsi
;
978 referenced_var_iterator rvi
;
981 /* Release any ssa_names still in use. */
982 for (i
= 0; i
< num_ssa_names
; i
++)
984 tree var
= ssa_name (i
);
985 if (var
&& TREE_CODE (var
) == SSA_NAME
)
987 SSA_NAME_IMM_USE_NODE (var
).prev
= &(SSA_NAME_IMM_USE_NODE (var
));
988 SSA_NAME_IMM_USE_NODE (var
).next
= &(SSA_NAME_IMM_USE_NODE (var
));
990 release_ssa_name (var
);
993 /* Remove annotations from every tree in the function. */
996 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
998 tree stmt
= bsi_stmt (bsi
);
999 stmt_ann_t ann
= get_stmt_ann (stmt
);
1001 free_ssa_operands (&ann
->operands
);
1002 ann
->addresses_taken
= 0;
1003 mark_stmt_modified (stmt
);
1005 set_phi_nodes (bb
, NULL
);
1008 /* Remove annotations from every referenced variable. */
1009 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1012 ggc_free (var
->base
.ann
);
1013 var
->base
.ann
= NULL
;
1015 htab_delete (gimple_referenced_vars (cfun
));
1016 cfun
->gimple_df
->referenced_vars
= NULL
;
1020 /* we no longer maintain the SSA operand cache at this point. */
1021 if (ssa_operands_active ())
1022 fini_ssa_operands ();
1024 cfun
->gimple_df
->global_var
= NULL_TREE
;
1026 htab_delete (cfun
->gimple_df
->default_defs
);
1027 cfun
->gimple_df
->default_defs
= NULL
;
1028 htab_delete (cfun
->gimple_df
->var_anns
);
1029 cfun
->gimple_df
->var_anns
= NULL
;
1030 cfun
->gimple_df
->call_clobbered_vars
= NULL
;
1031 cfun
->gimple_df
->addressable_vars
= NULL
;
1032 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1033 if (gimple_aliases_computed_p (cfun
))
1035 delete_alias_heapvars ();
1036 gcc_assert (!need_ssa_update_p ());
1038 cfun
->gimple_df
->aliases_computed_p
= false;
1039 delete_mem_ref_stats (cfun
);
1041 cfun
->gimple_df
= NULL
;
1043 /* We no longer need the edge variable maps. */
1044 redirect_edge_var_map_destroy ();
1047 /* Helper function for useless_type_conversion_p. */
1050 useless_type_conversion_p_1 (tree outer_type
, tree inner_type
)
1052 /* Qualifiers on value types do not matter. */
1053 inner_type
= TYPE_MAIN_VARIANT (inner_type
);
1054 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
1056 if (inner_type
== outer_type
)
1059 /* If we know the canonical types, compare them. */
1060 if (TYPE_CANONICAL (inner_type
)
1061 && TYPE_CANONICAL (inner_type
) == TYPE_CANONICAL (outer_type
))
1064 /* Changes in machine mode are never useless conversions. */
1065 if (TYPE_MODE (inner_type
) != TYPE_MODE (outer_type
))
1068 /* If both the inner and outer types are integral types, then the
1069 conversion is not necessary if they have the same mode and
1070 signedness and precision, and both or neither are boolean. */
1071 if (INTEGRAL_TYPE_P (inner_type
)
1072 && INTEGRAL_TYPE_P (outer_type
))
1074 /* Preserve changes in signedness or precision. */
1075 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
1076 || TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
1079 /* Conversions from a non-base to a base type are not useless.
1080 This way we preserve the invariant to do arithmetic in
1082 if (TREE_TYPE (inner_type
)
1083 && TREE_TYPE (inner_type
) != inner_type
1084 && (TREE_TYPE (outer_type
) == outer_type
1085 || TREE_TYPE (outer_type
) == NULL_TREE
))
1088 /* We don't need to preserve changes in the types minimum or
1089 maximum value in general as these do not generate code
1090 unless the types precisions are different. */
1095 /* Scalar floating point types with the same mode are compatible. */
1096 else if (SCALAR_FLOAT_TYPE_P (inner_type
)
1097 && SCALAR_FLOAT_TYPE_P (outer_type
))
1100 /* We need to take special care recursing to pointed-to types. */
1101 else if (POINTER_TYPE_P (inner_type
)
1102 && POINTER_TYPE_P (outer_type
))
1104 /* Don't lose casts between pointers to volatile and non-volatile
1105 qualified types. Doing so would result in changing the semantics
1106 of later accesses. */
1107 if ((TYPE_VOLATILE (TREE_TYPE (outer_type
))
1108 != TYPE_VOLATILE (TREE_TYPE (inner_type
)))
1109 && TYPE_VOLATILE (TREE_TYPE (outer_type
)))
1112 /* Do not lose casts between pointers with different
1113 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
1114 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type
)
1115 != TYPE_REF_CAN_ALIAS_ALL (outer_type
))
1116 || (get_alias_set (TREE_TYPE (inner_type
))
1117 != get_alias_set (TREE_TYPE (outer_type
))))
1120 /* We do not care for const qualification of the pointed-to types
1121 as const qualification has no semantic value to the middle-end. */
1123 /* Do not lose casts to restrict qualified pointers. */
1124 if ((TYPE_RESTRICT (outer_type
)
1125 != TYPE_RESTRICT (inner_type
))
1126 && TYPE_RESTRICT (outer_type
))
1129 /* Otherwise pointers/references are equivalent if their pointed
1130 to types are effectively the same. We can strip qualifiers
1131 on pointed-to types for further comparison, which is done in
1133 return useless_type_conversion_p_1 (TREE_TYPE (outer_type
),
1134 TREE_TYPE (inner_type
));
1137 /* Recurse for complex types. */
1138 else if (TREE_CODE (inner_type
) == COMPLEX_TYPE
1139 && TREE_CODE (outer_type
) == COMPLEX_TYPE
)
1140 return useless_type_conversion_p_1 (TREE_TYPE (outer_type
),
1141 TREE_TYPE (inner_type
));
1143 /* Recurse for vector types with the same number of subparts. */
1144 else if (TREE_CODE (inner_type
) == VECTOR_TYPE
1145 && TREE_CODE (outer_type
) == VECTOR_TYPE
1146 && TYPE_PRECISION (inner_type
) == TYPE_PRECISION (outer_type
))
1147 return useless_type_conversion_p_1 (TREE_TYPE (outer_type
),
1148 TREE_TYPE (inner_type
));
1150 /* For aggregates we may need to fall back to structural equality
1152 else if (AGGREGATE_TYPE_P (inner_type
)
1153 && AGGREGATE_TYPE_P (outer_type
))
1155 /* Different types of aggregates are incompatible. */
1156 if (TREE_CODE (inner_type
) != TREE_CODE (outer_type
))
1159 /* ??? Add structural equivalence check. */
1161 /* ??? This should eventually just return false. */
1162 return lang_hooks
.types_compatible_p (inner_type
, outer_type
);
1168 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1169 useless type conversion, otherwise return false.
1171 This function implicitly defines the middle-end type system. With
1172 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1173 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1174 the following invariants shall be fulfilled:
1176 1) useless_type_conversion_p is transitive.
1177 If a < b and b < c then a < c.
1179 2) useless_type_conversion_p is not symmetric.
1180 From a < b does not follow a > b.
1182 3) Types define the available set of operations applicable to values.
1183 A type conversion is useless if the operations for the target type
1184 is a subset of the operations for the source type. For example
1185 casts to void* are useless, casts from void* are not (void* can't
1186 be dereferenced or offsetted, but copied, hence its set of operations
1187 is a strict subset of that of all other data pointer types). Casts
1188 to const T* are useless (can't be written to), casts from const T*
1192 useless_type_conversion_p (tree outer_type
, tree inner_type
)
1194 /* If the outer type is (void *), then the conversion is not
1195 necessary. We have to make sure to not apply this while
1196 recursing though. */
1197 if (POINTER_TYPE_P (inner_type
)
1198 && POINTER_TYPE_P (outer_type
)
1199 && TREE_CODE (TREE_TYPE (outer_type
)) == VOID_TYPE
)
1202 return useless_type_conversion_p_1 (outer_type
, inner_type
);
1205 /* Return true if a conversion from either type of TYPE1 and TYPE2
1206 to the other is not required. Otherwise return false. */
1209 types_compatible_p (tree type1
, tree type2
)
1211 return (type1
== type2
1212 || (useless_type_conversion_p (type1
, type2
)
1213 && useless_type_conversion_p (type2
, type1
)));
1216 /* Return true if EXPR is a useless type conversion, otherwise return
1220 tree_ssa_useless_type_conversion (tree expr
)
1222 /* If we have an assignment that merely uses a NOP_EXPR to change
1223 the top of the RHS to the type of the LHS and the type conversion
1224 is "safe", then strip away the type conversion so that we can
1225 enter LHS = RHS into the const_and_copies table. */
1226 if (TREE_CODE (expr
) == NOP_EXPR
|| TREE_CODE (expr
) == CONVERT_EXPR
1227 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1228 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1229 /* FIXME: Use of GENERIC_TREE_TYPE here is a temporary measure to work
1230 around known bugs with GIMPLE_MODIFY_STMTs appearing in places
1231 they shouldn't. See PR 30391. */
1232 return useless_type_conversion_p
1234 GENERIC_TREE_TYPE (TREE_OPERAND (expr
, 0)));
1240 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1241 described in walk_use_def_chains.
1243 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1244 infinite loops. We used to have a bitmap for this to just mark
1245 SSA versions we had visited. But non-sparse bitmaps are way too
1246 expensive, while sparse bitmaps may cause quadratic behavior.
1248 IS_DFS is true if the caller wants to perform a depth-first search
1249 when visiting PHI nodes. A DFS will visit each PHI argument and
1250 call FN after each one. Otherwise, all the arguments are
1251 visited first and then FN is called with each of the visited
1252 arguments in a separate pass. */
1255 walk_use_def_chains_1 (tree var
, walk_use_def_chains_fn fn
, void *data
,
1256 struct pointer_set_t
*visited
, bool is_dfs
)
1260 if (pointer_set_insert (visited
, var
))
1263 def_stmt
= SSA_NAME_DEF_STMT (var
);
1265 if (TREE_CODE (def_stmt
) != PHI_NODE
)
1267 /* If we reached the end of the use-def chain, call FN. */
1268 return fn (var
, def_stmt
, data
);
1274 /* When doing a breadth-first search, call FN before following the
1275 use-def links for each argument. */
1277 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
1278 if (fn (PHI_ARG_DEF (def_stmt
, i
), def_stmt
, data
))
1281 /* Follow use-def links out of each PHI argument. */
1282 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
1284 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
1286 /* ARG may be NULL for newly introduced PHI nodes. */
1288 && TREE_CODE (arg
) == SSA_NAME
1289 && walk_use_def_chains_1 (arg
, fn
, data
, visited
, is_dfs
))
1293 /* When doing a depth-first search, call FN after following the
1294 use-def links for each argument. */
1296 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
1297 if (fn (PHI_ARG_DEF (def_stmt
, i
), def_stmt
, data
))
1306 /* Walk use-def chains starting at the SSA variable VAR. Call
1307 function FN at each reaching definition found. FN takes three
1308 arguments: VAR, its defining statement (DEF_STMT) and a generic
1309 pointer to whatever state information that FN may want to maintain
1310 (DATA). FN is able to stop the walk by returning true, otherwise
1311 in order to continue the walk, FN should return false.
1313 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1314 different. The first argument to FN is no longer the original
1315 variable VAR, but the PHI argument currently being examined. If FN
1316 wants to get at VAR, it should call PHI_RESULT (PHI).
1318 If IS_DFS is true, this function will:
1320 1- walk the use-def chains for all the PHI arguments, and,
1321 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1323 If IS_DFS is false, the two steps above are done in reverse order
1324 (i.e., a breadth-first search). */
1327 walk_use_def_chains (tree var
, walk_use_def_chains_fn fn
, void *data
,
1332 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1334 def_stmt
= SSA_NAME_DEF_STMT (var
);
1336 /* We only need to recurse if the reaching definition comes from a PHI
1338 if (TREE_CODE (def_stmt
) != PHI_NODE
)
1339 (*fn
) (var
, def_stmt
, data
);
1342 struct pointer_set_t
*visited
= pointer_set_create ();
1343 walk_use_def_chains_1 (var
, fn
, data
, visited
, is_dfs
);
1344 pointer_set_destroy (visited
);
1349 /* Return true if T, an SSA_NAME, has an undefined value. */
1352 ssa_undefined_value_p (tree t
)
1354 tree var
= SSA_NAME_VAR (t
);
1356 /* Parameters get their initial value from the function entry. */
1357 if (TREE_CODE (var
) == PARM_DECL
)
1360 /* Hard register variables get their initial value from the ether. */
1361 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1364 /* The value is undefined iff its definition statement is empty. */
1365 return IS_EMPTY_STMT (SSA_NAME_DEF_STMT (t
));
1368 /* Emit warnings for uninitialized variables. This is done in two passes.
1370 The first pass notices real uses of SSA names with undefined values.
1371 Such uses are unconditionally uninitialized, and we can be certain that
1372 such a use is a mistake. This pass is run before most optimizations,
1373 so that we catch as many as we can.
1375 The second pass follows PHI nodes to find uses that are potentially
1376 uninitialized. In this case we can't necessarily prove that the use
1377 is really uninitialized. This pass is run after most optimizations,
1378 so that we thread as many jumps and possible, and delete as much dead
1379 code as possible, in order to reduce false positives. We also look
1380 again for plain uninitialized variables, since optimization may have
1381 changed conditionally uninitialized to unconditionally uninitialized. */
1383 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1384 warning text is in MSGID and LOCUS may contain a location or be null. */
1387 warn_uninit (tree t
, const char *gmsgid
, void *data
)
1389 tree var
= SSA_NAME_VAR (t
);
1390 tree context
= (tree
) data
;
1392 expanded_location xloc
, floc
;
1394 if (!ssa_undefined_value_p (t
))
1397 /* TREE_NO_WARNING either means we already warned, or the front end
1398 wishes to suppress the warning. */
1399 if (TREE_NO_WARNING (var
))
1402 locus
= (context
!= NULL
&& EXPR_HAS_LOCATION (context
)
1403 ? EXPR_LOCUS (context
)
1404 : &DECL_SOURCE_LOCATION (var
));
1405 warning (OPT_Wuninitialized
, gmsgid
, locus
, var
);
1406 xloc
= expand_location (*locus
);
1407 floc
= expand_location (DECL_SOURCE_LOCATION (cfun
->decl
));
1408 if (xloc
.file
!= floc
.file
1409 || xloc
.line
< floc
.line
1410 || xloc
.line
> LOCATION_LINE (cfun
->function_end_locus
))
1411 inform ("%J%qD was declared here", var
, var
);
1413 TREE_NO_WARNING (var
) = 1;
1418 bool always_executed
;
1421 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1422 and warn about them. */
1425 warn_uninitialized_var (tree
*tp
, int *walk_subtrees
, void *data_
)
1427 struct walk_data
*data
= (struct walk_data
*)data_
;
1430 switch (TREE_CODE (t
))
1433 /* We only do data flow with SSA_NAMEs, so that's all we
1435 if (data
->always_executed
)
1436 warn_uninit (t
, "%H%qD is used uninitialized in this function",
1439 warn_uninit (t
, "%H%qD may be used uninitialized in this function",
1446 /* The total store transformation performed during gimplification
1447 creates uninitialized variable uses. If all is well, these will
1448 be optimized away, so don't warn now. */
1449 if (TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
)
1454 if (IS_TYPE_OR_DECL_P (t
))
1462 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1463 and warn about them. */
1466 warn_uninitialized_phi (tree phi
)
1468 int i
, n
= PHI_NUM_ARGS (phi
);
1470 /* Don't look at memory tags. */
1471 if (!is_gimple_reg (PHI_RESULT (phi
)))
1474 for (i
= 0; i
< n
; ++i
)
1476 tree op
= PHI_ARG_DEF (phi
, i
);
1477 if (TREE_CODE (op
) == SSA_NAME
)
1478 warn_uninit (op
, "%H%qD may be used uninitialized in this function",
1484 execute_early_warn_uninitialized (void)
1486 block_stmt_iterator bsi
;
1488 struct walk_data data
;
1490 calculate_dominance_info (CDI_POST_DOMINATORS
);
1494 data
.always_executed
= dominated_by_p (CDI_POST_DOMINATORS
,
1495 single_succ (ENTRY_BLOCK_PTR
), bb
);
1496 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1498 data
.stmt
= bsi_stmt (bsi
);
1499 walk_tree (bsi_stmt_ptr (bsi
), warn_uninitialized_var
,
1507 execute_late_warn_uninitialized (void)
1512 /* Re-do the plain uninitialized variable check, as optimization may have
1513 straightened control flow. Do this first so that we don't accidentally
1514 get a "may be" warning when we'd have seen an "is" warning later. */
1515 execute_early_warn_uninitialized ();
1518 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1519 warn_uninitialized_phi (phi
);
1524 gate_warn_uninitialized (void)
1526 return warn_uninitialized
!= 0;
1529 struct gimple_opt_pass pass_early_warn_uninitialized
=
1534 gate_warn_uninitialized
, /* gate */
1535 execute_early_warn_uninitialized
, /* execute */
1538 0, /* static_pass_number */
1540 PROP_ssa
, /* properties_required */
1541 0, /* properties_provided */
1542 0, /* properties_destroyed */
1543 0, /* todo_flags_start */
1544 0 /* todo_flags_finish */
1548 struct gimple_opt_pass pass_late_warn_uninitialized
=
1553 gate_warn_uninitialized
, /* gate */
1554 execute_late_warn_uninitialized
, /* execute */
1557 0, /* static_pass_number */
1559 PROP_ssa
, /* properties_required */
1560 0, /* properties_provided */
1561 0, /* properties_destroyed */
1562 0, /* todo_flags_start */
1563 0 /* todo_flags_finish */
1567 /* Compute TREE_ADDRESSABLE for local variables. */
1570 execute_update_addresses_taken (void)
1573 referenced_var_iterator rvi
;
1574 block_stmt_iterator bsi
;
1576 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1577 bitmap vars_updated
= BITMAP_ALLOC (NULL
);
1578 bool update_vops
= false;
1581 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1582 the function body. */
1585 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1587 stmt_ann_t s_ann
= stmt_ann (bsi_stmt (bsi
));
1589 if (s_ann
->addresses_taken
)
1590 bitmap_ior_into (addresses_taken
, s_ann
->addresses_taken
);
1592 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1594 unsigned i
, phi_num_args
= PHI_NUM_ARGS (phi
);
1595 for (i
= 0; i
< phi_num_args
; i
++)
1597 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1598 if (TREE_CODE (op
) == ADDR_EXPR
1599 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL_TREE
1601 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1606 /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
1608 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1609 if (!is_global_var (var
)
1610 && TREE_CODE (var
) != RESULT_DECL
1611 && TREE_ADDRESSABLE (var
)
1612 && !bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1614 TREE_ADDRESSABLE (var
) = 0;
1615 if (is_gimple_reg (var
))
1616 mark_sym_for_renaming (var
);
1618 bitmap_set_bit (vars_updated
, DECL_UID (var
));
1621 fprintf (dump_file
, "No longer having address taken ");
1622 print_generic_expr (dump_file
, var
, 0);
1623 fprintf (dump_file
, "\n");
1627 /* Operand caches needs to be recomputed for operands referencing the updated
1631 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1633 tree stmt
= bsi_stmt (bsi
);
1635 if ((LOADED_SYMS (stmt
)
1636 && bitmap_intersect_p (LOADED_SYMS (stmt
), vars_updated
))
1637 || (STORED_SYMS (stmt
)
1638 && bitmap_intersect_p (STORED_SYMS (stmt
), vars_updated
)))
1641 BITMAP_FREE (addresses_taken
);
1642 BITMAP_FREE (vars_updated
);
1646 struct gimple_opt_pass pass_update_address_taken
=
1650 "addressables", /* name */
1652 execute_update_addresses_taken
, /* execute */
1655 0, /* static_pass_number */
1657 PROP_ssa
, /* properties_required */
1658 0, /* properties_provided */
1659 0, /* properties_destroyed */
1660 0, /* todo_flags_start */
1661 TODO_update_ssa
/* todo_flags_finish */