1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
36 #include "pointer-set.h"
37 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
44 #include "diagnostic-core.h"
47 /* Pointer map of variable mappings, keyed by edge. */
48 static struct pointer_map_t
*edge_var_maps
;
51 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
54 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
57 edge_var_map_vector old_head
, head
;
58 edge_var_map new_node
;
60 if (edge_var_maps
== NULL
)
61 edge_var_maps
= pointer_map_create ();
63 slot
= pointer_map_insert (edge_var_maps
, e
);
64 old_head
= head
= (edge_var_map_vector
) *slot
;
67 head
= VEC_alloc (edge_var_map
, heap
, 5);
71 new_node
.result
= result
;
72 new_node
.locus
= locus
;
74 VEC_safe_push (edge_var_map
, heap
, head
, &new_node
);
77 /* The push did some reallocation. Update the pointer map. */
83 /* Clear the var mappings in edge E. */
86 redirect_edge_var_map_clear (edge e
)
89 edge_var_map_vector head
;
94 slot
= pointer_map_contains (edge_var_maps
, e
);
98 head
= (edge_var_map_vector
) *slot
;
99 VEC_free (edge_var_map
, heap
, head
);
105 /* Duplicate the redirected var mappings in OLDE in NEWE.
107 Since we can't remove a mapping, let's just duplicate it. This assumes a
108 pointer_map can have multiple edges mapping to the same var_map (many to
109 one mapping), since we don't remove the previous mappings. */
112 redirect_edge_var_map_dup (edge newe
, edge olde
)
114 void **new_slot
, **old_slot
;
115 edge_var_map_vector head
;
120 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
121 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
124 head
= (edge_var_map_vector
) *old_slot
;
127 *new_slot
= VEC_copy (edge_var_map
, heap
, head
);
129 *new_slot
= VEC_alloc (edge_var_map
, heap
, 5);
133 /* Return the variable mappings for a given edge. If there is none, return
137 redirect_edge_var_map_vector (edge e
)
141 /* Hey, what kind of idiot would... you'd be surprised. */
145 slot
= pointer_map_contains (edge_var_maps
, e
);
149 return (edge_var_map_vector
) *slot
;
152 /* Used by redirect_edge_var_map_destroy to free all memory. */
155 free_var_map_entry (const void *key ATTRIBUTE_UNUSED
,
157 void *data ATTRIBUTE_UNUSED
)
159 edge_var_map_vector head
= (edge_var_map_vector
) *value
;
160 VEC_free (edge_var_map
, heap
, head
);
164 /* Clear the edge variable mappings. */
167 redirect_edge_var_map_destroy (void)
171 pointer_map_traverse (edge_var_maps
, free_var_map_entry
, NULL
);
172 pointer_map_destroy (edge_var_maps
);
173 edge_var_maps
= NULL
;
178 /* Remove the corresponding arguments from the PHI nodes in E's
179 destination block and redirect it to DEST. Return redirected edge.
180 The list of removed arguments is stored in a vector accessed
181 through edge_var_maps. */
184 ssa_redirect_edge (edge e
, basic_block dest
)
186 gimple_stmt_iterator gsi
;
189 redirect_edge_var_map_clear (e
);
191 /* Remove the appropriate PHI arguments in E's destination block. */
192 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
195 source_location locus
;
197 phi
= gsi_stmt (gsi
);
198 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
199 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
201 if (def
== NULL_TREE
)
204 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
207 e
= redirect_edge_succ_nodup (e
, dest
);
213 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
217 flush_pending_stmts (edge e
)
220 edge_var_map_vector v
;
223 gimple_stmt_iterator gsi
;
225 v
= redirect_edge_var_map_vector (e
);
229 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
230 !gsi_end_p (gsi
) && VEC_iterate (edge_var_map
, v
, i
, vm
);
231 gsi_next (&gsi
), i
++)
235 phi
= gsi_stmt (gsi
);
236 def
= redirect_edge_var_map_def (vm
);
237 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
240 redirect_edge_var_map_clear (e
);
243 /* Given a tree for an expression for which we might want to emit
244 locations or values in debug information (generally a variable, but
245 we might deal with other kinds of trees in the future), return the
246 tree that should be used as the variable of a DEBUG_BIND STMT or
247 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
250 target_for_debug_bind (tree var
)
252 if (!MAY_HAVE_DEBUG_STMTS
)
255 if (TREE_CODE (var
) != VAR_DECL
256 && TREE_CODE (var
) != PARM_DECL
)
259 if (DECL_HAS_VALUE_EXPR_P (var
))
260 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
262 if (DECL_IGNORED_P (var
))
265 if (!is_gimple_reg (var
))
267 if (is_gimple_reg_type (TREE_TYPE (var
))
268 && referenced_var_lookup (cfun
, DECL_UID (var
)) == NULL_TREE
)
276 /* Called via walk_tree, look for SSA_NAMEs that have already been
280 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
282 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
284 if (wi
&& wi
->is_lhs
)
287 if (TREE_CODE (*tp
) == SSA_NAME
)
289 if (SSA_NAME_IN_FREE_LIST (*tp
))
294 else if (IS_TYPE_OR_DECL_P (*tp
))
300 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
301 by other DEBUG stmts, and replace uses of the DEF with the
302 newly-created debug temp. */
305 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
307 imm_use_iterator imm_iter
;
310 gimple def_stmt
= NULL
;
314 if (!MAY_HAVE_DEBUG_STMTS
)
317 /* If this name has already been registered for replacement, do nothing
318 as anything that uses this name isn't in SSA form. */
319 if (name_registered_for_update_p (var
))
322 /* Check whether there are debug stmts that reference this variable and,
323 if there are, decide whether we should use a debug temp. */
324 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
326 stmt
= USE_STMT (use_p
);
328 if (!gimple_debug_bind_p (stmt
))
334 if (gimple_debug_bind_get_value (stmt
) != var
)
336 /* Count this as an additional use, so as to make sure we
337 use a temp unless VAR's definition has a SINGLE_RHS that
348 def_stmt
= gsi_stmt (*gsi
);
350 def_stmt
= SSA_NAME_DEF_STMT (var
);
352 /* If we didn't get an insertion point, and the stmt has already
353 been removed, we won't be able to insert the debug bind stmt, so
354 we'll have to drop debug information. */
355 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
357 value
= degenerate_phi_result (def_stmt
);
358 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
360 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
362 else if (value
== error_mark_node
)
365 else if (is_gimple_assign (def_stmt
))
367 bool no_value
= false;
369 if (!dom_info_available_p (CDI_DOMINATORS
))
371 struct walk_stmt_info wi
;
373 memset (&wi
, 0, sizeof (wi
));
375 /* When removing blocks without following reverse dominance
376 order, we may sometimes encounter SSA_NAMEs that have
377 already been released, referenced in other SSA_DEFs that
378 we're about to release. Consider:
387 If we deleted BB X first, propagating the value of w_2
388 won't do us any good. It's too late to recover their
389 original definition of v_1: when it was deleted, it was
390 only referenced in other DEFs, it couldn't possibly know
391 it should have been retained, and propagating every
392 single DEF just in case it might have to be propagated
393 into a DEBUG STMT would probably be too wasteful.
395 When dominator information is not readily available, we
396 check for and accept some loss of debug information. But
397 if it is available, there's no excuse for us to remove
398 blocks in the wrong order, so we don't even check for
399 dead SSA NAMEs. SSA verification shall catch any
401 if ((!gsi
&& !gimple_bb (def_stmt
))
402 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
407 value
= gimple_assign_rhs_to_tree (def_stmt
);
412 /* If there's a single use of VAR, and VAR is the entire debug
413 expression (usecount would have been incremented again
414 otherwise), and the definition involves only constants and
415 SSA names, then we can propagate VALUE into this single use,
418 We can also avoid using a temp if VALUE can be shared and
419 propagated into all uses, without generating expressions that
420 wouldn't be valid gimple RHSs.
422 Other cases that would require unsharing or non-gimple RHSs
423 are deferred to a debug temp, although we could avoid temps
424 at the expense of duplication of expressions. */
426 if (CONSTANT_CLASS_P (value
)
427 || gimple_code (def_stmt
) == GIMPLE_PHI
429 && (!gimple_assign_single_p (def_stmt
)
430 || is_gimple_min_invariant (value
)))
431 || is_gimple_reg (value
))
432 value
= unshare_expr (value
);
436 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
438 def_temp
= gimple_build_debug_bind (vexpr
,
439 unshare_expr (value
),
442 DECL_ARTIFICIAL (vexpr
) = 1;
443 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
445 DECL_MODE (vexpr
) = DECL_MODE (value
);
447 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (value
));
450 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
453 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
454 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
461 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
463 if (!gimple_debug_bind_p (stmt
))
468 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
469 /* unshare_expr is not needed here. vexpr is either a
470 SINGLE_RHS, that can be safely shared, some other RHS
471 that was unshared when we found it had a single debug
472 use, or a DEBUG_EXPR_DECL, that can be safely
474 SET_USE (use_p
, value
);
475 /* If we didn't replace uses with a debug decl fold the
476 resulting expression. Otherwise we end up with invalid IL. */
477 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
479 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
480 fold_stmt_inplace (&gsi
);
484 gimple_debug_bind_reset_value (stmt
);
491 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
492 other DEBUG stmts, and replace uses of the DEF with the
493 newly-created debug temp. */
496 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
502 if (!MAY_HAVE_DEBUG_STMTS
)
505 stmt
= gsi_stmt (*gsi
);
507 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
509 tree var
= DEF_FROM_PTR (def_p
);
511 if (TREE_CODE (var
) != SSA_NAME
)
514 insert_debug_temp_for_var_def (gsi
, var
);
518 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
521 reset_debug_uses (gimple stmt
)
525 imm_use_iterator imm_iter
;
528 if (!MAY_HAVE_DEBUG_STMTS
)
531 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
533 tree var
= DEF_FROM_PTR (def_p
);
535 if (TREE_CODE (var
) != SSA_NAME
)
538 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
540 if (!gimple_debug_bind_p (use_stmt
))
543 gimple_debug_bind_reset_value (use_stmt
);
544 update_stmt (use_stmt
);
549 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
550 dominated stmts before their dominators, so that release_ssa_defs
551 stands a chance of propagating DEFs into debug bind stmts. */
554 release_defs_bitset (bitmap toremove
)
559 /* Performing a topological sort is probably overkill, this will
560 most likely run in slightly superlinear time, rather than the
561 pathological quadratic worst case. */
562 while (!bitmap_empty_p (toremove
))
563 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
565 bool remove_now
= true;
566 tree var
= ssa_name (j
);
568 imm_use_iterator uit
;
570 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
575 /* We can't propagate PHI nodes into debug stmts. */
576 if (gimple_code (stmt
) == GIMPLE_PHI
577 || is_gimple_debug (stmt
))
580 /* If we find another definition to remove that uses
581 the one we're looking at, defer the removal of this
582 one, so that it can be propagated into debug stmts
583 after the other is. */
584 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
586 tree odef
= DEF_FROM_PTR (def_p
);
588 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
596 BREAK_FROM_IMM_USE_STMT (uit
);
601 gimple def
= SSA_NAME_DEF_STMT (var
);
602 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
604 if (gimple_code (def
) == GIMPLE_PHI
)
605 remove_phi_node (&gsi
, true);
608 gsi_remove (&gsi
, true);
612 bitmap_clear_bit (toremove
, j
);
617 /* Return true if SSA_NAME is malformed and mark it visited.
619 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
623 verify_ssa_name (tree ssa_name
, bool is_virtual
)
625 if (TREE_CODE (ssa_name
) != SSA_NAME
)
627 error ("expected an SSA_NAME object");
631 if (TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
633 error ("type mismatch between an SSA_NAME and its symbol");
637 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
639 error ("found an SSA_NAME that had been released into the free pool");
643 if (is_virtual
&& is_gimple_reg (ssa_name
))
645 error ("found a virtual definition for a GIMPLE register");
649 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
651 error ("virtual SSA name for non-VOP decl");
655 if (!is_virtual
&& !is_gimple_reg (ssa_name
))
657 error ("found a real definition for a non-register");
661 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
662 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
664 error ("found a default name with a non-empty defining statement");
672 /* Return true if the definition of SSA_NAME at block BB is malformed.
674 STMT is the statement where SSA_NAME is created.
676 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
677 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
678 it means that the block in that array slot contains the
679 definition of SSA_NAME.
681 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
684 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
685 gimple stmt
, bool is_virtual
)
687 if (verify_ssa_name (ssa_name
, is_virtual
))
690 if (TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
691 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
693 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
697 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
699 error ("SSA_NAME created in two different blocks %i and %i",
700 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
704 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
706 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
708 error ("SSA_NAME_DEF_STMT is wrong");
709 fprintf (stderr
, "Expected definition statement:\n");
710 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
711 fprintf (stderr
, "\nActual definition statement:\n");
712 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
719 fprintf (stderr
, "while verifying SSA_NAME ");
720 print_generic_expr (stderr
, ssa_name
, 0);
721 fprintf (stderr
, " in statement\n");
722 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
728 /* Return true if the use of SSA_NAME at statement STMT in block BB is
731 DEF_BB is the block where SSA_NAME was found to be created.
733 IDOM contains immediate dominator information for the flowgraph.
735 CHECK_ABNORMAL is true if the caller wants to check whether this use
736 is flowing through an abnormal edge (only used when checking PHI
739 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
740 that are defined before STMT in basic block BB. */
743 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
744 gimple stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
747 tree ssa_name
= USE_FROM_PTR (use_p
);
749 if (!TREE_VISITED (ssa_name
))
750 if (verify_imm_links (stderr
, ssa_name
))
753 TREE_VISITED (ssa_name
) = 1;
755 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
756 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
757 ; /* Default definitions have empty statements. Nothing to do. */
760 error ("missing definition");
763 else if (bb
!= def_bb
764 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
766 error ("definition in block %i does not dominate use in block %i",
767 def_bb
->index
, bb
->index
);
770 else if (bb
== def_bb
771 && names_defined_in_bb
!= NULL
772 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
774 error ("definition in block %i follows the use", def_bb
->index
);
779 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
781 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
785 /* Make sure the use is in an appropriate list by checking the previous
786 element to make sure it's the same. */
787 if (use_p
->prev
== NULL
)
789 error ("no immediate_use list");
795 if (use_p
->prev
->use
== NULL
)
796 listvar
= use_p
->prev
->loc
.ssa_name
;
798 listvar
= USE_FROM_PTR (use_p
->prev
);
799 if (listvar
!= ssa_name
)
801 error ("wrong immediate use list");
808 fprintf (stderr
, "for SSA_NAME: ");
809 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
810 fprintf (stderr
, " in statement:\n");
811 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
818 /* Return true if any of the arguments for PHI node PHI at block BB is
821 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
822 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
823 it means that the block in that array slot contains the
824 definition of SSA_NAME. */
827 verify_phi_args (gimple phi
, basic_block bb
, basic_block
*definition_block
)
831 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
833 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
835 error ("incoming edge count does not match number of PHI arguments");
840 for (i
= 0; i
< phi_num_args
; i
++)
842 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
843 tree op
= USE_FROM_PTR (op_p
);
845 e
= EDGE_PRED (bb
, i
);
849 error ("PHI argument is missing for edge %d->%d",
856 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
858 error ("PHI argument is not SSA_NAME, or invariant");
862 if (TREE_CODE (op
) == SSA_NAME
)
864 err
= verify_ssa_name (op
, !is_gimple_reg (gimple_phi_result (phi
)));
865 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
866 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
869 if (TREE_CODE (op
) == ADDR_EXPR
)
871 tree base
= TREE_OPERAND (op
, 0);
872 while (handled_component_p (base
))
873 base
= TREE_OPERAND (base
, 0);
874 if ((TREE_CODE (base
) == VAR_DECL
875 || TREE_CODE (base
) == PARM_DECL
876 || TREE_CODE (base
) == RESULT_DECL
)
877 && !TREE_ADDRESSABLE (base
))
879 error ("address taken, but ADDRESSABLE bit not set");
886 error ("wrong edge %d->%d for PHI argument",
887 e
->src
->index
, e
->dest
->index
);
893 fprintf (stderr
, "PHI argument\n");
894 print_generic_stmt (stderr
, op
, TDF_VOPS
);
902 fprintf (stderr
, "for PHI node\n");
903 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
911 /* Verify common invariants in the SSA web.
912 TODO: verify the variable annotations. */
915 verify_ssa (bool check_modified_stmt
)
919 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
922 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
923 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
925 gcc_assert (!need_ssa_update_p (cfun
));
927 timevar_push (TV_TREE_SSA_VERIFY
);
929 /* Keep track of SSA names present in the IL. */
930 for (i
= 1; i
< num_ssa_names
; i
++)
932 tree name
= ssa_name (i
);
936 TREE_VISITED (name
) = 0;
938 verify_ssa_name (name
, !is_gimple_reg (name
));
940 stmt
= SSA_NAME_DEF_STMT (name
);
941 if (!gimple_nop_p (stmt
))
943 basic_block bb
= gimple_bb (stmt
);
944 verify_def (bb
, definition_block
,
945 name
, stmt
, !is_gimple_reg (name
));
951 calculate_dominance_info (CDI_DOMINATORS
);
953 /* Now verify all the uses and make sure they agree with the definitions
954 found in the previous pass. */
960 gimple_stmt_iterator gsi
;
962 /* Make sure that all edges have a clear 'aux' field. */
963 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
967 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
973 /* Verify the arguments for every PHI node in the block. */
974 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
976 phi
= gsi_stmt (gsi
);
977 if (verify_phi_args (phi
, bb
, definition_block
))
980 bitmap_set_bit (names_defined_in_bb
,
981 SSA_NAME_VERSION (gimple_phi_result (phi
)));
984 /* Now verify all the uses and vuses in every statement of the block. */
985 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
987 gimple stmt
= gsi_stmt (gsi
);
990 if (check_modified_stmt
&& gimple_modified_p (stmt
))
992 error ("stmt (%p) marked modified after optimization pass: ",
994 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
998 if (verify_ssa_operands (stmt
))
1000 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1004 if (gimple_debug_bind_p (stmt
)
1005 && !gimple_debug_bind_has_value_p (stmt
))
1008 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1010 op
= USE_FROM_PTR (use_p
);
1011 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1012 use_p
, stmt
, false, names_defined_in_bb
))
1016 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1018 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1020 error ("SSA_NAME_DEF_STMT is wrong");
1021 fprintf (stderr
, "Expected definition statement:\n");
1022 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1023 fprintf (stderr
, "\nActual definition statement:\n");
1024 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1028 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1032 bitmap_clear (names_defined_in_bb
);
1035 free (definition_block
);
1037 /* Restore the dominance information to its prior known state, so
1038 that we do not perturb the compiler's subsequent behavior. */
1039 if (orig_dom_state
== DOM_NONE
)
1040 free_dominance_info (CDI_DOMINATORS
);
1042 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1044 BITMAP_FREE (names_defined_in_bb
);
1045 timevar_pop (TV_TREE_SSA_VERIFY
);
1049 internal_error ("verify_ssa failed");
1052 /* Return true if the uid in both int tree maps are equal. */
1055 int_tree_map_eq (const void *va
, const void *vb
)
1057 const struct int_tree_map
*a
= (const struct int_tree_map
*) va
;
1058 const struct int_tree_map
*b
= (const struct int_tree_map
*) vb
;
1059 return (a
->uid
== b
->uid
);
1062 /* Hash a UID in a int_tree_map. */
1065 int_tree_map_hash (const void *item
)
1067 return ((const struct int_tree_map
*)item
)->uid
;
1070 /* Return true if the DECL_UID in both trees are equal. */
1073 uid_decl_map_eq (const void *va
, const void *vb
)
1075 const_tree a
= (const_tree
) va
;
1076 const_tree b
= (const_tree
) vb
;
1077 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
1080 /* Hash a tree in a uid_decl_map. */
1083 uid_decl_map_hash (const void *item
)
1085 return ((const_tree
)item
)->decl_minimal
.uid
;
1088 /* Return true if the DECL_UID in both trees are equal. */
1091 uid_ssaname_map_eq (const void *va
, const void *vb
)
1093 const_tree a
= (const_tree
) va
;
1094 const_tree b
= (const_tree
) vb
;
1095 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
1098 /* Hash a tree in a uid_decl_map. */
1101 uid_ssaname_map_hash (const void *item
)
1103 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
1107 /* Initialize global DFA and SSA structures. */
1110 init_tree_ssa (struct function
*fn
)
1112 fn
->gimple_df
= ggc_alloc_cleared_gimple_df ();
1113 fn
->gimple_df
->referenced_vars
= htab_create_ggc (20, uid_decl_map_hash
,
1114 uid_decl_map_eq
, NULL
);
1115 fn
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
1116 uid_ssaname_map_eq
, NULL
);
1117 pt_solution_reset (&fn
->gimple_df
->escaped
);
1118 init_ssanames (fn
, 0);
1121 /* Do the actions required to initialize internal data structures used
1122 in tree-ssa optimization passes. */
1125 execute_init_datastructures (void)
1127 /* Allocate hash tables, arrays and other structures. */
1128 init_tree_ssa (cfun
);
1132 struct gimple_opt_pass pass_init_datastructures
=
1136 "*init_datastructures", /* name */
1138 execute_init_datastructures
, /* execute */
1141 0, /* static_pass_number */
1142 TV_NONE
, /* tv_id */
1143 PROP_cfg
, /* properties_required */
1144 0, /* properties_provided */
1145 0, /* properties_destroyed */
1146 0, /* todo_flags_start */
1147 0 /* todo_flags_finish */
1151 /* Deallocate memory associated with SSA data structures for FNDECL. */
1154 delete_tree_ssa (void)
1156 referenced_var_iterator rvi
;
1159 /* Remove annotations from every referenced local variable. */
1160 FOR_EACH_REFERENCED_VAR (cfun
, var
, rvi
)
1162 if (is_global_var (var
))
1166 ggc_free (var_ann (var
));
1167 *DECL_VAR_ANN_PTR (var
) = NULL
;
1170 htab_delete (gimple_referenced_vars (cfun
));
1171 cfun
->gimple_df
->referenced_vars
= NULL
;
1175 /* We no longer maintain the SSA operand cache at this point. */
1176 if (ssa_operands_active ())
1177 fini_ssa_operands ();
1179 htab_delete (cfun
->gimple_df
->default_defs
);
1180 cfun
->gimple_df
->default_defs
= NULL
;
1181 pt_solution_reset (&cfun
->gimple_df
->escaped
);
1182 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
1183 pointer_map_destroy (cfun
->gimple_df
->decls_to_pointers
);
1184 cfun
->gimple_df
->decls_to_pointers
= NULL
;
1185 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1186 cfun
->gimple_df
= NULL
;
1188 /* We no longer need the edge variable maps. */
1189 redirect_edge_var_map_destroy ();
1192 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1193 useless type conversion, otherwise return false.
1195 This function implicitly defines the middle-end type system. With
1196 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1197 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1198 the following invariants shall be fulfilled:
1200 1) useless_type_conversion_p is transitive.
1201 If a < b and b < c then a < c.
1203 2) useless_type_conversion_p is not symmetric.
1204 From a < b does not follow a > b.
1206 3) Types define the available set of operations applicable to values.
1207 A type conversion is useless if the operations for the target type
1208 is a subset of the operations for the source type. For example
1209 casts to void* are useless, casts from void* are not (void* can't
1210 be dereferenced or offsetted, but copied, hence its set of operations
1211 is a strict subset of that of all other data pointer types). Casts
1212 to const T* are useless (can't be written to), casts from const T*
1216 useless_type_conversion_p (tree outer_type
, tree inner_type
)
1218 /* Do the following before stripping toplevel qualifiers. */
1219 if (POINTER_TYPE_P (inner_type
)
1220 && POINTER_TYPE_P (outer_type
))
1222 /* Do not lose casts between pointers to different address spaces. */
1223 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
1224 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)))
1228 /* From now on qualifiers on value types do not matter. */
1229 inner_type
= TYPE_MAIN_VARIANT (inner_type
);
1230 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
1232 if (inner_type
== outer_type
)
1235 /* If we know the canonical types, compare them. */
1236 if (TYPE_CANONICAL (inner_type
)
1237 && TYPE_CANONICAL (inner_type
) == TYPE_CANONICAL (outer_type
))
1240 /* Changes in machine mode are never useless conversions unless we
1241 deal with aggregate types in which case we defer to later checks. */
1242 if (TYPE_MODE (inner_type
) != TYPE_MODE (outer_type
)
1243 && !AGGREGATE_TYPE_P (inner_type
))
1246 /* If both the inner and outer types are integral types, then the
1247 conversion is not necessary if they have the same mode and
1248 signedness and precision, and both or neither are boolean. */
1249 if (INTEGRAL_TYPE_P (inner_type
)
1250 && INTEGRAL_TYPE_P (outer_type
))
1252 /* Preserve changes in signedness or precision. */
1253 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
1254 || TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
1257 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
1258 of precision one. */
1259 if (((TREE_CODE (inner_type
) == BOOLEAN_TYPE
)
1260 != (TREE_CODE (outer_type
) == BOOLEAN_TYPE
))
1261 && TYPE_PRECISION (outer_type
) != 1)
1264 /* We don't need to preserve changes in the types minimum or
1265 maximum value in general as these do not generate code
1266 unless the types precisions are different. */
1270 /* Scalar floating point types with the same mode are compatible. */
1271 else if (SCALAR_FLOAT_TYPE_P (inner_type
)
1272 && SCALAR_FLOAT_TYPE_P (outer_type
))
1275 /* Fixed point types with the same mode are compatible. */
1276 else if (FIXED_POINT_TYPE_P (inner_type
)
1277 && FIXED_POINT_TYPE_P (outer_type
))
1280 /* We need to take special care recursing to pointed-to types. */
1281 else if (POINTER_TYPE_P (inner_type
)
1282 && POINTER_TYPE_P (outer_type
))
1284 /* Do not lose casts to function pointer types. */
1285 if ((TREE_CODE (TREE_TYPE (outer_type
)) == FUNCTION_TYPE
1286 || TREE_CODE (TREE_TYPE (outer_type
)) == METHOD_TYPE
)
1287 && !(TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
1288 || TREE_CODE (TREE_TYPE (inner_type
)) == METHOD_TYPE
))
1291 /* We do not care for const qualification of the pointed-to types
1292 as const qualification has no semantic value to the middle-end. */
1294 /* Otherwise pointers/references are equivalent. */
1298 /* Recurse for complex types. */
1299 else if (TREE_CODE (inner_type
) == COMPLEX_TYPE
1300 && TREE_CODE (outer_type
) == COMPLEX_TYPE
)
1301 return useless_type_conversion_p (TREE_TYPE (outer_type
),
1302 TREE_TYPE (inner_type
));
1304 /* Recurse for vector types with the same number of subparts. */
1305 else if (TREE_CODE (inner_type
) == VECTOR_TYPE
1306 && TREE_CODE (outer_type
) == VECTOR_TYPE
1307 && TYPE_PRECISION (inner_type
) == TYPE_PRECISION (outer_type
))
1308 return useless_type_conversion_p (TREE_TYPE (outer_type
),
1309 TREE_TYPE (inner_type
));
1311 else if (TREE_CODE (inner_type
) == ARRAY_TYPE
1312 && TREE_CODE (outer_type
) == ARRAY_TYPE
)
1314 /* Preserve string attributes. */
1315 if (TYPE_STRING_FLAG (inner_type
) != TYPE_STRING_FLAG (outer_type
))
1318 /* Conversions from array types with unknown extent to
1319 array types with known extent are not useless. */
1320 if (!TYPE_DOMAIN (inner_type
)
1321 && TYPE_DOMAIN (outer_type
))
1324 /* Nor are conversions from array types with non-constant size to
1325 array types with constant size or to different size. */
1326 if (TYPE_SIZE (outer_type
)
1327 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
1328 && (!TYPE_SIZE (inner_type
)
1329 || TREE_CODE (TYPE_SIZE (inner_type
)) != INTEGER_CST
1330 || !tree_int_cst_equal (TYPE_SIZE (outer_type
),
1331 TYPE_SIZE (inner_type
))))
1334 /* Check conversions between arrays with partially known extents.
1335 If the array min/max values are constant they have to match.
1336 Otherwise allow conversions to unknown and variable extents.
1337 In particular this declares conversions that may change the
1338 mode to BLKmode as useless. */
1339 if (TYPE_DOMAIN (inner_type
)
1340 && TYPE_DOMAIN (outer_type
)
1341 && TYPE_DOMAIN (inner_type
) != TYPE_DOMAIN (outer_type
))
1343 tree inner_min
= TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type
));
1344 tree outer_min
= TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type
));
1345 tree inner_max
= TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type
));
1346 tree outer_max
= TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type
));
1348 /* After gimplification a variable min/max value carries no
1349 additional information compared to a NULL value. All that
1350 matters has been lowered to be part of the IL. */
1351 if (inner_min
&& TREE_CODE (inner_min
) != INTEGER_CST
)
1352 inner_min
= NULL_TREE
;
1353 if (outer_min
&& TREE_CODE (outer_min
) != INTEGER_CST
)
1354 outer_min
= NULL_TREE
;
1355 if (inner_max
&& TREE_CODE (inner_max
) != INTEGER_CST
)
1356 inner_max
= NULL_TREE
;
1357 if (outer_max
&& TREE_CODE (outer_max
) != INTEGER_CST
)
1358 outer_max
= NULL_TREE
;
1360 /* Conversions NULL / variable <- cst are useless, but not
1361 the other way around. */
1364 || !tree_int_cst_equal (inner_min
, outer_min
)))
1368 || !tree_int_cst_equal (inner_max
, outer_max
)))
1372 /* Recurse on the element check. */
1373 return useless_type_conversion_p (TREE_TYPE (outer_type
),
1374 TREE_TYPE (inner_type
));
1377 else if ((TREE_CODE (inner_type
) == FUNCTION_TYPE
1378 || TREE_CODE (inner_type
) == METHOD_TYPE
)
1379 && TREE_CODE (inner_type
) == TREE_CODE (outer_type
))
1381 tree outer_parm
, inner_parm
;
1383 /* If the return types are not compatible bail out. */
1384 if (!useless_type_conversion_p (TREE_TYPE (outer_type
),
1385 TREE_TYPE (inner_type
)))
1388 /* Method types should belong to a compatible base class. */
1389 if (TREE_CODE (inner_type
) == METHOD_TYPE
1390 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type
),
1391 TYPE_METHOD_BASETYPE (inner_type
)))
1394 /* A conversion to an unprototyped argument list is ok. */
1395 if (!prototype_p (outer_type
))
1398 /* If the unqualified argument types are compatible the conversion
1400 if (TYPE_ARG_TYPES (outer_type
) == TYPE_ARG_TYPES (inner_type
))
1403 for (outer_parm
= TYPE_ARG_TYPES (outer_type
),
1404 inner_parm
= TYPE_ARG_TYPES (inner_type
);
1405 outer_parm
&& inner_parm
;
1406 outer_parm
= TREE_CHAIN (outer_parm
),
1407 inner_parm
= TREE_CHAIN (inner_parm
))
1408 if (!useless_type_conversion_p
1409 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm
)),
1410 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm
))))
1413 /* If there is a mismatch in the number of arguments the functions
1414 are not compatible. */
1415 if (outer_parm
|| inner_parm
)
1418 /* Defer to the target if necessary. */
1419 if (TYPE_ATTRIBUTES (inner_type
) || TYPE_ATTRIBUTES (outer_type
))
1420 return comp_type_attributes (outer_type
, inner_type
) != 0;
1425 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1426 explicit conversions for types involving to be structurally
1428 else if (AGGREGATE_TYPE_P (inner_type
)
1429 && TREE_CODE (inner_type
) == TREE_CODE (outer_type
))
1435 /* Return true if a conversion from either type of TYPE1 and TYPE2
1436 to the other is not required. Otherwise return false. */
1439 types_compatible_p (tree type1
, tree type2
)
1441 return (type1
== type2
1442 || (useless_type_conversion_p (type1
, type2
)
1443 && useless_type_conversion_p (type2
, type1
)));
1446 /* Return true if EXPR is a useless type conversion, otherwise return
1450 tree_ssa_useless_type_conversion (tree expr
)
1452 /* If we have an assignment that merely uses a NOP_EXPR to change
1453 the top of the RHS to the type of the LHS and the type conversion
1454 is "safe", then strip away the type conversion so that we can
1455 enter LHS = RHS into the const_and_copies table. */
1456 if (CONVERT_EXPR_P (expr
)
1457 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1458 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1459 return useless_type_conversion_p
1461 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1466 /* Strip conversions from EXP according to
1467 tree_ssa_useless_type_conversion and return the resulting
1471 tree_ssa_strip_useless_type_conversions (tree exp
)
1473 while (tree_ssa_useless_type_conversion (exp
))
1474 exp
= TREE_OPERAND (exp
, 0);
1479 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1480 described in walk_use_def_chains.
1482 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1483 infinite loops. We used to have a bitmap for this to just mark
1484 SSA versions we had visited. But non-sparse bitmaps are way too
1485 expensive, while sparse bitmaps may cause quadratic behavior.
1487 IS_DFS is true if the caller wants to perform a depth-first search
1488 when visiting PHI nodes. A DFS will visit each PHI argument and
1489 call FN after each one. Otherwise, all the arguments are
1490 visited first and then FN is called with each of the visited
1491 arguments in a separate pass. */
1494 walk_use_def_chains_1 (tree var
, walk_use_def_chains_fn fn
, void *data
,
1495 struct pointer_set_t
*visited
, bool is_dfs
)
1499 if (pointer_set_insert (visited
, var
))
1502 def_stmt
= SSA_NAME_DEF_STMT (var
);
1504 if (gimple_code (def_stmt
) != GIMPLE_PHI
)
1506 /* If we reached the end of the use-def chain, call FN. */
1507 return fn (var
, def_stmt
, data
);
1513 /* When doing a breadth-first search, call FN before following the
1514 use-def links for each argument. */
1516 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1517 if (fn (gimple_phi_arg_def (def_stmt
, i
), def_stmt
, data
))
1520 /* Follow use-def links out of each PHI argument. */
1521 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1523 tree arg
= gimple_phi_arg_def (def_stmt
, i
);
1525 /* ARG may be NULL for newly introduced PHI nodes. */
1527 && TREE_CODE (arg
) == SSA_NAME
1528 && walk_use_def_chains_1 (arg
, fn
, data
, visited
, is_dfs
))
1532 /* When doing a depth-first search, call FN after following the
1533 use-def links for each argument. */
1535 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1536 if (fn (gimple_phi_arg_def (def_stmt
, i
), def_stmt
, data
))
1545 /* Walk use-def chains starting at the SSA variable VAR. Call
1546 function FN at each reaching definition found. FN takes three
1547 arguments: VAR, its defining statement (DEF_STMT) and a generic
1548 pointer to whatever state information that FN may want to maintain
1549 (DATA). FN is able to stop the walk by returning true, otherwise
1550 in order to continue the walk, FN should return false.
1552 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1553 different. The first argument to FN is no longer the original
1554 variable VAR, but the PHI argument currently being examined. If FN
1555 wants to get at VAR, it should call PHI_RESULT (PHI).
1557 If IS_DFS is true, this function will:
1559 1- walk the use-def chains for all the PHI arguments, and,
1560 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1562 If IS_DFS is false, the two steps above are done in reverse order
1563 (i.e., a breadth-first search). */
1566 walk_use_def_chains (tree var
, walk_use_def_chains_fn fn
, void *data
,
1571 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1573 def_stmt
= SSA_NAME_DEF_STMT (var
);
1575 /* We only need to recurse if the reaching definition comes from a PHI
1577 if (gimple_code (def_stmt
) != GIMPLE_PHI
)
1578 (*fn
) (var
, def_stmt
, data
);
1581 struct pointer_set_t
*visited
= pointer_set_create ();
1582 walk_use_def_chains_1 (var
, fn
, data
, visited
, is_dfs
);
1583 pointer_set_destroy (visited
);
1588 /* Emit warnings for uninitialized variables. This is done in two passes.
1590 The first pass notices real uses of SSA names with undefined values.
1591 Such uses are unconditionally uninitialized, and we can be certain that
1592 such a use is a mistake. This pass is run before most optimizations,
1593 so that we catch as many as we can.
1595 The second pass follows PHI nodes to find uses that are potentially
1596 uninitialized. In this case we can't necessarily prove that the use
1597 is really uninitialized. This pass is run after most optimizations,
1598 so that we thread as many jumps and possible, and delete as much dead
1599 code as possible, in order to reduce false positives. We also look
1600 again for plain uninitialized variables, since optimization may have
1601 changed conditionally uninitialized to unconditionally uninitialized. */
1603 /* Emit a warning for EXPR based on variable VAR at the point in the
1604 program T, an SSA_NAME, is used being uninitialized. The exact
1605 warning text is in MSGID and LOCUS may contain a location or be null.
1606 WC is the warning code. */
1609 warn_uninit (enum opt_code wc
, tree t
,
1610 tree expr
, tree var
, const char *gmsgid
, void *data
)
1612 gimple context
= (gimple
) data
;
1613 location_t location
, cfun_loc
;
1614 expanded_location xloc
, floc
;
1616 if (!ssa_undefined_value_p (t
))
1619 /* TREE_NO_WARNING either means we already warned, or the front end
1620 wishes to suppress the warning. */
1622 && (gimple_no_warning_p (context
)
1623 || (gimple_assign_single_p (context
)
1624 && TREE_NO_WARNING (gimple_assign_rhs1 (context
)))))
1625 || TREE_NO_WARNING (expr
))
1628 location
= (context
!= NULL
&& gimple_has_location (context
))
1629 ? gimple_location (context
)
1630 : DECL_SOURCE_LOCATION (var
);
1631 location
= linemap_resolve_location (line_table
, location
,
1632 LRK_SPELLING_LOCATION
,
1634 cfun_loc
= DECL_SOURCE_LOCATION (cfun
->decl
);
1635 xloc
= expand_location (location
);
1636 floc
= expand_location (cfun_loc
);
1637 if (warning_at (location
, wc
, gmsgid
, expr
))
1639 TREE_NO_WARNING (expr
) = 1;
1641 if (location
== DECL_SOURCE_LOCATION (var
))
1643 if (xloc
.file
!= floc
.file
1644 || linemap_location_before_p (line_table
,
1646 || linemap_location_before_p (line_table
,
1647 cfun
->function_end_locus
,
1649 inform (DECL_SOURCE_LOCATION (var
), "%qD was declared here", var
);
1654 warn_uninitialized_vars (bool warn_possibly_uninitialized
)
1656 gimple_stmt_iterator gsi
;
1661 bool always_executed
= dominated_by_p (CDI_POST_DOMINATORS
,
1662 single_succ (ENTRY_BLOCK_PTR
), bb
);
1663 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1665 gimple stmt
= gsi_stmt (gsi
);
1666 use_operand_p use_p
;
1667 ssa_op_iter op_iter
;
1670 if (is_gimple_debug (stmt
))
1673 /* We only do data flow with SSA_NAMEs, so that's all we
1675 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, op_iter
, SSA_OP_USE
)
1677 use
= USE_FROM_PTR (use_p
);
1678 if (always_executed
)
1679 warn_uninit (OPT_Wuninitialized
, use
,
1680 SSA_NAME_VAR (use
), SSA_NAME_VAR (use
),
1681 "%qD is used uninitialized in this function",
1683 else if (warn_possibly_uninitialized
)
1684 warn_uninit (OPT_Wuninitialized
, use
,
1685 SSA_NAME_VAR (use
), SSA_NAME_VAR (use
),
1686 "%qD may be used uninitialized in this function",
1690 /* For memory the only cheap thing we can do is see if we
1691 have a use of the default def of the virtual operand.
1692 ??? Note that at -O0 we do not have virtual operands.
1693 ??? Not so cheap would be to use the alias oracle via
1694 walk_aliased_vdefs, if we don't find any aliasing vdef
1695 warn as is-used-uninitialized, if we don't find an aliasing
1696 vdef that kills our use (stmt_kills_ref_p), warn as
1697 may-be-used-uninitialized. But this walk is quadratic and
1698 so must be limited which means we would miss warning
1700 use
= gimple_vuse (stmt
);
1702 && gimple_assign_single_p (stmt
)
1703 && !gimple_vdef (stmt
)
1704 && SSA_NAME_IS_DEFAULT_DEF (use
))
1706 tree rhs
= gimple_assign_rhs1 (stmt
);
1707 tree base
= get_base_address (rhs
);
1709 /* Do not warn if it can be initialized outside this function. */
1710 if (TREE_CODE (base
) != VAR_DECL
1711 || DECL_HARD_REGISTER (base
)
1712 || is_global_var (base
))
1715 if (always_executed
)
1716 warn_uninit (OPT_Wuninitialized
, use
, gimple_assign_rhs1 (stmt
),
1718 "%qE is used uninitialized in this function",
1720 else if (warn_possibly_uninitialized
)
1721 warn_uninit (OPT_Wuninitialized
, use
, gimple_assign_rhs1 (stmt
),
1723 "%qE may be used uninitialized in this function",
1733 execute_early_warn_uninitialized (void)
1735 /* Currently, this pass runs always but
1736 execute_late_warn_uninitialized only runs with optimization. With
1737 optimization we want to warn about possible uninitialized as late
1738 as possible, thus don't do it here. However, without
1739 optimization we need to warn here about "may be uninitialized".
1741 calculate_dominance_info (CDI_POST_DOMINATORS
);
1743 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize
);
1745 /* Post-dominator information can not be reliably updated. Free it
1748 free_dominance_info (CDI_POST_DOMINATORS
);
1753 gate_warn_uninitialized (void)
1755 return warn_uninitialized
!= 0;
1758 struct gimple_opt_pass pass_early_warn_uninitialized
=
1762 "*early_warn_uninitialized", /* name */
1763 gate_warn_uninitialized
, /* gate */
1764 execute_early_warn_uninitialized
, /* execute */
1767 0, /* static_pass_number */
1768 TV_TREE_UNINIT
, /* tv_id */
1769 PROP_ssa
, /* properties_required */
1770 0, /* properties_provided */
1771 0, /* properties_destroyed */
1772 0, /* todo_flags_start */
1773 0 /* todo_flags_finish */
1778 /* If necessary, rewrite the base of the reference tree *TP from
1779 a MEM_REF to a plain or converted symbol. */
1782 maybe_rewrite_mem_ref_base (tree
*tp
)
1786 while (handled_component_p (*tp
))
1787 tp
= &TREE_OPERAND (*tp
, 0);
1788 if (TREE_CODE (*tp
) == MEM_REF
1789 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1790 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1792 && !TREE_ADDRESSABLE (sym
)
1793 && symbol_marked_for_renaming (sym
))
1795 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1796 && useless_type_conversion_p (TREE_TYPE (*tp
),
1797 TREE_TYPE (TREE_TYPE (sym
)))
1798 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1799 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1801 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1802 TYPE_SIZE (TREE_TYPE (*tp
)),
1803 int_const_binop (MULT_EXPR
,
1804 bitsize_int (BITS_PER_UNIT
),
1805 TREE_OPERAND (*tp
, 1)));
1807 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1808 && useless_type_conversion_p (TREE_TYPE (*tp
),
1809 TREE_TYPE (TREE_TYPE (sym
))))
1811 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1812 ? REALPART_EXPR
: IMAGPART_EXPR
,
1813 TREE_TYPE (*tp
), sym
);
1815 else if (integer_zerop (TREE_OPERAND (*tp
, 1)))
1817 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1819 *tp
= build1 (VIEW_CONVERT_EXPR
,
1820 TREE_TYPE (*tp
), sym
);
1827 /* For a tree REF return its base if it is the base of a MEM_REF
1828 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1831 non_rewritable_mem_ref_base (tree ref
)
1835 /* A plain decl does not need it set. */
1839 while (handled_component_p (base
))
1840 base
= TREE_OPERAND (base
, 0);
1842 /* But watch out for MEM_REFs we cannot lower to a
1843 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1844 if (TREE_CODE (base
) == MEM_REF
1845 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1847 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1848 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1849 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1850 && useless_type_conversion_p (TREE_TYPE (base
),
1851 TREE_TYPE (TREE_TYPE (decl
)))
1852 && double_int_fits_in_uhwi_p (mem_ref_offset (base
))
1854 (tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1855 mem_ref_offset (base
)) == 1
1856 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1857 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1860 && (!integer_zerop (TREE_OPERAND (base
, 1))
1861 || (DECL_SIZE (decl
)
1862 != TYPE_SIZE (TREE_TYPE (base
)))
1863 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
)))
1870 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1871 Otherwise return true. */
1874 non_rewritable_lvalue_p (tree lhs
)
1876 /* A plain decl is always rewritable. */
1880 /* A decl that is wrapped inside a MEM-REF that covers
1881 it full is also rewritable.
1882 ??? The following could be relaxed allowing component
1883 references that do not change the access size. */
1884 if (TREE_CODE (lhs
) == MEM_REF
1885 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1886 && integer_zerop (TREE_OPERAND (lhs
, 1)))
1888 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1890 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1891 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1898 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1899 mark the variable VAR for conversion into SSA. Return true when updating
1900 stmts is required. */
1903 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
)
1905 bool update_vops
= false;
1907 /* Global Variables, result decls cannot be changed. */
1908 if (is_global_var (var
)
1909 || TREE_CODE (var
) == RESULT_DECL
1910 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1913 /* If the variable is not in the list of referenced vars then we
1914 do not need to touch it nor can we rename it. */
1915 if (!referenced_var_lookup (cfun
, DECL_UID (var
)))
1918 if (TREE_ADDRESSABLE (var
)
1919 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1920 a non-register. Otherwise we are confused and forget to
1921 add virtual operands for it. */
1922 && (!is_gimple_reg_type (TREE_TYPE (var
))
1923 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1924 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1925 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1927 TREE_ADDRESSABLE (var
) = 0;
1928 if (is_gimple_reg (var
))
1929 mark_sym_for_renaming (var
);
1933 fprintf (dump_file
, "No longer having address taken: ");
1934 print_generic_expr (dump_file
, var
, 0);
1935 fprintf (dump_file
, "\n");
1939 if (!DECL_GIMPLE_REG_P (var
)
1940 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1941 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1942 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1943 && !TREE_THIS_VOLATILE (var
)
1944 && (TREE_CODE (var
) != VAR_DECL
|| !DECL_HARD_REGISTER (var
)))
1946 DECL_GIMPLE_REG_P (var
) = 1;
1947 mark_sym_for_renaming (var
);
1951 fprintf (dump_file
, "Now a gimple register: ");
1952 print_generic_expr (dump_file
, var
, 0);
1953 fprintf (dump_file
, "\n");
1960 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1963 execute_update_addresses_taken (void)
1965 gimple_stmt_iterator gsi
;
1967 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1968 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1969 bool update_vops
= false;
1973 timevar_push (TV_ADDRESS_TAKEN
);
1975 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1976 the function body. */
1979 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1981 gimple stmt
= gsi_stmt (gsi
);
1982 enum gimple_code code
= gimple_code (stmt
);
1985 /* Note all addresses taken by the stmt. */
1986 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1988 /* If we have a call or an assignment, see if the lhs contains
1989 a local decl that requires not to be a gimple register. */
1990 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1992 tree lhs
= gimple_get_lhs (stmt
);
1994 && TREE_CODE (lhs
) != SSA_NAME
1995 && non_rewritable_lvalue_p (lhs
))
1997 decl
= get_base_address (lhs
);
1999 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
2003 if (gimple_assign_single_p (stmt
))
2005 tree rhs
= gimple_assign_rhs1 (stmt
);
2006 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
2007 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
2010 else if (code
== GIMPLE_CALL
)
2012 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2014 tree arg
= gimple_call_arg (stmt
, i
);
2015 if ((decl
= non_rewritable_mem_ref_base (arg
)))
2016 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
2020 else if (code
== GIMPLE_ASM
)
2022 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2024 tree link
= gimple_asm_output_op (stmt
, i
);
2025 tree lhs
= TREE_VALUE (link
);
2026 if (TREE_CODE (lhs
) != SSA_NAME
)
2028 decl
= get_base_address (lhs
);
2030 && (non_rewritable_lvalue_p (lhs
)
2031 /* We cannot move required conversions from
2032 the lhs to the rhs in asm statements, so
2033 require we do not need any. */
2034 || !useless_type_conversion_p
2035 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
2036 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
2039 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2041 tree link
= gimple_asm_input_op (stmt
, i
);
2042 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
2043 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
2048 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2051 gimple phi
= gsi_stmt (gsi
);
2053 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2055 tree op
= PHI_ARG_DEF (phi
, i
), var
;
2056 if (TREE_CODE (op
) == ADDR_EXPR
2057 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
2059 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
2064 /* We cannot iterate over all referenced vars because that can contain
2065 unused vars from BLOCK trees, which causes code generation differences
2067 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
2068 update_vops
|= maybe_optimize_var (var
, addresses_taken
, not_reg_needs
);
2070 FOR_EACH_VEC_ELT (tree
, cfun
->local_decls
, i
, var
)
2071 update_vops
|= maybe_optimize_var (var
, addresses_taken
, not_reg_needs
);
2073 /* Operand caches need to be recomputed for operands referencing the updated
2078 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2080 gimple stmt
= gsi_stmt (gsi
);
2082 /* Re-write TARGET_MEM_REFs of symbols we want to
2083 rewrite into SSA form. */
2084 if (gimple_assign_single_p (stmt
))
2086 tree lhs
= gimple_assign_lhs (stmt
);
2087 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
2090 /* We shouldn't have any fancy wrapping of
2091 component-refs on the LHS, but look through
2092 VIEW_CONVERT_EXPRs as that is easy. */
2093 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
2094 lhs
= TREE_OPERAND (lhs
, 0);
2095 if (TREE_CODE (lhs
) == MEM_REF
2096 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
2097 && integer_zerop (TREE_OPERAND (lhs
, 1))
2098 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
2100 && !TREE_ADDRESSABLE (sym
)
2101 && symbol_marked_for_renaming (sym
))
2104 lhs
= gimple_assign_lhs (stmt
);
2106 /* Rewrite the RHS and make sure the resulting assignment
2107 is validly typed. */
2108 maybe_rewrite_mem_ref_base (rhsp
);
2109 rhs
= gimple_assign_rhs1 (stmt
);
2110 if (gimple_assign_lhs (stmt
) != lhs
2111 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2113 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
2114 TREE_TYPE (lhs
), rhs
);
2116 if (gimple_assign_lhs (stmt
) != lhs
)
2117 gimple_assign_set_lhs (stmt
, lhs
);
2119 /* For var ={v} {CLOBBER}; where var lost
2120 TREE_ADDRESSABLE just remove the stmt. */
2122 && TREE_CLOBBER_P (rhs
)
2123 && symbol_marked_for_renaming (lhs
))
2125 unlink_stmt_vdef (stmt
);
2126 gsi_remove (&gsi
, true);
2127 release_defs (stmt
);
2131 if (gimple_assign_rhs1 (stmt
) != rhs
)
2133 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2134 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
2138 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2141 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2143 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
2144 maybe_rewrite_mem_ref_base (argp
);
2148 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2151 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2153 tree link
= gimple_asm_output_op (stmt
, i
);
2154 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
));
2156 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2158 tree link
= gimple_asm_input_op (stmt
, i
);
2159 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
));
2163 else if (gimple_debug_bind_p (stmt
)
2164 && gimple_debug_bind_has_value_p (stmt
))
2166 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
2168 maybe_rewrite_mem_ref_base (valuep
);
2169 decl
= non_rewritable_mem_ref_base (*valuep
);
2170 if (decl
&& symbol_marked_for_renaming (decl
))
2171 gimple_debug_bind_reset_value (stmt
);
2174 if (gimple_references_memory_p (stmt
)
2175 || is_gimple_debug (stmt
))
2181 /* Update SSA form here, we are called as non-pass as well. */
2182 if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
2183 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
2185 update_ssa (TODO_update_ssa
);
2188 BITMAP_FREE (not_reg_needs
);
2189 BITMAP_FREE (addresses_taken
);
2190 timevar_pop (TV_ADDRESS_TAKEN
);
2193 struct gimple_opt_pass pass_update_address_taken
=
2197 "addressables", /* name */
2202 0, /* static_pass_number */
2203 TV_ADDRESS_TAKEN
, /* tv_id */
2204 PROP_ssa
, /* properties_required */
2205 0, /* properties_provided */
2206 0, /* properties_destroyed */
2207 0, /* todo_flags_start */
2208 TODO_update_address_taken
/* todo_flags_finish */