1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
32 #include "gimple-pretty-print.h"
34 #include "pointer-set.h"
37 #include "tree-inline.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
43 /* Pointer map of variable mappings, keyed by edge. */
44 static struct pointer_map_t
*edge_var_maps
;
47 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
50 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
53 edge_var_map_vector
*head
;
54 edge_var_map new_node
;
56 if (edge_var_maps
== NULL
)
57 edge_var_maps
= pointer_map_create ();
59 slot
= pointer_map_insert (edge_var_maps
, e
);
60 head
= (edge_var_map_vector
*) *slot
;
62 vec_safe_reserve (head
, 5);
64 new_node
.result
= result
;
65 new_node
.locus
= locus
;
67 vec_safe_push (head
, new_node
);
72 /* Clear the var mappings in edge E. */
75 redirect_edge_var_map_clear (edge e
)
78 edge_var_map_vector
*head
;
83 slot
= pointer_map_contains (edge_var_maps
, e
);
87 head
= (edge_var_map_vector
*) *slot
;
94 /* Duplicate the redirected var mappings in OLDE in NEWE.
96 Since we can't remove a mapping, let's just duplicate it. This assumes a
97 pointer_map can have multiple edges mapping to the same var_map (many to
98 one mapping), since we don't remove the previous mappings. */
101 redirect_edge_var_map_dup (edge newe
, edge olde
)
103 void **new_slot
, **old_slot
;
104 edge_var_map_vector
*head
;
109 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
110 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
113 head
= (edge_var_map_vector
*) *old_slot
;
115 edge_var_map_vector
*new_head
= NULL
;
117 new_head
= vec_safe_copy (head
);
119 vec_safe_reserve (new_head
, 5);
120 *new_slot
= new_head
;
124 /* Return the variable mappings for a given edge. If there is none, return
127 edge_var_map_vector
*
128 redirect_edge_var_map_vector (edge e
)
132 /* Hey, what kind of idiot would... you'd be surprised. */
136 slot
= pointer_map_contains (edge_var_maps
, e
);
140 return (edge_var_map_vector
*) *slot
;
143 /* Used by redirect_edge_var_map_destroy to free all memory. */
146 free_var_map_entry (const void *key ATTRIBUTE_UNUSED
,
148 void *data ATTRIBUTE_UNUSED
)
150 edge_var_map_vector
*head
= (edge_var_map_vector
*) *value
;
155 /* Clear the edge variable mappings. */
158 redirect_edge_var_map_destroy (void)
162 pointer_map_traverse (edge_var_maps
, free_var_map_entry
, NULL
);
163 pointer_map_destroy (edge_var_maps
);
164 edge_var_maps
= NULL
;
169 /* Remove the corresponding arguments from the PHI nodes in E's
170 destination block and redirect it to DEST. Return redirected edge.
171 The list of removed arguments is stored in a vector accessed
172 through edge_var_maps. */
175 ssa_redirect_edge (edge e
, basic_block dest
)
177 gimple_stmt_iterator gsi
;
180 redirect_edge_var_map_clear (e
);
182 /* Remove the appropriate PHI arguments in E's destination block. */
183 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
186 source_location locus
;
188 phi
= gsi_stmt (gsi
);
189 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
190 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
192 if (def
== NULL_TREE
)
195 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
198 e
= redirect_edge_succ_nodup (e
, dest
);
204 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
208 flush_pending_stmts (edge e
)
211 edge_var_map_vector
*v
;
214 gimple_stmt_iterator gsi
;
216 v
= redirect_edge_var_map_vector (e
);
220 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
221 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
222 gsi_next (&gsi
), i
++)
226 phi
= gsi_stmt (gsi
);
227 def
= redirect_edge_var_map_def (vm
);
228 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
231 redirect_edge_var_map_clear (e
);
235 /* Data structure used to count the number of dereferences to PTR
236 inside an expression. */
245 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
246 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
249 count_ptr_derefs (tree
*tp
, int *walk_subtrees
, void *data
)
251 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
252 struct count_ptr_d
*count_p
= (struct count_ptr_d
*) wi_p
->info
;
254 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
255 pointer 'ptr' is *not* dereferenced, it is simply used to compute
256 the address of 'fld' as 'ptr + offsetof(fld)'. */
257 if (TREE_CODE (*tp
) == ADDR_EXPR
)
263 if (TREE_CODE (*tp
) == MEM_REF
&& TREE_OPERAND (*tp
, 0) == count_p
->ptr
)
266 count_p
->num_stores
++;
268 count_p
->num_loads
++;
275 /* Count the number of direct and indirect uses for pointer PTR in
276 statement STMT. The number of direct uses is stored in
277 *NUM_USES_P. Indirect references are counted separately depending
278 on whether they are store or load operations. The counts are
279 stored in *NUM_STORES_P and *NUM_LOADS_P. */
282 count_uses_and_derefs (tree ptr
, gimple stmt
, unsigned *num_uses_p
,
283 unsigned *num_loads_p
, unsigned *num_stores_p
)
292 /* Find out the total number of uses of PTR in STMT. */
293 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, i
, SSA_OP_USE
)
297 /* Now count the number of indirect references to PTR. This is
298 truly awful, but we don't have much choice. There are no parent
299 pointers inside INDIRECT_REFs, so an expression like
300 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
301 find all the indirect and direct uses of x_1 inside. The only
302 shortcut we can take is the fact that GIMPLE only allows
303 INDIRECT_REFs inside the expressions below. */
304 if (is_gimple_assign (stmt
)
305 || gimple_code (stmt
) == GIMPLE_RETURN
306 || gimple_code (stmt
) == GIMPLE_ASM
307 || is_gimple_call (stmt
))
309 struct walk_stmt_info wi
;
310 struct count_ptr_d count
;
313 count
.num_stores
= 0;
316 memset (&wi
, 0, sizeof (wi
));
318 walk_gimple_op (stmt
, count_ptr_derefs
, &wi
);
320 *num_stores_p
= count
.num_stores
;
321 *num_loads_p
= count
.num_loads
;
324 gcc_assert (*num_uses_p
>= *num_loads_p
+ *num_stores_p
);
328 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
329 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
330 expression with a different value.
332 This will update any annotations (say debug bind stmts) referring
333 to the original LHS, so that they use the RHS instead. This is
334 done even if NLHS and LHS are the same, for it is understood that
335 the RHS will be modified afterwards, and NLHS will not be assigned
338 Adjusting any non-annotation uses of the LHS, if needed, is a
339 responsibility of the caller.
341 The effect of this call should be pretty much the same as that of
342 inserting a copy of STMT before STMT, and then removing the
343 original stmt, at which time gsi_remove() would have update
344 annotations, but using this function saves all the inserting,
345 copying and removing. */
348 gimple_replace_ssa_lhs (gimple stmt
, tree nlhs
)
350 if (MAY_HAVE_DEBUG_STMTS
)
352 tree lhs
= gimple_get_lhs (stmt
);
354 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
356 insert_debug_temp_for_var_def (NULL
, lhs
);
359 gimple_set_lhs (stmt
, nlhs
);
363 /* Given a tree for an expression for which we might want to emit
364 locations or values in debug information (generally a variable, but
365 we might deal with other kinds of trees in the future), return the
366 tree that should be used as the variable of a DEBUG_BIND STMT or
367 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
370 target_for_debug_bind (tree var
)
372 if (!MAY_HAVE_DEBUG_STMTS
)
375 if (TREE_CODE (var
) == SSA_NAME
)
377 var
= SSA_NAME_VAR (var
);
378 if (var
== NULL_TREE
)
382 if ((TREE_CODE (var
) != VAR_DECL
383 || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
384 && TREE_CODE (var
) != PARM_DECL
)
387 if (DECL_HAS_VALUE_EXPR_P (var
))
388 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
390 if (DECL_IGNORED_P (var
))
393 /* var-tracking only tracks registers. */
394 if (!is_gimple_reg_type (TREE_TYPE (var
)))
400 /* Called via walk_tree, look for SSA_NAMEs that have already been
404 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
406 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
408 if (wi
&& wi
->is_lhs
)
411 if (TREE_CODE (*tp
) == SSA_NAME
)
413 if (SSA_NAME_IN_FREE_LIST (*tp
))
418 else if (IS_TYPE_OR_DECL_P (*tp
))
424 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
425 by other DEBUG stmts, and replace uses of the DEF with the
426 newly-created debug temp. */
429 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
431 imm_use_iterator imm_iter
;
434 gimple def_stmt
= NULL
;
438 if (!MAY_HAVE_DEBUG_STMTS
)
441 /* If this name has already been registered for replacement, do nothing
442 as anything that uses this name isn't in SSA form. */
443 if (name_registered_for_update_p (var
))
446 /* Check whether there are debug stmts that reference this variable and,
447 if there are, decide whether we should use a debug temp. */
448 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
450 stmt
= USE_STMT (use_p
);
452 if (!gimple_debug_bind_p (stmt
))
458 if (gimple_debug_bind_get_value (stmt
) != var
)
460 /* Count this as an additional use, so as to make sure we
461 use a temp unless VAR's definition has a SINGLE_RHS that
472 def_stmt
= gsi_stmt (*gsi
);
474 def_stmt
= SSA_NAME_DEF_STMT (var
);
476 /* If we didn't get an insertion point, and the stmt has already
477 been removed, we won't be able to insert the debug bind stmt, so
478 we'll have to drop debug information. */
479 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
481 value
= degenerate_phi_result (def_stmt
);
482 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
484 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
486 else if (value
== error_mark_node
)
489 else if (is_gimple_assign (def_stmt
))
491 bool no_value
= false;
493 if (!dom_info_available_p (CDI_DOMINATORS
))
495 struct walk_stmt_info wi
;
497 memset (&wi
, 0, sizeof (wi
));
499 /* When removing blocks without following reverse dominance
500 order, we may sometimes encounter SSA_NAMEs that have
501 already been released, referenced in other SSA_DEFs that
502 we're about to release. Consider:
511 If we deleted BB X first, propagating the value of w_2
512 won't do us any good. It's too late to recover their
513 original definition of v_1: when it was deleted, it was
514 only referenced in other DEFs, it couldn't possibly know
515 it should have been retained, and propagating every
516 single DEF just in case it might have to be propagated
517 into a DEBUG STMT would probably be too wasteful.
519 When dominator information is not readily available, we
520 check for and accept some loss of debug information. But
521 if it is available, there's no excuse for us to remove
522 blocks in the wrong order, so we don't even check for
523 dead SSA NAMEs. SSA verification shall catch any
525 if ((!gsi
&& !gimple_bb (def_stmt
))
526 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
531 value
= gimple_assign_rhs_to_tree (def_stmt
);
536 /* If there's a single use of VAR, and VAR is the entire debug
537 expression (usecount would have been incremented again
538 otherwise), and the definition involves only constants and
539 SSA names, then we can propagate VALUE into this single use,
542 We can also avoid using a temp if VALUE can be shared and
543 propagated into all uses, without generating expressions that
544 wouldn't be valid gimple RHSs.
546 Other cases that would require unsharing or non-gimple RHSs
547 are deferred to a debug temp, although we could avoid temps
548 at the expense of duplication of expressions. */
550 if (CONSTANT_CLASS_P (value
)
551 || gimple_code (def_stmt
) == GIMPLE_PHI
553 && (!gimple_assign_single_p (def_stmt
)
554 || is_gimple_min_invariant (value
)))
555 || is_gimple_reg (value
))
560 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
562 def_temp
= gimple_build_debug_bind (vexpr
,
563 unshare_expr (value
),
566 DECL_ARTIFICIAL (vexpr
) = 1;
567 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
569 DECL_MODE (vexpr
) = DECL_MODE (value
);
571 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (value
));
574 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
577 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
578 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
585 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
587 if (!gimple_debug_bind_p (stmt
))
592 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
593 /* unshare_expr is not needed here. vexpr is either a
594 SINGLE_RHS, that can be safely shared, some other RHS
595 that was unshared when we found it had a single debug
596 use, or a DEBUG_EXPR_DECL, that can be safely
598 SET_USE (use_p
, unshare_expr (value
));
599 /* If we didn't replace uses with a debug decl fold the
600 resulting expression. Otherwise we end up with invalid IL. */
601 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
603 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
604 fold_stmt_inplace (&gsi
);
608 gimple_debug_bind_reset_value (stmt
);
615 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
616 other DEBUG stmts, and replace uses of the DEF with the
617 newly-created debug temp. */
620 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
626 if (!MAY_HAVE_DEBUG_STMTS
)
629 stmt
= gsi_stmt (*gsi
);
631 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
633 tree var
= DEF_FROM_PTR (def_p
);
635 if (TREE_CODE (var
) != SSA_NAME
)
638 insert_debug_temp_for_var_def (gsi
, var
);
642 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
645 reset_debug_uses (gimple stmt
)
649 imm_use_iterator imm_iter
;
652 if (!MAY_HAVE_DEBUG_STMTS
)
655 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
657 tree var
= DEF_FROM_PTR (def_p
);
659 if (TREE_CODE (var
) != SSA_NAME
)
662 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
664 if (!gimple_debug_bind_p (use_stmt
))
667 gimple_debug_bind_reset_value (use_stmt
);
668 update_stmt (use_stmt
);
673 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
674 dominated stmts before their dominators, so that release_ssa_defs
675 stands a chance of propagating DEFs into debug bind stmts. */
678 release_defs_bitset (bitmap toremove
)
683 /* Performing a topological sort is probably overkill, this will
684 most likely run in slightly superlinear time, rather than the
685 pathological quadratic worst case. */
686 while (!bitmap_empty_p (toremove
))
687 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
689 bool remove_now
= true;
690 tree var
= ssa_name (j
);
692 imm_use_iterator uit
;
694 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
699 /* We can't propagate PHI nodes into debug stmts. */
700 if (gimple_code (stmt
) == GIMPLE_PHI
701 || is_gimple_debug (stmt
))
704 /* If we find another definition to remove that uses
705 the one we're looking at, defer the removal of this
706 one, so that it can be propagated into debug stmts
707 after the other is. */
708 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
710 tree odef
= DEF_FROM_PTR (def_p
);
712 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
720 BREAK_FROM_IMM_USE_STMT (uit
);
725 gimple def
= SSA_NAME_DEF_STMT (var
);
726 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
728 if (gimple_code (def
) == GIMPLE_PHI
)
729 remove_phi_node (&gsi
, true);
732 gsi_remove (&gsi
, true);
736 bitmap_clear_bit (toremove
, j
);
741 /* Return true if SSA_NAME is malformed and mark it visited.
743 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
747 verify_ssa_name (tree ssa_name
, bool is_virtual
)
749 if (TREE_CODE (ssa_name
) != SSA_NAME
)
751 error ("expected an SSA_NAME object");
755 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
757 error ("found an SSA_NAME that had been released into the free pool");
761 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
762 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
764 error ("type mismatch between an SSA_NAME and its symbol");
768 if (is_virtual
&& !virtual_operand_p (ssa_name
))
770 error ("found a virtual definition for a GIMPLE register");
774 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
776 error ("virtual SSA name for non-VOP decl");
780 if (!is_virtual
&& virtual_operand_p (ssa_name
))
782 error ("found a real definition for a non-register");
786 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
787 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
789 error ("found a default name with a non-empty defining statement");
797 /* Return true if the definition of SSA_NAME at block BB is malformed.
799 STMT is the statement where SSA_NAME is created.
801 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
802 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
803 it means that the block in that array slot contains the
804 definition of SSA_NAME.
806 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
809 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
810 gimple stmt
, bool is_virtual
)
812 if (verify_ssa_name (ssa_name
, is_virtual
))
815 if (SSA_NAME_VAR (ssa_name
)
816 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
817 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
819 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
823 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
825 error ("SSA_NAME created in two different blocks %i and %i",
826 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
830 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
832 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
834 error ("SSA_NAME_DEF_STMT is wrong");
835 fprintf (stderr
, "Expected definition statement:\n");
836 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
837 fprintf (stderr
, "\nActual definition statement:\n");
838 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
845 fprintf (stderr
, "while verifying SSA_NAME ");
846 print_generic_expr (stderr
, ssa_name
, 0);
847 fprintf (stderr
, " in statement\n");
848 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
854 /* Return true if the use of SSA_NAME at statement STMT in block BB is
857 DEF_BB is the block where SSA_NAME was found to be created.
859 IDOM contains immediate dominator information for the flowgraph.
861 CHECK_ABNORMAL is true if the caller wants to check whether this use
862 is flowing through an abnormal edge (only used when checking PHI
865 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
866 that are defined before STMT in basic block BB. */
869 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
870 gimple stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
873 tree ssa_name
= USE_FROM_PTR (use_p
);
875 if (!TREE_VISITED (ssa_name
))
876 if (verify_imm_links (stderr
, ssa_name
))
879 TREE_VISITED (ssa_name
) = 1;
881 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
882 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
883 ; /* Default definitions have empty statements. Nothing to do. */
886 error ("missing definition");
889 else if (bb
!= def_bb
890 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
892 error ("definition in block %i does not dominate use in block %i",
893 def_bb
->index
, bb
->index
);
896 else if (bb
== def_bb
897 && names_defined_in_bb
!= NULL
898 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
900 error ("definition in block %i follows the use", def_bb
->index
);
905 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
907 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
911 /* Make sure the use is in an appropriate list by checking the previous
912 element to make sure it's the same. */
913 if (use_p
->prev
== NULL
)
915 error ("no immediate_use list");
921 if (use_p
->prev
->use
== NULL
)
922 listvar
= use_p
->prev
->loc
.ssa_name
;
924 listvar
= USE_FROM_PTR (use_p
->prev
);
925 if (listvar
!= ssa_name
)
927 error ("wrong immediate use list");
934 fprintf (stderr
, "for SSA_NAME: ");
935 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
936 fprintf (stderr
, " in statement:\n");
937 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
944 /* Return true if any of the arguments for PHI node PHI at block BB is
947 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
948 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
949 it means that the block in that array slot contains the
950 definition of SSA_NAME. */
953 verify_phi_args (gimple phi
, basic_block bb
, basic_block
*definition_block
)
957 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
959 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
961 error ("incoming edge count does not match number of PHI arguments");
966 for (i
= 0; i
< phi_num_args
; i
++)
968 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
969 tree op
= USE_FROM_PTR (op_p
);
971 e
= EDGE_PRED (bb
, i
);
975 error ("PHI argument is missing for edge %d->%d",
982 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
984 error ("PHI argument is not SSA_NAME, or invariant");
988 if (TREE_CODE (op
) == SSA_NAME
)
990 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
991 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
992 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
995 if (TREE_CODE (op
) == ADDR_EXPR
)
997 tree base
= TREE_OPERAND (op
, 0);
998 while (handled_component_p (base
))
999 base
= TREE_OPERAND (base
, 0);
1000 if ((TREE_CODE (base
) == VAR_DECL
1001 || TREE_CODE (base
) == PARM_DECL
1002 || TREE_CODE (base
) == RESULT_DECL
)
1003 && !TREE_ADDRESSABLE (base
))
1005 error ("address taken, but ADDRESSABLE bit not set");
1012 error ("wrong edge %d->%d for PHI argument",
1013 e
->src
->index
, e
->dest
->index
);
1019 fprintf (stderr
, "PHI argument\n");
1020 print_generic_stmt (stderr
, op
, TDF_VOPS
);
1028 fprintf (stderr
, "for PHI node\n");
1029 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1037 /* Verify common invariants in the SSA web.
1038 TODO: verify the variable annotations. */
1041 verify_ssa (bool check_modified_stmt
)
1045 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
1048 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
1049 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
1051 gcc_assert (!need_ssa_update_p (cfun
));
1053 timevar_push (TV_TREE_SSA_VERIFY
);
1055 /* Keep track of SSA names present in the IL. */
1056 for (i
= 1; i
< num_ssa_names
; i
++)
1058 tree name
= ssa_name (i
);
1062 TREE_VISITED (name
) = 0;
1064 verify_ssa_name (name
, virtual_operand_p (name
));
1066 stmt
= SSA_NAME_DEF_STMT (name
);
1067 if (!gimple_nop_p (stmt
))
1069 basic_block bb
= gimple_bb (stmt
);
1070 verify_def (bb
, definition_block
,
1071 name
, stmt
, virtual_operand_p (name
));
1077 calculate_dominance_info (CDI_DOMINATORS
);
1079 /* Now verify all the uses and make sure they agree with the definitions
1080 found in the previous pass. */
1086 gimple_stmt_iterator gsi
;
1088 /* Make sure that all edges have a clear 'aux' field. */
1089 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1093 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1099 /* Verify the arguments for every PHI node in the block. */
1100 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1102 phi
= gsi_stmt (gsi
);
1103 if (verify_phi_args (phi
, bb
, definition_block
))
1106 bitmap_set_bit (names_defined_in_bb
,
1107 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1110 /* Now verify all the uses and vuses in every statement of the block. */
1111 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1113 gimple stmt
= gsi_stmt (gsi
);
1114 use_operand_p use_p
;
1116 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1118 error ("stmt (%p) marked modified after optimization pass: ",
1120 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1124 if (verify_ssa_operands (stmt
))
1126 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1130 if (gimple_debug_bind_p (stmt
)
1131 && !gimple_debug_bind_has_value_p (stmt
))
1134 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1136 op
= USE_FROM_PTR (use_p
);
1137 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1138 use_p
, stmt
, false, names_defined_in_bb
))
1142 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1144 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1146 error ("SSA_NAME_DEF_STMT is wrong");
1147 fprintf (stderr
, "Expected definition statement:\n");
1148 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1149 fprintf (stderr
, "\nActual definition statement:\n");
1150 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1154 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1158 bitmap_clear (names_defined_in_bb
);
1161 free (definition_block
);
1163 /* Restore the dominance information to its prior known state, so
1164 that we do not perturb the compiler's subsequent behavior. */
1165 if (orig_dom_state
== DOM_NONE
)
1166 free_dominance_info (CDI_DOMINATORS
);
1168 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1170 BITMAP_FREE (names_defined_in_bb
);
1171 timevar_pop (TV_TREE_SSA_VERIFY
);
1175 internal_error ("verify_ssa failed");
1178 /* Return true if the DECL_UID in both trees are equal. */
1181 uid_ssaname_map_eq (const void *va
, const void *vb
)
1183 const_tree a
= (const_tree
) va
;
1184 const_tree b
= (const_tree
) vb
;
1185 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
1188 /* Hash a tree in a uid_decl_map. */
1191 uid_ssaname_map_hash (const void *item
)
1193 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
1197 /* Initialize global DFA and SSA structures. */
1200 init_tree_ssa (struct function
*fn
)
1202 fn
->gimple_df
= ggc_alloc_cleared_gimple_df ();
1203 fn
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
1204 uid_ssaname_map_eq
, NULL
);
1205 pt_solution_reset (&fn
->gimple_df
->escaped
);
1206 init_ssanames (fn
, 0);
1209 /* Do the actions required to initialize internal data structures used
1210 in tree-ssa optimization passes. */
1213 execute_init_datastructures (void)
1215 /* Allocate hash tables, arrays and other structures. */
1216 gcc_assert (!cfun
->gimple_df
);
1217 init_tree_ssa (cfun
);
1221 /* Gate for IPCP optimization. */
1224 gate_init_datastructures (void)
1226 /* Do nothing for funcions that was produced already in SSA form. */
1227 return !(cfun
->curr_properties
& PROP_ssa
);
1232 const pass_data pass_data_init_datastructures
=
1234 GIMPLE_PASS
, /* type */
1235 "*init_datastructures", /* name */
1236 OPTGROUP_NONE
, /* optinfo_flags */
1237 true, /* has_gate */
1238 true, /* has_execute */
1239 TV_NONE
, /* tv_id */
1240 PROP_cfg
, /* properties_required */
1241 0, /* properties_provided */
1242 0, /* properties_destroyed */
1243 0, /* todo_flags_start */
1244 0, /* todo_flags_finish */
1247 class pass_init_datastructures
: public gimple_opt_pass
1250 pass_init_datastructures (gcc::context
*ctxt
)
1251 : gimple_opt_pass (pass_data_init_datastructures
, ctxt
)
1254 /* opt_pass methods: */
1255 bool gate () { return gate_init_datastructures (); }
1256 unsigned int execute () { return execute_init_datastructures (); }
1258 }; // class pass_init_datastructures
1263 make_pass_init_datastructures (gcc::context
*ctxt
)
1265 return new pass_init_datastructures (ctxt
);
1268 /* Deallocate memory associated with SSA data structures for FNDECL. */
1271 delete_tree_ssa (void)
1275 /* We no longer maintain the SSA operand cache at this point. */
1276 if (ssa_operands_active (cfun
))
1277 fini_ssa_operands ();
1279 htab_delete (cfun
->gimple_df
->default_defs
);
1280 cfun
->gimple_df
->default_defs
= NULL
;
1281 pt_solution_reset (&cfun
->gimple_df
->escaped
);
1282 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
1283 pointer_map_destroy (cfun
->gimple_df
->decls_to_pointers
);
1284 cfun
->gimple_df
->decls_to_pointers
= NULL
;
1285 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1286 cfun
->gimple_df
= NULL
;
1288 /* We no longer need the edge variable maps. */
1289 redirect_edge_var_map_destroy ();
1292 /* Return true if EXPR is a useless type conversion, otherwise return
1296 tree_ssa_useless_type_conversion (tree expr
)
1298 /* If we have an assignment that merely uses a NOP_EXPR to change
1299 the top of the RHS to the type of the LHS and the type conversion
1300 is "safe", then strip away the type conversion so that we can
1301 enter LHS = RHS into the const_and_copies table. */
1302 if (CONVERT_EXPR_P (expr
)
1303 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1304 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1305 return useless_type_conversion_p
1307 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1312 /* Strip conversions from EXP according to
1313 tree_ssa_useless_type_conversion and return the resulting
1317 tree_ssa_strip_useless_type_conversions (tree exp
)
1319 while (tree_ssa_useless_type_conversion (exp
))
1320 exp
= TREE_OPERAND (exp
, 0);
1325 /* Return true if T, an SSA_NAME, has an undefined value. */
1328 ssa_undefined_value_p (tree t
)
1330 tree var
= SSA_NAME_VAR (t
);
1334 /* Parameters get their initial value from the function entry. */
1335 else if (TREE_CODE (var
) == PARM_DECL
)
1337 /* When returning by reference the return address is actually a hidden
1339 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1341 /* Hard register variables get their initial value from the ether. */
1342 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1345 /* The value is undefined iff its definition statement is empty. */
1346 return gimple_nop_p (SSA_NAME_DEF_STMT (t
));
1350 /* If necessary, rewrite the base of the reference tree *TP from
1351 a MEM_REF to a plain or converted symbol. */
1354 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1358 while (handled_component_p (*tp
))
1359 tp
= &TREE_OPERAND (*tp
, 0);
1360 if (TREE_CODE (*tp
) == MEM_REF
1361 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1362 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1364 && !TREE_ADDRESSABLE (sym
)
1365 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1367 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1368 && useless_type_conversion_p (TREE_TYPE (*tp
),
1369 TREE_TYPE (TREE_TYPE (sym
)))
1370 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1371 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1373 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1374 TYPE_SIZE (TREE_TYPE (*tp
)),
1375 int_const_binop (MULT_EXPR
,
1376 bitsize_int (BITS_PER_UNIT
),
1377 TREE_OPERAND (*tp
, 1)));
1379 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1380 && useless_type_conversion_p (TREE_TYPE (*tp
),
1381 TREE_TYPE (TREE_TYPE (sym
))))
1383 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1384 ? REALPART_EXPR
: IMAGPART_EXPR
,
1385 TREE_TYPE (*tp
), sym
);
1387 else if (integer_zerop (TREE_OPERAND (*tp
, 1)))
1389 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1391 *tp
= build1 (VIEW_CONVERT_EXPR
,
1392 TREE_TYPE (*tp
), sym
);
1399 /* For a tree REF return its base if it is the base of a MEM_REF
1400 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1403 non_rewritable_mem_ref_base (tree ref
)
1407 /* A plain decl does not need it set. */
1411 while (handled_component_p (base
))
1412 base
= TREE_OPERAND (base
, 0);
1414 /* But watch out for MEM_REFs we cannot lower to a
1415 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1416 if (TREE_CODE (base
) == MEM_REF
1417 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1419 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1420 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1421 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1422 && useless_type_conversion_p (TREE_TYPE (base
),
1423 TREE_TYPE (TREE_TYPE (decl
)))
1424 && mem_ref_offset (base
).fits_uhwi ()
1425 && tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl
)))
1426 .ugt (mem_ref_offset (base
))
1427 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1428 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1431 && (!integer_zerop (TREE_OPERAND (base
, 1))
1432 || (DECL_SIZE (decl
)
1433 != TYPE_SIZE (TREE_TYPE (base
)))
1434 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
)))
1441 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1442 Otherwise return true. */
1445 non_rewritable_lvalue_p (tree lhs
)
1447 /* A plain decl is always rewritable. */
1451 /* A decl that is wrapped inside a MEM-REF that covers
1452 it full is also rewritable.
1453 ??? The following could be relaxed allowing component
1454 references that do not change the access size. */
1455 if (TREE_CODE (lhs
) == MEM_REF
1456 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1457 && integer_zerop (TREE_OPERAND (lhs
, 1)))
1459 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1461 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1462 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1469 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1470 mark the variable VAR for conversion into SSA. Return true when updating
1471 stmts is required. */
1474 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1475 bitmap suitable_for_renaming
)
1477 /* Global Variables, result decls cannot be changed. */
1478 if (is_global_var (var
)
1479 || TREE_CODE (var
) == RESULT_DECL
1480 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1483 if (TREE_ADDRESSABLE (var
)
1484 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1485 a non-register. Otherwise we are confused and forget to
1486 add virtual operands for it. */
1487 && (!is_gimple_reg_type (TREE_TYPE (var
))
1488 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1489 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1490 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1492 TREE_ADDRESSABLE (var
) = 0;
1493 if (is_gimple_reg (var
))
1494 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1497 fprintf (dump_file
, "No longer having address taken: ");
1498 print_generic_expr (dump_file
, var
, 0);
1499 fprintf (dump_file
, "\n");
1503 if (!DECL_GIMPLE_REG_P (var
)
1504 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1505 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1506 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1507 && !TREE_THIS_VOLATILE (var
)
1508 && (TREE_CODE (var
) != VAR_DECL
|| !DECL_HARD_REGISTER (var
)))
1510 DECL_GIMPLE_REG_P (var
) = 1;
1511 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1514 fprintf (dump_file
, "Now a gimple register: ");
1515 print_generic_expr (dump_file
, var
, 0);
1516 fprintf (dump_file
, "\n");
1521 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1524 execute_update_addresses_taken (void)
1526 gimple_stmt_iterator gsi
;
1528 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1529 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1530 bitmap suitable_for_renaming
= BITMAP_ALLOC (NULL
);
1534 timevar_push (TV_ADDRESS_TAKEN
);
1536 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1537 the function body. */
1540 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1542 gimple stmt
= gsi_stmt (gsi
);
1543 enum gimple_code code
= gimple_code (stmt
);
1546 /* Note all addresses taken by the stmt. */
1547 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1549 /* If we have a call or an assignment, see if the lhs contains
1550 a local decl that requires not to be a gimple register. */
1551 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1553 tree lhs
= gimple_get_lhs (stmt
);
1555 && TREE_CODE (lhs
) != SSA_NAME
1556 && non_rewritable_lvalue_p (lhs
))
1558 decl
= get_base_address (lhs
);
1560 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1564 if (gimple_assign_single_p (stmt
))
1566 tree rhs
= gimple_assign_rhs1 (stmt
);
1567 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1568 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1571 else if (code
== GIMPLE_CALL
)
1573 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1575 tree arg
= gimple_call_arg (stmt
, i
);
1576 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1577 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1581 else if (code
== GIMPLE_ASM
)
1583 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1585 tree link
= gimple_asm_output_op (stmt
, i
);
1586 tree lhs
= TREE_VALUE (link
);
1587 if (TREE_CODE (lhs
) != SSA_NAME
)
1589 decl
= get_base_address (lhs
);
1591 && (non_rewritable_lvalue_p (lhs
)
1592 /* We cannot move required conversions from
1593 the lhs to the rhs in asm statements, so
1594 require we do not need any. */
1595 || !useless_type_conversion_p
1596 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1597 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1600 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1602 tree link
= gimple_asm_input_op (stmt
, i
);
1603 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1604 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1609 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1612 gimple phi
= gsi_stmt (gsi
);
1614 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1616 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1617 if (TREE_CODE (op
) == ADDR_EXPR
1618 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1620 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1625 /* We cannot iterate over all referenced vars because that can contain
1626 unused vars from BLOCK trees, which causes code generation differences
1628 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1629 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1630 suitable_for_renaming
);
1632 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1633 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1634 suitable_for_renaming
);
1636 /* Operand caches need to be recomputed for operands referencing the updated
1637 variables and operands need to be rewritten to expose bare symbols. */
1638 if (!bitmap_empty_p (suitable_for_renaming
))
1641 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1643 gimple stmt
= gsi_stmt (gsi
);
1645 /* Re-write TARGET_MEM_REFs of symbols we want to
1646 rewrite into SSA form. */
1647 if (gimple_assign_single_p (stmt
))
1649 tree lhs
= gimple_assign_lhs (stmt
);
1650 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1653 /* We shouldn't have any fancy wrapping of
1654 component-refs on the LHS, but look through
1655 VIEW_CONVERT_EXPRs as that is easy. */
1656 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1657 lhs
= TREE_OPERAND (lhs
, 0);
1658 if (TREE_CODE (lhs
) == MEM_REF
1659 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1660 && integer_zerop (TREE_OPERAND (lhs
, 1))
1661 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1663 && !TREE_ADDRESSABLE (sym
)
1664 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1667 lhs
= gimple_assign_lhs (stmt
);
1669 /* Rewrite the RHS and make sure the resulting assignment
1670 is validly typed. */
1671 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1672 rhs
= gimple_assign_rhs1 (stmt
);
1673 if (gimple_assign_lhs (stmt
) != lhs
1674 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1676 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1677 TREE_TYPE (lhs
), rhs
);
1679 if (gimple_assign_lhs (stmt
) != lhs
)
1680 gimple_assign_set_lhs (stmt
, lhs
);
1682 /* For var ={v} {CLOBBER}; where var lost
1683 TREE_ADDRESSABLE just remove the stmt. */
1685 && TREE_CLOBBER_P (rhs
)
1686 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (lhs
)))
1688 unlink_stmt_vdef (stmt
);
1689 gsi_remove (&gsi
, true);
1690 release_defs (stmt
);
1694 if (gimple_assign_rhs1 (stmt
) != rhs
)
1696 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1697 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1701 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1704 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1706 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
1707 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
1711 else if (gimple_code (stmt
) == GIMPLE_ASM
)
1714 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1716 tree link
= gimple_asm_output_op (stmt
, i
);
1717 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1718 suitable_for_renaming
);
1720 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1722 tree link
= gimple_asm_input_op (stmt
, i
);
1723 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1724 suitable_for_renaming
);
1728 else if (gimple_debug_bind_p (stmt
)
1729 && gimple_debug_bind_has_value_p (stmt
))
1731 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
1733 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
1734 decl
= non_rewritable_mem_ref_base (*valuep
);
1736 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
1737 gimple_debug_bind_reset_value (stmt
);
1740 if (gimple_references_memory_p (stmt
)
1741 || is_gimple_debug (stmt
))
1747 /* Update SSA form here, we are called as non-pass as well. */
1748 if (number_of_loops (cfun
) > 1
1749 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
1750 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1752 update_ssa (TODO_update_ssa
);
1755 BITMAP_FREE (not_reg_needs
);
1756 BITMAP_FREE (addresses_taken
);
1757 BITMAP_FREE (suitable_for_renaming
);
1758 timevar_pop (TV_ADDRESS_TAKEN
);
1763 const pass_data pass_data_update_address_taken
=
1765 GIMPLE_PASS
, /* type */
1766 "addressables", /* name */
1767 OPTGROUP_NONE
, /* optinfo_flags */
1768 false, /* has_gate */
1769 false, /* has_execute */
1770 TV_ADDRESS_TAKEN
, /* tv_id */
1771 PROP_ssa
, /* properties_required */
1772 0, /* properties_provided */
1773 0, /* properties_destroyed */
1774 0, /* todo_flags_start */
1775 TODO_update_address_taken
, /* todo_flags_finish */
1778 class pass_update_address_taken
: public gimple_opt_pass
1781 pass_update_address_taken (gcc::context
*ctxt
)
1782 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
1785 /* opt_pass methods: */
1787 }; // class pass_update_address_taken
1792 make_pass_update_address_taken (gcc::context
*ctxt
)
1794 return new pass_update_address_taken (ctxt
);