1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "stor-layout.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
32 #include "gimple-pretty-print.h"
33 #include "pointer-set.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-fold.h"
37 #include "gimple-expr.h"
41 #include "gimple-iterator.h"
42 #include "gimple-walk.h"
43 #include "gimple-ssa.h"
44 #include "tree-phinodes.h"
45 #include "ssa-iterators.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "tree-ssa-loop-manip.h"
49 #include "tree-into-ssa.h"
51 #include "tree-inline.h"
53 #include "tree-pass.h"
54 #include "diagnostic-core.h"
57 #include "cfgexpand.h"
59 /* Pointer map of variable mappings, keyed by edge. */
60 static struct pointer_map_t
*edge_var_maps
;
63 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
66 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
69 edge_var_map_vector
*head
;
70 edge_var_map new_node
;
72 if (edge_var_maps
== NULL
)
73 edge_var_maps
= pointer_map_create ();
75 slot
= pointer_map_insert (edge_var_maps
, e
);
76 head
= (edge_var_map_vector
*) *slot
;
78 vec_safe_reserve (head
, 5);
80 new_node
.result
= result
;
81 new_node
.locus
= locus
;
83 vec_safe_push (head
, new_node
);
88 /* Clear the var mappings in edge E. */
91 redirect_edge_var_map_clear (edge e
)
94 edge_var_map_vector
*head
;
99 slot
= pointer_map_contains (edge_var_maps
, e
);
103 head
= (edge_var_map_vector
*) *slot
;
110 /* Duplicate the redirected var mappings in OLDE in NEWE.
112 Since we can't remove a mapping, let's just duplicate it. This assumes a
113 pointer_map can have multiple edges mapping to the same var_map (many to
114 one mapping), since we don't remove the previous mappings. */
117 redirect_edge_var_map_dup (edge newe
, edge olde
)
119 void **new_slot
, **old_slot
;
120 edge_var_map_vector
*head
;
125 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
126 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
129 head
= (edge_var_map_vector
*) *old_slot
;
131 edge_var_map_vector
*new_head
= NULL
;
133 new_head
= vec_safe_copy (head
);
135 vec_safe_reserve (new_head
, 5);
136 *new_slot
= new_head
;
140 /* Return the variable mappings for a given edge. If there is none, return
143 edge_var_map_vector
*
144 redirect_edge_var_map_vector (edge e
)
148 /* Hey, what kind of idiot would... you'd be surprised. */
152 slot
= pointer_map_contains (edge_var_maps
, e
);
156 return (edge_var_map_vector
*) *slot
;
159 /* Used by redirect_edge_var_map_destroy to free all memory. */
162 free_var_map_entry (const void *key ATTRIBUTE_UNUSED
,
164 void *data ATTRIBUTE_UNUSED
)
166 edge_var_map_vector
*head
= (edge_var_map_vector
*) *value
;
171 /* Clear the edge variable mappings. */
174 redirect_edge_var_map_destroy (void)
178 pointer_map_traverse (edge_var_maps
, free_var_map_entry
, NULL
);
179 pointer_map_destroy (edge_var_maps
);
180 edge_var_maps
= NULL
;
185 /* Remove the corresponding arguments from the PHI nodes in E's
186 destination block and redirect it to DEST. Return redirected edge.
187 The list of removed arguments is stored in a vector accessed
188 through edge_var_maps. */
191 ssa_redirect_edge (edge e
, basic_block dest
)
193 gimple_stmt_iterator gsi
;
196 redirect_edge_var_map_clear (e
);
198 /* Remove the appropriate PHI arguments in E's destination block. */
199 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
202 source_location locus
;
204 phi
= gsi_stmt (gsi
);
205 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
206 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
208 if (def
== NULL_TREE
)
211 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
214 e
= redirect_edge_succ_nodup (e
, dest
);
220 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
224 flush_pending_stmts (edge e
)
227 edge_var_map_vector
*v
;
230 gimple_stmt_iterator gsi
;
232 v
= redirect_edge_var_map_vector (e
);
236 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
237 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
238 gsi_next (&gsi
), i
++)
242 phi
= gsi_stmt (gsi
);
243 def
= redirect_edge_var_map_def (vm
);
244 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
247 redirect_edge_var_map_clear (e
);
250 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
251 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
252 expression with a different value.
254 This will update any annotations (say debug bind stmts) referring
255 to the original LHS, so that they use the RHS instead. This is
256 done even if NLHS and LHS are the same, for it is understood that
257 the RHS will be modified afterwards, and NLHS will not be assigned
260 Adjusting any non-annotation uses of the LHS, if needed, is a
261 responsibility of the caller.
263 The effect of this call should be pretty much the same as that of
264 inserting a copy of STMT before STMT, and then removing the
265 original stmt, at which time gsi_remove() would have update
266 annotations, but using this function saves all the inserting,
267 copying and removing. */
270 gimple_replace_ssa_lhs (gimple stmt
, tree nlhs
)
272 if (MAY_HAVE_DEBUG_STMTS
)
274 tree lhs
= gimple_get_lhs (stmt
);
276 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
278 insert_debug_temp_for_var_def (NULL
, lhs
);
281 gimple_set_lhs (stmt
, nlhs
);
285 /* Given a tree for an expression for which we might want to emit
286 locations or values in debug information (generally a variable, but
287 we might deal with other kinds of trees in the future), return the
288 tree that should be used as the variable of a DEBUG_BIND STMT or
289 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
292 target_for_debug_bind (tree var
)
294 if (!MAY_HAVE_DEBUG_STMTS
)
297 if (TREE_CODE (var
) == SSA_NAME
)
299 var
= SSA_NAME_VAR (var
);
300 if (var
== NULL_TREE
)
304 if ((TREE_CODE (var
) != VAR_DECL
305 || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
306 && TREE_CODE (var
) != PARM_DECL
)
309 if (DECL_HAS_VALUE_EXPR_P (var
))
310 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
312 if (DECL_IGNORED_P (var
))
315 /* var-tracking only tracks registers. */
316 if (!is_gimple_reg_type (TREE_TYPE (var
)))
322 /* Called via walk_tree, look for SSA_NAMEs that have already been
326 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
328 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
330 if (wi
&& wi
->is_lhs
)
333 if (TREE_CODE (*tp
) == SSA_NAME
)
335 if (SSA_NAME_IN_FREE_LIST (*tp
))
340 else if (IS_TYPE_OR_DECL_P (*tp
))
346 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
347 by other DEBUG stmts, and replace uses of the DEF with the
348 newly-created debug temp. */
351 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
353 imm_use_iterator imm_iter
;
356 gimple def_stmt
= NULL
;
360 if (!MAY_HAVE_DEBUG_STMTS
)
363 /* If this name has already been registered for replacement, do nothing
364 as anything that uses this name isn't in SSA form. */
365 if (name_registered_for_update_p (var
))
368 /* Check whether there are debug stmts that reference this variable and,
369 if there are, decide whether we should use a debug temp. */
370 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
372 stmt
= USE_STMT (use_p
);
374 if (!gimple_debug_bind_p (stmt
))
380 if (gimple_debug_bind_get_value (stmt
) != var
)
382 /* Count this as an additional use, so as to make sure we
383 use a temp unless VAR's definition has a SINGLE_RHS that
394 def_stmt
= gsi_stmt (*gsi
);
396 def_stmt
= SSA_NAME_DEF_STMT (var
);
398 /* If we didn't get an insertion point, and the stmt has already
399 been removed, we won't be able to insert the debug bind stmt, so
400 we'll have to drop debug information. */
401 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
403 value
= degenerate_phi_result (def_stmt
);
404 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
406 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
408 else if (value
== error_mark_node
)
411 else if (is_gimple_assign (def_stmt
))
413 bool no_value
= false;
415 if (!dom_info_available_p (CDI_DOMINATORS
))
417 struct walk_stmt_info wi
;
419 memset (&wi
, 0, sizeof (wi
));
421 /* When removing blocks without following reverse dominance
422 order, we may sometimes encounter SSA_NAMEs that have
423 already been released, referenced in other SSA_DEFs that
424 we're about to release. Consider:
433 If we deleted BB X first, propagating the value of w_2
434 won't do us any good. It's too late to recover their
435 original definition of v_1: when it was deleted, it was
436 only referenced in other DEFs, it couldn't possibly know
437 it should have been retained, and propagating every
438 single DEF just in case it might have to be propagated
439 into a DEBUG STMT would probably be too wasteful.
441 When dominator information is not readily available, we
442 check for and accept some loss of debug information. But
443 if it is available, there's no excuse for us to remove
444 blocks in the wrong order, so we don't even check for
445 dead SSA NAMEs. SSA verification shall catch any
447 if ((!gsi
&& !gimple_bb (def_stmt
))
448 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
453 value
= gimple_assign_rhs_to_tree (def_stmt
);
458 /* If there's a single use of VAR, and VAR is the entire debug
459 expression (usecount would have been incremented again
460 otherwise), and the definition involves only constants and
461 SSA names, then we can propagate VALUE into this single use,
464 We can also avoid using a temp if VALUE can be shared and
465 propagated into all uses, without generating expressions that
466 wouldn't be valid gimple RHSs.
468 Other cases that would require unsharing or non-gimple RHSs
469 are deferred to a debug temp, although we could avoid temps
470 at the expense of duplication of expressions. */
472 if (CONSTANT_CLASS_P (value
)
473 || gimple_code (def_stmt
) == GIMPLE_PHI
475 && (!gimple_assign_single_p (def_stmt
)
476 || is_gimple_min_invariant (value
)))
477 || is_gimple_reg (value
))
482 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
484 def_temp
= gimple_build_debug_bind (vexpr
,
485 unshare_expr (value
),
488 DECL_ARTIFICIAL (vexpr
) = 1;
489 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
491 DECL_MODE (vexpr
) = DECL_MODE (value
);
493 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (value
));
496 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
499 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
500 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
507 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
509 if (!gimple_debug_bind_p (stmt
))
514 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
515 /* unshare_expr is not needed here. vexpr is either a
516 SINGLE_RHS, that can be safely shared, some other RHS
517 that was unshared when we found it had a single debug
518 use, or a DEBUG_EXPR_DECL, that can be safely
520 SET_USE (use_p
, unshare_expr (value
));
521 /* If we didn't replace uses with a debug decl fold the
522 resulting expression. Otherwise we end up with invalid IL. */
523 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
525 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
526 fold_stmt_inplace (&gsi
);
530 gimple_debug_bind_reset_value (stmt
);
537 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
538 other DEBUG stmts, and replace uses of the DEF with the
539 newly-created debug temp. */
542 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
548 if (!MAY_HAVE_DEBUG_STMTS
)
551 stmt
= gsi_stmt (*gsi
);
553 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
555 tree var
= DEF_FROM_PTR (def_p
);
557 if (TREE_CODE (var
) != SSA_NAME
)
560 insert_debug_temp_for_var_def (gsi
, var
);
564 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
567 reset_debug_uses (gimple stmt
)
571 imm_use_iterator imm_iter
;
574 if (!MAY_HAVE_DEBUG_STMTS
)
577 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
579 tree var
= DEF_FROM_PTR (def_p
);
581 if (TREE_CODE (var
) != SSA_NAME
)
584 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
586 if (!gimple_debug_bind_p (use_stmt
))
589 gimple_debug_bind_reset_value (use_stmt
);
590 update_stmt (use_stmt
);
595 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
596 dominated stmts before their dominators, so that release_ssa_defs
597 stands a chance of propagating DEFs into debug bind stmts. */
600 release_defs_bitset (bitmap toremove
)
605 /* Performing a topological sort is probably overkill, this will
606 most likely run in slightly superlinear time, rather than the
607 pathological quadratic worst case. */
608 while (!bitmap_empty_p (toremove
))
609 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
611 bool remove_now
= true;
612 tree var
= ssa_name (j
);
614 imm_use_iterator uit
;
616 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
621 /* We can't propagate PHI nodes into debug stmts. */
622 if (gimple_code (stmt
) == GIMPLE_PHI
623 || is_gimple_debug (stmt
))
626 /* If we find another definition to remove that uses
627 the one we're looking at, defer the removal of this
628 one, so that it can be propagated into debug stmts
629 after the other is. */
630 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
632 tree odef
= DEF_FROM_PTR (def_p
);
634 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
642 BREAK_FROM_IMM_USE_STMT (uit
);
647 gimple def
= SSA_NAME_DEF_STMT (var
);
648 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
650 if (gimple_code (def
) == GIMPLE_PHI
)
651 remove_phi_node (&gsi
, true);
654 gsi_remove (&gsi
, true);
658 bitmap_clear_bit (toremove
, j
);
663 /* Return true if SSA_NAME is malformed and mark it visited.
665 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
669 verify_ssa_name (tree ssa_name
, bool is_virtual
)
671 if (TREE_CODE (ssa_name
) != SSA_NAME
)
673 error ("expected an SSA_NAME object");
677 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
679 error ("found an SSA_NAME that had been released into the free pool");
683 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
684 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
686 error ("type mismatch between an SSA_NAME and its symbol");
690 if (is_virtual
&& !virtual_operand_p (ssa_name
))
692 error ("found a virtual definition for a GIMPLE register");
696 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
698 error ("virtual SSA name for non-VOP decl");
702 if (!is_virtual
&& virtual_operand_p (ssa_name
))
704 error ("found a real definition for a non-register");
708 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
709 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
711 error ("found a default name with a non-empty defining statement");
719 /* Return true if the definition of SSA_NAME at block BB is malformed.
721 STMT is the statement where SSA_NAME is created.
723 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
724 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
725 it means that the block in that array slot contains the
726 definition of SSA_NAME.
728 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
731 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
732 gimple stmt
, bool is_virtual
)
734 if (verify_ssa_name (ssa_name
, is_virtual
))
737 if (SSA_NAME_VAR (ssa_name
)
738 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
739 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
741 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
745 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
747 error ("SSA_NAME created in two different blocks %i and %i",
748 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
752 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
754 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
756 error ("SSA_NAME_DEF_STMT is wrong");
757 fprintf (stderr
, "Expected definition statement:\n");
758 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
759 fprintf (stderr
, "\nActual definition statement:\n");
760 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
767 fprintf (stderr
, "while verifying SSA_NAME ");
768 print_generic_expr (stderr
, ssa_name
, 0);
769 fprintf (stderr
, " in statement\n");
770 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
776 /* Return true if the use of SSA_NAME at statement STMT in block BB is
779 DEF_BB is the block where SSA_NAME was found to be created.
781 IDOM contains immediate dominator information for the flowgraph.
783 CHECK_ABNORMAL is true if the caller wants to check whether this use
784 is flowing through an abnormal edge (only used when checking PHI
787 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
788 that are defined before STMT in basic block BB. */
791 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
792 gimple stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
795 tree ssa_name
= USE_FROM_PTR (use_p
);
797 if (!TREE_VISITED (ssa_name
))
798 if (verify_imm_links (stderr
, ssa_name
))
801 TREE_VISITED (ssa_name
) = 1;
803 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
804 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
805 ; /* Default definitions have empty statements. Nothing to do. */
808 error ("missing definition");
811 else if (bb
!= def_bb
812 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
814 error ("definition in block %i does not dominate use in block %i",
815 def_bb
->index
, bb
->index
);
818 else if (bb
== def_bb
819 && names_defined_in_bb
!= NULL
820 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
822 error ("definition in block %i follows the use", def_bb
->index
);
827 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
829 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
833 /* Make sure the use is in an appropriate list by checking the previous
834 element to make sure it's the same. */
835 if (use_p
->prev
== NULL
)
837 error ("no immediate_use list");
843 if (use_p
->prev
->use
== NULL
)
844 listvar
= use_p
->prev
->loc
.ssa_name
;
846 listvar
= USE_FROM_PTR (use_p
->prev
);
847 if (listvar
!= ssa_name
)
849 error ("wrong immediate use list");
856 fprintf (stderr
, "for SSA_NAME: ");
857 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
858 fprintf (stderr
, " in statement:\n");
859 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
866 /* Return true if any of the arguments for PHI node PHI at block BB is
869 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
870 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
871 it means that the block in that array slot contains the
872 definition of SSA_NAME. */
875 verify_phi_args (gimple phi
, basic_block bb
, basic_block
*definition_block
)
879 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
881 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
883 error ("incoming edge count does not match number of PHI arguments");
888 for (i
= 0; i
< phi_num_args
; i
++)
890 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
891 tree op
= USE_FROM_PTR (op_p
);
893 e
= EDGE_PRED (bb
, i
);
897 error ("PHI argument is missing for edge %d->%d",
904 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
906 error ("PHI argument is not SSA_NAME, or invariant");
910 if (TREE_CODE (op
) == SSA_NAME
)
912 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
913 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
914 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
917 if (TREE_CODE (op
) == ADDR_EXPR
)
919 tree base
= TREE_OPERAND (op
, 0);
920 while (handled_component_p (base
))
921 base
= TREE_OPERAND (base
, 0);
922 if ((TREE_CODE (base
) == VAR_DECL
923 || TREE_CODE (base
) == PARM_DECL
924 || TREE_CODE (base
) == RESULT_DECL
)
925 && !TREE_ADDRESSABLE (base
))
927 error ("address taken, but ADDRESSABLE bit not set");
934 error ("wrong edge %d->%d for PHI argument",
935 e
->src
->index
, e
->dest
->index
);
941 fprintf (stderr
, "PHI argument\n");
942 print_generic_stmt (stderr
, op
, TDF_VOPS
);
950 fprintf (stderr
, "for PHI node\n");
951 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
959 /* Verify common invariants in the SSA web.
960 TODO: verify the variable annotations. */
963 verify_ssa (bool check_modified_stmt
)
967 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
970 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
971 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
973 gcc_assert (!need_ssa_update_p (cfun
));
975 timevar_push (TV_TREE_SSA_VERIFY
);
977 /* Keep track of SSA names present in the IL. */
978 for (i
= 1; i
< num_ssa_names
; i
++)
980 tree name
= ssa_name (i
);
984 TREE_VISITED (name
) = 0;
986 verify_ssa_name (name
, virtual_operand_p (name
));
988 stmt
= SSA_NAME_DEF_STMT (name
);
989 if (!gimple_nop_p (stmt
))
991 basic_block bb
= gimple_bb (stmt
);
992 if (verify_def (bb
, definition_block
,
993 name
, stmt
, virtual_operand_p (name
)))
999 calculate_dominance_info (CDI_DOMINATORS
);
1001 /* Now verify all the uses and make sure they agree with the definitions
1002 found in the previous pass. */
1003 FOR_EACH_BB_FN (bb
, cfun
)
1008 gimple_stmt_iterator gsi
;
1010 /* Make sure that all edges have a clear 'aux' field. */
1011 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1015 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1021 /* Verify the arguments for every PHI node in the block. */
1022 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1024 phi
= gsi_stmt (gsi
);
1025 if (verify_phi_args (phi
, bb
, definition_block
))
1028 bitmap_set_bit (names_defined_in_bb
,
1029 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1032 /* Now verify all the uses and vuses in every statement of the block. */
1033 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1035 gimple stmt
= gsi_stmt (gsi
);
1036 use_operand_p use_p
;
1038 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1040 error ("stmt (%p) marked modified after optimization pass: ",
1042 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1046 if (verify_ssa_operands (cfun
, stmt
))
1048 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1052 if (gimple_debug_bind_p (stmt
)
1053 && !gimple_debug_bind_has_value_p (stmt
))
1056 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1058 op
= USE_FROM_PTR (use_p
);
1059 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1060 use_p
, stmt
, false, names_defined_in_bb
))
1064 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1066 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1068 error ("SSA_NAME_DEF_STMT is wrong");
1069 fprintf (stderr
, "Expected definition statement:\n");
1070 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1071 fprintf (stderr
, "\nActual definition statement:\n");
1072 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1076 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1080 bitmap_clear (names_defined_in_bb
);
1083 free (definition_block
);
1085 /* Restore the dominance information to its prior known state, so
1086 that we do not perturb the compiler's subsequent behavior. */
1087 if (orig_dom_state
== DOM_NONE
)
1088 free_dominance_info (CDI_DOMINATORS
);
1090 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1092 BITMAP_FREE (names_defined_in_bb
);
1093 timevar_pop (TV_TREE_SSA_VERIFY
);
1097 internal_error ("verify_ssa failed");
1100 /* Return true if the DECL_UID in both trees are equal. */
1103 uid_ssaname_map_eq (const void *va
, const void *vb
)
1105 const_tree a
= (const_tree
) va
;
1106 const_tree b
= (const_tree
) vb
;
1107 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
1110 /* Hash a tree in a uid_decl_map. */
1113 uid_ssaname_map_hash (const void *item
)
1115 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
1119 /* Initialize global DFA and SSA structures. */
1122 init_tree_ssa (struct function
*fn
)
1124 fn
->gimple_df
= ggc_alloc_cleared_gimple_df ();
1125 fn
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
1126 uid_ssaname_map_eq
, NULL
);
1127 pt_solution_reset (&fn
->gimple_df
->escaped
);
1128 init_ssanames (fn
, 0);
1131 /* Do the actions required to initialize internal data structures used
1132 in tree-ssa optimization passes. */
1135 execute_init_datastructures (void)
1137 /* Allocate hash tables, arrays and other structures. */
1138 gcc_assert (!cfun
->gimple_df
);
1139 init_tree_ssa (cfun
);
1143 /* Gate for IPCP optimization. */
1146 gate_init_datastructures (void)
1148 /* Do nothing for funcions that was produced already in SSA form. */
1149 return !(cfun
->curr_properties
& PROP_ssa
);
1154 const pass_data pass_data_init_datastructures
=
1156 GIMPLE_PASS
, /* type */
1157 "*init_datastructures", /* name */
1158 OPTGROUP_NONE
, /* optinfo_flags */
1159 true, /* has_gate */
1160 true, /* has_execute */
1161 TV_NONE
, /* tv_id */
1162 PROP_cfg
, /* properties_required */
1163 0, /* properties_provided */
1164 0, /* properties_destroyed */
1165 0, /* todo_flags_start */
1166 0, /* todo_flags_finish */
1169 class pass_init_datastructures
: public gimple_opt_pass
1172 pass_init_datastructures (gcc::context
*ctxt
)
1173 : gimple_opt_pass (pass_data_init_datastructures
, ctxt
)
1176 /* opt_pass methods: */
1177 bool gate () { return gate_init_datastructures (); }
1178 unsigned int execute () { return execute_init_datastructures (); }
1180 }; // class pass_init_datastructures
1185 make_pass_init_datastructures (gcc::context
*ctxt
)
1187 return new pass_init_datastructures (ctxt
);
1190 /* Deallocate memory associated with SSA data structures for FNDECL. */
1193 delete_tree_ssa (void)
1197 /* We no longer maintain the SSA operand cache at this point. */
1198 if (ssa_operands_active (cfun
))
1199 fini_ssa_operands (cfun
);
1201 htab_delete (cfun
->gimple_df
->default_defs
);
1202 cfun
->gimple_df
->default_defs
= NULL
;
1203 pt_solution_reset (&cfun
->gimple_df
->escaped
);
1204 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
1205 pointer_map_destroy (cfun
->gimple_df
->decls_to_pointers
);
1206 cfun
->gimple_df
->decls_to_pointers
= NULL
;
1207 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1208 cfun
->gimple_df
= NULL
;
1210 /* We no longer need the edge variable maps. */
1211 redirect_edge_var_map_destroy ();
1214 /* Return true if EXPR is a useless type conversion, otherwise return
1218 tree_ssa_useless_type_conversion (tree expr
)
1220 /* If we have an assignment that merely uses a NOP_EXPR to change
1221 the top of the RHS to the type of the LHS and the type conversion
1222 is "safe", then strip away the type conversion so that we can
1223 enter LHS = RHS into the const_and_copies table. */
1224 if (CONVERT_EXPR_P (expr
)
1225 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1226 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1227 return useless_type_conversion_p
1229 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1234 /* Strip conversions from EXP according to
1235 tree_ssa_useless_type_conversion and return the resulting
1239 tree_ssa_strip_useless_type_conversions (tree exp
)
1241 while (tree_ssa_useless_type_conversion (exp
))
1242 exp
= TREE_OPERAND (exp
, 0);
1247 /* Return true if T, an SSA_NAME, has an undefined value. */
1250 ssa_undefined_value_p (tree t
)
1252 tree var
= SSA_NAME_VAR (t
);
1256 /* Parameters get their initial value from the function entry. */
1257 else if (TREE_CODE (var
) == PARM_DECL
)
1259 /* When returning by reference the return address is actually a hidden
1261 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1263 /* Hard register variables get their initial value from the ether. */
1264 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1267 /* The value is undefined iff its definition statement is empty. */
1268 return gimple_nop_p (SSA_NAME_DEF_STMT (t
));
1272 /* If necessary, rewrite the base of the reference tree *TP from
1273 a MEM_REF to a plain or converted symbol. */
1276 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1280 while (handled_component_p (*tp
))
1281 tp
= &TREE_OPERAND (*tp
, 0);
1282 if (TREE_CODE (*tp
) == MEM_REF
1283 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1284 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1286 && !TREE_ADDRESSABLE (sym
)
1287 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1289 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1290 && useless_type_conversion_p (TREE_TYPE (*tp
),
1291 TREE_TYPE (TREE_TYPE (sym
)))
1292 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1293 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1295 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1296 TYPE_SIZE (TREE_TYPE (*tp
)),
1297 int_const_binop (MULT_EXPR
,
1298 bitsize_int (BITS_PER_UNIT
),
1299 TREE_OPERAND (*tp
, 1)));
1301 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1302 && useless_type_conversion_p (TREE_TYPE (*tp
),
1303 TREE_TYPE (TREE_TYPE (sym
))))
1305 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1306 ? REALPART_EXPR
: IMAGPART_EXPR
,
1307 TREE_TYPE (*tp
), sym
);
1309 else if (integer_zerop (TREE_OPERAND (*tp
, 1)))
1311 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1313 *tp
= build1 (VIEW_CONVERT_EXPR
,
1314 TREE_TYPE (*tp
), sym
);
1321 /* For a tree REF return its base if it is the base of a MEM_REF
1322 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1325 non_rewritable_mem_ref_base (tree ref
)
1329 /* A plain decl does not need it set. */
1333 while (handled_component_p (base
))
1334 base
= TREE_OPERAND (base
, 0);
1336 /* But watch out for MEM_REFs we cannot lower to a
1337 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1338 if (TREE_CODE (base
) == MEM_REF
1339 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1341 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1342 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1343 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1344 && useless_type_conversion_p (TREE_TYPE (base
),
1345 TREE_TYPE (TREE_TYPE (decl
)))
1346 && mem_ref_offset (base
).fits_uhwi ()
1347 && tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl
)))
1348 .ugt (mem_ref_offset (base
))
1349 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1350 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1353 && (!integer_zerop (TREE_OPERAND (base
, 1))
1354 || (DECL_SIZE (decl
)
1355 != TYPE_SIZE (TREE_TYPE (base
)))
1356 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
)))
1363 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1364 Otherwise return true. */
1367 non_rewritable_lvalue_p (tree lhs
)
1369 /* A plain decl is always rewritable. */
1373 /* A decl that is wrapped inside a MEM-REF that covers
1374 it full is also rewritable.
1375 ??? The following could be relaxed allowing component
1376 references that do not change the access size. */
1377 if (TREE_CODE (lhs
) == MEM_REF
1378 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1379 && integer_zerop (TREE_OPERAND (lhs
, 1)))
1381 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1383 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1384 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1391 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1392 mark the variable VAR for conversion into SSA. Return true when updating
1393 stmts is required. */
1396 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1397 bitmap suitable_for_renaming
)
1399 /* Global Variables, result decls cannot be changed. */
1400 if (is_global_var (var
)
1401 || TREE_CODE (var
) == RESULT_DECL
1402 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1405 if (TREE_ADDRESSABLE (var
)
1406 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1407 a non-register. Otherwise we are confused and forget to
1408 add virtual operands for it. */
1409 && (!is_gimple_reg_type (TREE_TYPE (var
))
1410 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1411 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1412 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1414 TREE_ADDRESSABLE (var
) = 0;
1415 if (is_gimple_reg (var
))
1416 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1419 fprintf (dump_file
, "No longer having address taken: ");
1420 print_generic_expr (dump_file
, var
, 0);
1421 fprintf (dump_file
, "\n");
1425 if (!DECL_GIMPLE_REG_P (var
)
1426 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1427 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1428 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1429 && !TREE_THIS_VOLATILE (var
)
1430 && (TREE_CODE (var
) != VAR_DECL
|| !DECL_HARD_REGISTER (var
)))
1432 DECL_GIMPLE_REG_P (var
) = 1;
1433 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1436 fprintf (dump_file
, "Now a gimple register: ");
1437 print_generic_expr (dump_file
, var
, 0);
1438 fprintf (dump_file
, "\n");
1443 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1446 execute_update_addresses_taken (void)
1448 gimple_stmt_iterator gsi
;
1450 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1451 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1452 bitmap suitable_for_renaming
= BITMAP_ALLOC (NULL
);
1456 timevar_push (TV_ADDRESS_TAKEN
);
1458 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1459 the function body. */
1460 FOR_EACH_BB_FN (bb
, cfun
)
1462 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1464 gimple stmt
= gsi_stmt (gsi
);
1465 enum gimple_code code
= gimple_code (stmt
);
1468 /* Note all addresses taken by the stmt. */
1469 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1471 /* If we have a call or an assignment, see if the lhs contains
1472 a local decl that requires not to be a gimple register. */
1473 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1475 tree lhs
= gimple_get_lhs (stmt
);
1477 && TREE_CODE (lhs
) != SSA_NAME
1478 && non_rewritable_lvalue_p (lhs
))
1480 decl
= get_base_address (lhs
);
1482 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1486 if (gimple_assign_single_p (stmt
))
1488 tree rhs
= gimple_assign_rhs1 (stmt
);
1489 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1490 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1493 else if (code
== GIMPLE_CALL
)
1495 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1497 tree arg
= gimple_call_arg (stmt
, i
);
1498 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1499 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1503 else if (code
== GIMPLE_ASM
)
1505 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1507 tree link
= gimple_asm_output_op (stmt
, i
);
1508 tree lhs
= TREE_VALUE (link
);
1509 if (TREE_CODE (lhs
) != SSA_NAME
)
1511 decl
= get_base_address (lhs
);
1513 && (non_rewritable_lvalue_p (lhs
)
1514 /* We cannot move required conversions from
1515 the lhs to the rhs in asm statements, so
1516 require we do not need any. */
1517 || !useless_type_conversion_p
1518 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1519 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1522 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1524 tree link
= gimple_asm_input_op (stmt
, i
);
1525 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1526 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1531 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1534 gimple phi
= gsi_stmt (gsi
);
1536 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1538 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1539 if (TREE_CODE (op
) == ADDR_EXPR
1540 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1542 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1547 /* We cannot iterate over all referenced vars because that can contain
1548 unused vars from BLOCK trees, which causes code generation differences
1550 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1551 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1552 suitable_for_renaming
);
1554 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1555 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1556 suitable_for_renaming
);
1558 /* Operand caches need to be recomputed for operands referencing the updated
1559 variables and operands need to be rewritten to expose bare symbols. */
1560 if (!bitmap_empty_p (suitable_for_renaming
))
1562 FOR_EACH_BB_FN (bb
, cfun
)
1563 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1565 gimple stmt
= gsi_stmt (gsi
);
1567 /* Re-write TARGET_MEM_REFs of symbols we want to
1568 rewrite into SSA form. */
1569 if (gimple_assign_single_p (stmt
))
1571 tree lhs
= gimple_assign_lhs (stmt
);
1572 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1575 /* We shouldn't have any fancy wrapping of
1576 component-refs on the LHS, but look through
1577 VIEW_CONVERT_EXPRs as that is easy. */
1578 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1579 lhs
= TREE_OPERAND (lhs
, 0);
1580 if (TREE_CODE (lhs
) == MEM_REF
1581 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1582 && integer_zerop (TREE_OPERAND (lhs
, 1))
1583 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1585 && !TREE_ADDRESSABLE (sym
)
1586 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1589 lhs
= gimple_assign_lhs (stmt
);
1591 /* Rewrite the RHS and make sure the resulting assignment
1592 is validly typed. */
1593 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1594 rhs
= gimple_assign_rhs1 (stmt
);
1595 if (gimple_assign_lhs (stmt
) != lhs
1596 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1598 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1599 TREE_TYPE (lhs
), rhs
);
1601 if (gimple_assign_lhs (stmt
) != lhs
)
1602 gimple_assign_set_lhs (stmt
, lhs
);
1604 /* For var ={v} {CLOBBER}; where var lost
1605 TREE_ADDRESSABLE just remove the stmt. */
1607 && TREE_CLOBBER_P (rhs
)
1608 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (lhs
)))
1610 unlink_stmt_vdef (stmt
);
1611 gsi_remove (&gsi
, true);
1612 release_defs (stmt
);
1616 if (gimple_assign_rhs1 (stmt
) != rhs
)
1618 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1619 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1623 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1626 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1628 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
1629 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
1633 else if (gimple_code (stmt
) == GIMPLE_ASM
)
1636 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1638 tree link
= gimple_asm_output_op (stmt
, i
);
1639 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1640 suitable_for_renaming
);
1642 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1644 tree link
= gimple_asm_input_op (stmt
, i
);
1645 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1646 suitable_for_renaming
);
1650 else if (gimple_debug_bind_p (stmt
)
1651 && gimple_debug_bind_has_value_p (stmt
))
1653 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
1655 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
1656 decl
= non_rewritable_mem_ref_base (*valuep
);
1658 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
1659 gimple_debug_bind_reset_value (stmt
);
1662 if (gimple_references_memory_p (stmt
)
1663 || is_gimple_debug (stmt
))
1669 /* Update SSA form here, we are called as non-pass as well. */
1670 if (number_of_loops (cfun
) > 1
1671 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
1672 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1674 update_ssa (TODO_update_ssa
);
1677 BITMAP_FREE (not_reg_needs
);
1678 BITMAP_FREE (addresses_taken
);
1679 BITMAP_FREE (suitable_for_renaming
);
1680 timevar_pop (TV_ADDRESS_TAKEN
);
1685 const pass_data pass_data_update_address_taken
=
1687 GIMPLE_PASS
, /* type */
1688 "addressables", /* name */
1689 OPTGROUP_NONE
, /* optinfo_flags */
1690 false, /* has_gate */
1691 false, /* has_execute */
1692 TV_ADDRESS_TAKEN
, /* tv_id */
1693 PROP_ssa
, /* properties_required */
1694 0, /* properties_provided */
1695 0, /* properties_destroyed */
1696 0, /* todo_flags_start */
1697 TODO_update_address_taken
, /* todo_flags_finish */
1700 class pass_update_address_taken
: public gimple_opt_pass
1703 pass_update_address_taken (gcc::context
*ctxt
)
1704 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
1707 /* opt_pass methods: */
1709 }; // class pass_update_address_taken
1714 make_pass_update_address_taken (gcc::context
*ctxt
)
1716 return new pass_update_address_taken (ctxt
);