1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
32 #include "gimple-pretty-print.h"
33 #include "pointer-set.h"
36 #include "gimple-iterator.h"
37 #include "gimple-walk.h"
38 #include "gimple-ssa.h"
39 #include "tree-phinodes.h"
40 #include "ssa-iterators.h"
41 #include "tree-ssanames.h"
42 #include "tree-ssa-loop-manip.h"
43 #include "tree-into-ssa.h"
45 #include "tree-inline.h"
47 #include "tree-pass.h"
48 #include "diagnostic-core.h"
51 /* Pointer map of variable mappings, keyed by edge. */
52 static struct pointer_map_t
*edge_var_maps
;
55 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
58 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
61 edge_var_map_vector
*head
;
62 edge_var_map new_node
;
64 if (edge_var_maps
== NULL
)
65 edge_var_maps
= pointer_map_create ();
67 slot
= pointer_map_insert (edge_var_maps
, e
);
68 head
= (edge_var_map_vector
*) *slot
;
70 vec_safe_reserve (head
, 5);
72 new_node
.result
= result
;
73 new_node
.locus
= locus
;
75 vec_safe_push (head
, new_node
);
80 /* Clear the var mappings in edge E. */
83 redirect_edge_var_map_clear (edge e
)
86 edge_var_map_vector
*head
;
91 slot
= pointer_map_contains (edge_var_maps
, e
);
95 head
= (edge_var_map_vector
*) *slot
;
102 /* Duplicate the redirected var mappings in OLDE in NEWE.
104 Since we can't remove a mapping, let's just duplicate it. This assumes a
105 pointer_map can have multiple edges mapping to the same var_map (many to
106 one mapping), since we don't remove the previous mappings. */
109 redirect_edge_var_map_dup (edge newe
, edge olde
)
111 void **new_slot
, **old_slot
;
112 edge_var_map_vector
*head
;
117 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
118 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
121 head
= (edge_var_map_vector
*) *old_slot
;
123 edge_var_map_vector
*new_head
= NULL
;
125 new_head
= vec_safe_copy (head
);
127 vec_safe_reserve (new_head
, 5);
128 *new_slot
= new_head
;
132 /* Return the variable mappings for a given edge. If there is none, return
135 edge_var_map_vector
*
136 redirect_edge_var_map_vector (edge e
)
140 /* Hey, what kind of idiot would... you'd be surprised. */
144 slot
= pointer_map_contains (edge_var_maps
, e
);
148 return (edge_var_map_vector
*) *slot
;
151 /* Used by redirect_edge_var_map_destroy to free all memory. */
154 free_var_map_entry (const void *key ATTRIBUTE_UNUSED
,
156 void *data ATTRIBUTE_UNUSED
)
158 edge_var_map_vector
*head
= (edge_var_map_vector
*) *value
;
163 /* Clear the edge variable mappings. */
166 redirect_edge_var_map_destroy (void)
170 pointer_map_traverse (edge_var_maps
, free_var_map_entry
, NULL
);
171 pointer_map_destroy (edge_var_maps
);
172 edge_var_maps
= NULL
;
177 /* Remove the corresponding arguments from the PHI nodes in E's
178 destination block and redirect it to DEST. Return redirected edge.
179 The list of removed arguments is stored in a vector accessed
180 through edge_var_maps. */
183 ssa_redirect_edge (edge e
, basic_block dest
)
185 gimple_stmt_iterator gsi
;
188 redirect_edge_var_map_clear (e
);
190 /* Remove the appropriate PHI arguments in E's destination block. */
191 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
194 source_location locus
;
196 phi
= gsi_stmt (gsi
);
197 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
198 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
200 if (def
== NULL_TREE
)
203 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
206 e
= redirect_edge_succ_nodup (e
, dest
);
212 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
216 flush_pending_stmts (edge e
)
219 edge_var_map_vector
*v
;
222 gimple_stmt_iterator gsi
;
224 v
= redirect_edge_var_map_vector (e
);
228 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
229 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
230 gsi_next (&gsi
), i
++)
234 phi
= gsi_stmt (gsi
);
235 def
= redirect_edge_var_map_def (vm
);
236 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
239 redirect_edge_var_map_clear (e
);
242 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
243 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
244 expression with a different value.
246 This will update any annotations (say debug bind stmts) referring
247 to the original LHS, so that they use the RHS instead. This is
248 done even if NLHS and LHS are the same, for it is understood that
249 the RHS will be modified afterwards, and NLHS will not be assigned
252 Adjusting any non-annotation uses of the LHS, if needed, is a
253 responsibility of the caller.
255 The effect of this call should be pretty much the same as that of
256 inserting a copy of STMT before STMT, and then removing the
257 original stmt, at which time gsi_remove() would have update
258 annotations, but using this function saves all the inserting,
259 copying and removing. */
262 gimple_replace_ssa_lhs (gimple stmt
, tree nlhs
)
264 if (MAY_HAVE_DEBUG_STMTS
)
266 tree lhs
= gimple_get_lhs (stmt
);
268 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
270 insert_debug_temp_for_var_def (NULL
, lhs
);
273 gimple_set_lhs (stmt
, nlhs
);
277 /* Given a tree for an expression for which we might want to emit
278 locations or values in debug information (generally a variable, but
279 we might deal with other kinds of trees in the future), return the
280 tree that should be used as the variable of a DEBUG_BIND STMT or
281 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
284 target_for_debug_bind (tree var
)
286 if (!MAY_HAVE_DEBUG_STMTS
)
289 if (TREE_CODE (var
) == SSA_NAME
)
291 var
= SSA_NAME_VAR (var
);
292 if (var
== NULL_TREE
)
296 if ((TREE_CODE (var
) != VAR_DECL
297 || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
298 && TREE_CODE (var
) != PARM_DECL
)
301 if (DECL_HAS_VALUE_EXPR_P (var
))
302 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
304 if (DECL_IGNORED_P (var
))
307 /* var-tracking only tracks registers. */
308 if (!is_gimple_reg_type (TREE_TYPE (var
)))
314 /* Called via walk_tree, look for SSA_NAMEs that have already been
318 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
320 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
322 if (wi
&& wi
->is_lhs
)
325 if (TREE_CODE (*tp
) == SSA_NAME
)
327 if (SSA_NAME_IN_FREE_LIST (*tp
))
332 else if (IS_TYPE_OR_DECL_P (*tp
))
338 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
339 by other DEBUG stmts, and replace uses of the DEF with the
340 newly-created debug temp. */
343 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
345 imm_use_iterator imm_iter
;
348 gimple def_stmt
= NULL
;
352 if (!MAY_HAVE_DEBUG_STMTS
)
355 /* If this name has already been registered for replacement, do nothing
356 as anything that uses this name isn't in SSA form. */
357 if (name_registered_for_update_p (var
))
360 /* Check whether there are debug stmts that reference this variable and,
361 if there are, decide whether we should use a debug temp. */
362 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
364 stmt
= USE_STMT (use_p
);
366 if (!gimple_debug_bind_p (stmt
))
372 if (gimple_debug_bind_get_value (stmt
) != var
)
374 /* Count this as an additional use, so as to make sure we
375 use a temp unless VAR's definition has a SINGLE_RHS that
386 def_stmt
= gsi_stmt (*gsi
);
388 def_stmt
= SSA_NAME_DEF_STMT (var
);
390 /* If we didn't get an insertion point, and the stmt has already
391 been removed, we won't be able to insert the debug bind stmt, so
392 we'll have to drop debug information. */
393 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
395 value
= degenerate_phi_result (def_stmt
);
396 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
398 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
400 else if (value
== error_mark_node
)
403 else if (is_gimple_assign (def_stmt
))
405 bool no_value
= false;
407 if (!dom_info_available_p (CDI_DOMINATORS
))
409 struct walk_stmt_info wi
;
411 memset (&wi
, 0, sizeof (wi
));
413 /* When removing blocks without following reverse dominance
414 order, we may sometimes encounter SSA_NAMEs that have
415 already been released, referenced in other SSA_DEFs that
416 we're about to release. Consider:
425 If we deleted BB X first, propagating the value of w_2
426 won't do us any good. It's too late to recover their
427 original definition of v_1: when it was deleted, it was
428 only referenced in other DEFs, it couldn't possibly know
429 it should have been retained, and propagating every
430 single DEF just in case it might have to be propagated
431 into a DEBUG STMT would probably be too wasteful.
433 When dominator information is not readily available, we
434 check for and accept some loss of debug information. But
435 if it is available, there's no excuse for us to remove
436 blocks in the wrong order, so we don't even check for
437 dead SSA NAMEs. SSA verification shall catch any
439 if ((!gsi
&& !gimple_bb (def_stmt
))
440 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
445 value
= gimple_assign_rhs_to_tree (def_stmt
);
450 /* If there's a single use of VAR, and VAR is the entire debug
451 expression (usecount would have been incremented again
452 otherwise), and the definition involves only constants and
453 SSA names, then we can propagate VALUE into this single use,
456 We can also avoid using a temp if VALUE can be shared and
457 propagated into all uses, without generating expressions that
458 wouldn't be valid gimple RHSs.
460 Other cases that would require unsharing or non-gimple RHSs
461 are deferred to a debug temp, although we could avoid temps
462 at the expense of duplication of expressions. */
464 if (CONSTANT_CLASS_P (value
)
465 || gimple_code (def_stmt
) == GIMPLE_PHI
467 && (!gimple_assign_single_p (def_stmt
)
468 || is_gimple_min_invariant (value
)))
469 || is_gimple_reg (value
))
474 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
476 def_temp
= gimple_build_debug_bind (vexpr
,
477 unshare_expr (value
),
480 DECL_ARTIFICIAL (vexpr
) = 1;
481 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
483 DECL_MODE (vexpr
) = DECL_MODE (value
);
485 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (value
));
488 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
491 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
492 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
499 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
501 if (!gimple_debug_bind_p (stmt
))
506 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
507 /* unshare_expr is not needed here. vexpr is either a
508 SINGLE_RHS, that can be safely shared, some other RHS
509 that was unshared when we found it had a single debug
510 use, or a DEBUG_EXPR_DECL, that can be safely
512 SET_USE (use_p
, unshare_expr (value
));
513 /* If we didn't replace uses with a debug decl fold the
514 resulting expression. Otherwise we end up with invalid IL. */
515 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
517 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
518 fold_stmt_inplace (&gsi
);
522 gimple_debug_bind_reset_value (stmt
);
529 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
530 other DEBUG stmts, and replace uses of the DEF with the
531 newly-created debug temp. */
534 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
540 if (!MAY_HAVE_DEBUG_STMTS
)
543 stmt
= gsi_stmt (*gsi
);
545 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
547 tree var
= DEF_FROM_PTR (def_p
);
549 if (TREE_CODE (var
) != SSA_NAME
)
552 insert_debug_temp_for_var_def (gsi
, var
);
556 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
559 reset_debug_uses (gimple stmt
)
563 imm_use_iterator imm_iter
;
566 if (!MAY_HAVE_DEBUG_STMTS
)
569 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
571 tree var
= DEF_FROM_PTR (def_p
);
573 if (TREE_CODE (var
) != SSA_NAME
)
576 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
578 if (!gimple_debug_bind_p (use_stmt
))
581 gimple_debug_bind_reset_value (use_stmt
);
582 update_stmt (use_stmt
);
587 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
588 dominated stmts before their dominators, so that release_ssa_defs
589 stands a chance of propagating DEFs into debug bind stmts. */
592 release_defs_bitset (bitmap toremove
)
597 /* Performing a topological sort is probably overkill, this will
598 most likely run in slightly superlinear time, rather than the
599 pathological quadratic worst case. */
600 while (!bitmap_empty_p (toremove
))
601 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
603 bool remove_now
= true;
604 tree var
= ssa_name (j
);
606 imm_use_iterator uit
;
608 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
613 /* We can't propagate PHI nodes into debug stmts. */
614 if (gimple_code (stmt
) == GIMPLE_PHI
615 || is_gimple_debug (stmt
))
618 /* If we find another definition to remove that uses
619 the one we're looking at, defer the removal of this
620 one, so that it can be propagated into debug stmts
621 after the other is. */
622 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
624 tree odef
= DEF_FROM_PTR (def_p
);
626 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
634 BREAK_FROM_IMM_USE_STMT (uit
);
639 gimple def
= SSA_NAME_DEF_STMT (var
);
640 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
642 if (gimple_code (def
) == GIMPLE_PHI
)
643 remove_phi_node (&gsi
, true);
646 gsi_remove (&gsi
, true);
650 bitmap_clear_bit (toremove
, j
);
655 /* Return true if SSA_NAME is malformed and mark it visited.
657 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
661 verify_ssa_name (tree ssa_name
, bool is_virtual
)
663 if (TREE_CODE (ssa_name
) != SSA_NAME
)
665 error ("expected an SSA_NAME object");
669 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
671 error ("found an SSA_NAME that had been released into the free pool");
675 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
676 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
678 error ("type mismatch between an SSA_NAME and its symbol");
682 if (is_virtual
&& !virtual_operand_p (ssa_name
))
684 error ("found a virtual definition for a GIMPLE register");
688 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
690 error ("virtual SSA name for non-VOP decl");
694 if (!is_virtual
&& virtual_operand_p (ssa_name
))
696 error ("found a real definition for a non-register");
700 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
701 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
703 error ("found a default name with a non-empty defining statement");
711 /* Return true if the definition of SSA_NAME at block BB is malformed.
713 STMT is the statement where SSA_NAME is created.
715 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
716 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
717 it means that the block in that array slot contains the
718 definition of SSA_NAME.
720 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
723 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
724 gimple stmt
, bool is_virtual
)
726 if (verify_ssa_name (ssa_name
, is_virtual
))
729 if (SSA_NAME_VAR (ssa_name
)
730 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
731 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
733 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
737 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
739 error ("SSA_NAME created in two different blocks %i and %i",
740 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
744 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
746 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
748 error ("SSA_NAME_DEF_STMT is wrong");
749 fprintf (stderr
, "Expected definition statement:\n");
750 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
751 fprintf (stderr
, "\nActual definition statement:\n");
752 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
759 fprintf (stderr
, "while verifying SSA_NAME ");
760 print_generic_expr (stderr
, ssa_name
, 0);
761 fprintf (stderr
, " in statement\n");
762 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
768 /* Return true if the use of SSA_NAME at statement STMT in block BB is
771 DEF_BB is the block where SSA_NAME was found to be created.
773 IDOM contains immediate dominator information for the flowgraph.
775 CHECK_ABNORMAL is true if the caller wants to check whether this use
776 is flowing through an abnormal edge (only used when checking PHI
779 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
780 that are defined before STMT in basic block BB. */
783 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
784 gimple stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
787 tree ssa_name
= USE_FROM_PTR (use_p
);
789 if (!TREE_VISITED (ssa_name
))
790 if (verify_imm_links (stderr
, ssa_name
))
793 TREE_VISITED (ssa_name
) = 1;
795 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
796 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
797 ; /* Default definitions have empty statements. Nothing to do. */
800 error ("missing definition");
803 else if (bb
!= def_bb
804 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
806 error ("definition in block %i does not dominate use in block %i",
807 def_bb
->index
, bb
->index
);
810 else if (bb
== def_bb
811 && names_defined_in_bb
!= NULL
812 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
814 error ("definition in block %i follows the use", def_bb
->index
);
819 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
821 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
825 /* Make sure the use is in an appropriate list by checking the previous
826 element to make sure it's the same. */
827 if (use_p
->prev
== NULL
)
829 error ("no immediate_use list");
835 if (use_p
->prev
->use
== NULL
)
836 listvar
= use_p
->prev
->loc
.ssa_name
;
838 listvar
= USE_FROM_PTR (use_p
->prev
);
839 if (listvar
!= ssa_name
)
841 error ("wrong immediate use list");
848 fprintf (stderr
, "for SSA_NAME: ");
849 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
850 fprintf (stderr
, " in statement:\n");
851 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
858 /* Return true if any of the arguments for PHI node PHI at block BB is
861 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
862 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
863 it means that the block in that array slot contains the
864 definition of SSA_NAME. */
867 verify_phi_args (gimple phi
, basic_block bb
, basic_block
*definition_block
)
871 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
873 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
875 error ("incoming edge count does not match number of PHI arguments");
880 for (i
= 0; i
< phi_num_args
; i
++)
882 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
883 tree op
= USE_FROM_PTR (op_p
);
885 e
= EDGE_PRED (bb
, i
);
889 error ("PHI argument is missing for edge %d->%d",
896 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
898 error ("PHI argument is not SSA_NAME, or invariant");
902 if (TREE_CODE (op
) == SSA_NAME
)
904 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
905 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
906 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
909 if (TREE_CODE (op
) == ADDR_EXPR
)
911 tree base
= TREE_OPERAND (op
, 0);
912 while (handled_component_p (base
))
913 base
= TREE_OPERAND (base
, 0);
914 if ((TREE_CODE (base
) == VAR_DECL
915 || TREE_CODE (base
) == PARM_DECL
916 || TREE_CODE (base
) == RESULT_DECL
)
917 && !TREE_ADDRESSABLE (base
))
919 error ("address taken, but ADDRESSABLE bit not set");
926 error ("wrong edge %d->%d for PHI argument",
927 e
->src
->index
, e
->dest
->index
);
933 fprintf (stderr
, "PHI argument\n");
934 print_generic_stmt (stderr
, op
, TDF_VOPS
);
942 fprintf (stderr
, "for PHI node\n");
943 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
951 /* Verify common invariants in the SSA web.
952 TODO: verify the variable annotations. */
955 verify_ssa (bool check_modified_stmt
)
959 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
962 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
963 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
965 gcc_assert (!need_ssa_update_p (cfun
));
967 timevar_push (TV_TREE_SSA_VERIFY
);
969 /* Keep track of SSA names present in the IL. */
970 for (i
= 1; i
< num_ssa_names
; i
++)
972 tree name
= ssa_name (i
);
976 TREE_VISITED (name
) = 0;
978 verify_ssa_name (name
, virtual_operand_p (name
));
980 stmt
= SSA_NAME_DEF_STMT (name
);
981 if (!gimple_nop_p (stmt
))
983 basic_block bb
= gimple_bb (stmt
);
984 verify_def (bb
, definition_block
,
985 name
, stmt
, virtual_operand_p (name
));
991 calculate_dominance_info (CDI_DOMINATORS
);
993 /* Now verify all the uses and make sure they agree with the definitions
994 found in the previous pass. */
1000 gimple_stmt_iterator gsi
;
1002 /* Make sure that all edges have a clear 'aux' field. */
1003 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1007 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1013 /* Verify the arguments for every PHI node in the block. */
1014 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1016 phi
= gsi_stmt (gsi
);
1017 if (verify_phi_args (phi
, bb
, definition_block
))
1020 bitmap_set_bit (names_defined_in_bb
,
1021 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1024 /* Now verify all the uses and vuses in every statement of the block. */
1025 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1027 gimple stmt
= gsi_stmt (gsi
);
1028 use_operand_p use_p
;
1030 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1032 error ("stmt (%p) marked modified after optimization pass: ",
1034 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1038 if (verify_ssa_operands (stmt
))
1040 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1044 if (gimple_debug_bind_p (stmt
)
1045 && !gimple_debug_bind_has_value_p (stmt
))
1048 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1050 op
= USE_FROM_PTR (use_p
);
1051 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1052 use_p
, stmt
, false, names_defined_in_bb
))
1056 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1058 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1060 error ("SSA_NAME_DEF_STMT is wrong");
1061 fprintf (stderr
, "Expected definition statement:\n");
1062 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1063 fprintf (stderr
, "\nActual definition statement:\n");
1064 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1068 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1072 bitmap_clear (names_defined_in_bb
);
1075 free (definition_block
);
1077 /* Restore the dominance information to its prior known state, so
1078 that we do not perturb the compiler's subsequent behavior. */
1079 if (orig_dom_state
== DOM_NONE
)
1080 free_dominance_info (CDI_DOMINATORS
);
1082 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1084 BITMAP_FREE (names_defined_in_bb
);
1085 timevar_pop (TV_TREE_SSA_VERIFY
);
1089 internal_error ("verify_ssa failed");
1092 /* Return true if the DECL_UID in both trees are equal. */
1095 uid_ssaname_map_eq (const void *va
, const void *vb
)
1097 const_tree a
= (const_tree
) va
;
1098 const_tree b
= (const_tree
) vb
;
1099 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
1102 /* Hash a tree in a uid_decl_map. */
1105 uid_ssaname_map_hash (const void *item
)
1107 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
1111 /* Initialize global DFA and SSA structures. */
1114 init_tree_ssa (struct function
*fn
)
1116 fn
->gimple_df
= ggc_alloc_cleared_gimple_df ();
1117 fn
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
1118 uid_ssaname_map_eq
, NULL
);
1119 pt_solution_reset (&fn
->gimple_df
->escaped
);
1120 init_ssanames (fn
, 0);
1123 /* Do the actions required to initialize internal data structures used
1124 in tree-ssa optimization passes. */
1127 execute_init_datastructures (void)
1129 /* Allocate hash tables, arrays and other structures. */
1130 gcc_assert (!cfun
->gimple_df
);
1131 init_tree_ssa (cfun
);
1135 /* Gate for IPCP optimization. */
1138 gate_init_datastructures (void)
1140 /* Do nothing for funcions that was produced already in SSA form. */
1141 return !(cfun
->curr_properties
& PROP_ssa
);
1146 const pass_data pass_data_init_datastructures
=
1148 GIMPLE_PASS
, /* type */
1149 "*init_datastructures", /* name */
1150 OPTGROUP_NONE
, /* optinfo_flags */
1151 true, /* has_gate */
1152 true, /* has_execute */
1153 TV_NONE
, /* tv_id */
1154 PROP_cfg
, /* properties_required */
1155 0, /* properties_provided */
1156 0, /* properties_destroyed */
1157 0, /* todo_flags_start */
1158 0, /* todo_flags_finish */
1161 class pass_init_datastructures
: public gimple_opt_pass
1164 pass_init_datastructures (gcc::context
*ctxt
)
1165 : gimple_opt_pass (pass_data_init_datastructures
, ctxt
)
1168 /* opt_pass methods: */
1169 bool gate () { return gate_init_datastructures (); }
1170 unsigned int execute () { return execute_init_datastructures (); }
1172 }; // class pass_init_datastructures
1177 make_pass_init_datastructures (gcc::context
*ctxt
)
1179 return new pass_init_datastructures (ctxt
);
1182 /* Deallocate memory associated with SSA data structures for FNDECL. */
1185 delete_tree_ssa (void)
1189 /* We no longer maintain the SSA operand cache at this point. */
1190 if (ssa_operands_active (cfun
))
1191 fini_ssa_operands ();
1193 htab_delete (cfun
->gimple_df
->default_defs
);
1194 cfun
->gimple_df
->default_defs
= NULL
;
1195 pt_solution_reset (&cfun
->gimple_df
->escaped
);
1196 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
1197 pointer_map_destroy (cfun
->gimple_df
->decls_to_pointers
);
1198 cfun
->gimple_df
->decls_to_pointers
= NULL
;
1199 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1200 cfun
->gimple_df
= NULL
;
1202 /* We no longer need the edge variable maps. */
1203 redirect_edge_var_map_destroy ();
1206 /* Return true if EXPR is a useless type conversion, otherwise return
1210 tree_ssa_useless_type_conversion (tree expr
)
1212 /* If we have an assignment that merely uses a NOP_EXPR to change
1213 the top of the RHS to the type of the LHS and the type conversion
1214 is "safe", then strip away the type conversion so that we can
1215 enter LHS = RHS into the const_and_copies table. */
1216 if (CONVERT_EXPR_P (expr
)
1217 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1218 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1219 return useless_type_conversion_p
1221 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1226 /* Strip conversions from EXP according to
1227 tree_ssa_useless_type_conversion and return the resulting
1231 tree_ssa_strip_useless_type_conversions (tree exp
)
1233 while (tree_ssa_useless_type_conversion (exp
))
1234 exp
= TREE_OPERAND (exp
, 0);
1239 /* Return true if T, an SSA_NAME, has an undefined value. */
1242 ssa_undefined_value_p (tree t
)
1244 tree var
= SSA_NAME_VAR (t
);
1248 /* Parameters get their initial value from the function entry. */
1249 else if (TREE_CODE (var
) == PARM_DECL
)
1251 /* When returning by reference the return address is actually a hidden
1253 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1255 /* Hard register variables get their initial value from the ether. */
1256 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1259 /* The value is undefined iff its definition statement is empty. */
1260 return gimple_nop_p (SSA_NAME_DEF_STMT (t
));
1264 /* If necessary, rewrite the base of the reference tree *TP from
1265 a MEM_REF to a plain or converted symbol. */
1268 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1272 while (handled_component_p (*tp
))
1273 tp
= &TREE_OPERAND (*tp
, 0);
1274 if (TREE_CODE (*tp
) == MEM_REF
1275 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1276 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1278 && !TREE_ADDRESSABLE (sym
)
1279 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1281 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1282 && useless_type_conversion_p (TREE_TYPE (*tp
),
1283 TREE_TYPE (TREE_TYPE (sym
)))
1284 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1285 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1287 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1288 TYPE_SIZE (TREE_TYPE (*tp
)),
1289 int_const_binop (MULT_EXPR
,
1290 bitsize_int (BITS_PER_UNIT
),
1291 TREE_OPERAND (*tp
, 1)));
1293 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1294 && useless_type_conversion_p (TREE_TYPE (*tp
),
1295 TREE_TYPE (TREE_TYPE (sym
))))
1297 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1298 ? REALPART_EXPR
: IMAGPART_EXPR
,
1299 TREE_TYPE (*tp
), sym
);
1301 else if (integer_zerop (TREE_OPERAND (*tp
, 1)))
1303 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1305 *tp
= build1 (VIEW_CONVERT_EXPR
,
1306 TREE_TYPE (*tp
), sym
);
1313 /* For a tree REF return its base if it is the base of a MEM_REF
1314 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1317 non_rewritable_mem_ref_base (tree ref
)
1321 /* A plain decl does not need it set. */
1325 while (handled_component_p (base
))
1326 base
= TREE_OPERAND (base
, 0);
1328 /* But watch out for MEM_REFs we cannot lower to a
1329 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1330 if (TREE_CODE (base
) == MEM_REF
1331 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1333 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1334 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1335 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1336 && useless_type_conversion_p (TREE_TYPE (base
),
1337 TREE_TYPE (TREE_TYPE (decl
)))
1338 && mem_ref_offset (base
).fits_uhwi ()
1339 && tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl
)))
1340 .ugt (mem_ref_offset (base
))
1341 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1342 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1345 && (!integer_zerop (TREE_OPERAND (base
, 1))
1346 || (DECL_SIZE (decl
)
1347 != TYPE_SIZE (TREE_TYPE (base
)))
1348 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
)))
1355 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1356 Otherwise return true. */
1359 non_rewritable_lvalue_p (tree lhs
)
1361 /* A plain decl is always rewritable. */
1365 /* A decl that is wrapped inside a MEM-REF that covers
1366 it full is also rewritable.
1367 ??? The following could be relaxed allowing component
1368 references that do not change the access size. */
1369 if (TREE_CODE (lhs
) == MEM_REF
1370 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1371 && integer_zerop (TREE_OPERAND (lhs
, 1)))
1373 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1375 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1376 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1383 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1384 mark the variable VAR for conversion into SSA. Return true when updating
1385 stmts is required. */
1388 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1389 bitmap suitable_for_renaming
)
1391 /* Global Variables, result decls cannot be changed. */
1392 if (is_global_var (var
)
1393 || TREE_CODE (var
) == RESULT_DECL
1394 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1397 if (TREE_ADDRESSABLE (var
)
1398 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1399 a non-register. Otherwise we are confused and forget to
1400 add virtual operands for it. */
1401 && (!is_gimple_reg_type (TREE_TYPE (var
))
1402 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1403 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1404 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1406 TREE_ADDRESSABLE (var
) = 0;
1407 if (is_gimple_reg (var
))
1408 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1411 fprintf (dump_file
, "No longer having address taken: ");
1412 print_generic_expr (dump_file
, var
, 0);
1413 fprintf (dump_file
, "\n");
1417 if (!DECL_GIMPLE_REG_P (var
)
1418 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1419 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1420 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1421 && !TREE_THIS_VOLATILE (var
)
1422 && (TREE_CODE (var
) != VAR_DECL
|| !DECL_HARD_REGISTER (var
)))
1424 DECL_GIMPLE_REG_P (var
) = 1;
1425 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1428 fprintf (dump_file
, "Now a gimple register: ");
1429 print_generic_expr (dump_file
, var
, 0);
1430 fprintf (dump_file
, "\n");
1435 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1438 execute_update_addresses_taken (void)
1440 gimple_stmt_iterator gsi
;
1442 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1443 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1444 bitmap suitable_for_renaming
= BITMAP_ALLOC (NULL
);
1448 timevar_push (TV_ADDRESS_TAKEN
);
1450 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1451 the function body. */
1454 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1456 gimple stmt
= gsi_stmt (gsi
);
1457 enum gimple_code code
= gimple_code (stmt
);
1460 /* Note all addresses taken by the stmt. */
1461 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1463 /* If we have a call or an assignment, see if the lhs contains
1464 a local decl that requires not to be a gimple register. */
1465 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1467 tree lhs
= gimple_get_lhs (stmt
);
1469 && TREE_CODE (lhs
) != SSA_NAME
1470 && non_rewritable_lvalue_p (lhs
))
1472 decl
= get_base_address (lhs
);
1474 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1478 if (gimple_assign_single_p (stmt
))
1480 tree rhs
= gimple_assign_rhs1 (stmt
);
1481 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1482 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1485 else if (code
== GIMPLE_CALL
)
1487 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1489 tree arg
= gimple_call_arg (stmt
, i
);
1490 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1491 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1495 else if (code
== GIMPLE_ASM
)
1497 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1499 tree link
= gimple_asm_output_op (stmt
, i
);
1500 tree lhs
= TREE_VALUE (link
);
1501 if (TREE_CODE (lhs
) != SSA_NAME
)
1503 decl
= get_base_address (lhs
);
1505 && (non_rewritable_lvalue_p (lhs
)
1506 /* We cannot move required conversions from
1507 the lhs to the rhs in asm statements, so
1508 require we do not need any. */
1509 || !useless_type_conversion_p
1510 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1511 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1514 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1516 tree link
= gimple_asm_input_op (stmt
, i
);
1517 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1518 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1523 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1526 gimple phi
= gsi_stmt (gsi
);
1528 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1530 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1531 if (TREE_CODE (op
) == ADDR_EXPR
1532 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1534 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1539 /* We cannot iterate over all referenced vars because that can contain
1540 unused vars from BLOCK trees, which causes code generation differences
1542 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1543 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1544 suitable_for_renaming
);
1546 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1547 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1548 suitable_for_renaming
);
1550 /* Operand caches need to be recomputed for operands referencing the updated
1551 variables and operands need to be rewritten to expose bare symbols. */
1552 if (!bitmap_empty_p (suitable_for_renaming
))
1555 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1557 gimple stmt
= gsi_stmt (gsi
);
1559 /* Re-write TARGET_MEM_REFs of symbols we want to
1560 rewrite into SSA form. */
1561 if (gimple_assign_single_p (stmt
))
1563 tree lhs
= gimple_assign_lhs (stmt
);
1564 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1567 /* We shouldn't have any fancy wrapping of
1568 component-refs on the LHS, but look through
1569 VIEW_CONVERT_EXPRs as that is easy. */
1570 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1571 lhs
= TREE_OPERAND (lhs
, 0);
1572 if (TREE_CODE (lhs
) == MEM_REF
1573 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1574 && integer_zerop (TREE_OPERAND (lhs
, 1))
1575 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1577 && !TREE_ADDRESSABLE (sym
)
1578 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1581 lhs
= gimple_assign_lhs (stmt
);
1583 /* Rewrite the RHS and make sure the resulting assignment
1584 is validly typed. */
1585 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1586 rhs
= gimple_assign_rhs1 (stmt
);
1587 if (gimple_assign_lhs (stmt
) != lhs
1588 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1590 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1591 TREE_TYPE (lhs
), rhs
);
1593 if (gimple_assign_lhs (stmt
) != lhs
)
1594 gimple_assign_set_lhs (stmt
, lhs
);
1596 /* For var ={v} {CLOBBER}; where var lost
1597 TREE_ADDRESSABLE just remove the stmt. */
1599 && TREE_CLOBBER_P (rhs
)
1600 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (lhs
)))
1602 unlink_stmt_vdef (stmt
);
1603 gsi_remove (&gsi
, true);
1604 release_defs (stmt
);
1608 if (gimple_assign_rhs1 (stmt
) != rhs
)
1610 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1611 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1615 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1618 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1620 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
1621 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
1625 else if (gimple_code (stmt
) == GIMPLE_ASM
)
1628 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1630 tree link
= gimple_asm_output_op (stmt
, i
);
1631 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1632 suitable_for_renaming
);
1634 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1636 tree link
= gimple_asm_input_op (stmt
, i
);
1637 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1638 suitable_for_renaming
);
1642 else if (gimple_debug_bind_p (stmt
)
1643 && gimple_debug_bind_has_value_p (stmt
))
1645 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
1647 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
1648 decl
= non_rewritable_mem_ref_base (*valuep
);
1650 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
1651 gimple_debug_bind_reset_value (stmt
);
1654 if (gimple_references_memory_p (stmt
)
1655 || is_gimple_debug (stmt
))
1661 /* Update SSA form here, we are called as non-pass as well. */
1662 if (number_of_loops (cfun
) > 1
1663 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
1664 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1666 update_ssa (TODO_update_ssa
);
1669 BITMAP_FREE (not_reg_needs
);
1670 BITMAP_FREE (addresses_taken
);
1671 BITMAP_FREE (suitable_for_renaming
);
1672 timevar_pop (TV_ADDRESS_TAKEN
);
1677 const pass_data pass_data_update_address_taken
=
1679 GIMPLE_PASS
, /* type */
1680 "addressables", /* name */
1681 OPTGROUP_NONE
, /* optinfo_flags */
1682 false, /* has_gate */
1683 false, /* has_execute */
1684 TV_ADDRESS_TAKEN
, /* tv_id */
1685 PROP_ssa
, /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 TODO_update_address_taken
, /* todo_flags_finish */
1692 class pass_update_address_taken
: public gimple_opt_pass
1695 pass_update_address_taken (gcc::context
*ctxt
)
1696 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
1699 /* opt_pass methods: */
1701 }; // class pass_update_address_taken
1706 make_pass_update_address_taken (gcc::context
*ctxt
)
1708 return new pass_update_address_taken (ctxt
);