1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
41 #include "cfgexpand.h"
45 /* Pointer map of variable mappings, keyed by edge. */
46 static hash_map
<edge
, auto_vec
<edge_var_map
> > *edge_var_maps
;
49 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
52 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
54 edge_var_map new_node
;
56 if (edge_var_maps
== NULL
)
57 edge_var_maps
= new hash_map
<edge
, auto_vec
<edge_var_map
> >;
59 auto_vec
<edge_var_map
> &slot
= edge_var_maps
->get_or_insert (e
);
61 new_node
.result
= result
;
62 new_node
.locus
= locus
;
64 slot
.safe_push (new_node
);
68 /* Clear the var mappings in edge E. */
71 redirect_edge_var_map_clear (edge e
)
76 auto_vec
<edge_var_map
> *head
= edge_var_maps
->get (e
);
83 /* Duplicate the redirected var mappings in OLDE in NEWE.
85 This assumes a hash_map can have multiple edges mapping to the same
86 var_map (many to one mapping), since we don't remove the previous mappings.
90 redirect_edge_var_map_dup (edge newe
, edge olde
)
95 auto_vec
<edge_var_map
> *new_head
= &edge_var_maps
->get_or_insert (newe
);
96 auto_vec
<edge_var_map
> *old_head
= edge_var_maps
->get (olde
);
100 new_head
->safe_splice (*old_head
);
104 /* Return the variable mappings for a given edge. If there is none, return
108 redirect_edge_var_map_vector (edge e
)
110 /* Hey, what kind of idiot would... you'd be surprised. */
114 auto_vec
<edge_var_map
> *slot
= edge_var_maps
->get (e
);
121 /* Clear the edge variable mappings. */
124 redirect_edge_var_map_empty (void)
127 edge_var_maps
->empty ();
131 /* Remove the corresponding arguments from the PHI nodes in E's
132 destination block and redirect it to DEST. Return redirected edge.
133 The list of removed arguments is stored in a vector accessed
134 through edge_var_maps. */
137 ssa_redirect_edge (edge e
, basic_block dest
)
142 redirect_edge_var_map_clear (e
);
144 /* Remove the appropriate PHI arguments in E's destination block. */
145 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
148 source_location locus
;
151 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
152 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
154 if (def
== NULL_TREE
)
157 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
160 e
= redirect_edge_succ_nodup (e
, dest
);
166 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
170 flush_pending_stmts (edge e
)
177 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (e
);
181 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
182 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
183 gsi_next (&gsi
), i
++)
188 def
= redirect_edge_var_map_def (vm
);
189 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
192 redirect_edge_var_map_clear (e
);
195 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
196 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
197 expression with a different value.
199 This will update any annotations (say debug bind stmts) referring
200 to the original LHS, so that they use the RHS instead. This is
201 done even if NLHS and LHS are the same, for it is understood that
202 the RHS will be modified afterwards, and NLHS will not be assigned
205 Adjusting any non-annotation uses of the LHS, if needed, is a
206 responsibility of the caller.
208 The effect of this call should be pretty much the same as that of
209 inserting a copy of STMT before STMT, and then removing the
210 original stmt, at which time gsi_remove() would have update
211 annotations, but using this function saves all the inserting,
212 copying and removing. */
215 gimple_replace_ssa_lhs (gimple
*stmt
, tree nlhs
)
217 if (MAY_HAVE_DEBUG_STMTS
)
219 tree lhs
= gimple_get_lhs (stmt
);
221 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
223 insert_debug_temp_for_var_def (NULL
, lhs
);
226 gimple_set_lhs (stmt
, nlhs
);
230 /* Given a tree for an expression for which we might want to emit
231 locations or values in debug information (generally a variable, but
232 we might deal with other kinds of trees in the future), return the
233 tree that should be used as the variable of a DEBUG_BIND STMT or
234 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
237 target_for_debug_bind (tree var
)
239 if (!MAY_HAVE_DEBUG_STMTS
)
242 if (TREE_CODE (var
) == SSA_NAME
)
244 var
= SSA_NAME_VAR (var
);
245 if (var
== NULL_TREE
)
249 if ((!VAR_P (var
) || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
250 && TREE_CODE (var
) != PARM_DECL
)
253 if (DECL_HAS_VALUE_EXPR_P (var
))
254 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
256 if (DECL_IGNORED_P (var
))
259 /* var-tracking only tracks registers. */
260 if (!is_gimple_reg_type (TREE_TYPE (var
)))
266 /* Called via walk_tree, look for SSA_NAMEs that have already been
270 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
272 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
274 if (wi
&& wi
->is_lhs
)
277 if (TREE_CODE (*tp
) == SSA_NAME
)
279 if (SSA_NAME_IN_FREE_LIST (*tp
))
284 else if (IS_TYPE_OR_DECL_P (*tp
))
290 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
291 by other DEBUG stmts, and replace uses of the DEF with the
292 newly-created debug temp. */
295 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
297 imm_use_iterator imm_iter
;
300 gimple
*def_stmt
= NULL
;
304 if (!MAY_HAVE_DEBUG_STMTS
)
307 /* If this name has already been registered for replacement, do nothing
308 as anything that uses this name isn't in SSA form. */
309 if (name_registered_for_update_p (var
))
312 /* Check whether there are debug stmts that reference this variable and,
313 if there are, decide whether we should use a debug temp. */
314 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
316 stmt
= USE_STMT (use_p
);
318 if (!gimple_debug_bind_p (stmt
))
324 if (gimple_debug_bind_get_value (stmt
) != var
)
326 /* Count this as an additional use, so as to make sure we
327 use a temp unless VAR's definition has a SINGLE_RHS that
338 def_stmt
= gsi_stmt (*gsi
);
340 def_stmt
= SSA_NAME_DEF_STMT (var
);
342 /* If we didn't get an insertion point, and the stmt has already
343 been removed, we won't be able to insert the debug bind stmt, so
344 we'll have to drop debug information. */
345 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
347 value
= degenerate_phi_result (as_a
<gphi
*> (def_stmt
));
348 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
350 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 else if (value
== error_mark_node
)
355 else if (is_gimple_assign (def_stmt
))
357 bool no_value
= false;
359 if (!dom_info_available_p (CDI_DOMINATORS
))
361 struct walk_stmt_info wi
;
363 memset (&wi
, 0, sizeof (wi
));
365 /* When removing blocks without following reverse dominance
366 order, we may sometimes encounter SSA_NAMEs that have
367 already been released, referenced in other SSA_DEFs that
368 we're about to release. Consider:
377 If we deleted BB X first, propagating the value of w_2
378 won't do us any good. It's too late to recover their
379 original definition of v_1: when it was deleted, it was
380 only referenced in other DEFs, it couldn't possibly know
381 it should have been retained, and propagating every
382 single DEF just in case it might have to be propagated
383 into a DEBUG STMT would probably be too wasteful.
385 When dominator information is not readily available, we
386 check for and accept some loss of debug information. But
387 if it is available, there's no excuse for us to remove
388 blocks in the wrong order, so we don't even check for
389 dead SSA NAMEs. SSA verification shall catch any
391 if ((!gsi
&& !gimple_bb (def_stmt
))
392 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
397 value
= gimple_assign_rhs_to_tree (def_stmt
);
402 /* If there's a single use of VAR, and VAR is the entire debug
403 expression (usecount would have been incremented again
404 otherwise), and the definition involves only constants and
405 SSA names, then we can propagate VALUE into this single use,
408 We can also avoid using a temp if VALUE can be shared and
409 propagated into all uses, without generating expressions that
410 wouldn't be valid gimple RHSs.
412 Other cases that would require unsharing or non-gimple RHSs
413 are deferred to a debug temp, although we could avoid temps
414 at the expense of duplication of expressions. */
416 if (CONSTANT_CLASS_P (value
)
417 || gimple_code (def_stmt
) == GIMPLE_PHI
419 && (!gimple_assign_single_p (def_stmt
)
420 || is_gimple_min_invariant (value
)))
421 || is_gimple_reg (value
))
426 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
428 def_temp
= gimple_build_debug_bind (vexpr
,
429 unshare_expr (value
),
432 DECL_ARTIFICIAL (vexpr
) = 1;
433 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
435 SET_DECL_MODE (vexpr
, DECL_MODE (value
));
437 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (value
)));
440 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
443 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
444 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
451 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
453 if (!gimple_debug_bind_p (stmt
))
458 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
459 /* unshare_expr is not needed here. vexpr is either a
460 SINGLE_RHS, that can be safely shared, some other RHS
461 that was unshared when we found it had a single debug
462 use, or a DEBUG_EXPR_DECL, that can be safely
464 SET_USE (use_p
, unshare_expr (value
));
465 /* If we didn't replace uses with a debug decl fold the
466 resulting expression. Otherwise we end up with invalid IL. */
467 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
469 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
470 fold_stmt_inplace (&gsi
);
474 gimple_debug_bind_reset_value (stmt
);
481 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
482 other DEBUG stmts, and replace uses of the DEF with the
483 newly-created debug temp. */
486 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
492 if (!MAY_HAVE_DEBUG_STMTS
)
495 stmt
= gsi_stmt (*gsi
);
497 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
499 tree var
= DEF_FROM_PTR (def_p
);
501 if (TREE_CODE (var
) != SSA_NAME
)
504 insert_debug_temp_for_var_def (gsi
, var
);
508 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
511 reset_debug_uses (gimple
*stmt
)
515 imm_use_iterator imm_iter
;
518 if (!MAY_HAVE_DEBUG_STMTS
)
521 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
523 tree var
= DEF_FROM_PTR (def_p
);
525 if (TREE_CODE (var
) != SSA_NAME
)
528 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
530 if (!gimple_debug_bind_p (use_stmt
))
533 gimple_debug_bind_reset_value (use_stmt
);
534 update_stmt (use_stmt
);
539 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
540 dominated stmts before their dominators, so that release_ssa_defs
541 stands a chance of propagating DEFs into debug bind stmts. */
544 release_defs_bitset (bitmap toremove
)
549 /* Performing a topological sort is probably overkill, this will
550 most likely run in slightly superlinear time, rather than the
551 pathological quadratic worst case. */
552 while (!bitmap_empty_p (toremove
))
554 unsigned to_remove_bit
= -1U;
555 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
557 if (to_remove_bit
!= -1U)
559 bitmap_clear_bit (toremove
, to_remove_bit
);
563 bool remove_now
= true;
564 tree var
= ssa_name (j
);
566 imm_use_iterator uit
;
568 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
573 /* We can't propagate PHI nodes into debug stmts. */
574 if (gimple_code (stmt
) == GIMPLE_PHI
575 || is_gimple_debug (stmt
))
578 /* If we find another definition to remove that uses
579 the one we're looking at, defer the removal of this
580 one, so that it can be propagated into debug stmts
581 after the other is. */
582 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
584 tree odef
= DEF_FROM_PTR (def_p
);
586 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
594 BREAK_FROM_IMM_USE_STMT (uit
);
599 gimple
*def
= SSA_NAME_DEF_STMT (var
);
600 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
602 if (gimple_code (def
) == GIMPLE_PHI
)
603 remove_phi_node (&gsi
, true);
606 gsi_remove (&gsi
, true);
613 if (to_remove_bit
!= -1U)
614 bitmap_clear_bit (toremove
, to_remove_bit
);
619 /* Verify virtual SSA form. */
622 verify_vssa (basic_block bb
, tree current_vdef
, sbitmap visited
)
626 if (bitmap_bit_p (visited
, bb
->index
))
629 bitmap_set_bit (visited
, bb
->index
);
631 /* Pick up the single virtual PHI def. */
633 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
636 tree res
= gimple_phi_result (si
.phi ());
637 if (virtual_operand_p (res
))
641 error ("multiple virtual PHI nodes in BB %d", bb
->index
);
642 print_gimple_stmt (stderr
, phi
, 0, 0);
643 print_gimple_stmt (stderr
, si
.phi (), 0, 0);
652 current_vdef
= gimple_phi_result (phi
);
653 if (TREE_CODE (current_vdef
) != SSA_NAME
)
655 error ("virtual definition is not an SSA name");
656 print_gimple_stmt (stderr
, phi
, 0, 0);
662 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
665 gimple
*stmt
= gsi_stmt (gsi
);
666 tree vuse
= gimple_vuse (stmt
);
669 if (vuse
!= current_vdef
)
671 error ("stmt with wrong VUSE");
672 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
673 fprintf (stderr
, "expected ");
674 print_generic_expr (stderr
, current_vdef
, 0);
675 fprintf (stderr
, "\n");
678 tree vdef
= gimple_vdef (stmt
);
682 if (TREE_CODE (current_vdef
) != SSA_NAME
)
684 error ("virtual definition is not an SSA name");
685 print_gimple_stmt (stderr
, phi
, 0, 0);
692 /* Verify destination PHI uses and recurse. */
695 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
697 gphi
*phi
= get_virtual_phi (e
->dest
);
699 && PHI_ARG_DEF_FROM_EDGE (phi
, e
) != current_vdef
)
701 error ("PHI node with wrong VUSE on edge from BB %d",
703 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
);
704 fprintf (stderr
, "expected ");
705 print_generic_expr (stderr
, current_vdef
, 0);
706 fprintf (stderr
, "\n");
711 err
|= verify_vssa (e
->dest
, current_vdef
, visited
);
717 /* Return true if SSA_NAME is malformed and mark it visited.
719 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
723 verify_ssa_name (tree ssa_name
, bool is_virtual
)
725 if (TREE_CODE (ssa_name
) != SSA_NAME
)
727 error ("expected an SSA_NAME object");
731 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
733 error ("found an SSA_NAME that had been released into the free pool");
737 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
738 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
740 error ("type mismatch between an SSA_NAME and its symbol");
744 if (is_virtual
&& !virtual_operand_p (ssa_name
))
746 error ("found a virtual definition for a GIMPLE register");
750 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
752 error ("virtual SSA name for non-VOP decl");
756 if (!is_virtual
&& virtual_operand_p (ssa_name
))
758 error ("found a real definition for a non-register");
762 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
763 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
765 error ("found a default name with a non-empty defining statement");
773 /* Return true if the definition of SSA_NAME at block BB is malformed.
775 STMT is the statement where SSA_NAME is created.
777 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
778 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
779 it means that the block in that array slot contains the
780 definition of SSA_NAME.
782 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
785 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
786 gimple
*stmt
, bool is_virtual
)
788 if (verify_ssa_name (ssa_name
, is_virtual
))
791 if (SSA_NAME_VAR (ssa_name
)
792 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
793 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
795 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
799 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
801 error ("SSA_NAME created in two different blocks %i and %i",
802 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
806 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
808 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
810 error ("SSA_NAME_DEF_STMT is wrong");
811 fprintf (stderr
, "Expected definition statement:\n");
812 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
813 fprintf (stderr
, "\nActual definition statement:\n");
814 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
821 fprintf (stderr
, "while verifying SSA_NAME ");
822 print_generic_expr (stderr
, ssa_name
, 0);
823 fprintf (stderr
, " in statement\n");
824 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
830 /* Return true if the use of SSA_NAME at statement STMT in block BB is
833 DEF_BB is the block where SSA_NAME was found to be created.
835 IDOM contains immediate dominator information for the flowgraph.
837 CHECK_ABNORMAL is true if the caller wants to check whether this use
838 is flowing through an abnormal edge (only used when checking PHI
841 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
842 that are defined before STMT in basic block BB. */
845 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
846 gimple
*stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
849 tree ssa_name
= USE_FROM_PTR (use_p
);
851 if (!TREE_VISITED (ssa_name
))
852 if (verify_imm_links (stderr
, ssa_name
))
855 TREE_VISITED (ssa_name
) = 1;
857 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
858 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
859 ; /* Default definitions have empty statements. Nothing to do. */
862 error ("missing definition");
865 else if (bb
!= def_bb
866 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
868 error ("definition in block %i does not dominate use in block %i",
869 def_bb
->index
, bb
->index
);
872 else if (bb
== def_bb
873 && names_defined_in_bb
!= NULL
874 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
876 error ("definition in block %i follows the use", def_bb
->index
);
881 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
883 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
887 /* Make sure the use is in an appropriate list by checking the previous
888 element to make sure it's the same. */
889 if (use_p
->prev
== NULL
)
891 error ("no immediate_use list");
897 if (use_p
->prev
->use
== NULL
)
898 listvar
= use_p
->prev
->loc
.ssa_name
;
900 listvar
= USE_FROM_PTR (use_p
->prev
);
901 if (listvar
!= ssa_name
)
903 error ("wrong immediate use list");
910 fprintf (stderr
, "for SSA_NAME: ");
911 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
912 fprintf (stderr
, " in statement:\n");
913 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
920 /* Return true if any of the arguments for PHI node PHI at block BB is
923 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
924 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
925 it means that the block in that array slot contains the
926 definition of SSA_NAME. */
929 verify_phi_args (gphi
*phi
, basic_block bb
, basic_block
*definition_block
)
933 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
935 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
937 error ("incoming edge count does not match number of PHI arguments");
942 for (i
= 0; i
< phi_num_args
; i
++)
944 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
945 tree op
= USE_FROM_PTR (op_p
);
947 e
= EDGE_PRED (bb
, i
);
951 error ("PHI argument is missing for edge %d->%d",
958 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
960 error ("PHI argument is not SSA_NAME, or invariant");
964 if (TREE_CODE (op
) == SSA_NAME
)
966 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
967 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
968 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
971 if (TREE_CODE (op
) == ADDR_EXPR
)
973 tree base
= TREE_OPERAND (op
, 0);
974 while (handled_component_p (base
))
975 base
= TREE_OPERAND (base
, 0);
977 || TREE_CODE (base
) == PARM_DECL
978 || TREE_CODE (base
) == RESULT_DECL
)
979 && !TREE_ADDRESSABLE (base
))
981 error ("address taken, but ADDRESSABLE bit not set");
988 error ("wrong edge %d->%d for PHI argument",
989 e
->src
->index
, e
->dest
->index
);
995 fprintf (stderr
, "PHI argument\n");
996 print_generic_stmt (stderr
, op
, TDF_VOPS
);
1004 fprintf (stderr
, "for PHI node\n");
1005 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1013 /* Verify common invariants in the SSA web.
1014 TODO: verify the variable annotations. */
1017 verify_ssa (bool check_modified_stmt
, bool check_ssa_operands
)
1020 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
1023 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
1024 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
1026 gcc_assert (!need_ssa_update_p (cfun
));
1028 timevar_push (TV_TREE_SSA_VERIFY
);
1031 /* Keep track of SSA names present in the IL. */
1034 hash_map
<void *, tree
> ssa_info
;
1036 FOR_EACH_SSA_NAME (i
, name
, cfun
)
1039 TREE_VISITED (name
) = 0;
1041 verify_ssa_name (name
, virtual_operand_p (name
));
1043 stmt
= SSA_NAME_DEF_STMT (name
);
1044 if (!gimple_nop_p (stmt
))
1046 basic_block bb
= gimple_bb (stmt
);
1047 if (verify_def (bb
, definition_block
,
1048 name
, stmt
, virtual_operand_p (name
)))
1053 if (POINTER_TYPE_P (TREE_TYPE (name
)))
1054 info
= SSA_NAME_PTR_INFO (name
);
1055 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1056 info
= SSA_NAME_RANGE_INFO (name
);
1060 tree
&val
= ssa_info
.get_or_insert (info
, &existed
);
1063 error ("shared SSA name info");
1064 print_generic_expr (stderr
, val
, 0);
1065 fprintf (stderr
, " and ");
1066 print_generic_expr (stderr
, name
, 0);
1067 fprintf (stderr
, "\n");
1076 calculate_dominance_info (CDI_DOMINATORS
);
1078 /* Now verify all the uses and make sure they agree with the definitions
1079 found in the previous pass. */
1080 FOR_EACH_BB_FN (bb
, cfun
)
1085 /* Make sure that all edges have a clear 'aux' field. */
1086 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1090 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1096 /* Verify the arguments for every PHI node in the block. */
1097 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1099 gphi
*phi
= gsi
.phi ();
1100 if (verify_phi_args (phi
, bb
, definition_block
))
1103 bitmap_set_bit (names_defined_in_bb
,
1104 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1107 /* Now verify all the uses and vuses in every statement of the block. */
1108 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1111 gimple
*stmt
= gsi_stmt (gsi
);
1112 use_operand_p use_p
;
1114 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1116 error ("stmt (%p) marked modified after optimization pass: ",
1118 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1122 if (check_ssa_operands
&& verify_ssa_operands (cfun
, stmt
))
1124 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1128 if (gimple_debug_bind_p (stmt
)
1129 && !gimple_debug_bind_has_value_p (stmt
))
1132 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1134 op
= USE_FROM_PTR (use_p
);
1135 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1136 use_p
, stmt
, false, names_defined_in_bb
))
1140 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1142 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1144 error ("SSA_NAME_DEF_STMT is wrong");
1145 fprintf (stderr
, "Expected definition statement:\n");
1146 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1147 fprintf (stderr
, "\nActual definition statement:\n");
1148 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1152 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1156 bitmap_clear (names_defined_in_bb
);
1159 free (definition_block
);
1161 if (gimple_vop (cfun
)
1162 && ssa_default_def (cfun
, gimple_vop (cfun
)))
1164 auto_sbitmap
visited (last_basic_block_for_fn (cfun
) + 1);
1165 bitmap_clear (visited
);
1166 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
1167 ssa_default_def (cfun
, gimple_vop (cfun
)), visited
))
1171 /* Restore the dominance information to its prior known state, so
1172 that we do not perturb the compiler's subsequent behavior. */
1173 if (orig_dom_state
== DOM_NONE
)
1174 free_dominance_info (CDI_DOMINATORS
);
1176 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1178 BITMAP_FREE (names_defined_in_bb
);
1179 timevar_pop (TV_TREE_SSA_VERIFY
);
1183 internal_error ("verify_ssa failed");
1187 /* Initialize global DFA and SSA structures. */
1190 init_tree_ssa (struct function
*fn
)
1192 fn
->gimple_df
= ggc_cleared_alloc
<gimple_df
> ();
1193 fn
->gimple_df
->default_defs
= hash_table
<ssa_name_hasher
>::create_ggc (20);
1194 pt_solution_reset (&fn
->gimple_df
->escaped
);
1195 init_ssanames (fn
, 0);
1198 /* Deallocate memory associated with SSA data structures for FNDECL. */
1201 delete_tree_ssa (struct function
*fn
)
1205 /* We no longer maintain the SSA operand cache at this point. */
1206 if (ssa_operands_active (fn
))
1207 fini_ssa_operands (fn
);
1209 fn
->gimple_df
->default_defs
->empty ();
1210 fn
->gimple_df
->default_defs
= NULL
;
1211 pt_solution_reset (&fn
->gimple_df
->escaped
);
1212 if (fn
->gimple_df
->decls_to_pointers
!= NULL
)
1213 delete fn
->gimple_df
->decls_to_pointers
;
1214 fn
->gimple_df
->decls_to_pointers
= NULL
;
1215 fn
->gimple_df
= NULL
;
1217 /* We no longer need the edge variable maps. */
1218 redirect_edge_var_map_empty ();
1221 /* Return true if EXPR is a useless type conversion, otherwise return
1225 tree_ssa_useless_type_conversion (tree expr
)
1227 /* If we have an assignment that merely uses a NOP_EXPR to change
1228 the top of the RHS to the type of the LHS and the type conversion
1229 is "safe", then strip away the type conversion so that we can
1230 enter LHS = RHS into the const_and_copies table. */
1231 if (CONVERT_EXPR_P (expr
)
1232 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1233 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1234 return useless_type_conversion_p
1236 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1241 /* Strip conversions from EXP according to
1242 tree_ssa_useless_type_conversion and return the resulting
1246 tree_ssa_strip_useless_type_conversions (tree exp
)
1248 while (tree_ssa_useless_type_conversion (exp
))
1249 exp
= TREE_OPERAND (exp
, 0);
1254 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1255 should be returned if the value is only partially undefined. */
1258 ssa_undefined_value_p (tree t
, bool partial
)
1261 tree var
= SSA_NAME_VAR (t
);
1265 /* Parameters get their initial value from the function entry. */
1266 else if (TREE_CODE (var
) == PARM_DECL
)
1268 /* When returning by reference the return address is actually a hidden
1270 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1272 /* Hard register variables get their initial value from the ether. */
1273 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1276 /* The value is undefined iff its definition statement is empty. */
1277 def_stmt
= SSA_NAME_DEF_STMT (t
);
1278 if (gimple_nop_p (def_stmt
))
1281 /* Check if the complex was not only partially defined. */
1282 if (partial
&& is_gimple_assign (def_stmt
)
1283 && gimple_assign_rhs_code (def_stmt
) == COMPLEX_EXPR
)
1287 rhs1
= gimple_assign_rhs1 (def_stmt
);
1288 rhs2
= gimple_assign_rhs2 (def_stmt
);
1289 return (TREE_CODE (rhs1
) == SSA_NAME
&& ssa_undefined_value_p (rhs1
))
1290 || (TREE_CODE (rhs2
) == SSA_NAME
&& ssa_undefined_value_p (rhs2
));
1296 /* Return TRUE iff STMT, a gimple statement, references an undefined
1300 gimple_uses_undefined_value_p (gimple
*stmt
)
1305 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
1306 if (ssa_undefined_value_p (op
))
1314 /* If necessary, rewrite the base of the reference tree *TP from
1315 a MEM_REF to a plain or converted symbol. */
1318 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1322 while (handled_component_p (*tp
))
1323 tp
= &TREE_OPERAND (*tp
, 0);
1324 if (TREE_CODE (*tp
) == MEM_REF
1325 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1326 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1328 && !TREE_ADDRESSABLE (sym
)
1329 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1330 && is_gimple_reg_type (TREE_TYPE (*tp
))
1331 && ! VOID_TYPE_P (TREE_TYPE (*tp
)))
1333 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1334 && useless_type_conversion_p (TREE_TYPE (*tp
),
1335 TREE_TYPE (TREE_TYPE (sym
)))
1336 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1337 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1339 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1340 TYPE_SIZE (TREE_TYPE (*tp
)),
1341 int_const_binop (MULT_EXPR
,
1342 bitsize_int (BITS_PER_UNIT
),
1343 TREE_OPERAND (*tp
, 1)));
1345 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1346 && useless_type_conversion_p (TREE_TYPE (*tp
),
1347 TREE_TYPE (TREE_TYPE (sym
))))
1349 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1350 ? REALPART_EXPR
: IMAGPART_EXPR
,
1351 TREE_TYPE (*tp
), sym
);
1353 else if (integer_zerop (TREE_OPERAND (*tp
, 1))
1354 && DECL_SIZE (sym
) == TYPE_SIZE (TREE_TYPE (*tp
)))
1356 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1358 *tp
= build1 (VIEW_CONVERT_EXPR
,
1359 TREE_TYPE (*tp
), sym
);
1363 else if (DECL_SIZE (sym
)
1364 && TREE_CODE (DECL_SIZE (sym
)) == INTEGER_CST
1365 && mem_ref_offset (*tp
) >= 0
1366 && wi::leu_p (mem_ref_offset (*tp
)
1367 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp
))),
1368 wi::to_offset (DECL_SIZE_UNIT (sym
)))
1369 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
1370 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
)))
1371 == TYPE_PRECISION (TREE_TYPE (*tp
))))
1372 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
))),
1373 BITS_PER_UNIT
) == 0)
1375 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1376 TYPE_SIZE (TREE_TYPE (*tp
)),
1377 wide_int_to_tree (bitsizetype
,
1378 mem_ref_offset (*tp
)
1379 << LOG2_BITS_PER_UNIT
));
1384 /* For a tree REF return its base if it is the base of a MEM_REF
1385 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1388 non_rewritable_mem_ref_base (tree ref
)
1392 /* A plain decl does not need it set. */
1396 if (! (base
= CONST_CAST_TREE (strip_invariant_refs (ref
))))
1398 base
= get_base_address (ref
);
1404 /* But watch out for MEM_REFs we cannot lower to a
1405 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1406 if (TREE_CODE (base
) == MEM_REF
1407 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1409 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1410 if (! DECL_P (decl
))
1412 if (! is_gimple_reg_type (TREE_TYPE (base
))
1413 || VOID_TYPE_P (TREE_TYPE (base
)))
1415 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1416 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1417 && useless_type_conversion_p (TREE_TYPE (base
),
1418 TREE_TYPE (TREE_TYPE (decl
)))
1419 && wi::fits_uhwi_p (mem_ref_offset (base
))
1420 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1421 mem_ref_offset (base
))
1422 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1423 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1425 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1426 if (integer_zerop (TREE_OPERAND (base
, 1))
1427 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (base
)))
1429 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1430 if (DECL_SIZE (decl
)
1431 && TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
1432 && mem_ref_offset (base
) >= 0
1433 && wi::leu_p (mem_ref_offset (base
)
1434 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base
))),
1435 wi::to_offset (DECL_SIZE_UNIT (decl
)))
1436 /* ??? We can't handle bitfield precision extracts without
1437 either using an alternate type for the BIT_FIELD_REF and
1438 then doing a conversion or possibly adjusting the offset
1439 according to endianness. */
1440 && (! INTEGRAL_TYPE_P (TREE_TYPE (base
))
1441 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
)))
1442 == TYPE_PRECISION (TREE_TYPE (base
))))
1443 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
))),
1444 BITS_PER_UNIT
) == 0)
1452 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1453 Otherwise return true. */
1456 non_rewritable_lvalue_p (tree lhs
)
1458 /* A plain decl is always rewritable. */
1462 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1463 a reasonably efficient manner... */
1464 if ((TREE_CODE (lhs
) == REALPART_EXPR
1465 || TREE_CODE (lhs
) == IMAGPART_EXPR
)
1466 && DECL_P (TREE_OPERAND (lhs
, 0)))
1469 /* ??? The following could be relaxed allowing component
1470 references that do not change the access size. */
1471 if (TREE_CODE (lhs
) == MEM_REF
1472 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
)
1474 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1476 /* A decl that is wrapped inside a MEM-REF that covers
1477 it full is also rewritable. */
1478 if (integer_zerop (TREE_OPERAND (lhs
, 1))
1480 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1481 /* If the dynamic type of the decl has larger precision than
1482 the decl itself we can't use the decls type for SSA rewriting. */
1483 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1484 || compare_tree_int (DECL_SIZE (decl
),
1485 TYPE_PRECISION (TREE_TYPE (decl
))) == 0)
1486 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1487 && (TYPE_PRECISION (TREE_TYPE (decl
))
1488 >= TYPE_PRECISION (TREE_TYPE (lhs
)))))
1489 /* Make sure we are not re-writing non-float copying into float
1490 copying as that can incur normalization. */
1491 && (! FLOAT_TYPE_P (TREE_TYPE (decl
))
1492 || types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (decl
)))
1493 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1496 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1497 using a BIT_INSERT_EXPR. */
1499 && VECTOR_TYPE_P (TREE_TYPE (decl
))
1500 && TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
1501 && types_compatible_p (TREE_TYPE (lhs
),
1502 TREE_TYPE (TREE_TYPE (decl
)))
1503 && tree_fits_uhwi_p (TREE_OPERAND (lhs
, 1))
1504 && tree_int_cst_lt (TREE_OPERAND (lhs
, 1),
1505 TYPE_SIZE_UNIT (TREE_TYPE (decl
)))
1506 && (tree_to_uhwi (TREE_OPERAND (lhs
, 1))
1507 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)))) == 0)
1511 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1513 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1514 && DECL_P (TREE_OPERAND (lhs
, 0))
1515 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1516 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1517 && types_compatible_p (TREE_TYPE (lhs
),
1518 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs
, 0))))
1519 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1520 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))) == 0)
1526 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1527 mark the variable VAR for conversion into SSA. Return true when updating
1528 stmts is required. */
1531 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1532 bitmap suitable_for_renaming
)
1534 /* Global Variables, result decls cannot be changed. */
1535 if (is_global_var (var
)
1536 || TREE_CODE (var
) == RESULT_DECL
1537 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1540 if (TREE_ADDRESSABLE (var
)
1541 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1542 a non-register. Otherwise we are confused and forget to
1543 add virtual operands for it. */
1544 && (!is_gimple_reg_type (TREE_TYPE (var
))
1545 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1546 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1547 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1549 TREE_ADDRESSABLE (var
) = 0;
1550 if (is_gimple_reg (var
))
1551 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1554 fprintf (dump_file
, "No longer having address taken: ");
1555 print_generic_expr (dump_file
, var
, 0);
1556 fprintf (dump_file
, "\n");
1560 if (!DECL_GIMPLE_REG_P (var
)
1561 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1562 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1563 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1564 && !TREE_THIS_VOLATILE (var
)
1565 && (!VAR_P (var
) || !DECL_HARD_REGISTER (var
)))
1567 DECL_GIMPLE_REG_P (var
) = 1;
1568 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1571 fprintf (dump_file
, "Now a gimple register: ");
1572 print_generic_expr (dump_file
, var
, 0);
1573 fprintf (dump_file
, "\n");
1578 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1581 execute_update_addresses_taken (void)
1584 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1585 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1586 bitmap suitable_for_renaming
= BITMAP_ALLOC (NULL
);
1590 timevar_push (TV_ADDRESS_TAKEN
);
1592 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1593 the function body. */
1594 FOR_EACH_BB_FN (bb
, cfun
)
1596 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1599 gimple
*stmt
= gsi_stmt (gsi
);
1600 enum gimple_code code
= gimple_code (stmt
);
1603 if (code
== GIMPLE_CALL
1604 && optimize_atomic_compare_exchange_p (stmt
))
1606 /* For __atomic_compare_exchange_N if the second argument
1607 is &var, don't mark var addressable;
1608 if it becomes non-addressable, we'll rewrite it into
1609 ATOMIC_COMPARE_EXCHANGE call. */
1610 tree arg
= gimple_call_arg (stmt
, 1);
1611 gimple_call_set_arg (stmt
, 1, null_pointer_node
);
1612 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1613 gimple_call_set_arg (stmt
, 1, arg
);
1616 /* Note all addresses taken by the stmt. */
1617 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1619 /* If we have a call or an assignment, see if the lhs contains
1620 a local decl that requires not to be a gimple register. */
1621 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1623 tree lhs
= gimple_get_lhs (stmt
);
1625 && TREE_CODE (lhs
) != SSA_NAME
1626 && ((code
== GIMPLE_CALL
&& ! DECL_P (lhs
))
1627 || non_rewritable_lvalue_p (lhs
)))
1629 decl
= get_base_address (lhs
);
1631 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1635 if (gimple_assign_single_p (stmt
))
1637 tree rhs
= gimple_assign_rhs1 (stmt
);
1638 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1639 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1642 else if (code
== GIMPLE_CALL
)
1644 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1646 tree arg
= gimple_call_arg (stmt
, i
);
1647 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1648 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1652 else if (code
== GIMPLE_ASM
)
1654 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1655 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
1657 tree link
= gimple_asm_output_op (asm_stmt
, i
);
1658 tree lhs
= TREE_VALUE (link
);
1659 if (TREE_CODE (lhs
) != SSA_NAME
)
1661 decl
= get_base_address (lhs
);
1663 && (non_rewritable_lvalue_p (lhs
)
1664 /* We cannot move required conversions from
1665 the lhs to the rhs in asm statements, so
1666 require we do not need any. */
1667 || !useless_type_conversion_p
1668 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1669 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1672 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
1674 tree link
= gimple_asm_input_op (asm_stmt
, i
);
1675 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1676 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1681 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1685 gphi
*phi
= gsi
.phi ();
1687 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1689 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1690 if (TREE_CODE (op
) == ADDR_EXPR
1691 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1693 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1698 /* We cannot iterate over all referenced vars because that can contain
1699 unused vars from BLOCK trees, which causes code generation differences
1701 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1702 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1703 suitable_for_renaming
);
1705 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1706 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1707 suitable_for_renaming
);
1709 /* Operand caches need to be recomputed for operands referencing the updated
1710 variables and operands need to be rewritten to expose bare symbols. */
1711 if (!bitmap_empty_p (suitable_for_renaming
))
1713 FOR_EACH_BB_FN (bb
, cfun
)
1714 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1716 gimple
*stmt
= gsi_stmt (gsi
);
1718 /* Re-write TARGET_MEM_REFs of symbols we want to
1719 rewrite into SSA form. */
1720 if (gimple_assign_single_p (stmt
))
1722 tree lhs
= gimple_assign_lhs (stmt
);
1723 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1726 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1727 gimplify_modify_expr_complex_part. */
1728 if ((TREE_CODE (lhs
) == IMAGPART_EXPR
1729 || TREE_CODE (lhs
) == REALPART_EXPR
)
1730 && DECL_P (TREE_OPERAND (lhs
, 0))
1731 && bitmap_bit_p (suitable_for_renaming
,
1732 DECL_UID (TREE_OPERAND (lhs
, 0))))
1734 tree other
= make_ssa_name (TREE_TYPE (lhs
));
1735 tree lrhs
= build1 (TREE_CODE (lhs
) == IMAGPART_EXPR
1736 ? REALPART_EXPR
: IMAGPART_EXPR
,
1738 TREE_OPERAND (lhs
, 0));
1739 gimple
*load
= gimple_build_assign (other
, lrhs
);
1740 location_t loc
= gimple_location (stmt
);
1741 gimple_set_location (load
, loc
);
1742 gimple_set_vuse (load
, gimple_vuse (stmt
));
1743 gsi_insert_before (&gsi
, load
, GSI_SAME_STMT
);
1744 gimple_assign_set_lhs (stmt
, TREE_OPERAND (lhs
, 0));
1745 gimple_assign_set_rhs_with_ops
1746 (&gsi
, COMPLEX_EXPR
,
1747 TREE_CODE (lhs
) == IMAGPART_EXPR
1748 ? other
: gimple_assign_rhs1 (stmt
),
1749 TREE_CODE (lhs
) == IMAGPART_EXPR
1750 ? gimple_assign_rhs1 (stmt
) : other
, NULL_TREE
);
1751 stmt
= gsi_stmt (gsi
);
1752 unlink_stmt_vdef (stmt
);
1757 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1758 into a BIT_INSERT_EXPR. */
1759 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1760 && DECL_P (TREE_OPERAND (lhs
, 0))
1761 && bitmap_bit_p (suitable_for_renaming
,
1762 DECL_UID (TREE_OPERAND (lhs
, 0)))
1763 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1764 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1765 && types_compatible_p (TREE_TYPE (lhs
),
1766 TREE_TYPE (TREE_TYPE
1767 (TREE_OPERAND (lhs
, 0))))
1768 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1769 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
))) == 0))
1771 tree var
= TREE_OPERAND (lhs
, 0);
1772 tree val
= gimple_assign_rhs1 (stmt
);
1773 tree bitpos
= TREE_OPERAND (lhs
, 2);
1774 gimple_assign_set_lhs (stmt
, var
);
1775 gimple_assign_set_rhs_with_ops
1776 (&gsi
, BIT_INSERT_EXPR
, var
, val
, bitpos
);
1777 stmt
= gsi_stmt (gsi
);
1778 unlink_stmt_vdef (stmt
);
1783 /* Rewrite a vector insert using a MEM_REF on the LHS
1784 into a BIT_INSERT_EXPR. */
1785 if (TREE_CODE (lhs
) == MEM_REF
1786 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1787 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1789 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1790 && VECTOR_TYPE_P (TREE_TYPE (sym
))
1791 && TYPE_MODE (TREE_TYPE (sym
)) != BLKmode
1792 && types_compatible_p (TREE_TYPE (lhs
),
1793 TREE_TYPE (TREE_TYPE (sym
)))
1794 && tree_fits_uhwi_p (TREE_OPERAND (lhs
, 1))
1795 && tree_int_cst_lt (TREE_OPERAND (lhs
, 1),
1796 TYPE_SIZE_UNIT (TREE_TYPE (sym
)))
1797 && (tree_to_uhwi (TREE_OPERAND (lhs
, 1))
1798 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)))) == 0)
1800 tree val
= gimple_assign_rhs1 (stmt
);
1802 = wide_int_to_tree (bitsizetype
,
1803 mem_ref_offset (lhs
) * BITS_PER_UNIT
);
1804 gimple_assign_set_lhs (stmt
, sym
);
1805 gimple_assign_set_rhs_with_ops
1806 (&gsi
, BIT_INSERT_EXPR
, sym
, val
, bitpos
);
1807 stmt
= gsi_stmt (gsi
);
1808 unlink_stmt_vdef (stmt
);
1813 /* We shouldn't have any fancy wrapping of
1814 component-refs on the LHS, but look through
1815 VIEW_CONVERT_EXPRs as that is easy. */
1816 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1817 lhs
= TREE_OPERAND (lhs
, 0);
1818 if (TREE_CODE (lhs
) == MEM_REF
1819 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1820 && integer_zerop (TREE_OPERAND (lhs
, 1))
1821 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1823 && !TREE_ADDRESSABLE (sym
)
1824 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1827 lhs
= gimple_assign_lhs (stmt
);
1829 /* Rewrite the RHS and make sure the resulting assignment
1830 is validly typed. */
1831 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1832 rhs
= gimple_assign_rhs1 (stmt
);
1833 if (gimple_assign_lhs (stmt
) != lhs
1834 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1837 if (gimple_clobber_p (stmt
))
1839 rhs
= build_constructor (TREE_TYPE (lhs
), NULL
);
1840 TREE_THIS_VOLATILE (rhs
) = 1;
1843 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1844 TREE_TYPE (lhs
), rhs
);
1846 if (gimple_assign_lhs (stmt
) != lhs
)
1847 gimple_assign_set_lhs (stmt
, lhs
);
1849 if (gimple_assign_rhs1 (stmt
) != rhs
)
1851 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1852 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1856 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1859 if (optimize_atomic_compare_exchange_p (stmt
))
1861 tree expected
= gimple_call_arg (stmt
, 1);
1862 if (bitmap_bit_p (suitable_for_renaming
,
1863 DECL_UID (TREE_OPERAND (expected
, 0))))
1865 fold_builtin_atomic_compare_exchange (&gsi
);
1869 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1871 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
1872 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
1876 else if (gimple_code (stmt
) == GIMPLE_ASM
)
1878 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1880 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
1882 tree link
= gimple_asm_output_op (asm_stmt
, i
);
1883 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1884 suitable_for_renaming
);
1886 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
1888 tree link
= gimple_asm_input_op (asm_stmt
, i
);
1889 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1890 suitable_for_renaming
);
1894 else if (gimple_debug_bind_p (stmt
)
1895 && gimple_debug_bind_has_value_p (stmt
))
1897 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
1899 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
1900 decl
= non_rewritable_mem_ref_base (*valuep
);
1902 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
1903 gimple_debug_bind_reset_value (stmt
);
1906 if (gimple_references_memory_p (stmt
)
1907 || is_gimple_debug (stmt
))
1913 /* Update SSA form here, we are called as non-pass as well. */
1914 if (number_of_loops (cfun
) > 1
1915 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
1916 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1918 update_ssa (TODO_update_ssa
);
1921 BITMAP_FREE (not_reg_needs
);
1922 BITMAP_FREE (addresses_taken
);
1923 BITMAP_FREE (suitable_for_renaming
);
1924 timevar_pop (TV_ADDRESS_TAKEN
);
1929 const pass_data pass_data_update_address_taken
=
1931 GIMPLE_PASS
, /* type */
1932 "addressables", /* name */
1933 OPTGROUP_NONE
, /* optinfo_flags */
1934 TV_ADDRESS_TAKEN
, /* tv_id */
1935 PROP_ssa
, /* properties_required */
1936 0, /* properties_provided */
1937 0, /* properties_destroyed */
1938 0, /* todo_flags_start */
1939 TODO_update_address_taken
, /* todo_flags_finish */
1942 class pass_update_address_taken
: public gimple_opt_pass
1945 pass_update_address_taken (gcc::context
*ctxt
)
1946 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
1949 /* opt_pass methods: */
1951 }; // class pass_update_address_taken
1956 make_pass_update_address_taken (gcc::context
*ctxt
)
1958 return new pass_update_address_taken (ctxt
);