1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
41 #include "cfgexpand.h"
46 /* Pointer map of variable mappings, keyed by edge. */
47 static hash_map
<edge
, auto_vec
<edge_var_map
> > *edge_var_maps
;
50 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
53 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
55 edge_var_map new_node
;
57 if (edge_var_maps
== NULL
)
58 edge_var_maps
= new hash_map
<edge
, auto_vec
<edge_var_map
> >;
60 auto_vec
<edge_var_map
> &slot
= edge_var_maps
->get_or_insert (e
);
62 new_node
.result
= result
;
63 new_node
.locus
= locus
;
65 slot
.safe_push (new_node
);
69 /* Clear the var mappings in edge E. */
72 redirect_edge_var_map_clear (edge e
)
77 auto_vec
<edge_var_map
> *head
= edge_var_maps
->get (e
);
84 /* Duplicate the redirected var mappings in OLDE in NEWE.
86 This assumes a hash_map can have multiple edges mapping to the same
87 var_map (many to one mapping), since we don't remove the previous mappings.
91 redirect_edge_var_map_dup (edge newe
, edge olde
)
96 auto_vec
<edge_var_map
> *new_head
= &edge_var_maps
->get_or_insert (newe
);
97 auto_vec
<edge_var_map
> *old_head
= edge_var_maps
->get (olde
);
101 new_head
->safe_splice (*old_head
);
105 /* Return the variable mappings for a given edge. If there is none, return
109 redirect_edge_var_map_vector (edge e
)
111 /* Hey, what kind of idiot would... you'd be surprised. */
115 auto_vec
<edge_var_map
> *slot
= edge_var_maps
->get (e
);
122 /* Clear the edge variable mappings. */
125 redirect_edge_var_map_empty (void)
128 edge_var_maps
->empty ();
132 /* Remove the corresponding arguments from the PHI nodes in E's
133 destination block and redirect it to DEST. Return redirected edge.
134 The list of removed arguments is stored in a vector accessed
135 through edge_var_maps. */
138 ssa_redirect_edge (edge e
, basic_block dest
)
143 redirect_edge_var_map_clear (e
);
145 /* Remove the appropriate PHI arguments in E's destination block. */
146 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
149 source_location locus
;
152 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
153 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
155 if (def
== NULL_TREE
)
158 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
161 e
= redirect_edge_succ_nodup (e
, dest
);
167 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
171 flush_pending_stmts (edge e
)
178 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (e
);
182 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
183 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
184 gsi_next (&gsi
), i
++)
189 def
= redirect_edge_var_map_def (vm
);
190 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
193 redirect_edge_var_map_clear (e
);
196 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
197 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
198 expression with a different value.
200 This will update any annotations (say debug bind stmts) referring
201 to the original LHS, so that they use the RHS instead. This is
202 done even if NLHS and LHS are the same, for it is understood that
203 the RHS will be modified afterwards, and NLHS will not be assigned
206 Adjusting any non-annotation uses of the LHS, if needed, is a
207 responsibility of the caller.
209 The effect of this call should be pretty much the same as that of
210 inserting a copy of STMT before STMT, and then removing the
211 original stmt, at which time gsi_remove() would have update
212 annotations, but using this function saves all the inserting,
213 copying and removing. */
216 gimple_replace_ssa_lhs (gimple
*stmt
, tree nlhs
)
218 if (MAY_HAVE_DEBUG_STMTS
)
220 tree lhs
= gimple_get_lhs (stmt
);
222 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
224 insert_debug_temp_for_var_def (NULL
, lhs
);
227 gimple_set_lhs (stmt
, nlhs
);
231 /* Given a tree for an expression for which we might want to emit
232 locations or values in debug information (generally a variable, but
233 we might deal with other kinds of trees in the future), return the
234 tree that should be used as the variable of a DEBUG_BIND STMT or
235 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
238 target_for_debug_bind (tree var
)
240 if (!MAY_HAVE_DEBUG_STMTS
)
243 if (TREE_CODE (var
) == SSA_NAME
)
245 var
= SSA_NAME_VAR (var
);
246 if (var
== NULL_TREE
)
250 if ((!VAR_P (var
) || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
251 && TREE_CODE (var
) != PARM_DECL
)
254 if (DECL_HAS_VALUE_EXPR_P (var
))
255 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
257 if (DECL_IGNORED_P (var
))
260 /* var-tracking only tracks registers. */
261 if (!is_gimple_reg_type (TREE_TYPE (var
)))
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
271 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
273 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
275 if (wi
&& wi
->is_lhs
)
278 if (TREE_CODE (*tp
) == SSA_NAME
)
280 if (SSA_NAME_IN_FREE_LIST (*tp
))
285 else if (IS_TYPE_OR_DECL_P (*tp
))
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292 by other DEBUG stmts, and replace uses of the DEF with the
293 newly-created debug temp. */
296 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
298 imm_use_iterator imm_iter
;
301 gimple
*def_stmt
= NULL
;
305 if (!MAY_HAVE_DEBUG_STMTS
)
308 /* If this name has already been registered for replacement, do nothing
309 as anything that uses this name isn't in SSA form. */
310 if (name_registered_for_update_p (var
))
313 /* Check whether there are debug stmts that reference this variable and,
314 if there are, decide whether we should use a debug temp. */
315 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
317 stmt
= USE_STMT (use_p
);
319 if (!gimple_debug_bind_p (stmt
))
325 if (gimple_debug_bind_get_value (stmt
) != var
)
327 /* Count this as an additional use, so as to make sure we
328 use a temp unless VAR's definition has a SINGLE_RHS that
339 def_stmt
= gsi_stmt (*gsi
);
341 def_stmt
= SSA_NAME_DEF_STMT (var
);
343 /* If we didn't get an insertion point, and the stmt has already
344 been removed, we won't be able to insert the debug bind stmt, so
345 we'll have to drop debug information. */
346 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
348 value
= degenerate_phi_result (as_a
<gphi
*> (def_stmt
));
349 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
351 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
353 else if (value
== error_mark_node
)
356 else if (is_gimple_assign (def_stmt
))
358 bool no_value
= false;
360 if (!dom_info_available_p (CDI_DOMINATORS
))
362 struct walk_stmt_info wi
;
364 memset (&wi
, 0, sizeof (wi
));
366 /* When removing blocks without following reverse dominance
367 order, we may sometimes encounter SSA_NAMEs that have
368 already been released, referenced in other SSA_DEFs that
369 we're about to release. Consider:
378 If we deleted BB X first, propagating the value of w_2
379 won't do us any good. It's too late to recover their
380 original definition of v_1: when it was deleted, it was
381 only referenced in other DEFs, it couldn't possibly know
382 it should have been retained, and propagating every
383 single DEF just in case it might have to be propagated
384 into a DEBUG STMT would probably be too wasteful.
386 When dominator information is not readily available, we
387 check for and accept some loss of debug information. But
388 if it is available, there's no excuse for us to remove
389 blocks in the wrong order, so we don't even check for
390 dead SSA NAMEs. SSA verification shall catch any
392 if ((!gsi
&& !gimple_bb (def_stmt
))
393 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
398 value
= gimple_assign_rhs_to_tree (def_stmt
);
403 /* If there's a single use of VAR, and VAR is the entire debug
404 expression (usecount would have been incremented again
405 otherwise), and the definition involves only constants and
406 SSA names, then we can propagate VALUE into this single use,
409 We can also avoid using a temp if VALUE can be shared and
410 propagated into all uses, without generating expressions that
411 wouldn't be valid gimple RHSs.
413 Other cases that would require unsharing or non-gimple RHSs
414 are deferred to a debug temp, although we could avoid temps
415 at the expense of duplication of expressions. */
417 if (CONSTANT_CLASS_P (value
)
418 || gimple_code (def_stmt
) == GIMPLE_PHI
420 && (!gimple_assign_single_p (def_stmt
)
421 || is_gimple_min_invariant (value
)))
422 || is_gimple_reg (value
))
427 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
429 def_temp
= gimple_build_debug_bind (vexpr
,
430 unshare_expr (value
),
433 DECL_ARTIFICIAL (vexpr
) = 1;
434 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
436 SET_DECL_MODE (vexpr
, DECL_MODE (value
));
438 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (value
)));
441 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
444 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
445 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
452 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
454 if (!gimple_debug_bind_p (stmt
))
459 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
460 /* unshare_expr is not needed here. vexpr is either a
461 SINGLE_RHS, that can be safely shared, some other RHS
462 that was unshared when we found it had a single debug
463 use, or a DEBUG_EXPR_DECL, that can be safely
465 SET_USE (use_p
, unshare_expr (value
));
466 /* If we didn't replace uses with a debug decl fold the
467 resulting expression. Otherwise we end up with invalid IL. */
468 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
470 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
471 fold_stmt_inplace (&gsi
);
475 gimple_debug_bind_reset_value (stmt
);
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483 other DEBUG stmts, and replace uses of the DEF with the
484 newly-created debug temp. */
487 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
493 if (!MAY_HAVE_DEBUG_STMTS
)
496 stmt
= gsi_stmt (*gsi
);
498 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
500 tree var
= DEF_FROM_PTR (def_p
);
502 if (TREE_CODE (var
) != SSA_NAME
)
505 insert_debug_temp_for_var_def (gsi
, var
);
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
512 reset_debug_uses (gimple
*stmt
)
516 imm_use_iterator imm_iter
;
519 if (!MAY_HAVE_DEBUG_STMTS
)
522 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
524 tree var
= DEF_FROM_PTR (def_p
);
526 if (TREE_CODE (var
) != SSA_NAME
)
529 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
531 if (!gimple_debug_bind_p (use_stmt
))
534 gimple_debug_bind_reset_value (use_stmt
);
535 update_stmt (use_stmt
);
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541 dominated stmts before their dominators, so that release_ssa_defs
542 stands a chance of propagating DEFs into debug bind stmts. */
545 release_defs_bitset (bitmap toremove
)
550 /* Performing a topological sort is probably overkill, this will
551 most likely run in slightly superlinear time, rather than the
552 pathological quadratic worst case. */
553 while (!bitmap_empty_p (toremove
))
555 unsigned to_remove_bit
= -1U;
556 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
558 if (to_remove_bit
!= -1U)
560 bitmap_clear_bit (toremove
, to_remove_bit
);
564 bool remove_now
= true;
565 tree var
= ssa_name (j
);
567 imm_use_iterator uit
;
569 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
574 /* We can't propagate PHI nodes into debug stmts. */
575 if (gimple_code (stmt
) == GIMPLE_PHI
576 || is_gimple_debug (stmt
))
579 /* If we find another definition to remove that uses
580 the one we're looking at, defer the removal of this
581 one, so that it can be propagated into debug stmts
582 after the other is. */
583 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
585 tree odef
= DEF_FROM_PTR (def_p
);
587 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
595 BREAK_FROM_IMM_USE_STMT (uit
);
600 gimple
*def
= SSA_NAME_DEF_STMT (var
);
601 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
603 if (gimple_code (def
) == GIMPLE_PHI
)
604 remove_phi_node (&gsi
, true);
607 gsi_remove (&gsi
, true);
614 if (to_remove_bit
!= -1U)
615 bitmap_clear_bit (toremove
, to_remove_bit
);
620 /* Verify virtual SSA form. */
623 verify_vssa (basic_block bb
, tree current_vdef
, sbitmap visited
)
627 if (bitmap_bit_p (visited
, bb
->index
))
630 bitmap_set_bit (visited
, bb
->index
);
632 /* Pick up the single virtual PHI def. */
634 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
637 tree res
= gimple_phi_result (si
.phi ());
638 if (virtual_operand_p (res
))
642 error ("multiple virtual PHI nodes in BB %d", bb
->index
);
643 print_gimple_stmt (stderr
, phi
, 0, 0);
644 print_gimple_stmt (stderr
, si
.phi (), 0, 0);
653 current_vdef
= gimple_phi_result (phi
);
654 if (TREE_CODE (current_vdef
) != SSA_NAME
)
656 error ("virtual definition is not an SSA name");
657 print_gimple_stmt (stderr
, phi
, 0, 0);
663 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
666 gimple
*stmt
= gsi_stmt (gsi
);
667 tree vuse
= gimple_vuse (stmt
);
670 if (vuse
!= current_vdef
)
672 error ("stmt with wrong VUSE");
673 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
674 fprintf (stderr
, "expected ");
675 print_generic_expr (stderr
, current_vdef
, 0);
676 fprintf (stderr
, "\n");
679 tree vdef
= gimple_vdef (stmt
);
683 if (TREE_CODE (current_vdef
) != SSA_NAME
)
685 error ("virtual definition is not an SSA name");
686 print_gimple_stmt (stderr
, phi
, 0, 0);
693 /* Verify destination PHI uses and recurse. */
696 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
698 gphi
*phi
= get_virtual_phi (e
->dest
);
700 && PHI_ARG_DEF_FROM_EDGE (phi
, e
) != current_vdef
)
702 error ("PHI node with wrong VUSE on edge from BB %d",
704 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
);
705 fprintf (stderr
, "expected ");
706 print_generic_expr (stderr
, current_vdef
, 0);
707 fprintf (stderr
, "\n");
712 err
|= verify_vssa (e
->dest
, current_vdef
, visited
);
718 /* Return true if SSA_NAME is malformed and mark it visited.
720 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
724 verify_ssa_name (tree ssa_name
, bool is_virtual
)
726 if (TREE_CODE (ssa_name
) != SSA_NAME
)
728 error ("expected an SSA_NAME object");
732 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
734 error ("found an SSA_NAME that had been released into the free pool");
738 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
739 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
741 error ("type mismatch between an SSA_NAME and its symbol");
745 if (is_virtual
&& !virtual_operand_p (ssa_name
))
747 error ("found a virtual definition for a GIMPLE register");
751 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
753 error ("virtual SSA name for non-VOP decl");
757 if (!is_virtual
&& virtual_operand_p (ssa_name
))
759 error ("found a real definition for a non-register");
763 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
764 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
766 error ("found a default name with a non-empty defining statement");
774 /* Return true if the definition of SSA_NAME at block BB is malformed.
776 STMT is the statement where SSA_NAME is created.
778 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
779 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
780 it means that the block in that array slot contains the
781 definition of SSA_NAME.
783 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
786 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
787 gimple
*stmt
, bool is_virtual
)
789 if (verify_ssa_name (ssa_name
, is_virtual
))
792 if (SSA_NAME_VAR (ssa_name
)
793 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
794 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
796 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
800 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
802 error ("SSA_NAME created in two different blocks %i and %i",
803 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
807 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
809 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
811 error ("SSA_NAME_DEF_STMT is wrong");
812 fprintf (stderr
, "Expected definition statement:\n");
813 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
814 fprintf (stderr
, "\nActual definition statement:\n");
815 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
822 fprintf (stderr
, "while verifying SSA_NAME ");
823 print_generic_expr (stderr
, ssa_name
, 0);
824 fprintf (stderr
, " in statement\n");
825 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
831 /* Return true if the use of SSA_NAME at statement STMT in block BB is
834 DEF_BB is the block where SSA_NAME was found to be created.
836 IDOM contains immediate dominator information for the flowgraph.
838 CHECK_ABNORMAL is true if the caller wants to check whether this use
839 is flowing through an abnormal edge (only used when checking PHI
842 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
843 that are defined before STMT in basic block BB. */
846 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
847 gimple
*stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
850 tree ssa_name
= USE_FROM_PTR (use_p
);
852 if (!TREE_VISITED (ssa_name
))
853 if (verify_imm_links (stderr
, ssa_name
))
856 TREE_VISITED (ssa_name
) = 1;
858 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
859 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
860 ; /* Default definitions have empty statements. Nothing to do. */
863 error ("missing definition");
866 else if (bb
!= def_bb
867 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
869 error ("definition in block %i does not dominate use in block %i",
870 def_bb
->index
, bb
->index
);
873 else if (bb
== def_bb
874 && names_defined_in_bb
!= NULL
875 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
877 error ("definition in block %i follows the use", def_bb
->index
);
882 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
884 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
888 /* Make sure the use is in an appropriate list by checking the previous
889 element to make sure it's the same. */
890 if (use_p
->prev
== NULL
)
892 error ("no immediate_use list");
898 if (use_p
->prev
->use
== NULL
)
899 listvar
= use_p
->prev
->loc
.ssa_name
;
901 listvar
= USE_FROM_PTR (use_p
->prev
);
902 if (listvar
!= ssa_name
)
904 error ("wrong immediate use list");
911 fprintf (stderr
, "for SSA_NAME: ");
912 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
913 fprintf (stderr
, " in statement:\n");
914 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
921 /* Return true if any of the arguments for PHI node PHI at block BB is
924 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
925 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
926 it means that the block in that array slot contains the
927 definition of SSA_NAME. */
930 verify_phi_args (gphi
*phi
, basic_block bb
, basic_block
*definition_block
)
934 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
936 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
938 error ("incoming edge count does not match number of PHI arguments");
943 for (i
= 0; i
< phi_num_args
; i
++)
945 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
946 tree op
= USE_FROM_PTR (op_p
);
948 e
= EDGE_PRED (bb
, i
);
952 error ("PHI argument is missing for edge %d->%d",
959 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
961 error ("PHI argument is not SSA_NAME, or invariant");
965 if (TREE_CODE (op
) == SSA_NAME
)
967 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
968 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
969 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
972 if (TREE_CODE (op
) == ADDR_EXPR
)
974 tree base
= TREE_OPERAND (op
, 0);
975 while (handled_component_p (base
))
976 base
= TREE_OPERAND (base
, 0);
978 || TREE_CODE (base
) == PARM_DECL
979 || TREE_CODE (base
) == RESULT_DECL
)
980 && !TREE_ADDRESSABLE (base
))
982 error ("address taken, but ADDRESSABLE bit not set");
989 error ("wrong edge %d->%d for PHI argument",
990 e
->src
->index
, e
->dest
->index
);
996 fprintf (stderr
, "PHI argument\n");
997 print_generic_stmt (stderr
, op
, TDF_VOPS
);
1005 fprintf (stderr
, "for PHI node\n");
1006 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1014 /* Verify common invariants in the SSA web.
1015 TODO: verify the variable annotations. */
1018 verify_ssa (bool check_modified_stmt
, bool check_ssa_operands
)
1021 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
1024 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
1025 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
1027 gcc_assert (!need_ssa_update_p (cfun
));
1029 timevar_push (TV_TREE_SSA_VERIFY
);
1032 /* Keep track of SSA names present in the IL. */
1035 hash_map
<void *, tree
> ssa_info
;
1037 FOR_EACH_SSA_NAME (i
, name
, cfun
)
1040 TREE_VISITED (name
) = 0;
1042 verify_ssa_name (name
, virtual_operand_p (name
));
1044 stmt
= SSA_NAME_DEF_STMT (name
);
1045 if (!gimple_nop_p (stmt
))
1047 basic_block bb
= gimple_bb (stmt
);
1048 if (verify_def (bb
, definition_block
,
1049 name
, stmt
, virtual_operand_p (name
)))
1054 if (POINTER_TYPE_P (TREE_TYPE (name
)))
1055 info
= SSA_NAME_PTR_INFO (name
);
1056 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1057 info
= SSA_NAME_RANGE_INFO (name
);
1061 tree
&val
= ssa_info
.get_or_insert (info
, &existed
);
1064 error ("shared SSA name info");
1065 print_generic_expr (stderr
, val
, 0);
1066 fprintf (stderr
, " and ");
1067 print_generic_expr (stderr
, name
, 0);
1068 fprintf (stderr
, "\n");
1077 calculate_dominance_info (CDI_DOMINATORS
);
1079 /* Now verify all the uses and make sure they agree with the definitions
1080 found in the previous pass. */
1081 FOR_EACH_BB_FN (bb
, cfun
)
1086 /* Make sure that all edges have a clear 'aux' field. */
1087 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1091 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1097 /* Verify the arguments for every PHI node in the block. */
1098 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1100 gphi
*phi
= gsi
.phi ();
1101 if (verify_phi_args (phi
, bb
, definition_block
))
1104 bitmap_set_bit (names_defined_in_bb
,
1105 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1108 /* Now verify all the uses and vuses in every statement of the block. */
1109 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1112 gimple
*stmt
= gsi_stmt (gsi
);
1113 use_operand_p use_p
;
1115 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1117 error ("stmt (%p) marked modified after optimization pass: ",
1119 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1123 if (check_ssa_operands
&& verify_ssa_operands (cfun
, stmt
))
1125 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1129 if (gimple_debug_bind_p (stmt
)
1130 && !gimple_debug_bind_has_value_p (stmt
))
1133 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1135 op
= USE_FROM_PTR (use_p
);
1136 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1137 use_p
, stmt
, false, names_defined_in_bb
))
1141 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1143 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1145 error ("SSA_NAME_DEF_STMT is wrong");
1146 fprintf (stderr
, "Expected definition statement:\n");
1147 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1148 fprintf (stderr
, "\nActual definition statement:\n");
1149 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1153 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1157 bitmap_clear (names_defined_in_bb
);
1160 free (definition_block
);
1162 if (gimple_vop (cfun
)
1163 && ssa_default_def (cfun
, gimple_vop (cfun
)))
1165 auto_sbitmap
visited (last_basic_block_for_fn (cfun
) + 1);
1166 bitmap_clear (visited
);
1167 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
1168 ssa_default_def (cfun
, gimple_vop (cfun
)), visited
))
1172 /* Restore the dominance information to its prior known state, so
1173 that we do not perturb the compiler's subsequent behavior. */
1174 if (orig_dom_state
== DOM_NONE
)
1175 free_dominance_info (CDI_DOMINATORS
);
1177 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1179 BITMAP_FREE (names_defined_in_bb
);
1180 timevar_pop (TV_TREE_SSA_VERIFY
);
1184 internal_error ("verify_ssa failed");
1188 /* Initialize global DFA and SSA structures. */
1191 init_tree_ssa (struct function
*fn
)
1193 fn
->gimple_df
= ggc_cleared_alloc
<gimple_df
> ();
1194 fn
->gimple_df
->default_defs
= hash_table
<ssa_name_hasher
>::create_ggc (20);
1195 pt_solution_reset (&fn
->gimple_df
->escaped
);
1196 init_ssanames (fn
, 0);
1199 /* Deallocate memory associated with SSA data structures for FNDECL. */
1202 delete_tree_ssa (struct function
*fn
)
1206 /* We no longer maintain the SSA operand cache at this point. */
1207 if (ssa_operands_active (fn
))
1208 fini_ssa_operands (fn
);
1210 fn
->gimple_df
->default_defs
->empty ();
1211 fn
->gimple_df
->default_defs
= NULL
;
1212 pt_solution_reset (&fn
->gimple_df
->escaped
);
1213 if (fn
->gimple_df
->decls_to_pointers
!= NULL
)
1214 delete fn
->gimple_df
->decls_to_pointers
;
1215 fn
->gimple_df
->decls_to_pointers
= NULL
;
1216 fn
->gimple_df
= NULL
;
1218 /* We no longer need the edge variable maps. */
1219 redirect_edge_var_map_empty ();
1222 /* Return true if EXPR is a useless type conversion, otherwise return
1226 tree_ssa_useless_type_conversion (tree expr
)
1228 /* If we have an assignment that merely uses a NOP_EXPR to change
1229 the top of the RHS to the type of the LHS and the type conversion
1230 is "safe", then strip away the type conversion so that we can
1231 enter LHS = RHS into the const_and_copies table. */
1232 if (CONVERT_EXPR_P (expr
)
1233 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1234 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1235 return useless_type_conversion_p
1237 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1242 /* Strip conversions from EXP according to
1243 tree_ssa_useless_type_conversion and return the resulting
1247 tree_ssa_strip_useless_type_conversions (tree exp
)
1249 while (tree_ssa_useless_type_conversion (exp
))
1250 exp
= TREE_OPERAND (exp
, 0);
1254 /* Return true if T, as SSA_NAME, has an implicit default defined value. */
1257 ssa_defined_default_def_p (tree t
)
1259 tree var
= SSA_NAME_VAR (t
);
1263 /* Parameters get their initial value from the function entry. */
1264 else if (TREE_CODE (var
) == PARM_DECL
)
1266 /* When returning by reference the return address is actually a hidden
1268 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1270 /* Hard register variables get their initial value from the ether. */
1271 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1278 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1279 should be returned if the value is only partially undefined. */
1282 ssa_undefined_value_p (tree t
, bool partial
)
1286 if (ssa_defined_default_def_p (t
))
1289 /* The value is undefined iff its definition statement is empty. */
1290 def_stmt
= SSA_NAME_DEF_STMT (t
);
1291 if (gimple_nop_p (def_stmt
))
1294 /* Check if the complex was not only partially defined. */
1295 if (partial
&& is_gimple_assign (def_stmt
)
1296 && gimple_assign_rhs_code (def_stmt
) == COMPLEX_EXPR
)
1300 rhs1
= gimple_assign_rhs1 (def_stmt
);
1301 rhs2
= gimple_assign_rhs2 (def_stmt
);
1302 return (TREE_CODE (rhs1
) == SSA_NAME
&& ssa_undefined_value_p (rhs1
))
1303 || (TREE_CODE (rhs2
) == SSA_NAME
&& ssa_undefined_value_p (rhs2
));
1309 /* Return TRUE iff STMT, a gimple statement, references an undefined
1313 gimple_uses_undefined_value_p (gimple
*stmt
)
1318 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
1319 if (ssa_undefined_value_p (op
))
1327 /* If necessary, rewrite the base of the reference tree *TP from
1328 a MEM_REF to a plain or converted symbol. */
1331 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1335 while (handled_component_p (*tp
))
1336 tp
= &TREE_OPERAND (*tp
, 0);
1337 if (TREE_CODE (*tp
) == MEM_REF
1338 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1339 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1341 && !TREE_ADDRESSABLE (sym
)
1342 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1343 && is_gimple_reg_type (TREE_TYPE (*tp
))
1344 && ! VOID_TYPE_P (TREE_TYPE (*tp
)))
1346 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1347 && useless_type_conversion_p (TREE_TYPE (*tp
),
1348 TREE_TYPE (TREE_TYPE (sym
)))
1349 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1350 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1352 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1353 TYPE_SIZE (TREE_TYPE (*tp
)),
1354 int_const_binop (MULT_EXPR
,
1355 bitsize_int (BITS_PER_UNIT
),
1356 TREE_OPERAND (*tp
, 1)));
1358 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1359 && useless_type_conversion_p (TREE_TYPE (*tp
),
1360 TREE_TYPE (TREE_TYPE (sym
))))
1362 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1363 ? REALPART_EXPR
: IMAGPART_EXPR
,
1364 TREE_TYPE (*tp
), sym
);
1366 else if (integer_zerop (TREE_OPERAND (*tp
, 1))
1367 && DECL_SIZE (sym
) == TYPE_SIZE (TREE_TYPE (*tp
)))
1369 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1371 *tp
= build1 (VIEW_CONVERT_EXPR
,
1372 TREE_TYPE (*tp
), sym
);
1376 else if (DECL_SIZE (sym
)
1377 && TREE_CODE (DECL_SIZE (sym
)) == INTEGER_CST
1378 && mem_ref_offset (*tp
) >= 0
1379 && wi::leu_p (mem_ref_offset (*tp
)
1380 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp
))),
1381 wi::to_offset (DECL_SIZE_UNIT (sym
)))
1382 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
1383 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
)))
1384 == TYPE_PRECISION (TREE_TYPE (*tp
))))
1385 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
))),
1386 BITS_PER_UNIT
) == 0)
1388 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1389 TYPE_SIZE (TREE_TYPE (*tp
)),
1390 wide_int_to_tree (bitsizetype
,
1391 mem_ref_offset (*tp
)
1392 << LOG2_BITS_PER_UNIT
));
1397 /* For a tree REF return its base if it is the base of a MEM_REF
1398 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1401 non_rewritable_mem_ref_base (tree ref
)
1405 /* A plain decl does not need it set. */
1409 if (! (base
= CONST_CAST_TREE (strip_invariant_refs (ref
))))
1411 base
= get_base_address (ref
);
1417 /* But watch out for MEM_REFs we cannot lower to a
1418 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1419 if (TREE_CODE (base
) == MEM_REF
1420 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1422 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1423 if (! DECL_P (decl
))
1425 if (! is_gimple_reg_type (TREE_TYPE (base
))
1426 || VOID_TYPE_P (TREE_TYPE (base
)))
1428 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1429 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1430 && useless_type_conversion_p (TREE_TYPE (base
),
1431 TREE_TYPE (TREE_TYPE (decl
)))
1432 && wi::fits_uhwi_p (mem_ref_offset (base
))
1433 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1434 mem_ref_offset (base
))
1435 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1436 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1438 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1439 if (integer_zerop (TREE_OPERAND (base
, 1))
1440 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (base
)))
1442 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1443 if (DECL_SIZE (decl
)
1444 && TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
1445 && mem_ref_offset (base
) >= 0
1446 && wi::leu_p (mem_ref_offset (base
)
1447 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base
))),
1448 wi::to_offset (DECL_SIZE_UNIT (decl
)))
1449 /* ??? We can't handle bitfield precision extracts without
1450 either using an alternate type for the BIT_FIELD_REF and
1451 then doing a conversion or possibly adjusting the offset
1452 according to endianness. */
1453 && (! INTEGRAL_TYPE_P (TREE_TYPE (base
))
1454 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
)))
1455 == TYPE_PRECISION (TREE_TYPE (base
))))
1456 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
))),
1457 BITS_PER_UNIT
) == 0)
1465 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1466 Otherwise return true. */
1469 non_rewritable_lvalue_p (tree lhs
)
1471 /* A plain decl is always rewritable. */
1475 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1476 a reasonably efficient manner... */
1477 if ((TREE_CODE (lhs
) == REALPART_EXPR
1478 || TREE_CODE (lhs
) == IMAGPART_EXPR
)
1479 && DECL_P (TREE_OPERAND (lhs
, 0)))
1482 /* ??? The following could be relaxed allowing component
1483 references that do not change the access size. */
1484 if (TREE_CODE (lhs
) == MEM_REF
1485 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
)
1487 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1489 /* A decl that is wrapped inside a MEM-REF that covers
1490 it full is also rewritable. */
1491 if (integer_zerop (TREE_OPERAND (lhs
, 1))
1493 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1494 /* If the dynamic type of the decl has larger precision than
1495 the decl itself we can't use the decls type for SSA rewriting. */
1496 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1497 || compare_tree_int (DECL_SIZE (decl
),
1498 TYPE_PRECISION (TREE_TYPE (decl
))) == 0)
1499 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1500 && (TYPE_PRECISION (TREE_TYPE (decl
))
1501 >= TYPE_PRECISION (TREE_TYPE (lhs
)))))
1502 /* Make sure we are not re-writing non-float copying into float
1503 copying as that can incur normalization. */
1504 && (! FLOAT_TYPE_P (TREE_TYPE (decl
))
1505 || types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (decl
)))
1506 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1509 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1510 using a BIT_INSERT_EXPR. */
1512 && VECTOR_TYPE_P (TREE_TYPE (decl
))
1513 && TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
1514 && types_compatible_p (TREE_TYPE (lhs
),
1515 TREE_TYPE (TREE_TYPE (decl
)))
1516 && tree_fits_uhwi_p (TREE_OPERAND (lhs
, 1))
1517 && tree_int_cst_lt (TREE_OPERAND (lhs
, 1),
1518 TYPE_SIZE_UNIT (TREE_TYPE (decl
)))
1519 && (tree_to_uhwi (TREE_OPERAND (lhs
, 1))
1520 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)))) == 0)
1524 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1526 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1527 && DECL_P (TREE_OPERAND (lhs
, 0))
1528 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1529 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1530 && types_compatible_p (TREE_TYPE (lhs
),
1531 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs
, 0))))
1532 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1533 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))) == 0)
1539 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1540 mark the variable VAR for conversion into SSA. Return true when updating
1541 stmts is required. */
1544 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1545 bitmap suitable_for_renaming
)
1547 /* Global Variables, result decls cannot be changed. */
1548 if (is_global_var (var
)
1549 || TREE_CODE (var
) == RESULT_DECL
1550 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1553 if (TREE_ADDRESSABLE (var
)
1554 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1555 a non-register. Otherwise we are confused and forget to
1556 add virtual operands for it. */
1557 && (!is_gimple_reg_type (TREE_TYPE (var
))
1558 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1559 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1560 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1562 TREE_ADDRESSABLE (var
) = 0;
1563 if (is_gimple_reg (var
))
1564 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1567 fprintf (dump_file
, "No longer having address taken: ");
1568 print_generic_expr (dump_file
, var
, 0);
1569 fprintf (dump_file
, "\n");
1573 if (!DECL_GIMPLE_REG_P (var
)
1574 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1575 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1576 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1577 && !TREE_THIS_VOLATILE (var
)
1578 && (!VAR_P (var
) || !DECL_HARD_REGISTER (var
)))
1580 DECL_GIMPLE_REG_P (var
) = 1;
1581 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1584 fprintf (dump_file
, "Now a gimple register: ");
1585 print_generic_expr (dump_file
, var
, 0);
1586 fprintf (dump_file
, "\n");
1591 /* Return true when STMT is ASAN mark where second argument is an address
1592 of a local variable. */
1595 is_asan_mark_p (gimple
*stmt
)
1597 if (!gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1600 tree addr
= get_base_address (gimple_call_arg (stmt
, 1));
1601 if (TREE_CODE (addr
) == ADDR_EXPR
1602 && VAR_P (TREE_OPERAND (addr
, 0)))
1604 tree var
= TREE_OPERAND (addr
, 0);
1605 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1606 DECL_ATTRIBUTES (var
)))
1609 unsigned addressable
= TREE_ADDRESSABLE (var
);
1610 TREE_ADDRESSABLE (var
) = 0;
1611 bool r
= is_gimple_reg (var
);
1612 TREE_ADDRESSABLE (var
) = addressable
;
1619 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1622 execute_update_addresses_taken (void)
1625 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1626 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1627 bitmap suitable_for_renaming
= BITMAP_ALLOC (NULL
);
1631 timevar_push (TV_ADDRESS_TAKEN
);
1633 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1634 the function body. */
1635 FOR_EACH_BB_FN (bb
, cfun
)
1637 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1640 gimple
*stmt
= gsi_stmt (gsi
);
1641 enum gimple_code code
= gimple_code (stmt
);
1644 if (code
== GIMPLE_CALL
)
1646 if (optimize_atomic_compare_exchange_p (stmt
))
1648 /* For __atomic_compare_exchange_N if the second argument
1649 is &var, don't mark var addressable;
1650 if it becomes non-addressable, we'll rewrite it into
1651 ATOMIC_COMPARE_EXCHANGE call. */
1652 tree arg
= gimple_call_arg (stmt
, 1);
1653 gimple_call_set_arg (stmt
, 1, null_pointer_node
);
1654 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1655 gimple_call_set_arg (stmt
, 1, arg
);
1657 else if (is_asan_mark_p (stmt
))
1660 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1663 /* Note all addresses taken by the stmt. */
1664 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1666 /* If we have a call or an assignment, see if the lhs contains
1667 a local decl that requires not to be a gimple register. */
1668 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1670 tree lhs
= gimple_get_lhs (stmt
);
1672 && TREE_CODE (lhs
) != SSA_NAME
1673 && ((code
== GIMPLE_CALL
&& ! DECL_P (lhs
))
1674 || non_rewritable_lvalue_p (lhs
)))
1676 decl
= get_base_address (lhs
);
1678 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1682 if (gimple_assign_single_p (stmt
))
1684 tree rhs
= gimple_assign_rhs1 (stmt
);
1685 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1686 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1689 else if (code
== GIMPLE_CALL
)
1691 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1693 tree arg
= gimple_call_arg (stmt
, i
);
1694 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1695 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1699 else if (code
== GIMPLE_ASM
)
1701 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1702 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
1704 tree link
= gimple_asm_output_op (asm_stmt
, i
);
1705 tree lhs
= TREE_VALUE (link
);
1706 if (TREE_CODE (lhs
) != SSA_NAME
)
1708 decl
= get_base_address (lhs
);
1710 && (non_rewritable_lvalue_p (lhs
)
1711 /* We cannot move required conversions from
1712 the lhs to the rhs in asm statements, so
1713 require we do not need any. */
1714 || !useless_type_conversion_p
1715 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1716 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1719 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
1721 tree link
= gimple_asm_input_op (asm_stmt
, i
);
1722 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1723 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1728 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1732 gphi
*phi
= gsi
.phi ();
1734 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1736 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1737 if (TREE_CODE (op
) == ADDR_EXPR
1738 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1740 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1745 /* We cannot iterate over all referenced vars because that can contain
1746 unused vars from BLOCK trees, which causes code generation differences
1748 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1749 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1750 suitable_for_renaming
);
1752 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1753 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1754 suitable_for_renaming
);
1756 /* Operand caches need to be recomputed for operands referencing the updated
1757 variables and operands need to be rewritten to expose bare symbols. */
1758 if (!bitmap_empty_p (suitable_for_renaming
))
1760 FOR_EACH_BB_FN (bb
, cfun
)
1761 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1763 gimple
*stmt
= gsi_stmt (gsi
);
1765 /* Re-write TARGET_MEM_REFs of symbols we want to
1766 rewrite into SSA form. */
1767 if (gimple_assign_single_p (stmt
))
1769 tree lhs
= gimple_assign_lhs (stmt
);
1770 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1773 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1774 gimplify_modify_expr_complex_part. */
1775 if ((TREE_CODE (lhs
) == IMAGPART_EXPR
1776 || TREE_CODE (lhs
) == REALPART_EXPR
)
1777 && DECL_P (TREE_OPERAND (lhs
, 0))
1778 && bitmap_bit_p (suitable_for_renaming
,
1779 DECL_UID (TREE_OPERAND (lhs
, 0))))
1781 tree other
= make_ssa_name (TREE_TYPE (lhs
));
1782 tree lrhs
= build1 (TREE_CODE (lhs
) == IMAGPART_EXPR
1783 ? REALPART_EXPR
: IMAGPART_EXPR
,
1785 TREE_OPERAND (lhs
, 0));
1786 gimple
*load
= gimple_build_assign (other
, lrhs
);
1787 location_t loc
= gimple_location (stmt
);
1788 gimple_set_location (load
, loc
);
1789 gimple_set_vuse (load
, gimple_vuse (stmt
));
1790 gsi_insert_before (&gsi
, load
, GSI_SAME_STMT
);
1791 gimple_assign_set_lhs (stmt
, TREE_OPERAND (lhs
, 0));
1792 gimple_assign_set_rhs_with_ops
1793 (&gsi
, COMPLEX_EXPR
,
1794 TREE_CODE (lhs
) == IMAGPART_EXPR
1795 ? other
: gimple_assign_rhs1 (stmt
),
1796 TREE_CODE (lhs
) == IMAGPART_EXPR
1797 ? gimple_assign_rhs1 (stmt
) : other
, NULL_TREE
);
1798 stmt
= gsi_stmt (gsi
);
1799 unlink_stmt_vdef (stmt
);
1804 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1805 into a BIT_INSERT_EXPR. */
1806 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1807 && DECL_P (TREE_OPERAND (lhs
, 0))
1808 && bitmap_bit_p (suitable_for_renaming
,
1809 DECL_UID (TREE_OPERAND (lhs
, 0)))
1810 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1811 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1812 && types_compatible_p (TREE_TYPE (lhs
),
1813 TREE_TYPE (TREE_TYPE
1814 (TREE_OPERAND (lhs
, 0))))
1815 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1816 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
))) == 0))
1818 tree var
= TREE_OPERAND (lhs
, 0);
1819 tree val
= gimple_assign_rhs1 (stmt
);
1820 tree bitpos
= TREE_OPERAND (lhs
, 2);
1821 gimple_assign_set_lhs (stmt
, var
);
1822 gimple_assign_set_rhs_with_ops
1823 (&gsi
, BIT_INSERT_EXPR
, var
, val
, bitpos
);
1824 stmt
= gsi_stmt (gsi
);
1825 unlink_stmt_vdef (stmt
);
1830 /* Rewrite a vector insert using a MEM_REF on the LHS
1831 into a BIT_INSERT_EXPR. */
1832 if (TREE_CODE (lhs
) == MEM_REF
1833 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1834 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1836 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1837 && VECTOR_TYPE_P (TREE_TYPE (sym
))
1838 && TYPE_MODE (TREE_TYPE (sym
)) != BLKmode
1839 && types_compatible_p (TREE_TYPE (lhs
),
1840 TREE_TYPE (TREE_TYPE (sym
)))
1841 && tree_fits_uhwi_p (TREE_OPERAND (lhs
, 1))
1842 && tree_int_cst_lt (TREE_OPERAND (lhs
, 1),
1843 TYPE_SIZE_UNIT (TREE_TYPE (sym
)))
1844 && (tree_to_uhwi (TREE_OPERAND (lhs
, 1))
1845 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)))) == 0)
1847 tree val
= gimple_assign_rhs1 (stmt
);
1849 = wide_int_to_tree (bitsizetype
,
1850 mem_ref_offset (lhs
) * BITS_PER_UNIT
);
1851 gimple_assign_set_lhs (stmt
, sym
);
1852 gimple_assign_set_rhs_with_ops
1853 (&gsi
, BIT_INSERT_EXPR
, sym
, val
, bitpos
);
1854 stmt
= gsi_stmt (gsi
);
1855 unlink_stmt_vdef (stmt
);
1860 /* We shouldn't have any fancy wrapping of
1861 component-refs on the LHS, but look through
1862 VIEW_CONVERT_EXPRs as that is easy. */
1863 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1864 lhs
= TREE_OPERAND (lhs
, 0);
1865 if (TREE_CODE (lhs
) == MEM_REF
1866 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1867 && integer_zerop (TREE_OPERAND (lhs
, 1))
1868 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1870 && !TREE_ADDRESSABLE (sym
)
1871 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1874 lhs
= gimple_assign_lhs (stmt
);
1876 /* Rewrite the RHS and make sure the resulting assignment
1877 is validly typed. */
1878 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1879 rhs
= gimple_assign_rhs1 (stmt
);
1880 if (gimple_assign_lhs (stmt
) != lhs
1881 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1884 if (gimple_clobber_p (stmt
))
1886 rhs
= build_constructor (TREE_TYPE (lhs
), NULL
);
1887 TREE_THIS_VOLATILE (rhs
) = 1;
1890 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1891 TREE_TYPE (lhs
), rhs
);
1893 if (gimple_assign_lhs (stmt
) != lhs
)
1894 gimple_assign_set_lhs (stmt
, lhs
);
1896 if (gimple_assign_rhs1 (stmt
) != rhs
)
1898 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1899 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1903 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1906 if (optimize_atomic_compare_exchange_p (stmt
))
1908 tree expected
= gimple_call_arg (stmt
, 1);
1909 if (bitmap_bit_p (suitable_for_renaming
,
1910 DECL_UID (TREE_OPERAND (expected
, 0))))
1912 fold_builtin_atomic_compare_exchange (&gsi
);
1916 else if (is_asan_mark_p (stmt
))
1918 tree var
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
1919 if (bitmap_bit_p (suitable_for_renaming
, DECL_UID (var
)))
1921 unlink_stmt_vdef (stmt
);
1922 if (asan_mark_p (stmt
, ASAN_MARK_POISON
))
1925 = gimple_build_call_internal (IFN_ASAN_POISON
, 0);
1926 gimple_call_set_lhs (call
, var
);
1927 gsi_replace (&gsi
, call
, GSI_SAME_STMT
);
1931 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1932 is uninitialized. Avoid dependencies on
1933 previous out of scope value. */
1935 = build_constructor (TREE_TYPE (var
), NULL
);
1936 TREE_THIS_VOLATILE (clobber
) = 1;
1937 gimple
*g
= gimple_build_assign (var
, clobber
);
1938 gsi_replace (&gsi
, g
, GSI_SAME_STMT
);
1943 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1945 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
1946 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
1950 else if (gimple_code (stmt
) == GIMPLE_ASM
)
1952 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1954 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
1956 tree link
= gimple_asm_output_op (asm_stmt
, i
);
1957 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1958 suitable_for_renaming
);
1960 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
1962 tree link
= gimple_asm_input_op (asm_stmt
, i
);
1963 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1964 suitable_for_renaming
);
1968 else if (gimple_debug_bind_p (stmt
)
1969 && gimple_debug_bind_has_value_p (stmt
))
1971 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
1973 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
1974 decl
= non_rewritable_mem_ref_base (*valuep
);
1976 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
1977 gimple_debug_bind_reset_value (stmt
);
1980 if (gimple_references_memory_p (stmt
)
1981 || is_gimple_debug (stmt
))
1987 /* Update SSA form here, we are called as non-pass as well. */
1988 if (number_of_loops (cfun
) > 1
1989 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
1990 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1992 update_ssa (TODO_update_ssa
);
1995 BITMAP_FREE (not_reg_needs
);
1996 BITMAP_FREE (addresses_taken
);
1997 BITMAP_FREE (suitable_for_renaming
);
1998 timevar_pop (TV_ADDRESS_TAKEN
);
2003 const pass_data pass_data_update_address_taken
=
2005 GIMPLE_PASS
, /* type */
2006 "addressables", /* name */
2007 OPTGROUP_NONE
, /* optinfo_flags */
2008 TV_ADDRESS_TAKEN
, /* tv_id */
2009 PROP_ssa
, /* properties_required */
2010 0, /* properties_provided */
2011 0, /* properties_destroyed */
2012 0, /* todo_flags_start */
2013 TODO_update_address_taken
, /* todo_flags_finish */
2016 class pass_update_address_taken
: public gimple_opt_pass
2019 pass_update_address_taken (gcc::context
*ctxt
)
2020 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
2023 /* opt_pass methods: */
2025 }; // class pass_update_address_taken
2030 make_pass_update_address_taken (gcc::context
*ctxt
)
2032 return new pass_update_address_taken (ctxt
);