1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
41 #include "cfgexpand.h"
44 #include "stringpool.h"
48 /* Pointer map of variable mappings, keyed by edge. */
49 static hash_map
<edge
, auto_vec
<edge_var_map
> > *edge_var_maps
;
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
55 redirect_edge_var_map_add (edge e
, tree result
, tree def
, location_t locus
)
57 edge_var_map new_node
;
59 if (edge_var_maps
== NULL
)
60 edge_var_maps
= new hash_map
<edge
, auto_vec
<edge_var_map
> >;
62 auto_vec
<edge_var_map
> &slot
= edge_var_maps
->get_or_insert (e
);
64 new_node
.result
= result
;
65 new_node
.locus
= locus
;
67 slot
.safe_push (new_node
);
71 /* Clear the var mappings in edge E. */
74 redirect_edge_var_map_clear (edge e
)
79 auto_vec
<edge_var_map
> *head
= edge_var_maps
->get (e
);
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
88 This assumes a hash_map can have multiple edges mapping to the same
89 var_map (many to one mapping), since we don't remove the previous mappings.
93 redirect_edge_var_map_dup (edge newe
, edge olde
)
98 auto_vec
<edge_var_map
> *new_head
= &edge_var_maps
->get_or_insert (newe
);
99 auto_vec
<edge_var_map
> *old_head
= edge_var_maps
->get (olde
);
103 new_head
->safe_splice (*old_head
);
107 /* Return the variable mappings for a given edge. If there is none, return
111 redirect_edge_var_map_vector (edge e
)
113 /* Hey, what kind of idiot would... you'd be surprised. */
117 auto_vec
<edge_var_map
> *slot
= edge_var_maps
->get (e
);
124 /* Clear the edge variable mappings. */
127 redirect_edge_var_map_empty (void)
130 edge_var_maps
->empty ();
134 /* Remove the corresponding arguments from the PHI nodes in E's
135 destination block and redirect it to DEST. Return redirected edge.
136 The list of removed arguments is stored in a vector accessed
137 through edge_var_maps. */
140 ssa_redirect_edge (edge e
, basic_block dest
)
145 redirect_edge_var_map_clear (e
);
147 /* Remove the appropriate PHI arguments in E's destination block.
148 If we are redirecting a copied edge the destination has not
149 got PHI argument space reserved nor an interesting argument. */
150 if (! (e
->dest
->flags
& BB_DUPLICATED
))
151 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
157 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
158 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
160 if (def
== NULL_TREE
)
163 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
166 e
= redirect_edge_succ_nodup (e
, dest
);
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
176 flush_pending_stmts (edge e
)
183 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (e
);
187 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
188 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
189 gsi_next (&gsi
), i
++)
194 def
= redirect_edge_var_map_def (vm
);
195 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
198 redirect_edge_var_map_clear (e
);
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203 expression with a different value.
205 This will update any annotations (say debug bind stmts) referring
206 to the original LHS, so that they use the RHS instead. This is
207 done even if NLHS and LHS are the same, for it is understood that
208 the RHS will be modified afterwards, and NLHS will not be assigned
211 Adjusting any non-annotation uses of the LHS, if needed, is a
212 responsibility of the caller.
214 The effect of this call should be pretty much the same as that of
215 inserting a copy of STMT before STMT, and then removing the
216 original stmt, at which time gsi_remove() would have update
217 annotations, but using this function saves all the inserting,
218 copying and removing. */
221 gimple_replace_ssa_lhs (gimple
*stmt
, tree nlhs
)
223 if (MAY_HAVE_DEBUG_BIND_STMTS
)
225 tree lhs
= gimple_get_lhs (stmt
);
227 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
229 insert_debug_temp_for_var_def (NULL
, lhs
);
232 gimple_set_lhs (stmt
, nlhs
);
236 /* Given a tree for an expression for which we might want to emit
237 locations or values in debug information (generally a variable, but
238 we might deal with other kinds of trees in the future), return the
239 tree that should be used as the variable of a DEBUG_BIND STMT or
240 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
243 target_for_debug_bind (tree var
)
245 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
248 if (TREE_CODE (var
) == SSA_NAME
)
250 var
= SSA_NAME_VAR (var
);
251 if (var
== NULL_TREE
)
255 if ((!VAR_P (var
) || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
256 && TREE_CODE (var
) != PARM_DECL
)
259 if (DECL_HAS_VALUE_EXPR_P (var
))
260 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
262 if (DECL_IGNORED_P (var
))
265 /* var-tracking only tracks registers. */
266 if (!is_gimple_reg_type (TREE_TYPE (var
)))
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
276 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
278 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
280 if (wi
&& wi
->is_lhs
)
283 if (TREE_CODE (*tp
) == SSA_NAME
)
285 if (SSA_NAME_IN_FREE_LIST (*tp
))
290 else if (IS_TYPE_OR_DECL_P (*tp
))
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297 by other DEBUG stmts, and replace uses of the DEF with the
298 newly-created debug temp. */
301 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
303 imm_use_iterator imm_iter
;
306 gimple
*def_stmt
= NULL
;
310 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
313 /* If this name has already been registered for replacement, do nothing
314 as anything that uses this name isn't in SSA form. */
315 if (name_registered_for_update_p (var
))
318 /* Check whether there are debug stmts that reference this variable and,
319 if there are, decide whether we should use a debug temp. */
320 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
322 stmt
= USE_STMT (use_p
);
324 if (!gimple_debug_bind_p (stmt
))
330 if (gimple_debug_bind_get_value (stmt
) != var
)
332 /* Count this as an additional use, so as to make sure we
333 use a temp unless VAR's definition has a SINGLE_RHS that
344 def_stmt
= gsi_stmt (*gsi
);
346 def_stmt
= SSA_NAME_DEF_STMT (var
);
348 /* If we didn't get an insertion point, and the stmt has already
349 been removed, we won't be able to insert the debug bind stmt, so
350 we'll have to drop debug information. */
351 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
353 value
= degenerate_phi_result (as_a
<gphi
*> (def_stmt
));
354 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
356 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
358 else if (value
== error_mark_node
)
361 else if (gimple_clobber_p (def_stmt
))
362 /* We can end up here when rewriting a decl into SSA and coming
363 along a clobber for the original decl. Turn that into
364 # DEBUG decl => NULL */
366 else if (is_gimple_assign (def_stmt
))
368 bool no_value
= false;
370 if (!dom_info_available_p (CDI_DOMINATORS
))
372 struct walk_stmt_info wi
;
374 memset (&wi
, 0, sizeof (wi
));
376 /* When removing blocks without following reverse dominance
377 order, we may sometimes encounter SSA_NAMEs that have
378 already been released, referenced in other SSA_DEFs that
379 we're about to release. Consider:
388 If we deleted BB X first, propagating the value of w_2
389 won't do us any good. It's too late to recover their
390 original definition of v_1: when it was deleted, it was
391 only referenced in other DEFs, it couldn't possibly know
392 it should have been retained, and propagating every
393 single DEF just in case it might have to be propagated
394 into a DEBUG STMT would probably be too wasteful.
396 When dominator information is not readily available, we
397 check for and accept some loss of debug information. But
398 if it is available, there's no excuse for us to remove
399 blocks in the wrong order, so we don't even check for
400 dead SSA NAMEs. SSA verification shall catch any
402 if ((!gsi
&& !gimple_bb (def_stmt
))
403 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
408 value
= gimple_assign_rhs_to_tree (def_stmt
);
413 /* If there's a single use of VAR, and VAR is the entire debug
414 expression (usecount would have been incremented again
415 otherwise), and the definition involves only constants and
416 SSA names, then we can propagate VALUE into this single use,
419 We can also avoid using a temp if VALUE can be shared and
420 propagated into all uses, without generating expressions that
421 wouldn't be valid gimple RHSs.
423 Other cases that would require unsharing or non-gimple RHSs
424 are deferred to a debug temp, although we could avoid temps
425 at the expense of duplication of expressions. */
427 if (CONSTANT_CLASS_P (value
)
428 || gimple_code (def_stmt
) == GIMPLE_PHI
430 && (!gimple_assign_single_p (def_stmt
)
431 || is_gimple_min_invariant (value
)))
432 || is_gimple_reg (value
))
437 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
439 def_temp
= gimple_build_debug_bind (vexpr
,
440 unshare_expr (value
),
443 DECL_ARTIFICIAL (vexpr
) = 1;
444 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
446 SET_DECL_MODE (vexpr
, DECL_MODE (value
));
448 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (value
)));
451 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
454 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
455 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
462 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
464 if (!gimple_debug_bind_p (stmt
))
469 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
470 /* unshare_expr is not needed here. vexpr is either a
471 SINGLE_RHS, that can be safely shared, some other RHS
472 that was unshared when we found it had a single debug
473 use, or a DEBUG_EXPR_DECL, that can be safely
475 SET_USE (use_p
, unshare_expr (value
));
476 /* If we didn't replace uses with a debug decl fold the
477 resulting expression. Otherwise we end up with invalid IL. */
478 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
480 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
481 fold_stmt_inplace (&gsi
);
485 gimple_debug_bind_reset_value (stmt
);
492 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
493 other DEBUG stmts, and replace uses of the DEF with the
494 newly-created debug temp. */
497 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
503 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
506 stmt
= gsi_stmt (*gsi
);
508 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
510 tree var
= DEF_FROM_PTR (def_p
);
512 if (TREE_CODE (var
) != SSA_NAME
)
515 insert_debug_temp_for_var_def (gsi
, var
);
519 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
522 reset_debug_uses (gimple
*stmt
)
526 imm_use_iterator imm_iter
;
529 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
532 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
534 tree var
= DEF_FROM_PTR (def_p
);
536 if (TREE_CODE (var
) != SSA_NAME
)
539 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
541 if (!gimple_debug_bind_p (use_stmt
))
544 gimple_debug_bind_reset_value (use_stmt
);
545 update_stmt (use_stmt
);
550 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
551 dominated stmts before their dominators, so that release_ssa_defs
552 stands a chance of propagating DEFs into debug bind stmts. */
555 release_defs_bitset (bitmap toremove
)
560 /* Performing a topological sort is probably overkill, this will
561 most likely run in slightly superlinear time, rather than the
562 pathological quadratic worst case. */
563 while (!bitmap_empty_p (toremove
))
565 unsigned to_remove_bit
= -1U;
566 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
568 if (to_remove_bit
!= -1U)
570 bitmap_clear_bit (toremove
, to_remove_bit
);
574 bool remove_now
= true;
575 tree var
= ssa_name (j
);
577 imm_use_iterator uit
;
579 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
584 /* We can't propagate PHI nodes into debug stmts. */
585 if (gimple_code (stmt
) == GIMPLE_PHI
586 || is_gimple_debug (stmt
))
589 /* If we find another definition to remove that uses
590 the one we're looking at, defer the removal of this
591 one, so that it can be propagated into debug stmts
592 after the other is. */
593 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
595 tree odef
= DEF_FROM_PTR (def_p
);
597 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
605 BREAK_FROM_IMM_USE_STMT (uit
);
610 gimple
*def
= SSA_NAME_DEF_STMT (var
);
611 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
613 if (gimple_code (def
) == GIMPLE_PHI
)
614 remove_phi_node (&gsi
, true);
617 gsi_remove (&gsi
, true);
624 if (to_remove_bit
!= -1U)
625 bitmap_clear_bit (toremove
, to_remove_bit
);
630 /* Disable warnings about missing quoting in GCC diagnostics for
631 the verification errors. Their format strings don't follow GCC
632 diagnostic conventions and the calls are ultimately followed by
633 one to internal_error. */
635 # pragma GCC diagnostic push
636 # pragma GCC diagnostic ignored "-Wformat-diag"
639 /* Verify virtual SSA form. */
642 verify_vssa (basic_block bb
, tree current_vdef
, sbitmap visited
)
646 if (bitmap_bit_p (visited
, bb
->index
))
649 bitmap_set_bit (visited
, bb
->index
);
651 /* Pick up the single virtual PHI def. */
653 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
656 tree res
= gimple_phi_result (si
.phi ());
657 if (virtual_operand_p (res
))
661 error ("multiple virtual PHI nodes in BB %d", bb
->index
);
662 print_gimple_stmt (stderr
, phi
, 0);
663 print_gimple_stmt (stderr
, si
.phi (), 0);
672 current_vdef
= gimple_phi_result (phi
);
673 if (TREE_CODE (current_vdef
) != SSA_NAME
)
675 error ("virtual definition is not an SSA name");
676 print_gimple_stmt (stderr
, phi
, 0);
682 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
685 gimple
*stmt
= gsi_stmt (gsi
);
686 tree vuse
= gimple_vuse (stmt
);
689 if (vuse
!= current_vdef
)
691 error ("stmt with wrong VUSE");
692 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
693 fprintf (stderr
, "expected ");
694 print_generic_expr (stderr
, current_vdef
);
695 fprintf (stderr
, "\n");
698 tree vdef
= gimple_vdef (stmt
);
702 if (TREE_CODE (current_vdef
) != SSA_NAME
)
704 error ("virtual definition is not an SSA name");
705 print_gimple_stmt (stderr
, phi
, 0);
712 /* Verify destination PHI uses and recurse. */
715 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
717 gphi
*phi
= get_virtual_phi (e
->dest
);
719 && PHI_ARG_DEF_FROM_EDGE (phi
, e
) != current_vdef
)
721 error ("PHI node with wrong VUSE on edge from BB %d",
723 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
);
724 fprintf (stderr
, "expected ");
725 print_generic_expr (stderr
, current_vdef
);
726 fprintf (stderr
, "\n");
731 err
|= verify_vssa (e
->dest
, current_vdef
, visited
);
737 /* Return true if SSA_NAME is malformed and mark it visited.
739 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
743 verify_ssa_name (tree ssa_name
, bool is_virtual
)
745 if (TREE_CODE (ssa_name
) != SSA_NAME
)
747 error ("expected an SSA_NAME object");
751 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
753 error ("found an SSA_NAME that had been released into the free pool");
757 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
758 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
760 error ("type mismatch between an SSA_NAME and its symbol");
764 if (is_virtual
&& !virtual_operand_p (ssa_name
))
766 error ("found a virtual definition for a GIMPLE register");
770 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
772 error ("virtual SSA name for non-VOP decl");
776 if (!is_virtual
&& virtual_operand_p (ssa_name
))
778 error ("found a real definition for a non-register");
782 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
783 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
785 error ("found a default name with a non-empty defining statement");
793 /* Return true if the definition of SSA_NAME at block BB is malformed.
795 STMT is the statement where SSA_NAME is created.
797 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
798 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
799 it means that the block in that array slot contains the
800 definition of SSA_NAME.
802 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
805 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
806 gimple
*stmt
, bool is_virtual
)
808 if (verify_ssa_name (ssa_name
, is_virtual
))
811 if (SSA_NAME_VAR (ssa_name
)
812 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
813 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
815 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
819 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
821 error ("SSA_NAME created in two different blocks %i and %i",
822 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
826 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
828 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
830 error ("SSA_NAME_DEF_STMT is wrong");
831 fprintf (stderr
, "Expected definition statement:\n");
832 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
833 fprintf (stderr
, "\nActual definition statement:\n");
834 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
841 fprintf (stderr
, "while verifying SSA_NAME ");
842 print_generic_expr (stderr
, ssa_name
);
843 fprintf (stderr
, " in statement\n");
844 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
850 /* Return true if the use of SSA_NAME at statement STMT in block BB is
853 DEF_BB is the block where SSA_NAME was found to be created.
855 IDOM contains immediate dominator information for the flowgraph.
857 CHECK_ABNORMAL is true if the caller wants to check whether this use
858 is flowing through an abnormal edge (only used when checking PHI
861 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
862 that are defined before STMT in basic block BB. */
865 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
866 gimple
*stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
869 tree ssa_name
= USE_FROM_PTR (use_p
);
871 if (!TREE_VISITED (ssa_name
))
872 if (verify_imm_links (stderr
, ssa_name
))
875 TREE_VISITED (ssa_name
) = 1;
877 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
878 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
879 ; /* Default definitions have empty statements. Nothing to do. */
882 error ("missing definition");
885 else if (bb
!= def_bb
886 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
888 error ("definition in block %i does not dominate use in block %i",
889 def_bb
->index
, bb
->index
);
892 else if (bb
== def_bb
893 && names_defined_in_bb
!= NULL
894 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
896 error ("definition in block %i follows the use", def_bb
->index
);
901 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
903 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
907 /* Make sure the use is in an appropriate list by checking the previous
908 element to make sure it's the same. */
909 if (use_p
->prev
== NULL
)
911 error ("no immediate_use list");
917 if (use_p
->prev
->use
== NULL
)
918 listvar
= use_p
->prev
->loc
.ssa_name
;
920 listvar
= USE_FROM_PTR (use_p
->prev
);
921 if (listvar
!= ssa_name
)
923 error ("wrong immediate use list");
930 fprintf (stderr
, "for SSA_NAME: ");
931 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
932 fprintf (stderr
, " in statement:\n");
933 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
940 /* Return true if any of the arguments for PHI node PHI at block BB is
943 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
944 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
945 it means that the block in that array slot contains the
946 definition of SSA_NAME. */
949 verify_phi_args (gphi
*phi
, basic_block bb
, basic_block
*definition_block
)
953 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
955 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
957 error ("incoming edge count does not match number of PHI arguments");
962 for (i
= 0; i
< phi_num_args
; i
++)
964 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
965 tree op
= USE_FROM_PTR (op_p
);
967 e
= EDGE_PRED (bb
, i
);
971 error ("PHI argument is missing for edge %d->%d",
978 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
980 error ("PHI argument is not SSA_NAME, or invariant");
984 if (TREE_CODE (op
) == SSA_NAME
)
986 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
987 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
988 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
991 if (TREE_CODE (op
) == ADDR_EXPR
)
993 tree base
= TREE_OPERAND (op
, 0);
994 while (handled_component_p (base
))
995 base
= TREE_OPERAND (base
, 0);
997 || TREE_CODE (base
) == PARM_DECL
998 || TREE_CODE (base
) == RESULT_DECL
)
999 && !TREE_ADDRESSABLE (base
))
1001 error ("address taken, but ADDRESSABLE bit not set");
1008 error ("wrong edge %d->%d for PHI argument",
1009 e
->src
->index
, e
->dest
->index
);
1015 fprintf (stderr
, "PHI argument\n");
1016 print_generic_stmt (stderr
, op
, TDF_VOPS
);
1024 fprintf (stderr
, "for PHI node\n");
1025 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1033 /* Verify common invariants in the SSA web.
1034 TODO: verify the variable annotations. */
1037 verify_ssa (bool check_modified_stmt
, bool check_ssa_operands
)
1040 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
1043 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
1044 auto_bitmap names_defined_in_bb
;
1046 gcc_assert (!need_ssa_update_p (cfun
));
1048 timevar_push (TV_TREE_SSA_VERIFY
);
1051 /* Keep track of SSA names present in the IL. */
1054 hash_map
<void *, tree
> ssa_info
;
1056 FOR_EACH_SSA_NAME (i
, name
, cfun
)
1059 TREE_VISITED (name
) = 0;
1061 verify_ssa_name (name
, virtual_operand_p (name
));
1063 stmt
= SSA_NAME_DEF_STMT (name
);
1064 if (!gimple_nop_p (stmt
))
1066 basic_block bb
= gimple_bb (stmt
);
1067 if (verify_def (bb
, definition_block
,
1068 name
, stmt
, virtual_operand_p (name
)))
1073 if (POINTER_TYPE_P (TREE_TYPE (name
)))
1074 info
= SSA_NAME_PTR_INFO (name
);
1075 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1076 info
= SSA_NAME_RANGE_INFO (name
);
1080 tree
&val
= ssa_info
.get_or_insert (info
, &existed
);
1083 error ("shared SSA name info");
1084 print_generic_expr (stderr
, val
);
1085 fprintf (stderr
, " and ");
1086 print_generic_expr (stderr
, name
);
1087 fprintf (stderr
, "\n");
1096 calculate_dominance_info (CDI_DOMINATORS
);
1098 /* Now verify all the uses and make sure they agree with the definitions
1099 found in the previous pass. */
1100 FOR_EACH_BB_FN (bb
, cfun
)
1105 /* Make sure that all edges have a clear 'aux' field. */
1106 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1110 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1116 /* Verify the arguments for every PHI node in the block. */
1117 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1119 gphi
*phi
= gsi
.phi ();
1120 if (verify_phi_args (phi
, bb
, definition_block
))
1123 bitmap_set_bit (names_defined_in_bb
,
1124 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1127 /* Now verify all the uses and vuses in every statement of the block. */
1128 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1131 gimple
*stmt
= gsi_stmt (gsi
);
1132 use_operand_p use_p
;
1134 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1136 error ("stmt (%p) marked modified after optimization pass: ",
1138 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1142 if (check_ssa_operands
&& verify_ssa_operands (cfun
, stmt
))
1144 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1148 if (gimple_debug_bind_p (stmt
)
1149 && !gimple_debug_bind_has_value_p (stmt
))
1152 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1154 op
= USE_FROM_PTR (use_p
);
1155 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1156 use_p
, stmt
, false, names_defined_in_bb
))
1160 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1162 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1164 error ("SSA_NAME_DEF_STMT is wrong");
1165 fprintf (stderr
, "Expected definition statement:\n");
1166 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1167 fprintf (stderr
, "\nActual definition statement:\n");
1168 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1172 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1176 bitmap_clear (names_defined_in_bb
);
1179 free (definition_block
);
1181 if (gimple_vop (cfun
)
1182 && ssa_default_def (cfun
, gimple_vop (cfun
)))
1184 auto_sbitmap
visited (last_basic_block_for_fn (cfun
) + 1);
1185 bitmap_clear (visited
);
1186 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
1187 ssa_default_def (cfun
, gimple_vop (cfun
)), visited
))
1191 /* Restore the dominance information to its prior known state, so
1192 that we do not perturb the compiler's subsequent behavior. */
1193 if (orig_dom_state
== DOM_NONE
)
1194 free_dominance_info (CDI_DOMINATORS
);
1196 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1198 timevar_pop (TV_TREE_SSA_VERIFY
);
1202 internal_error ("verify_ssa failed");
1206 # pragma GCC diagnostic pop
1209 /* Initialize global DFA and SSA structures. */
1212 init_tree_ssa (struct function
*fn
)
1214 fn
->gimple_df
= ggc_cleared_alloc
<gimple_df
> ();
1215 fn
->gimple_df
->default_defs
= hash_table
<ssa_name_hasher
>::create_ggc (20);
1216 pt_solution_reset (&fn
->gimple_df
->escaped
);
1217 init_ssanames (fn
, 0);
1220 /* Deallocate memory associated with SSA data structures for FNDECL. */
1223 delete_tree_ssa (struct function
*fn
)
1227 /* We no longer maintain the SSA operand cache at this point. */
1228 if (ssa_operands_active (fn
))
1229 fini_ssa_operands (fn
);
1231 fn
->gimple_df
->default_defs
->empty ();
1232 fn
->gimple_df
->default_defs
= NULL
;
1233 pt_solution_reset (&fn
->gimple_df
->escaped
);
1234 if (fn
->gimple_df
->decls_to_pointers
!= NULL
)
1235 delete fn
->gimple_df
->decls_to_pointers
;
1236 fn
->gimple_df
->decls_to_pointers
= NULL
;
1237 fn
->gimple_df
= NULL
;
1239 /* We no longer need the edge variable maps. */
1240 redirect_edge_var_map_empty ();
1243 /* Return true if EXPR is a useless type conversion, otherwise return
1247 tree_ssa_useless_type_conversion (tree expr
)
1249 /* If we have an assignment that merely uses a NOP_EXPR to change
1250 the top of the RHS to the type of the LHS and the type conversion
1251 is "safe", then strip away the type conversion so that we can
1252 enter LHS = RHS into the const_and_copies table. */
1253 if (CONVERT_EXPR_P (expr
)
1254 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1255 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1256 return useless_type_conversion_p
1258 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1263 /* Strip conversions from EXP according to
1264 tree_ssa_useless_type_conversion and return the resulting
1268 tree_ssa_strip_useless_type_conversions (tree exp
)
1270 while (tree_ssa_useless_type_conversion (exp
))
1271 exp
= TREE_OPERAND (exp
, 0);
1275 /* Return true if T, as SSA_NAME, has an implicit default defined value. */
1278 ssa_defined_default_def_p (tree t
)
1280 tree var
= SSA_NAME_VAR (t
);
1284 /* Parameters get their initial value from the function entry. */
1285 else if (TREE_CODE (var
) == PARM_DECL
)
1287 /* When returning by reference the return address is actually a hidden
1289 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1291 /* Hard register variables get their initial value from the ether. */
1292 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1299 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1300 should be returned if the value is only partially undefined. */
1303 ssa_undefined_value_p (tree t
, bool partial
)
1307 if (ssa_defined_default_def_p (t
))
1310 /* The value is undefined iff its definition statement is empty. */
1311 def_stmt
= SSA_NAME_DEF_STMT (t
);
1312 if (gimple_nop_p (def_stmt
))
1315 /* Check if the complex was not only partially defined. */
1316 if (partial
&& is_gimple_assign (def_stmt
)
1317 && gimple_assign_rhs_code (def_stmt
) == COMPLEX_EXPR
)
1321 rhs1
= gimple_assign_rhs1 (def_stmt
);
1322 rhs2
= gimple_assign_rhs2 (def_stmt
);
1323 return (TREE_CODE (rhs1
) == SSA_NAME
&& ssa_undefined_value_p (rhs1
))
1324 || (TREE_CODE (rhs2
) == SSA_NAME
&& ssa_undefined_value_p (rhs2
));
1330 /* Return TRUE iff STMT, a gimple statement, references an undefined
1334 gimple_uses_undefined_value_p (gimple
*stmt
)
1339 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
1340 if (ssa_undefined_value_p (op
))
1348 /* If necessary, rewrite the base of the reference tree *TP from
1349 a MEM_REF to a plain or converted symbol. */
1352 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1356 while (handled_component_p (*tp
))
1357 tp
= &TREE_OPERAND (*tp
, 0);
1358 if (TREE_CODE (*tp
) == MEM_REF
1359 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1360 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1362 && !TREE_ADDRESSABLE (sym
)
1363 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1364 && is_gimple_reg_type (TREE_TYPE (*tp
))
1365 && ! VOID_TYPE_P (TREE_TYPE (*tp
)))
1367 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1368 && useless_type_conversion_p (TREE_TYPE (*tp
),
1369 TREE_TYPE (TREE_TYPE (sym
)))
1370 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1371 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1373 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1374 TYPE_SIZE (TREE_TYPE (*tp
)),
1375 int_const_binop (MULT_EXPR
,
1376 bitsize_int (BITS_PER_UNIT
),
1377 TREE_OPERAND (*tp
, 1)));
1379 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1380 && useless_type_conversion_p (TREE_TYPE (*tp
),
1381 TREE_TYPE (TREE_TYPE (sym
))))
1383 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1384 ? REALPART_EXPR
: IMAGPART_EXPR
,
1385 TREE_TYPE (*tp
), sym
);
1387 else if (integer_zerop (TREE_OPERAND (*tp
, 1))
1388 && DECL_SIZE (sym
) == TYPE_SIZE (TREE_TYPE (*tp
)))
1390 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1392 *tp
= build1 (VIEW_CONVERT_EXPR
,
1393 TREE_TYPE (*tp
), sym
);
1397 else if (DECL_SIZE (sym
)
1398 && TREE_CODE (DECL_SIZE (sym
)) == INTEGER_CST
1399 && (known_subrange_p
1400 (mem_ref_offset (*tp
),
1401 wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp
))),
1402 0, wi::to_offset (DECL_SIZE_UNIT (sym
))))
1403 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
1404 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
)))
1405 == TYPE_PRECISION (TREE_TYPE (*tp
))))
1406 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
))),
1407 BITS_PER_UNIT
) == 0)
1409 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1410 TYPE_SIZE (TREE_TYPE (*tp
)),
1411 wide_int_to_tree (bitsizetype
,
1412 mem_ref_offset (*tp
)
1413 << LOG2_BITS_PER_UNIT
));
1418 /* For a tree REF return its base if it is the base of a MEM_REF
1419 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1422 non_rewritable_mem_ref_base (tree ref
)
1426 /* A plain decl does not need it set. */
1430 if (! (base
= CONST_CAST_TREE (strip_invariant_refs (ref
))))
1432 base
= get_base_address (ref
);
1438 /* But watch out for MEM_REFs we cannot lower to a
1439 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1440 if (TREE_CODE (base
) == MEM_REF
1441 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1443 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1444 if (! DECL_P (decl
))
1446 if (! is_gimple_reg_type (TREE_TYPE (base
))
1447 || VOID_TYPE_P (TREE_TYPE (base
))
1448 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
))
1450 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1451 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1452 && useless_type_conversion_p (TREE_TYPE (base
),
1453 TREE_TYPE (TREE_TYPE (decl
)))
1454 && known_ge (mem_ref_offset (base
), 0)
1455 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1456 mem_ref_offset (base
))
1457 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1458 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1460 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1461 if (integer_zerop (TREE_OPERAND (base
, 1))
1462 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (base
)))
1464 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1465 if (DECL_SIZE (decl
)
1466 && TREE_CODE (DECL_SIZE_UNIT (decl
)) == INTEGER_CST
1467 && (known_subrange_p
1468 (mem_ref_offset (base
),
1469 wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base
))),
1470 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl
))))
1471 /* ??? We can't handle bitfield precision extracts without
1472 either using an alternate type for the BIT_FIELD_REF and
1473 then doing a conversion or possibly adjusting the offset
1474 according to endianness. */
1475 && (! INTEGRAL_TYPE_P (TREE_TYPE (base
))
1476 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
)))
1477 == TYPE_PRECISION (TREE_TYPE (base
))))
1478 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
))),
1479 BITS_PER_UNIT
) == 0)
1487 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1488 Otherwise return true. */
1491 non_rewritable_lvalue_p (tree lhs
)
1493 /* A plain decl is always rewritable. */
1497 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1498 a reasonably efficient manner... */
1499 if ((TREE_CODE (lhs
) == REALPART_EXPR
1500 || TREE_CODE (lhs
) == IMAGPART_EXPR
)
1501 && DECL_P (TREE_OPERAND (lhs
, 0)))
1504 /* ??? The following could be relaxed allowing component
1505 references that do not change the access size. */
1506 if (TREE_CODE (lhs
) == MEM_REF
1507 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
)
1509 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1511 /* A decl that is wrapped inside a MEM-REF that covers
1512 it full is also rewritable. */
1513 if (integer_zerop (TREE_OPERAND (lhs
, 1))
1515 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1516 /* If the dynamic type of the decl has larger precision than
1517 the decl itself we can't use the decls type for SSA rewriting. */
1518 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1519 || compare_tree_int (DECL_SIZE (decl
),
1520 TYPE_PRECISION (TREE_TYPE (decl
))) == 0)
1521 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1522 && (TYPE_PRECISION (TREE_TYPE (decl
))
1523 >= TYPE_PRECISION (TREE_TYPE (lhs
)))))
1524 /* Make sure we are not re-writing non-float copying into float
1525 copying as that can incur normalization. */
1526 && (! FLOAT_TYPE_P (TREE_TYPE (decl
))
1527 || types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (decl
)))
1528 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1531 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1532 using a BIT_INSERT_EXPR. */
1534 && VECTOR_TYPE_P (TREE_TYPE (decl
))
1535 && TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
1536 && known_ge (mem_ref_offset (lhs
), 0)
1537 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1538 mem_ref_offset (lhs
))
1539 && multiple_of_p (sizetype
, TREE_OPERAND (lhs
, 1),
1540 TYPE_SIZE_UNIT (TREE_TYPE (lhs
))))
1542 poly_uint64 lhs_bits
, nelts
;
1543 if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs
)), &lhs_bits
)
1544 && multiple_p (lhs_bits
,
1546 (TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl
)))),
1549 if (known_eq (nelts
, 1u))
1551 /* For sub-vector inserts the insert vector mode has to be
1553 tree vtype
= build_vector_type (TREE_TYPE (TREE_TYPE (decl
)),
1555 if (TYPE_MODE (vtype
) != BLKmode
)
1561 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1563 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1564 && DECL_P (TREE_OPERAND (lhs
, 0))
1565 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1566 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1567 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)),
1569 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs
, 0)))), 0)
1570 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1571 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))) == 0)
1577 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1578 mark the variable VAR for conversion into SSA. Return true when updating
1579 stmts is required. */
1582 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1583 bitmap suitable_for_renaming
)
1585 /* Global Variables, result decls cannot be changed. */
1586 if (is_global_var (var
)
1587 || TREE_CODE (var
) == RESULT_DECL
1588 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1591 if (TREE_ADDRESSABLE (var
)
1592 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1593 a non-register. Otherwise we are confused and forget to
1594 add virtual operands for it. */
1595 && (!is_gimple_reg_type (TREE_TYPE (var
))
1596 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1597 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1598 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1600 TREE_ADDRESSABLE (var
) = 0;
1601 /* If we cleared TREE_ADDRESSABLE make sure DECL_GIMPLE_REG_P
1602 is unset if we cannot rewrite the var into SSA. */
1603 if ((TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1604 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
1605 && bitmap_bit_p (not_reg_needs
, DECL_UID (var
)))
1606 DECL_GIMPLE_REG_P (var
) = 0;
1607 if (is_gimple_reg (var
))
1608 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1611 fprintf (dump_file
, "No longer having address taken: ");
1612 print_generic_expr (dump_file
, var
);
1613 fprintf (dump_file
, "\n");
1617 if (!DECL_GIMPLE_REG_P (var
)
1618 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1619 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1620 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1621 && !TREE_THIS_VOLATILE (var
)
1622 && (!VAR_P (var
) || !DECL_HARD_REGISTER (var
)))
1624 DECL_GIMPLE_REG_P (var
) = 1;
1625 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1628 fprintf (dump_file
, "Now a gimple register: ");
1629 print_generic_expr (dump_file
, var
);
1630 fprintf (dump_file
, "\n");
1635 /* Return true when STMT is ASAN mark where second argument is an address
1636 of a local variable. */
1639 is_asan_mark_p (gimple
*stmt
)
1641 if (!gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1644 tree addr
= get_base_address (gimple_call_arg (stmt
, 1));
1645 if (TREE_CODE (addr
) == ADDR_EXPR
1646 && VAR_P (TREE_OPERAND (addr
, 0)))
1648 tree var
= TREE_OPERAND (addr
, 0);
1649 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1650 DECL_ATTRIBUTES (var
)))
1653 unsigned addressable
= TREE_ADDRESSABLE (var
);
1654 TREE_ADDRESSABLE (var
) = 0;
1655 bool r
= is_gimple_reg (var
);
1656 TREE_ADDRESSABLE (var
) = addressable
;
1663 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1666 execute_update_addresses_taken (void)
1669 auto_bitmap addresses_taken
;
1670 auto_bitmap not_reg_needs
;
1671 auto_bitmap suitable_for_renaming
;
1675 timevar_push (TV_ADDRESS_TAKEN
);
1677 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1678 the function body. */
1679 FOR_EACH_BB_FN (bb
, cfun
)
1681 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1684 gimple
*stmt
= gsi_stmt (gsi
);
1685 enum gimple_code code
= gimple_code (stmt
);
1688 if (code
== GIMPLE_CALL
)
1690 if (optimize_atomic_compare_exchange_p (stmt
))
1692 /* For __atomic_compare_exchange_N if the second argument
1693 is &var, don't mark var addressable;
1694 if it becomes non-addressable, we'll rewrite it into
1695 ATOMIC_COMPARE_EXCHANGE call. */
1696 tree arg
= gimple_call_arg (stmt
, 1);
1697 gimple_call_set_arg (stmt
, 1, null_pointer_node
);
1698 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1699 gimple_call_set_arg (stmt
, 1, arg
);
1701 else if (is_asan_mark_p (stmt
)
1702 || gimple_call_internal_p (stmt
, IFN_GOMP_SIMT_ENTER
))
1705 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1708 /* Note all addresses taken by the stmt. */
1709 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1711 /* If we have a call or an assignment, see if the lhs contains
1712 a local decl that requires not to be a gimple register. */
1713 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1715 tree lhs
= gimple_get_lhs (stmt
);
1717 && TREE_CODE (lhs
) != SSA_NAME
1718 && ((code
== GIMPLE_CALL
&& ! DECL_P (lhs
))
1719 || non_rewritable_lvalue_p (lhs
)))
1721 decl
= get_base_address (lhs
);
1723 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1727 if (gimple_assign_single_p (stmt
))
1729 tree rhs
= gimple_assign_rhs1 (stmt
);
1730 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1731 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1734 else if (code
== GIMPLE_CALL
)
1736 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1738 tree arg
= gimple_call_arg (stmt
, i
);
1739 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1740 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1744 else if (code
== GIMPLE_ASM
)
1746 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1747 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
1749 tree link
= gimple_asm_output_op (asm_stmt
, i
);
1750 tree lhs
= TREE_VALUE (link
);
1751 if (TREE_CODE (lhs
) != SSA_NAME
)
1753 decl
= get_base_address (lhs
);
1755 && (non_rewritable_lvalue_p (lhs
)
1756 /* We cannot move required conversions from
1757 the lhs to the rhs in asm statements, so
1758 require we do not need any. */
1759 || !useless_type_conversion_p
1760 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1761 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1764 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
1766 tree link
= gimple_asm_input_op (asm_stmt
, i
);
1767 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1768 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1773 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1777 gphi
*phi
= gsi
.phi ();
1779 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1781 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1782 if (TREE_CODE (op
) == ADDR_EXPR
1783 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1785 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1790 /* We cannot iterate over all referenced vars because that can contain
1791 unused vars from BLOCK trees, which causes code generation differences
1793 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1794 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1795 suitable_for_renaming
);
1797 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1798 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1799 suitable_for_renaming
);
1801 /* Operand caches need to be recomputed for operands referencing the updated
1802 variables and operands need to be rewritten to expose bare symbols. */
1803 if (!bitmap_empty_p (suitable_for_renaming
))
1805 FOR_EACH_BB_FN (bb
, cfun
)
1806 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1808 gimple
*stmt
= gsi_stmt (gsi
);
1810 /* Re-write TARGET_MEM_REFs of symbols we want to
1811 rewrite into SSA form. */
1812 if (gimple_assign_single_p (stmt
))
1814 tree lhs
= gimple_assign_lhs (stmt
);
1815 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1818 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1819 gimplify_modify_expr_complex_part. */
1820 if ((TREE_CODE (lhs
) == IMAGPART_EXPR
1821 || TREE_CODE (lhs
) == REALPART_EXPR
)
1822 && DECL_P (TREE_OPERAND (lhs
, 0))
1823 && bitmap_bit_p (suitable_for_renaming
,
1824 DECL_UID (TREE_OPERAND (lhs
, 0))))
1826 tree other
= make_ssa_name (TREE_TYPE (lhs
));
1827 tree lrhs
= build1 (TREE_CODE (lhs
) == IMAGPART_EXPR
1828 ? REALPART_EXPR
: IMAGPART_EXPR
,
1830 TREE_OPERAND (lhs
, 0));
1831 gimple
*load
= gimple_build_assign (other
, lrhs
);
1832 location_t loc
= gimple_location (stmt
);
1833 gimple_set_location (load
, loc
);
1834 gimple_set_vuse (load
, gimple_vuse (stmt
));
1835 gsi_insert_before (&gsi
, load
, GSI_SAME_STMT
);
1836 gimple_assign_set_lhs (stmt
, TREE_OPERAND (lhs
, 0));
1837 gimple_assign_set_rhs_with_ops
1838 (&gsi
, COMPLEX_EXPR
,
1839 TREE_CODE (lhs
) == IMAGPART_EXPR
1840 ? other
: gimple_assign_rhs1 (stmt
),
1841 TREE_CODE (lhs
) == IMAGPART_EXPR
1842 ? gimple_assign_rhs1 (stmt
) : other
, NULL_TREE
);
1843 stmt
= gsi_stmt (gsi
);
1844 unlink_stmt_vdef (stmt
);
1849 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1850 into a BIT_INSERT_EXPR. */
1851 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1852 && DECL_P (TREE_OPERAND (lhs
, 0))
1853 && bitmap_bit_p (suitable_for_renaming
,
1854 DECL_UID (TREE_OPERAND (lhs
, 0)))
1855 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1856 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1857 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)),
1858 TYPE_SIZE_UNIT (TREE_TYPE
1859 (TREE_TYPE (TREE_OPERAND (lhs
, 0)))),
1861 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1862 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
))) == 0))
1864 tree var
= TREE_OPERAND (lhs
, 0);
1865 tree val
= gimple_assign_rhs1 (stmt
);
1866 if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var
)),
1869 tree tem
= make_ssa_name (TREE_TYPE (TREE_TYPE (var
)));
1871 = gimple_build_assign (tem
,
1872 build1 (VIEW_CONVERT_EXPR
,
1873 TREE_TYPE (tem
), val
));
1874 gsi_insert_before (&gsi
, pun
, GSI_SAME_STMT
);
1877 tree bitpos
= TREE_OPERAND (lhs
, 2);
1878 gimple_assign_set_lhs (stmt
, var
);
1879 gimple_assign_set_rhs_with_ops
1880 (&gsi
, BIT_INSERT_EXPR
, var
, val
, bitpos
);
1881 stmt
= gsi_stmt (gsi
);
1882 unlink_stmt_vdef (stmt
);
1887 /* Rewrite a vector insert using a MEM_REF on the LHS
1888 into a BIT_INSERT_EXPR. */
1889 if (TREE_CODE (lhs
) == MEM_REF
1890 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1891 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1893 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1894 && VECTOR_TYPE_P (TREE_TYPE (sym
))
1895 && TYPE_MODE (TREE_TYPE (sym
)) != BLKmode
1896 && known_ge (mem_ref_offset (lhs
), 0)
1897 && known_gt (wi::to_poly_offset
1898 (TYPE_SIZE_UNIT (TREE_TYPE (sym
))),
1899 mem_ref_offset (lhs
))
1900 && multiple_of_p (sizetype
,
1901 TREE_OPERAND (lhs
, 1),
1902 TYPE_SIZE_UNIT (TREE_TYPE (lhs
))))
1904 tree val
= gimple_assign_rhs1 (stmt
);
1905 if (! types_compatible_p (TREE_TYPE (val
),
1906 TREE_TYPE (TREE_TYPE (sym
))))
1908 poly_uint64 lhs_bits
, nelts
;
1909 tree temtype
= TREE_TYPE (TREE_TYPE (sym
));
1910 if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs
)),
1912 && multiple_p (lhs_bits
,
1914 (TYPE_SIZE (TREE_TYPE
1915 (TREE_TYPE (sym
)))),
1917 && maybe_ne (nelts
, 1u))
1918 temtype
= build_vector_type (temtype
, nelts
);
1919 tree tem
= make_ssa_name (temtype
);
1921 = gimple_build_assign (tem
,
1922 build1 (VIEW_CONVERT_EXPR
,
1923 TREE_TYPE (tem
), val
));
1924 gsi_insert_before (&gsi
, pun
, GSI_SAME_STMT
);
1928 = wide_int_to_tree (bitsizetype
,
1929 mem_ref_offset (lhs
) * BITS_PER_UNIT
);
1930 gimple_assign_set_lhs (stmt
, sym
);
1931 gimple_assign_set_rhs_with_ops
1932 (&gsi
, BIT_INSERT_EXPR
, sym
, val
, bitpos
);
1933 stmt
= gsi_stmt (gsi
);
1934 unlink_stmt_vdef (stmt
);
1939 /* We shouldn't have any fancy wrapping of
1940 component-refs on the LHS, but look through
1941 VIEW_CONVERT_EXPRs as that is easy. */
1942 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1943 lhs
= TREE_OPERAND (lhs
, 0);
1944 if (TREE_CODE (lhs
) == MEM_REF
1945 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1946 && integer_zerop (TREE_OPERAND (lhs
, 1))
1947 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1949 && !TREE_ADDRESSABLE (sym
)
1950 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1953 lhs
= gimple_assign_lhs (stmt
);
1955 /* Rewrite the RHS and make sure the resulting assignment
1956 is validly typed. */
1957 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1958 rhs
= gimple_assign_rhs1 (stmt
);
1959 if (gimple_assign_lhs (stmt
) != lhs
1960 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1963 if (gimple_clobber_p (stmt
))
1965 rhs
= build_constructor (TREE_TYPE (lhs
), NULL
);
1966 TREE_THIS_VOLATILE (rhs
) = 1;
1969 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1970 TREE_TYPE (lhs
), rhs
);
1972 if (gimple_assign_lhs (stmt
) != lhs
)
1973 gimple_assign_set_lhs (stmt
, lhs
);
1975 if (gimple_assign_rhs1 (stmt
) != rhs
)
1977 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1978 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1982 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1985 if (optimize_atomic_compare_exchange_p (stmt
))
1987 tree expected
= gimple_call_arg (stmt
, 1);
1988 if (bitmap_bit_p (suitable_for_renaming
,
1989 DECL_UID (TREE_OPERAND (expected
, 0))))
1991 fold_builtin_atomic_compare_exchange (&gsi
);
1995 else if (is_asan_mark_p (stmt
))
1997 tree var
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
1998 if (bitmap_bit_p (suitable_for_renaming
, DECL_UID (var
)))
2000 unlink_stmt_vdef (stmt
);
2001 if (asan_mark_p (stmt
, ASAN_MARK_POISON
))
2004 = gimple_build_call_internal (IFN_ASAN_POISON
, 0);
2005 gimple_call_set_lhs (call
, var
);
2006 gsi_replace (&gsi
, call
, GSI_SAME_STMT
);
2010 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
2011 is uninitialized. Avoid dependencies on
2012 previous out of scope value. */
2014 = build_constructor (TREE_TYPE (var
), NULL
);
2015 TREE_THIS_VOLATILE (clobber
) = 1;
2016 gimple
*g
= gimple_build_assign (var
, clobber
);
2017 gsi_replace (&gsi
, g
, GSI_SAME_STMT
);
2022 else if (gimple_call_internal_p (stmt
, IFN_GOMP_SIMT_ENTER
))
2023 for (i
= 1; i
< gimple_call_num_args (stmt
); i
++)
2025 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
2026 if (*argp
== null_pointer_node
)
2028 gcc_assert (TREE_CODE (*argp
) == ADDR_EXPR
2029 && VAR_P (TREE_OPERAND (*argp
, 0)));
2030 tree var
= TREE_OPERAND (*argp
, 0);
2031 if (bitmap_bit_p (suitable_for_renaming
, DECL_UID (var
)))
2032 *argp
= null_pointer_node
;
2034 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2036 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
2037 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
2041 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2043 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
2045 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
2047 tree link
= gimple_asm_output_op (asm_stmt
, i
);
2048 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
2049 suitable_for_renaming
);
2051 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
2053 tree link
= gimple_asm_input_op (asm_stmt
, i
);
2054 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
2055 suitable_for_renaming
);
2059 else if (gimple_debug_bind_p (stmt
)
2060 && gimple_debug_bind_has_value_p (stmt
))
2062 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
2064 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
2065 decl
= non_rewritable_mem_ref_base (*valuep
);
2067 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
2068 gimple_debug_bind_reset_value (stmt
);
2071 if (gimple_references_memory_p (stmt
)
2072 || is_gimple_debug (stmt
))
2078 /* Update SSA form here, we are called as non-pass as well. */
2079 if (number_of_loops (cfun
) > 1
2080 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
2081 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
2083 update_ssa (TODO_update_ssa
);
2086 timevar_pop (TV_ADDRESS_TAKEN
);
2091 const pass_data pass_data_update_address_taken
=
2093 GIMPLE_PASS
, /* type */
2094 "addressables", /* name */
2095 OPTGROUP_NONE
, /* optinfo_flags */
2096 TV_ADDRESS_TAKEN
, /* tv_id */
2097 PROP_ssa
, /* properties_required */
2098 0, /* properties_provided */
2099 0, /* properties_destroyed */
2100 0, /* todo_flags_start */
2101 TODO_update_address_taken
, /* todo_flags_finish */
2104 class pass_update_address_taken
: public gimple_opt_pass
2107 pass_update_address_taken (gcc::context
*ctxt
)
2108 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
2111 /* opt_pass methods: */
2113 }; // class pass_update_address_taken
2118 make_pass_update_address_taken (gcc::context
*ctxt
)
2120 return new pass_update_address_taken (ctxt
);