1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-iterator.h"
34 #include "gimple-fold.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
41 #include "cfgexpand.h"
44 #include "stringpool.h"
48 /* Pointer map of variable mappings, keyed by edge. */
49 static hash_map
<edge
, auto_vec
<edge_var_map
> > *edge_var_maps
;
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
55 redirect_edge_var_map_add (edge e
, tree result
, tree def
, location_t locus
)
57 edge_var_map new_node
;
59 if (edge_var_maps
== NULL
)
60 edge_var_maps
= new hash_map
<edge
, auto_vec
<edge_var_map
> >;
62 auto_vec
<edge_var_map
> &slot
= edge_var_maps
->get_or_insert (e
);
64 new_node
.result
= result
;
65 new_node
.locus
= locus
;
67 slot
.safe_push (new_node
);
71 /* Clear the var mappings in edge E. */
74 redirect_edge_var_map_clear (edge e
)
79 auto_vec
<edge_var_map
> *head
= edge_var_maps
->get (e
);
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
88 This assumes a hash_map can have multiple edges mapping to the same
89 var_map (many to one mapping), since we don't remove the previous mappings.
93 redirect_edge_var_map_dup (edge newe
, edge olde
)
98 auto_vec
<edge_var_map
> *new_head
= &edge_var_maps
->get_or_insert (newe
);
99 auto_vec
<edge_var_map
> *old_head
= edge_var_maps
->get (olde
);
103 new_head
->safe_splice (*old_head
);
107 /* Return the variable mappings for a given edge. If there is none, return
111 redirect_edge_var_map_vector (edge e
)
113 /* Hey, what kind of idiot would... you'd be surprised. */
117 auto_vec
<edge_var_map
> *slot
= edge_var_maps
->get (e
);
124 /* Clear the edge variable mappings. */
127 redirect_edge_var_map_empty (void)
130 edge_var_maps
->empty ();
134 /* Remove the corresponding arguments from the PHI nodes in E's
135 destination block and redirect it to DEST. Return redirected edge.
136 The list of removed arguments is stored in a vector accessed
137 through edge_var_maps. */
140 ssa_redirect_edge (edge e
, basic_block dest
)
145 redirect_edge_var_map_clear (e
);
147 /* Remove the appropriate PHI arguments in E's destination block.
148 If we are redirecting a copied edge the destination has not
149 got PHI argument space reserved nor an interesting argument. */
150 if (! (e
->dest
->flags
& BB_DUPLICATED
))
151 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
157 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
158 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
160 if (def
== NULL_TREE
)
163 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
166 e
= redirect_edge_succ_nodup (e
, dest
);
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
176 flush_pending_stmts (edge e
)
183 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (e
);
187 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
188 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
189 gsi_next (&gsi
), i
++)
194 def
= redirect_edge_var_map_def (vm
);
195 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
198 redirect_edge_var_map_clear (e
);
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203 expression with a different value.
205 This will update any annotations (say debug bind stmts) referring
206 to the original LHS, so that they use the RHS instead. This is
207 done even if NLHS and LHS are the same, for it is understood that
208 the RHS will be modified afterwards, and NLHS will not be assigned
211 Adjusting any non-annotation uses of the LHS, if needed, is a
212 responsibility of the caller.
214 The effect of this call should be pretty much the same as that of
215 inserting a copy of STMT before STMT, and then removing the
216 original stmt, at which time gsi_remove() would have update
217 annotations, but using this function saves all the inserting,
218 copying and removing. */
221 gimple_replace_ssa_lhs (gimple
*stmt
, tree nlhs
)
223 if (MAY_HAVE_DEBUG_BIND_STMTS
)
225 tree lhs
= gimple_get_lhs (stmt
);
227 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
229 insert_debug_temp_for_var_def (NULL
, lhs
);
232 gimple_set_lhs (stmt
, nlhs
);
236 /* Given a tree for an expression for which we might want to emit
237 locations or values in debug information (generally a variable, but
238 we might deal with other kinds of trees in the future), return the
239 tree that should be used as the variable of a DEBUG_BIND STMT or
240 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
243 target_for_debug_bind (tree var
)
245 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
248 if (TREE_CODE (var
) == SSA_NAME
)
250 var
= SSA_NAME_VAR (var
);
251 if (var
== NULL_TREE
)
255 if ((!VAR_P (var
) || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
256 && TREE_CODE (var
) != PARM_DECL
)
259 if (DECL_HAS_VALUE_EXPR_P (var
))
260 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
262 if (DECL_IGNORED_P (var
))
265 /* var-tracking only tracks registers. */
266 if (!is_gimple_reg_type (TREE_TYPE (var
)))
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
276 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
278 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
280 if (wi
&& wi
->is_lhs
)
283 if (TREE_CODE (*tp
) == SSA_NAME
)
285 if (SSA_NAME_IN_FREE_LIST (*tp
))
290 else if (IS_TYPE_OR_DECL_P (*tp
))
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297 by other DEBUG stmts, and replace uses of the DEF with the
298 newly-created debug temp. */
301 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
303 imm_use_iterator imm_iter
;
306 gimple
*def_stmt
= NULL
;
310 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
313 /* If this name has already been registered for replacement, do nothing
314 as anything that uses this name isn't in SSA form. */
315 if (name_registered_for_update_p (var
))
318 /* Check whether there are debug stmts that reference this variable and,
319 if there are, decide whether we should use a debug temp. */
320 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
322 stmt
= USE_STMT (use_p
);
324 if (!gimple_debug_bind_p (stmt
))
330 if (gimple_debug_bind_get_value (stmt
) != var
)
332 /* Count this as an additional use, so as to make sure we
333 use a temp unless VAR's definition has a SINGLE_RHS that
344 def_stmt
= gsi_stmt (*gsi
);
346 def_stmt
= SSA_NAME_DEF_STMT (var
);
348 /* If we didn't get an insertion point, and the stmt has already
349 been removed, we won't be able to insert the debug bind stmt, so
350 we'll have to drop debug information. */
351 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
353 value
= degenerate_phi_result (as_a
<gphi
*> (def_stmt
));
354 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
356 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
358 else if (value
== error_mark_node
)
361 else if (gimple_clobber_p (def_stmt
))
362 /* We can end up here when rewriting a decl into SSA and coming
363 along a clobber for the original decl. Turn that into
364 # DEBUG decl => NULL */
366 else if (is_gimple_assign (def_stmt
))
368 bool no_value
= false;
370 if (!dom_info_available_p (CDI_DOMINATORS
))
372 struct walk_stmt_info wi
;
374 memset (&wi
, 0, sizeof (wi
));
376 /* When removing blocks without following reverse dominance
377 order, we may sometimes encounter SSA_NAMEs that have
378 already been released, referenced in other SSA_DEFs that
379 we're about to release. Consider:
388 If we deleted BB X first, propagating the value of w_2
389 won't do us any good. It's too late to recover their
390 original definition of v_1: when it was deleted, it was
391 only referenced in other DEFs, it couldn't possibly know
392 it should have been retained, and propagating every
393 single DEF just in case it might have to be propagated
394 into a DEBUG STMT would probably be too wasteful.
396 When dominator information is not readily available, we
397 check for and accept some loss of debug information. But
398 if it is available, there's no excuse for us to remove
399 blocks in the wrong order, so we don't even check for
400 dead SSA NAMEs. SSA verification shall catch any
402 if ((!gsi
&& !gimple_bb (def_stmt
))
403 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
408 value
= gimple_assign_rhs_to_tree (def_stmt
);
413 /* If there's a single use of VAR, and VAR is the entire debug
414 expression (usecount would have been incremented again
415 otherwise), then we can propagate VALUE into this single use,
418 We can also avoid using a temp if VALUE can be shared and
419 propagated into all uses, without generating expressions that
420 wouldn't be valid gimple RHSs.
422 Other cases that would require unsharing or non-gimple RHSs
423 are deferred to a debug temp, although we could avoid temps
424 at the expense of duplication of expressions. */
427 || gimple_code (def_stmt
) == GIMPLE_PHI
428 || CONSTANT_CLASS_P (value
)
429 || is_gimple_reg (value
))
434 tree vexpr
= build_debug_expr_decl (TREE_TYPE (value
));
436 def_temp
= gimple_build_debug_bind (vexpr
,
437 unshare_expr (value
),
440 /* FIXME: Is setting the mode really necessary? */
442 SET_DECL_MODE (vexpr
, DECL_MODE (value
));
444 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (value
)));
447 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
450 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
451 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
458 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
460 if (!gimple_debug_bind_p (stmt
))
465 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
466 SET_USE (use_p
, unshare_expr (value
));
467 /* If we didn't replace uses with a debug decl fold the
468 resulting expression. Otherwise we end up with invalid IL. */
469 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
471 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
472 fold_stmt_inplace (&gsi
);
476 gimple_debug_bind_reset_value (stmt
);
483 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
484 other DEBUG stmts, and replace uses of the DEF with the
485 newly-created debug temp. */
488 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
494 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
497 stmt
= gsi_stmt (*gsi
);
499 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
501 tree var
= DEF_FROM_PTR (def_p
);
503 if (TREE_CODE (var
) != SSA_NAME
)
506 insert_debug_temp_for_var_def (gsi
, var
);
510 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
513 reset_debug_uses (gimple
*stmt
)
517 imm_use_iterator imm_iter
;
520 if (!MAY_HAVE_DEBUG_BIND_STMTS
)
523 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
525 tree var
= DEF_FROM_PTR (def_p
);
527 if (TREE_CODE (var
) != SSA_NAME
)
530 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
532 if (!gimple_debug_bind_p (use_stmt
))
535 gimple_debug_bind_reset_value (use_stmt
);
536 update_stmt (use_stmt
);
541 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
542 dominated stmts before their dominators, so that release_ssa_defs
543 stands a chance of propagating DEFs into debug bind stmts. */
546 release_defs_bitset (bitmap toremove
)
551 /* Performing a topological sort is probably overkill, this will
552 most likely run in slightly superlinear time, rather than the
553 pathological quadratic worst case.
554 But iterate from max SSA name version to min one because
555 that mimics allocation order during code generation behavior best.
556 Use an array for this which we compact on-the-fly with a NULL
557 marker moving towards the end of the vector. */
558 auto_vec
<tree
, 16> names
;
559 names
.reserve (bitmap_count_bits (toremove
) + 1);
560 names
.quick_push (NULL_TREE
);
561 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
562 names
.quick_push (ssa_name (j
));
564 bitmap_tree_view (toremove
);
565 while (!bitmap_empty_p (toremove
))
567 j
= names
.length () - 1;
568 for (unsigned i
= names
.length () - 1; names
[i
];)
570 bool remove_now
= true;
573 imm_use_iterator uit
;
575 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
580 /* We can't propagate PHI nodes into debug stmts. */
581 if (gimple_code (stmt
) == GIMPLE_PHI
582 || is_gimple_debug (stmt
))
585 /* If we find another definition to remove that uses
586 the one we're looking at, defer the removal of this
587 one, so that it can be propagated into debug stmts
588 after the other is. */
589 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
591 tree odef
= DEF_FROM_PTR (def_p
);
593 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
606 gimple
*def
= SSA_NAME_DEF_STMT (var
);
607 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
609 if (gimple_code (def
) == GIMPLE_PHI
)
610 remove_phi_node (&gsi
, true);
613 gsi_remove (&gsi
, true);
616 bitmap_clear_bit (toremove
, SSA_NAME_VERSION (var
));
624 bitmap_list_view (toremove
);
627 /* Disable warnings about missing quoting in GCC diagnostics for
628 the verification errors. Their format strings don't follow GCC
629 diagnostic conventions and the calls are ultimately followed by
630 one to internal_error. */
632 # pragma GCC diagnostic push
633 # pragma GCC diagnostic ignored "-Wformat-diag"
636 /* Verify virtual SSA form. */
639 verify_vssa (basic_block bb
, tree current_vdef
, sbitmap visited
)
643 if (!bitmap_set_bit (visited
, bb
->index
))
646 /* Pick up the single virtual PHI def. */
648 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
651 tree res
= gimple_phi_result (si
.phi ());
652 if (virtual_operand_p (res
))
656 error ("multiple virtual PHI nodes in BB %d", bb
->index
);
657 print_gimple_stmt (stderr
, phi
, 0);
658 print_gimple_stmt (stderr
, si
.phi (), 0);
667 current_vdef
= gimple_phi_result (phi
);
668 if (TREE_CODE (current_vdef
) != SSA_NAME
)
670 error ("virtual definition is not an SSA name");
671 print_gimple_stmt (stderr
, phi
, 0);
677 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
680 gimple
*stmt
= gsi_stmt (gsi
);
681 tree vuse
= gimple_vuse (stmt
);
684 if (vuse
!= current_vdef
)
686 error ("stmt with wrong VUSE");
687 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
688 fprintf (stderr
, "expected ");
689 print_generic_expr (stderr
, current_vdef
);
690 fprintf (stderr
, "\n");
693 tree vdef
= gimple_vdef (stmt
);
697 if (TREE_CODE (current_vdef
) != SSA_NAME
)
699 error ("virtual definition is not an SSA name");
700 print_gimple_stmt (stderr
, phi
, 0);
707 /* Verify destination PHI uses and recurse. */
710 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
712 gphi
*phi
= get_virtual_phi (e
->dest
);
714 && PHI_ARG_DEF_FROM_EDGE (phi
, e
) != current_vdef
)
716 error ("PHI node with wrong VUSE on edge from BB %d",
718 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
);
719 fprintf (stderr
, "expected ");
720 print_generic_expr (stderr
, current_vdef
);
721 fprintf (stderr
, "\n");
726 err
|= verify_vssa (e
->dest
, current_vdef
, visited
);
732 /* Return true if SSA_NAME is malformed and mark it visited.
734 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
738 verify_ssa_name (tree ssa_name
, bool is_virtual
)
740 if (TREE_CODE (ssa_name
) != SSA_NAME
)
742 error ("expected an SSA_NAME object");
746 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
748 error ("found an SSA_NAME that had been released into the free pool");
752 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
753 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
755 error ("type mismatch between an SSA_NAME and its symbol");
759 if (is_virtual
&& !virtual_operand_p (ssa_name
))
761 error ("found a virtual definition for a GIMPLE register");
765 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
767 error ("virtual SSA name for non-VOP decl");
771 if (!is_virtual
&& virtual_operand_p (ssa_name
))
773 error ("found a real definition for a non-register");
777 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
778 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
780 error ("found a default name with a non-empty defining statement");
788 /* Return true if the definition of SSA_NAME at block BB is malformed.
790 STMT is the statement where SSA_NAME is created.
792 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
793 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
794 it means that the block in that array slot contains the
795 definition of SSA_NAME.
797 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
800 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
801 gimple
*stmt
, bool is_virtual
)
803 if (verify_ssa_name (ssa_name
, is_virtual
))
806 if (SSA_NAME_VAR (ssa_name
)
807 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
808 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
810 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
814 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
816 error ("SSA_NAME created in two different blocks %i and %i",
817 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
821 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
823 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
825 error ("SSA_NAME_DEF_STMT is wrong");
826 fprintf (stderr
, "Expected definition statement:\n");
827 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
828 fprintf (stderr
, "\nActual definition statement:\n");
829 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
836 fprintf (stderr
, "while verifying SSA_NAME ");
837 print_generic_expr (stderr
, ssa_name
);
838 fprintf (stderr
, " in statement\n");
839 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
845 /* Return true if the use of SSA_NAME at statement STMT in block BB is
848 DEF_BB is the block where SSA_NAME was found to be created.
850 IDOM contains immediate dominator information for the flowgraph.
852 CHECK_ABNORMAL is true if the caller wants to check whether this use
853 is flowing through an abnormal edge (only used when checking PHI
856 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
857 that are defined before STMT in basic block BB. */
860 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
861 gimple
*stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
864 tree ssa_name
= USE_FROM_PTR (use_p
);
866 if (!TREE_VISITED (ssa_name
))
867 if (verify_imm_links (stderr
, ssa_name
))
870 TREE_VISITED (ssa_name
) = 1;
872 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
873 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
874 ; /* Default definitions have empty statements. Nothing to do. */
877 error ("missing definition");
880 else if (bb
!= def_bb
881 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
883 error ("definition in block %i does not dominate use in block %i",
884 def_bb
->index
, bb
->index
);
887 else if (bb
== def_bb
888 && names_defined_in_bb
!= NULL
889 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
891 error ("definition in block %i follows the use", def_bb
->index
);
896 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
898 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
902 /* Make sure the use is in an appropriate list by checking the previous
903 element to make sure it's the same. */
904 if (use_p
->prev
== NULL
)
906 error ("no immediate_use list");
912 if (use_p
->prev
->use
== NULL
)
913 listvar
= use_p
->prev
->loc
.ssa_name
;
915 listvar
= USE_FROM_PTR (use_p
->prev
);
916 if (listvar
!= ssa_name
)
918 error ("wrong immediate use list");
925 fprintf (stderr
, "for SSA_NAME: ");
926 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
927 fprintf (stderr
, " in statement:\n");
928 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
935 /* Return true if any of the arguments for PHI node PHI at block BB is
938 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
939 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
940 it means that the block in that array slot contains the
941 definition of SSA_NAME. */
944 verify_phi_args (gphi
*phi
, basic_block bb
, basic_block
*definition_block
)
948 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
950 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
952 error ("incoming edge count does not match number of PHI arguments");
957 for (i
= 0; i
< phi_num_args
; i
++)
959 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
960 tree op
= USE_FROM_PTR (op_p
);
962 e
= EDGE_PRED (bb
, i
);
966 error ("PHI argument is missing for edge %d->%d",
973 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
975 error ("PHI argument is not SSA_NAME, or invariant");
979 if ((e
->flags
& EDGE_ABNORMAL
) && TREE_CODE (op
) != SSA_NAME
)
981 error ("PHI argument on abnormal edge is not SSA_NAME");
985 if (TREE_CODE (op
) == SSA_NAME
)
987 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
988 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
989 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
992 if (TREE_CODE (op
) == ADDR_EXPR
)
994 tree base
= TREE_OPERAND (op
, 0);
995 while (handled_component_p (base
))
996 base
= TREE_OPERAND (base
, 0);
998 || TREE_CODE (base
) == PARM_DECL
999 || TREE_CODE (base
) == RESULT_DECL
)
1000 && !TREE_ADDRESSABLE (base
))
1002 error ("address taken, but ADDRESSABLE bit not set");
1009 error ("wrong edge %d->%d for PHI argument",
1010 e
->src
->index
, e
->dest
->index
);
1016 fprintf (stderr
, "PHI argument\n");
1017 print_generic_stmt (stderr
, op
, TDF_VOPS
);
1025 fprintf (stderr
, "for PHI node\n");
1026 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1034 /* Verify common invariants in the SSA web.
1035 TODO: verify the variable annotations. */
1038 verify_ssa (bool check_modified_stmt
, bool check_ssa_operands
)
1041 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
1044 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
1045 auto_bitmap names_defined_in_bb
;
1047 gcc_assert (!need_ssa_update_p (cfun
));
1049 timevar_push (TV_TREE_SSA_VERIFY
);
1052 /* Keep track of SSA names present in the IL. */
1055 hash_map
<void *, tree
> ssa_info
;
1057 FOR_EACH_SSA_NAME (i
, name
, cfun
)
1060 TREE_VISITED (name
) = 0;
1062 verify_ssa_name (name
, virtual_operand_p (name
));
1064 stmt
= SSA_NAME_DEF_STMT (name
);
1065 if (!gimple_nop_p (stmt
))
1067 basic_block bb
= gimple_bb (stmt
);
1068 if (verify_def (bb
, definition_block
,
1069 name
, stmt
, virtual_operand_p (name
)))
1074 if (POINTER_TYPE_P (TREE_TYPE (name
)))
1075 info
= SSA_NAME_PTR_INFO (name
);
1076 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1077 info
= SSA_NAME_RANGE_INFO (name
);
1081 tree
&val
= ssa_info
.get_or_insert (info
, &existed
);
1084 error ("shared SSA name info");
1085 print_generic_expr (stderr
, val
);
1086 fprintf (stderr
, " and ");
1087 print_generic_expr (stderr
, name
);
1088 fprintf (stderr
, "\n");
1097 calculate_dominance_info (CDI_DOMINATORS
);
1099 /* Now verify all the uses and make sure they agree with the definitions
1100 found in the previous pass. */
1101 FOR_EACH_BB_FN (bb
, cfun
)
1106 /* Make sure that all edges have a clear 'aux' field. */
1107 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1111 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1117 /* Verify the arguments for every PHI node in the block. */
1118 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1120 gphi
*phi
= gsi
.phi ();
1121 if (verify_phi_args (phi
, bb
, definition_block
))
1124 bitmap_set_bit (names_defined_in_bb
,
1125 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1128 /* Now verify all the uses and vuses in every statement of the block. */
1129 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1132 gimple
*stmt
= gsi_stmt (gsi
);
1133 use_operand_p use_p
;
1135 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1137 error ("stmt (%p) marked modified after optimization pass: ",
1139 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1143 if (check_ssa_operands
&& verify_ssa_operands (cfun
, stmt
))
1145 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1149 if (gimple_debug_bind_p (stmt
)
1150 && !gimple_debug_bind_has_value_p (stmt
))
1153 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1155 op
= USE_FROM_PTR (use_p
);
1156 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1157 use_p
, stmt
, false, names_defined_in_bb
))
1161 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1163 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1165 error ("SSA_NAME_DEF_STMT is wrong");
1166 fprintf (stderr
, "Expected definition statement:\n");
1167 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1168 fprintf (stderr
, "\nActual definition statement:\n");
1169 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1173 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1177 bitmap_clear (names_defined_in_bb
);
1180 free (definition_block
);
1182 if (gimple_vop (cfun
)
1183 && ssa_default_def (cfun
, gimple_vop (cfun
)))
1185 auto_sbitmap
visited (last_basic_block_for_fn (cfun
) + 1);
1186 bitmap_clear (visited
);
1187 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
1188 ssa_default_def (cfun
, gimple_vop (cfun
)), visited
))
1192 /* Restore the dominance information to its prior known state, so
1193 that we do not perturb the compiler's subsequent behavior. */
1194 if (orig_dom_state
== DOM_NONE
)
1195 free_dominance_info (CDI_DOMINATORS
);
1197 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1199 timevar_pop (TV_TREE_SSA_VERIFY
);
1203 internal_error ("verify_ssa failed");
1207 # pragma GCC diagnostic pop
1210 /* Initialize global DFA and SSA structures.
1211 If SIZE is non-zero allocated ssa names array of a given size. */
1214 init_tree_ssa (struct function
*fn
, int size
)
1216 fn
->gimple_df
= ggc_cleared_alloc
<gimple_df
> ();
1217 fn
->gimple_df
->default_defs
= hash_table
<ssa_name_hasher
>::create_ggc (20);
1218 pt_solution_reset (&fn
->gimple_df
->escaped
);
1219 init_ssanames (fn
, size
);
1222 /* Deallocate memory associated with SSA data structures for FNDECL. */
1225 delete_tree_ssa (struct function
*fn
)
1229 /* We no longer maintain the SSA operand cache at this point. */
1230 if (ssa_operands_active (fn
))
1231 fini_ssa_operands (fn
);
1233 fn
->gimple_df
->default_defs
->empty ();
1234 fn
->gimple_df
->default_defs
= NULL
;
1235 pt_solution_reset (&fn
->gimple_df
->escaped
);
1236 if (fn
->gimple_df
->decls_to_pointers
!= NULL
)
1237 delete fn
->gimple_df
->decls_to_pointers
;
1238 fn
->gimple_df
->decls_to_pointers
= NULL
;
1239 fn
->gimple_df
= NULL
;
1241 /* We no longer need the edge variable maps. */
1242 redirect_edge_var_map_empty ();
1245 /* Return true if EXPR is a useless type conversion, otherwise return
1249 tree_ssa_useless_type_conversion (tree expr
)
1251 tree outer_type
, inner_type
;
1253 /* If we have an assignment that merely uses a NOP_EXPR to change
1254 the top of the RHS to the type of the LHS and the type conversion
1255 is "safe", then strip away the type conversion so that we can
1256 enter LHS = RHS into the const_and_copies table. */
1257 if (!CONVERT_EXPR_P (expr
)
1258 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
1259 && TREE_CODE (expr
) != NON_LVALUE_EXPR
)
1262 outer_type
= TREE_TYPE (expr
);
1263 inner_type
= TREE_TYPE (TREE_OPERAND (expr
, 0));
1265 if (inner_type
== error_mark_node
)
1268 return useless_type_conversion_p (outer_type
, inner_type
);
1271 /* Strip conversions from EXP according to
1272 tree_ssa_useless_type_conversion and return the resulting
1276 tree_ssa_strip_useless_type_conversions (tree exp
)
1278 while (tree_ssa_useless_type_conversion (exp
))
1279 exp
= TREE_OPERAND (exp
, 0);
1283 /* Return true if T, as SSA_NAME, has an implicit default defined value. */
1286 ssa_defined_default_def_p (tree t
)
1288 tree var
= SSA_NAME_VAR (t
);
1292 /* Parameters get their initial value from the function entry. */
1293 else if (TREE_CODE (var
) == PARM_DECL
)
1295 /* When returning by reference the return address is actually a hidden
1297 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1299 /* Hard register variables get their initial value from the ether. */
1300 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1307 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1308 should be returned if the value is only partially undefined. */
1311 ssa_undefined_value_p (tree t
, bool partial
)
1315 gcc_checking_assert (!virtual_operand_p (t
));
1317 if (ssa_defined_default_def_p (t
))
1320 /* The value is undefined iff its definition statement is empty. */
1321 def_stmt
= SSA_NAME_DEF_STMT (t
);
1322 if (gimple_nop_p (def_stmt
))
1325 /* The value is undefined if the definition statement is a call
1326 to .DEFERRED_INIT function. */
1327 if (gimple_call_internal_p (def_stmt
, IFN_DEFERRED_INIT
))
1330 /* The value is partially undefined if the definition statement is
1331 a REALPART_EXPR or IMAGPART_EXPR and its operand is defined by
1332 the call to .DEFERRED_INIT function. This is for handling the
1335 1 typedef _Complex float C;
1348 with -ftrivial-auto-var-init, compiler will insert the following
1349 artificial initialization:
1350 f = .DEFERRED_INIT (f, 2);
1351 _1 = REALPART_EXPR <f>;
1353 we should treat the definition _1 = REALPART_EXPR <f> as undefined. */
1354 if (partial
&& is_gimple_assign (def_stmt
)
1355 && (gimple_assign_rhs_code (def_stmt
) == REALPART_EXPR
1356 || gimple_assign_rhs_code (def_stmt
) == IMAGPART_EXPR
))
1358 tree real_imag_part
= TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0);
1359 if (TREE_CODE (real_imag_part
) == SSA_NAME
1360 && gimple_call_internal_p (SSA_NAME_DEF_STMT (real_imag_part
),
1365 /* Check if the complex was not only partially defined. */
1366 if (partial
&& is_gimple_assign (def_stmt
)
1367 && gimple_assign_rhs_code (def_stmt
) == COMPLEX_EXPR
)
1371 rhs1
= gimple_assign_rhs1 (def_stmt
);
1372 rhs2
= gimple_assign_rhs2 (def_stmt
);
1373 return (TREE_CODE (rhs1
) == SSA_NAME
&& ssa_undefined_value_p (rhs1
))
1374 || (TREE_CODE (rhs2
) == SSA_NAME
&& ssa_undefined_value_p (rhs2
));
1380 /* Return TRUE iff STMT, a gimple statement, references an undefined
1384 gimple_uses_undefined_value_p (gimple
*stmt
)
1389 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
1390 if (ssa_undefined_value_p (op
))
1397 /* Return TRUE iff there are any non-PHI uses of VAR that dominate the
1398 end of BB. If we return TRUE and BB is a loop header, then VAR we
1399 be assumed to be defined within the loop, even if it is marked as
1403 ssa_name_any_use_dominates_bb_p (tree var
, basic_block bb
)
1405 imm_use_iterator iter
;
1406 use_operand_p use_p
;
1407 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1409 if (is_a
<gphi
*> (USE_STMT (use_p
))
1410 || is_gimple_debug (USE_STMT (use_p
)))
1412 basic_block dombb
= gimple_bb (USE_STMT (use_p
));
1413 if (dominated_by_p (CDI_DOMINATORS
, bb
, dombb
))
1420 /* Mark as maybe_undef any SSA_NAMEs that are unsuitable as ivopts
1421 candidates for potentially involving undefined behavior. */
1424 mark_ssa_maybe_undefs (void)
1426 auto_vec
<tree
> queue
;
1428 /* Scan all SSA_NAMEs, marking the definitely-undefined ones as
1429 maybe-undefined and queuing them for propagation, while clearing
1430 the mark on others. */
1433 FOR_EACH_SSA_NAME (i
, var
, cfun
)
1435 if (SSA_NAME_IS_VIRTUAL_OPERAND (var
)
1436 || !ssa_undefined_value_p (var
, false))
1437 ssa_name_set_maybe_undef (var
, false);
1440 ssa_name_set_maybe_undef (var
);
1441 queue
.safe_push (var
);
1442 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1443 fprintf (dump_file
, "marking _%i as maybe-undef\n",
1444 SSA_NAME_VERSION (var
));
1448 /* Now propagate maybe-undefined from a DEF to any other PHI that
1449 uses it, as long as there isn't any intervening use of DEF. */
1450 while (!queue
.is_empty ())
1453 imm_use_iterator iter
;
1454 use_operand_p use_p
;
1455 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1457 /* Any uses of VAR that aren't PHI args imply VAR must be
1458 defined, otherwise undefined behavior would have been
1459 definitely invoked. Only PHI args may hold
1460 maybe-undefined values without invoking undefined
1461 behavior for that reason alone. */
1462 if (!is_a
<gphi
*> (USE_STMT (use_p
)))
1464 gphi
*phi
= as_a
<gphi
*> (USE_STMT (use_p
));
1466 tree def
= gimple_phi_result (phi
);
1467 if (ssa_name_maybe_undef_p (def
))
1470 /* Look for any uses of the maybe-unused SSA_NAME that
1471 dominates the block that reaches the incoming block
1472 corresponding to the PHI arg in which it is mentioned.
1473 That means we can assume the SSA_NAME is defined in that
1474 path, so we only mark a PHI result as maybe-undef if we
1475 find an unused reaching SSA_NAME. */
1476 int idx
= phi_arg_index_from_use (use_p
);
1477 basic_block bb
= gimple_phi_arg_edge (phi
, idx
)->src
;
1478 if (ssa_name_any_use_dominates_bb_p (var
, bb
))
1481 ssa_name_set_maybe_undef (def
);
1482 queue
.safe_push (def
);
1483 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1484 fprintf (dump_file
, "marking _%i as maybe-undef because of _%i\n",
1485 SSA_NAME_VERSION (def
), SSA_NAME_VERSION (var
));
1491 /* If necessary, rewrite the base of the reference tree *TP from
1492 a MEM_REF to a plain or converted symbol. */
1495 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1499 while (handled_component_p (*tp
))
1500 tp
= &TREE_OPERAND (*tp
, 0);
1501 if (TREE_CODE (*tp
) == MEM_REF
1502 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1503 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1505 && !TREE_ADDRESSABLE (sym
)
1506 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
1507 && is_gimple_reg_type (TREE_TYPE (*tp
))
1508 && ! VOID_TYPE_P (TREE_TYPE (*tp
)))
1510 if (VECTOR_TYPE_P (TREE_TYPE (sym
))
1511 && useless_type_conversion_p (TREE_TYPE (*tp
),
1512 TREE_TYPE (TREE_TYPE (sym
)))
1513 && multiple_p (mem_ref_offset (*tp
),
1514 wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp
)))))
1516 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1517 TYPE_SIZE (TREE_TYPE (*tp
)),
1518 int_const_binop (MULT_EXPR
,
1519 bitsize_int (BITS_PER_UNIT
),
1520 TREE_OPERAND (*tp
, 1)));
1522 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1523 && useless_type_conversion_p (TREE_TYPE (*tp
),
1524 TREE_TYPE (TREE_TYPE (sym
))))
1526 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1527 ? REALPART_EXPR
: IMAGPART_EXPR
,
1528 TREE_TYPE (*tp
), sym
);
1530 else if (integer_zerop (TREE_OPERAND (*tp
, 1))
1531 && DECL_SIZE (sym
) == TYPE_SIZE (TREE_TYPE (*tp
)))
1533 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1535 *tp
= build1 (VIEW_CONVERT_EXPR
,
1536 TREE_TYPE (*tp
), sym
);
1540 else if (DECL_SIZE (sym
)
1541 && TREE_CODE (DECL_SIZE (sym
)) == INTEGER_CST
1542 && (known_subrange_p
1543 (mem_ref_offset (*tp
),
1544 wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp
))),
1545 0, wi::to_offset (DECL_SIZE_UNIT (sym
))))
1546 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
1547 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
)))
1548 == TYPE_PRECISION (TREE_TYPE (*tp
))))
1549 && (! INTEGRAL_TYPE_P (TREE_TYPE (sym
))
1550 || type_has_mode_precision_p (TREE_TYPE (sym
)))
1551 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp
))),
1552 BITS_PER_UNIT
) == 0)
1554 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1555 TYPE_SIZE (TREE_TYPE (*tp
)),
1556 wide_int_to_tree (bitsizetype
,
1557 mem_ref_offset (*tp
)
1558 << LOG2_BITS_PER_UNIT
));
1563 /* For a tree REF return its base if it is the base of a MEM_REF
1564 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1567 non_rewritable_mem_ref_base (tree ref
)
1571 /* A plain decl does not need it set. */
1575 switch (TREE_CODE (ref
))
1580 if (DECL_P (TREE_OPERAND (ref
, 0)))
1583 case VIEW_CONVERT_EXPR
:
1584 if (DECL_P (TREE_OPERAND (ref
, 0)))
1586 if (TYPE_SIZE (TREE_TYPE (ref
))
1587 != TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 0))))
1588 return TREE_OPERAND (ref
, 0);
1592 /* We would need to rewrite ARRAY_REFs or COMPONENT_REFs and even
1593 more so multiple levels of handled components. */
1599 /* But watch out for MEM_REFs we cannot lower to a
1600 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1601 if (TREE_CODE (base
) == MEM_REF
1602 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1604 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1605 if (! DECL_P (decl
))
1607 if (! is_gimple_reg_type (TREE_TYPE (base
))
1608 || VOID_TYPE_P (TREE_TYPE (base
))
1609 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
))
1611 if ((VECTOR_TYPE_P (TREE_TYPE (decl
))
1612 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1613 && useless_type_conversion_p (TREE_TYPE (base
),
1614 TREE_TYPE (TREE_TYPE (decl
)))
1615 && known_ge (mem_ref_offset (base
), 0)
1616 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1617 mem_ref_offset (base
))
1618 && multiple_p (mem_ref_offset (base
),
1619 wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base
)))))
1621 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1622 if (integer_zerop (TREE_OPERAND (base
, 1))
1623 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (base
)))
1625 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1626 if (DECL_SIZE (decl
)
1627 && TREE_CODE (DECL_SIZE_UNIT (decl
)) == INTEGER_CST
1628 && (known_subrange_p
1629 (mem_ref_offset (base
),
1630 wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base
))),
1631 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl
))))
1632 /* ??? We can't handle bitfield precision extracts without
1633 either using an alternate type for the BIT_FIELD_REF and
1634 then doing a conversion or possibly adjusting the offset
1635 according to endianness. */
1636 && (! INTEGRAL_TYPE_P (TREE_TYPE (base
))
1637 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
)))
1638 == TYPE_PRECISION (TREE_TYPE (base
))))
1639 /* ??? Likewise for extracts from bitfields, we'd have
1640 to pun the base object to a size precision mode first. */
1641 && (! INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1642 || type_has_mode_precision_p (TREE_TYPE (decl
)))
1643 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base
))),
1644 BITS_PER_UNIT
) == 0)
1649 /* We cannot rewrite a decl in the base. */
1650 base
= get_base_address (ref
);
1654 /* We cannot rewrite TARGET_MEM_REFs. */
1655 else if (TREE_CODE (base
) == TARGET_MEM_REF
1656 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1658 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1659 if (! DECL_P (decl
))
1667 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1668 Otherwise return true. */
1671 non_rewritable_lvalue_p (tree lhs
)
1673 /* A plain decl is always rewritable. */
1677 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1678 a reasonably efficient manner... */
1679 if ((TREE_CODE (lhs
) == REALPART_EXPR
1680 || TREE_CODE (lhs
) == IMAGPART_EXPR
)
1681 && DECL_P (TREE_OPERAND (lhs
, 0)))
1684 /* ??? The following could be relaxed allowing component
1685 references that do not change the access size. */
1686 if (TREE_CODE (lhs
) == MEM_REF
1687 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
)
1689 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1691 /* A decl that is wrapped inside a MEM-REF that covers
1692 it full is also rewritable. */
1693 if (integer_zerop (TREE_OPERAND (lhs
, 1))
1695 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1696 /* If the dynamic type of the decl has larger precision than
1697 the decl itself we can't use the decls type for SSA rewriting. */
1698 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1699 || compare_tree_int (DECL_SIZE (decl
),
1700 TYPE_PRECISION (TREE_TYPE (decl
))) == 0)
1701 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1702 && (TYPE_PRECISION (TREE_TYPE (decl
))
1703 >= TYPE_PRECISION (TREE_TYPE (lhs
)))))
1704 /* Make sure we are not re-writing non-float copying into float
1705 copying as that can incur normalization. */
1706 && (! FLOAT_TYPE_P (TREE_TYPE (decl
))
1707 || types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (decl
)))
1708 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1711 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1712 using a BIT_INSERT_EXPR. */
1714 && VECTOR_TYPE_P (TREE_TYPE (decl
))
1715 && TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
1716 && known_ge (mem_ref_offset (lhs
), 0)
1717 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1718 mem_ref_offset (lhs
))
1719 && multiple_p (mem_ref_offset (lhs
),
1720 wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (lhs
))))
1721 && known_ge (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (decl
))),
1722 wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (lhs
)))))
1724 poly_uint64 lhs_bits
, nelts
;
1725 if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs
)), &lhs_bits
)
1726 && multiple_p (lhs_bits
,
1728 (TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl
)))),
1730 && valid_vector_subparts_p (nelts
))
1732 if (known_eq (nelts
, 1u))
1734 /* For sub-vector inserts the insert vector mode has to be
1736 tree vtype
= build_vector_type (TREE_TYPE (TREE_TYPE (decl
)),
1738 if (TYPE_MODE (vtype
) != BLKmode
)
1744 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1746 if (TREE_CODE (lhs
) == BIT_FIELD_REF
1747 && DECL_P (TREE_OPERAND (lhs
, 0))
1748 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
1749 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
1750 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)),
1752 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs
, 0)))), 0)
1753 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
1754 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))) == 0)
1760 /* When possible, clear TREE_ADDRESSABLE bit, set or clear DECL_NOT_GIMPLE_REG_P
1761 and mark the variable VAR for conversion into SSA. Return true when updating
1762 stmts is required. */
1765 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1766 bitmap suitable_for_renaming
)
1768 /* Global Variables, result decls cannot be changed. */
1769 if (is_global_var (var
)
1770 || TREE_CODE (var
) == RESULT_DECL
1771 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1774 bool maybe_reg
= false;
1775 if (TREE_ADDRESSABLE (var
))
1777 TREE_ADDRESSABLE (var
) = 0;
1781 fprintf (dump_file
, "No longer having address taken: ");
1782 print_generic_expr (dump_file
, var
);
1783 fprintf (dump_file
, "\n");
1787 /* For register type decls if we do not have any partial defs
1788 we cannot express in SSA form mark them as DECL_NOT_GIMPLE_REG_P
1789 as to avoid SSA rewrite. For the others go ahead and mark
1790 them for renaming. */
1791 if (is_gimple_reg_type (TREE_TYPE (var
)))
1793 if (bitmap_bit_p (not_reg_needs
, DECL_UID (var
)))
1795 DECL_NOT_GIMPLE_REG_P (var
) = 1;
1798 fprintf (dump_file
, "Has partial defs: ");
1799 print_generic_expr (dump_file
, var
);
1800 fprintf (dump_file
, "\n");
1803 else if (DECL_NOT_GIMPLE_REG_P (var
))
1806 DECL_NOT_GIMPLE_REG_P (var
) = 0;
1808 if (maybe_reg
&& is_gimple_reg (var
))
1812 fprintf (dump_file
, "Now a gimple register: ");
1813 print_generic_expr (dump_file
, var
);
1814 fprintf (dump_file
, "\n");
1816 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1821 /* Return true when STMT is ASAN mark where second argument is an address
1822 of a local variable. */
1825 is_asan_mark_p (gimple
*stmt
)
1827 if (!gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1830 tree addr
= get_base_address (gimple_call_arg (stmt
, 1));
1831 if (TREE_CODE (addr
) == ADDR_EXPR
1832 && VAR_P (TREE_OPERAND (addr
, 0)))
1834 tree var
= TREE_OPERAND (addr
, 0);
1835 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1836 DECL_ATTRIBUTES (var
)))
1839 unsigned addressable
= TREE_ADDRESSABLE (var
);
1840 TREE_ADDRESSABLE (var
) = 0;
1841 bool r
= is_gimple_reg (var
);
1842 TREE_ADDRESSABLE (var
) = addressable
;
1849 /* Compute TREE_ADDRESSABLE and whether we have unhandled partial defs
1850 for local variables. */
1853 execute_update_addresses_taken (void)
1856 auto_bitmap addresses_taken
;
1857 auto_bitmap not_reg_needs
;
1858 auto_bitmap suitable_for_renaming
;
1859 bool optimistic_not_addressable
= false;
1863 timevar_push (TV_ADDRESS_TAKEN
);
1865 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1866 the function body. */
1867 FOR_EACH_BB_FN (bb
, cfun
)
1869 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
1872 gimple
*stmt
= gsi_stmt (gsi
);
1873 enum gimple_code code
= gimple_code (stmt
);
1876 if (code
== GIMPLE_CALL
)
1878 if (optimize_atomic_compare_exchange_p (stmt
))
1880 /* For __atomic_compare_exchange_N if the second argument
1881 is &var, don't mark var addressable;
1882 if it becomes non-addressable, we'll rewrite it into
1883 ATOMIC_COMPARE_EXCHANGE call. */
1884 tree arg
= gimple_call_arg (stmt
, 1);
1885 gimple_call_set_arg (stmt
, 1, null_pointer_node
);
1886 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1887 gimple_call_set_arg (stmt
, 1, arg
);
1888 /* Remember we have to check again below. */
1889 optimistic_not_addressable
= true;
1891 else if (is_asan_mark_p (stmt
)
1892 || gimple_call_internal_p (stmt
, IFN_GOMP_SIMT_ENTER
))
1895 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1898 /* Note all addresses taken by the stmt. */
1899 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1901 /* If we have a call or an assignment, see if the lhs contains
1902 a local decl that requires not to be a gimple register. */
1903 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1905 tree lhs
= gimple_get_lhs (stmt
);
1907 && TREE_CODE (lhs
) != SSA_NAME
1908 && ((code
== GIMPLE_CALL
&& ! DECL_P (lhs
))
1909 || non_rewritable_lvalue_p (lhs
)))
1911 decl
= get_base_address (lhs
);
1913 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1917 if (gimple_assign_single_p (stmt
))
1919 tree rhs
= gimple_assign_rhs1 (stmt
);
1920 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1921 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1924 else if (code
== GIMPLE_CALL
)
1926 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1928 tree arg
= gimple_call_arg (stmt
, i
);
1929 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1930 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1934 else if (code
== GIMPLE_ASM
)
1936 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1937 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
1939 tree link
= gimple_asm_output_op (asm_stmt
, i
);
1940 tree lhs
= TREE_VALUE (link
);
1941 if (TREE_CODE (lhs
) != SSA_NAME
)
1943 decl
= get_base_address (lhs
);
1945 && (non_rewritable_lvalue_p (lhs
)
1946 /* We cannot move required conversions from
1947 the lhs to the rhs in asm statements, so
1948 require we do not need any. */
1949 || !useless_type_conversion_p
1950 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1951 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1954 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
1956 tree link
= gimple_asm_input_op (asm_stmt
, i
);
1957 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1958 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1963 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1967 gphi
*phi
= gsi
.phi ();
1969 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1971 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1972 if (TREE_CODE (op
) == ADDR_EXPR
1973 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1975 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1980 /* We cannot iterate over all referenced vars because that can contain
1981 unused vars from BLOCK trees, which causes code generation differences
1983 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1984 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1985 suitable_for_renaming
);
1987 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1988 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1989 suitable_for_renaming
);
1991 /* Operand caches need to be recomputed for operands referencing the updated
1992 variables and operands need to be rewritten to expose bare symbols. */
1993 if (!bitmap_empty_p (suitable_for_renaming
)
1994 || optimistic_not_addressable
)
1996 FOR_EACH_BB_FN (bb
, cfun
)
1997 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1999 gimple
*stmt
= gsi_stmt (gsi
);
2001 /* Re-write TARGET_MEM_REFs of symbols we want to
2002 rewrite into SSA form. */
2003 if (gimple_assign_single_p (stmt
))
2005 tree lhs
= gimple_assign_lhs (stmt
);
2006 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
2009 /* Rewrite LHS IMAG/REALPART_EXPR similar to
2010 gimplify_modify_expr_complex_part. */
2011 if ((TREE_CODE (lhs
) == IMAGPART_EXPR
2012 || TREE_CODE (lhs
) == REALPART_EXPR
)
2013 && DECL_P (TREE_OPERAND (lhs
, 0))
2014 && bitmap_bit_p (suitable_for_renaming
,
2015 DECL_UID (TREE_OPERAND (lhs
, 0))))
2017 tree other
= make_ssa_name (TREE_TYPE (lhs
));
2018 tree lrhs
= build1 (TREE_CODE (lhs
) == IMAGPART_EXPR
2019 ? REALPART_EXPR
: IMAGPART_EXPR
,
2021 TREE_OPERAND (lhs
, 0));
2022 suppress_warning (lrhs
);
2023 gimple
*load
= gimple_build_assign (other
, lrhs
);
2024 location_t loc
= gimple_location (stmt
);
2025 gimple_set_location (load
, loc
);
2026 gimple_set_vuse (load
, gimple_vuse (stmt
));
2027 gsi_insert_before (&gsi
, load
, GSI_SAME_STMT
);
2028 gimple_assign_set_lhs (stmt
, TREE_OPERAND (lhs
, 0));
2029 gimple_assign_set_rhs_with_ops
2030 (&gsi
, COMPLEX_EXPR
,
2031 TREE_CODE (lhs
) == IMAGPART_EXPR
2032 ? other
: gimple_assign_rhs1 (stmt
),
2033 TREE_CODE (lhs
) == IMAGPART_EXPR
2034 ? gimple_assign_rhs1 (stmt
) : other
, NULL_TREE
);
2035 stmt
= gsi_stmt (gsi
);
2036 unlink_stmt_vdef (stmt
);
2041 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
2042 into a BIT_INSERT_EXPR. */
2043 if (TREE_CODE (lhs
) == BIT_FIELD_REF
2044 && DECL_P (TREE_OPERAND (lhs
, 0))
2045 && bitmap_bit_p (suitable_for_renaming
,
2046 DECL_UID (TREE_OPERAND (lhs
, 0)))
2047 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs
, 0)))
2048 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs
, 0))) != BLKmode
2049 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)),
2050 TYPE_SIZE_UNIT (TREE_TYPE
2051 (TREE_TYPE (TREE_OPERAND (lhs
, 0)))),
2053 && (tree_to_uhwi (TREE_OPERAND (lhs
, 2))
2054 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
))) == 0))
2056 tree var
= TREE_OPERAND (lhs
, 0);
2057 tree val
= gimple_assign_rhs1 (stmt
);
2058 if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var
)),
2061 tree tem
= make_ssa_name (TREE_TYPE (TREE_TYPE (var
)));
2063 = gimple_build_assign (tem
,
2064 build1 (VIEW_CONVERT_EXPR
,
2065 TREE_TYPE (tem
), val
));
2066 gsi_insert_before (&gsi
, pun
, GSI_SAME_STMT
);
2069 tree bitpos
= TREE_OPERAND (lhs
, 2);
2070 gimple_assign_set_lhs (stmt
, var
);
2071 gimple_assign_set_rhs_with_ops
2072 (&gsi
, BIT_INSERT_EXPR
, var
, val
, bitpos
);
2073 stmt
= gsi_stmt (gsi
);
2074 unlink_stmt_vdef (stmt
);
2079 /* Rewrite a vector insert using a MEM_REF on the LHS
2080 into a BIT_INSERT_EXPR. */
2081 if (TREE_CODE (lhs
) == MEM_REF
2082 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
2083 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
2085 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
))
2086 && VECTOR_TYPE_P (TREE_TYPE (sym
))
2087 && TYPE_MODE (TREE_TYPE (sym
)) != BLKmode
2088 /* If it is a full replacement we can do better below. */
2089 && maybe_ne (wi::to_poly_offset
2090 (TYPE_SIZE_UNIT (TREE_TYPE (lhs
))),
2092 (TYPE_SIZE_UNIT (TREE_TYPE (sym
))))
2093 && known_ge (mem_ref_offset (lhs
), 0)
2094 && known_gt (wi::to_poly_offset
2095 (TYPE_SIZE_UNIT (TREE_TYPE (sym
))),
2096 mem_ref_offset (lhs
))
2097 && multiple_p (mem_ref_offset (lhs
),
2099 (TYPE_SIZE_UNIT (TREE_TYPE (lhs
)))))
2101 tree val
= gimple_assign_rhs1 (stmt
);
2102 if (! types_compatible_p (TREE_TYPE (val
),
2103 TREE_TYPE (TREE_TYPE (sym
))))
2105 poly_uint64 lhs_bits
, nelts
;
2106 tree temtype
= TREE_TYPE (TREE_TYPE (sym
));
2107 if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs
)),
2109 && multiple_p (lhs_bits
,
2111 (TYPE_SIZE (TREE_TYPE
2112 (TREE_TYPE (sym
)))),
2114 && maybe_ne (nelts
, 1u)
2115 && valid_vector_subparts_p (nelts
))
2116 temtype
= build_vector_type (temtype
, nelts
);
2117 tree tem
= make_ssa_name (temtype
);
2119 = gimple_build_assign (tem
,
2120 build1 (VIEW_CONVERT_EXPR
,
2121 TREE_TYPE (tem
), val
));
2122 gsi_insert_before (&gsi
, pun
, GSI_SAME_STMT
);
2126 = wide_int_to_tree (bitsizetype
,
2127 mem_ref_offset (lhs
) * BITS_PER_UNIT
);
2128 gimple_assign_set_lhs (stmt
, sym
);
2129 gimple_assign_set_rhs_with_ops
2130 (&gsi
, BIT_INSERT_EXPR
, sym
, val
, bitpos
);
2131 stmt
= gsi_stmt (gsi
);
2132 unlink_stmt_vdef (stmt
);
2137 /* We shouldn't have any fancy wrapping of
2138 component-refs on the LHS, but look through
2139 VIEW_CONVERT_EXPRs as that is easy. */
2140 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
2141 lhs
= TREE_OPERAND (lhs
, 0);
2142 if (TREE_CODE (lhs
) == MEM_REF
2143 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
2144 && integer_zerop (TREE_OPERAND (lhs
, 1))
2145 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
2147 && !TREE_ADDRESSABLE (sym
)
2148 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
2151 lhs
= gimple_assign_lhs (stmt
);
2153 /* Rewrite the RHS and make sure the resulting assignment
2154 is validly typed. */
2155 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
2156 rhs
= gimple_assign_rhs1 (stmt
);
2157 if (gimple_assign_lhs (stmt
) != lhs
2158 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2161 if (gimple_clobber_p (stmt
))
2163 rhs
= build_constructor (TREE_TYPE (lhs
), NULL
);
2164 TREE_THIS_VOLATILE (rhs
) = 1;
2167 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
2168 TREE_TYPE (lhs
), rhs
);
2170 if (gimple_assign_lhs (stmt
) != lhs
)
2171 gimple_assign_set_lhs (stmt
, lhs
);
2173 if (gimple_assign_rhs1 (stmt
) != rhs
)
2175 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2176 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
2180 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2183 if (optimize_atomic_compare_exchange_p (stmt
))
2185 tree expected
= gimple_call_arg (stmt
, 1);
2186 tree decl
= TREE_OPERAND (expected
, 0);
2187 if (bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
2189 fold_builtin_atomic_compare_exchange (&gsi
);
2192 else if (!TREE_ADDRESSABLE (decl
))
2193 /* If there are partial defs of the decl we may
2194 have cleared the addressable bit but set
2195 DECL_NOT_GIMPLE_REG_P. We have to restore
2196 TREE_ADDRESSABLE here. */
2197 TREE_ADDRESSABLE (decl
) = 1;
2199 else if (is_asan_mark_p (stmt
))
2201 tree var
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
2202 if (bitmap_bit_p (suitable_for_renaming
, DECL_UID (var
)))
2204 unlink_stmt_vdef (stmt
);
2205 if (asan_mark_p (stmt
, ASAN_MARK_POISON
))
2208 = gimple_build_call_internal (IFN_ASAN_POISON
, 0);
2209 gimple_call_set_lhs (call
, var
);
2210 gsi_replace (&gsi
, call
, true);
2214 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
2215 is uninitialized. Avoid dependencies on
2216 previous out of scope value. */
2217 tree clobber
= build_clobber (TREE_TYPE (var
));
2218 gimple
*g
= gimple_build_assign (var
, clobber
);
2219 gsi_replace (&gsi
, g
, true);
2224 else if (gimple_call_internal_p (stmt
, IFN_GOMP_SIMT_ENTER
))
2225 for (i
= 1; i
< gimple_call_num_args (stmt
); i
++)
2227 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
2228 if (*argp
== null_pointer_node
)
2230 gcc_assert (TREE_CODE (*argp
) == ADDR_EXPR
2231 && VAR_P (TREE_OPERAND (*argp
, 0)));
2232 tree var
= TREE_OPERAND (*argp
, 0);
2233 if (bitmap_bit_p (suitable_for_renaming
, DECL_UID (var
)))
2234 *argp
= null_pointer_node
;
2236 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2238 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
2239 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
2243 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2245 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
2247 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
2249 tree link
= gimple_asm_output_op (asm_stmt
, i
);
2250 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
2251 suitable_for_renaming
);
2253 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
2255 tree link
= gimple_asm_input_op (asm_stmt
, i
);
2256 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
2257 suitable_for_renaming
);
2261 else if (gimple_debug_bind_p (stmt
)
2262 && gimple_debug_bind_has_value_p (stmt
))
2264 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
2266 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
2267 decl
= non_rewritable_mem_ref_base (*valuep
);
2269 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
2270 gimple_debug_bind_reset_value (stmt
);
2273 if (gimple_references_memory_p (stmt
)
2274 || is_gimple_debug (stmt
))
2280 /* Update SSA form here, we are called as non-pass as well. */
2281 if (number_of_loops (cfun
) > 1
2282 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
2283 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
2285 update_ssa (TODO_update_ssa
);
2288 timevar_pop (TV_ADDRESS_TAKEN
);
2293 const pass_data pass_data_update_address_taken
=
2295 GIMPLE_PASS
, /* type */
2296 "addressables", /* name */
2297 OPTGROUP_NONE
, /* optinfo_flags */
2298 TV_ADDRESS_TAKEN
, /* tv_id */
2299 PROP_ssa
, /* properties_required */
2300 0, /* properties_provided */
2301 0, /* properties_destroyed */
2302 0, /* todo_flags_start */
2303 TODO_update_address_taken
, /* todo_flags_finish */
2306 class pass_update_address_taken
: public gimple_opt_pass
2309 pass_update_address_taken (gcc::context
*ctxt
)
2310 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
2313 /* opt_pass methods: */
2315 }; // class pass_update_address_taken
2320 make_pass_update_address_taken (gcc::context
*ctxt
)
2322 return new pass_update_address_taken (ctxt
);