typeck.c (cp_build_function_call_vec): When mark_used fails unconditionally return...
[official-gcc.git] / gcc / tree-ssa.c
blob489f6dc1501c855f039bc53133bad8c5fca8e1e7
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
48 /* Pointer map of variable mappings, keyed by edge. */
49 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus)
57 edge_var_map new_node;
59 if (edge_var_maps == NULL)
60 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
62 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
63 new_node.def = def;
64 new_node.result = result;
65 new_node.locus = locus;
67 slot.safe_push (new_node);
71 /* Clear the var mappings in edge E. */
73 void
74 redirect_edge_var_map_clear (edge e)
76 if (!edge_var_maps)
77 return;
79 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
81 if (head)
82 head->release ();
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
88 This assumes a hash_map can have multiple edges mapping to the same
89 var_map (many to one mapping), since we don't remove the previous mappings.
92 void
93 redirect_edge_var_map_dup (edge newe, edge olde)
95 if (!edge_var_maps)
96 return;
98 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
99 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
100 if (!old_head)
101 return;
103 new_head->safe_splice (*old_head);
107 /* Return the variable mappings for a given edge. If there is none, return
108 NULL. */
110 vec<edge_var_map> *
111 redirect_edge_var_map_vector (edge e)
113 /* Hey, what kind of idiot would... you'd be surprised. */
114 if (!edge_var_maps)
115 return NULL;
117 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
118 if (!slot)
119 return NULL;
121 return slot;
124 /* Clear the edge variable mappings. */
126 void
127 redirect_edge_var_map_empty (void)
129 if (edge_var_maps)
130 edge_var_maps->empty ();
134 /* Remove the corresponding arguments from the PHI nodes in E's
135 destination block and redirect it to DEST. Return redirected edge.
136 The list of removed arguments is stored in a vector accessed
137 through edge_var_maps. */
139 edge
140 ssa_redirect_edge (edge e, basic_block dest)
142 gphi_iterator gsi;
143 gphi *phi;
145 redirect_edge_var_map_clear (e);
147 /* Remove the appropriate PHI arguments in E's destination block.
148 If we are redirecting a copied edge the destination has not
149 got PHI argument space reserved nor an interesting argument. */
150 if (! (e->dest->flags & BB_DUPLICATED))
151 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
153 tree def;
154 location_t locus;
156 phi = gsi.phi ();
157 def = gimple_phi_arg_def (phi, e->dest_idx);
158 locus = gimple_phi_arg_location (phi, e->dest_idx);
160 if (def == NULL_TREE)
161 continue;
163 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
166 e = redirect_edge_succ_nodup (e, dest);
168 return e;
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173 E->dest. */
175 void
176 flush_pending_stmts (edge e)
178 gphi *phi;
179 edge_var_map *vm;
180 int i;
181 gphi_iterator gsi;
183 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184 if (!v)
185 return;
187 for (gsi = gsi_start_phis (e->dest), i = 0;
188 !gsi_end_p (gsi) && v->iterate (i, &vm);
189 gsi_next (&gsi), i++)
191 tree def;
193 phi = gsi.phi ();
194 def = redirect_edge_var_map_def (vm);
195 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
198 redirect_edge_var_map_clear (e);
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203 expression with a different value.
205 This will update any annotations (say debug bind stmts) referring
206 to the original LHS, so that they use the RHS instead. This is
207 done even if NLHS and LHS are the same, for it is understood that
208 the RHS will be modified afterwards, and NLHS will not be assigned
209 an equivalent value.
211 Adjusting any non-annotation uses of the LHS, if needed, is a
212 responsibility of the caller.
214 The effect of this call should be pretty much the same as that of
215 inserting a copy of STMT before STMT, and then removing the
216 original stmt, at which time gsi_remove() would have update
217 annotations, but using this function saves all the inserting,
218 copying and removing. */
220 void
221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
223 if (MAY_HAVE_DEBUG_BIND_STMTS)
225 tree lhs = gimple_get_lhs (stmt);
227 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
229 insert_debug_temp_for_var_def (NULL, lhs);
232 gimple_set_lhs (stmt, nlhs);
236 /* Given a tree for an expression for which we might want to emit
237 locations or values in debug information (generally a variable, but
238 we might deal with other kinds of trees in the future), return the
239 tree that should be used as the variable of a DEBUG_BIND STMT or
240 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
242 tree
243 target_for_debug_bind (tree var)
245 if (!MAY_HAVE_DEBUG_BIND_STMTS)
246 return NULL_TREE;
248 if (TREE_CODE (var) == SSA_NAME)
250 var = SSA_NAME_VAR (var);
251 if (var == NULL_TREE)
252 return NULL_TREE;
255 if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256 && TREE_CODE (var) != PARM_DECL)
257 return NULL_TREE;
259 if (DECL_HAS_VALUE_EXPR_P (var))
260 return target_for_debug_bind (DECL_VALUE_EXPR (var));
262 if (DECL_IGNORED_P (var))
263 return NULL_TREE;
265 /* var-tracking only tracks registers. */
266 if (!is_gimple_reg_type (TREE_TYPE (var)))
267 return NULL_TREE;
269 return var;
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273 released. */
275 static tree
276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
278 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
280 if (wi && wi->is_lhs)
281 return NULL_TREE;
283 if (TREE_CODE (*tp) == SSA_NAME)
285 if (SSA_NAME_IN_FREE_LIST (*tp))
286 return *tp;
288 *walk_subtrees = 0;
290 else if (IS_TYPE_OR_DECL_P (*tp))
291 *walk_subtrees = 0;
293 return NULL_TREE;
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297 by other DEBUG stmts, and replace uses of the DEF with the
298 newly-created debug temp. */
300 void
301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
303 imm_use_iterator imm_iter;
304 use_operand_p use_p;
305 gimple *stmt;
306 gimple *def_stmt = NULL;
307 int usecount = 0;
308 tree value = NULL;
310 if (!MAY_HAVE_DEBUG_BIND_STMTS)
311 return;
313 /* If this name has already been registered for replacement, do nothing
314 as anything that uses this name isn't in SSA form. */
315 if (name_registered_for_update_p (var))
316 return;
318 /* Check whether there are debug stmts that reference this variable and,
319 if there are, decide whether we should use a debug temp. */
320 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
322 stmt = USE_STMT (use_p);
324 if (!gimple_debug_bind_p (stmt))
325 continue;
327 if (usecount++)
328 break;
330 if (gimple_debug_bind_get_value (stmt) != var)
332 /* Count this as an additional use, so as to make sure we
333 use a temp unless VAR's definition has a SINGLE_RHS that
334 can be shared. */
335 usecount++;
336 break;
340 if (!usecount)
341 return;
343 if (gsi)
344 def_stmt = gsi_stmt (*gsi);
345 else
346 def_stmt = SSA_NAME_DEF_STMT (var);
348 /* If we didn't get an insertion point, and the stmt has already
349 been removed, we won't be able to insert the debug bind stmt, so
350 we'll have to drop debug information. */
351 if (gimple_code (def_stmt) == GIMPLE_PHI)
353 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
354 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 value = NULL;
356 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 to. */
358 else if (value == error_mark_node)
359 value = NULL;
361 else if (gimple_clobber_p (def_stmt))
362 /* We can end up here when rewriting a decl into SSA and coming
363 along a clobber for the original decl. Turn that into
364 # DEBUG decl => NULL */
365 value = NULL;
366 else if (is_gimple_assign (def_stmt))
368 bool no_value = false;
370 if (!dom_info_available_p (CDI_DOMINATORS))
372 struct walk_stmt_info wi;
374 memset (&wi, 0, sizeof (wi));
376 /* When removing blocks without following reverse dominance
377 order, we may sometimes encounter SSA_NAMEs that have
378 already been released, referenced in other SSA_DEFs that
379 we're about to release. Consider:
381 <bb X>:
382 v_1 = foo;
384 <bb Y>:
385 w_2 = v_1 + bar;
386 # DEBUG w => w_2
388 If we deleted BB X first, propagating the value of w_2
389 won't do us any good. It's too late to recover their
390 original definition of v_1: when it was deleted, it was
391 only referenced in other DEFs, it couldn't possibly know
392 it should have been retained, and propagating every
393 single DEF just in case it might have to be propagated
394 into a DEBUG STMT would probably be too wasteful.
396 When dominator information is not readily available, we
397 check for and accept some loss of debug information. But
398 if it is available, there's no excuse for us to remove
399 blocks in the wrong order, so we don't even check for
400 dead SSA NAMEs. SSA verification shall catch any
401 errors. */
402 if ((!gsi && !gimple_bb (def_stmt))
403 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
404 no_value = true;
407 if (!no_value)
408 value = gimple_assign_rhs_to_tree (def_stmt);
411 if (value)
413 /* If there's a single use of VAR, and VAR is the entire debug
414 expression (usecount would have been incremented again
415 otherwise), and the definition involves only constants and
416 SSA names, then we can propagate VALUE into this single use,
417 avoiding the temp.
419 We can also avoid using a temp if VALUE can be shared and
420 propagated into all uses, without generating expressions that
421 wouldn't be valid gimple RHSs.
423 Other cases that would require unsharing or non-gimple RHSs
424 are deferred to a debug temp, although we could avoid temps
425 at the expense of duplication of expressions. */
427 if (CONSTANT_CLASS_P (value)
428 || gimple_code (def_stmt) == GIMPLE_PHI
429 || (usecount == 1
430 && (!gimple_assign_single_p (def_stmt)
431 || is_gimple_min_invariant (value)))
432 || is_gimple_reg (value))
434 else
436 gdebug *def_temp;
437 tree vexpr = make_node (DEBUG_EXPR_DECL);
439 def_temp = gimple_build_debug_bind (vexpr,
440 unshare_expr (value),
441 def_stmt);
443 DECL_ARTIFICIAL (vexpr) = 1;
444 TREE_TYPE (vexpr) = TREE_TYPE (value);
445 if (DECL_P (value))
446 SET_DECL_MODE (vexpr, DECL_MODE (value));
447 else
448 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
450 if (gsi)
451 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
452 else
454 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
455 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
458 value = vexpr;
462 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
464 if (!gimple_debug_bind_p (stmt))
465 continue;
467 if (value)
469 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
470 /* unshare_expr is not needed here. vexpr is either a
471 SINGLE_RHS, that can be safely shared, some other RHS
472 that was unshared when we found it had a single debug
473 use, or a DEBUG_EXPR_DECL, that can be safely
474 shared. */
475 SET_USE (use_p, unshare_expr (value));
476 /* If we didn't replace uses with a debug decl fold the
477 resulting expression. Otherwise we end up with invalid IL. */
478 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
480 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
481 fold_stmt_inplace (&gsi);
484 else
485 gimple_debug_bind_reset_value (stmt);
487 update_stmt (stmt);
492 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
493 other DEBUG stmts, and replace uses of the DEF with the
494 newly-created debug temp. */
496 void
497 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
499 gimple *stmt;
500 ssa_op_iter op_iter;
501 def_operand_p def_p;
503 if (!MAY_HAVE_DEBUG_BIND_STMTS)
504 return;
506 stmt = gsi_stmt (*gsi);
508 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
510 tree var = DEF_FROM_PTR (def_p);
512 if (TREE_CODE (var) != SSA_NAME)
513 continue;
515 insert_debug_temp_for_var_def (gsi, var);
519 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
521 void
522 reset_debug_uses (gimple *stmt)
524 ssa_op_iter op_iter;
525 def_operand_p def_p;
526 imm_use_iterator imm_iter;
527 gimple *use_stmt;
529 if (!MAY_HAVE_DEBUG_BIND_STMTS)
530 return;
532 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
534 tree var = DEF_FROM_PTR (def_p);
536 if (TREE_CODE (var) != SSA_NAME)
537 continue;
539 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
541 if (!gimple_debug_bind_p (use_stmt))
542 continue;
544 gimple_debug_bind_reset_value (use_stmt);
545 update_stmt (use_stmt);
550 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
551 dominated stmts before their dominators, so that release_ssa_defs
552 stands a chance of propagating DEFs into debug bind stmts. */
554 void
555 release_defs_bitset (bitmap toremove)
557 unsigned j;
558 bitmap_iterator bi;
560 /* Performing a topological sort is probably overkill, this will
561 most likely run in slightly superlinear time, rather than the
562 pathological quadratic worst case. */
563 while (!bitmap_empty_p (toremove))
565 unsigned to_remove_bit = -1U;
566 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
568 if (to_remove_bit != -1U)
570 bitmap_clear_bit (toremove, to_remove_bit);
571 to_remove_bit = -1U;
574 bool remove_now = true;
575 tree var = ssa_name (j);
576 gimple *stmt;
577 imm_use_iterator uit;
579 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
581 ssa_op_iter dit;
582 def_operand_p def_p;
584 /* We can't propagate PHI nodes into debug stmts. */
585 if (gimple_code (stmt) == GIMPLE_PHI
586 || is_gimple_debug (stmt))
587 continue;
589 /* If we find another definition to remove that uses
590 the one we're looking at, defer the removal of this
591 one, so that it can be propagated into debug stmts
592 after the other is. */
593 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
595 tree odef = DEF_FROM_PTR (def_p);
597 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
599 remove_now = false;
600 break;
604 if (!remove_now)
605 BREAK_FROM_IMM_USE_STMT (uit);
608 if (remove_now)
610 gimple *def = SSA_NAME_DEF_STMT (var);
611 gimple_stmt_iterator gsi = gsi_for_stmt (def);
613 if (gimple_code (def) == GIMPLE_PHI)
614 remove_phi_node (&gsi, true);
615 else
617 gsi_remove (&gsi, true);
618 release_defs (def);
621 to_remove_bit = j;
624 if (to_remove_bit != -1U)
625 bitmap_clear_bit (toremove, to_remove_bit);
630 /* Verify virtual SSA form. */
632 bool
633 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
635 bool err = false;
637 if (bitmap_bit_p (visited, bb->index))
638 return false;
640 bitmap_set_bit (visited, bb->index);
642 /* Pick up the single virtual PHI def. */
643 gphi *phi = NULL;
644 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
645 gsi_next (&si))
647 tree res = gimple_phi_result (si.phi ());
648 if (virtual_operand_p (res))
650 if (phi)
652 error ("multiple virtual PHI nodes in BB %d", bb->index);
653 print_gimple_stmt (stderr, phi, 0);
654 print_gimple_stmt (stderr, si.phi (), 0);
655 err = true;
657 else
658 phi = si.phi ();
661 if (phi)
663 current_vdef = gimple_phi_result (phi);
664 if (TREE_CODE (current_vdef) != SSA_NAME)
666 error ("virtual definition is not an SSA name");
667 print_gimple_stmt (stderr, phi, 0);
668 err = true;
672 /* Verify stmts. */
673 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
674 gsi_next (&gsi))
676 gimple *stmt = gsi_stmt (gsi);
677 tree vuse = gimple_vuse (stmt);
678 if (vuse)
680 if (vuse != current_vdef)
682 error ("stmt with wrong VUSE");
683 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
684 fprintf (stderr, "expected ");
685 print_generic_expr (stderr, current_vdef);
686 fprintf (stderr, "\n");
687 err = true;
689 tree vdef = gimple_vdef (stmt);
690 if (vdef)
692 current_vdef = vdef;
693 if (TREE_CODE (current_vdef) != SSA_NAME)
695 error ("virtual definition is not an SSA name");
696 print_gimple_stmt (stderr, phi, 0);
697 err = true;
703 /* Verify destination PHI uses and recurse. */
704 edge_iterator ei;
705 edge e;
706 FOR_EACH_EDGE (e, ei, bb->succs)
708 gphi *phi = get_virtual_phi (e->dest);
709 if (phi
710 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
712 error ("PHI node with wrong VUSE on edge from BB %d",
713 e->src->index);
714 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
715 fprintf (stderr, "expected ");
716 print_generic_expr (stderr, current_vdef);
717 fprintf (stderr, "\n");
718 err = true;
721 /* Recurse. */
722 err |= verify_vssa (e->dest, current_vdef, visited);
725 return err;
728 /* Return true if SSA_NAME is malformed and mark it visited.
730 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
731 operand. */
733 static bool
734 verify_ssa_name (tree ssa_name, bool is_virtual)
736 if (TREE_CODE (ssa_name) != SSA_NAME)
738 error ("expected an SSA_NAME object");
739 return true;
742 if (SSA_NAME_IN_FREE_LIST (ssa_name))
744 error ("found an SSA_NAME that had been released into the free pool");
745 return true;
748 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
749 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
751 error ("type mismatch between an SSA_NAME and its symbol");
752 return true;
755 if (is_virtual && !virtual_operand_p (ssa_name))
757 error ("found a virtual definition for a GIMPLE register");
758 return true;
761 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
763 error ("virtual SSA name for non-VOP decl");
764 return true;
767 if (!is_virtual && virtual_operand_p (ssa_name))
769 error ("found a real definition for a non-register");
770 return true;
773 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
774 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
776 error ("found a default name with a non-empty defining statement");
777 return true;
780 return false;
784 /* Return true if the definition of SSA_NAME at block BB is malformed.
786 STMT is the statement where SSA_NAME is created.
788 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
789 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
790 it means that the block in that array slot contains the
791 definition of SSA_NAME.
793 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
795 static bool
796 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
797 gimple *stmt, bool is_virtual)
799 if (verify_ssa_name (ssa_name, is_virtual))
800 goto err;
802 if (SSA_NAME_VAR (ssa_name)
803 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
804 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
806 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
807 goto err;
810 if (definition_block[SSA_NAME_VERSION (ssa_name)])
812 error ("SSA_NAME created in two different blocks %i and %i",
813 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
814 goto err;
817 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
819 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
821 error ("SSA_NAME_DEF_STMT is wrong");
822 fprintf (stderr, "Expected definition statement:\n");
823 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
824 fprintf (stderr, "\nActual definition statement:\n");
825 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
826 goto err;
829 return false;
831 err:
832 fprintf (stderr, "while verifying SSA_NAME ");
833 print_generic_expr (stderr, ssa_name);
834 fprintf (stderr, " in statement\n");
835 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
837 return true;
841 /* Return true if the use of SSA_NAME at statement STMT in block BB is
842 malformed.
844 DEF_BB is the block where SSA_NAME was found to be created.
846 IDOM contains immediate dominator information for the flowgraph.
848 CHECK_ABNORMAL is true if the caller wants to check whether this use
849 is flowing through an abnormal edge (only used when checking PHI
850 arguments).
852 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
853 that are defined before STMT in basic block BB. */
855 static bool
856 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
857 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
859 bool err = false;
860 tree ssa_name = USE_FROM_PTR (use_p);
862 if (!TREE_VISITED (ssa_name))
863 if (verify_imm_links (stderr, ssa_name))
864 err = true;
866 TREE_VISITED (ssa_name) = 1;
868 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
869 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
870 ; /* Default definitions have empty statements. Nothing to do. */
871 else if (!def_bb)
873 error ("missing definition");
874 err = true;
876 else if (bb != def_bb
877 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
879 error ("definition in block %i does not dominate use in block %i",
880 def_bb->index, bb->index);
881 err = true;
883 else if (bb == def_bb
884 && names_defined_in_bb != NULL
885 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
887 error ("definition in block %i follows the use", def_bb->index);
888 err = true;
891 if (check_abnormal
892 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
894 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
895 err = true;
898 /* Make sure the use is in an appropriate list by checking the previous
899 element to make sure it's the same. */
900 if (use_p->prev == NULL)
902 error ("no immediate_use list");
903 err = true;
905 else
907 tree listvar;
908 if (use_p->prev->use == NULL)
909 listvar = use_p->prev->loc.ssa_name;
910 else
911 listvar = USE_FROM_PTR (use_p->prev);
912 if (listvar != ssa_name)
914 error ("wrong immediate use list");
915 err = true;
919 if (err)
921 fprintf (stderr, "for SSA_NAME: ");
922 print_generic_expr (stderr, ssa_name, TDF_VOPS);
923 fprintf (stderr, " in statement:\n");
924 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
927 return err;
931 /* Return true if any of the arguments for PHI node PHI at block BB is
932 malformed.
934 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
935 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
936 it means that the block in that array slot contains the
937 definition of SSA_NAME. */
939 static bool
940 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
942 edge e;
943 bool err = false;
944 size_t i, phi_num_args = gimple_phi_num_args (phi);
946 if (EDGE_COUNT (bb->preds) != phi_num_args)
948 error ("incoming edge count does not match number of PHI arguments");
949 err = true;
950 goto error;
953 for (i = 0; i < phi_num_args; i++)
955 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
956 tree op = USE_FROM_PTR (op_p);
958 e = EDGE_PRED (bb, i);
960 if (op == NULL_TREE)
962 error ("PHI argument is missing for edge %d->%d",
963 e->src->index,
964 e->dest->index);
965 err = true;
966 goto error;
969 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
971 error ("PHI argument is not SSA_NAME, or invariant");
972 err = true;
975 if (TREE_CODE (op) == SSA_NAME)
977 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
978 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
979 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
982 if (TREE_CODE (op) == ADDR_EXPR)
984 tree base = TREE_OPERAND (op, 0);
985 while (handled_component_p (base))
986 base = TREE_OPERAND (base, 0);
987 if ((VAR_P (base)
988 || TREE_CODE (base) == PARM_DECL
989 || TREE_CODE (base) == RESULT_DECL)
990 && !TREE_ADDRESSABLE (base))
992 error ("address taken, but ADDRESSABLE bit not set");
993 err = true;
997 if (e->dest != bb)
999 error ("wrong edge %d->%d for PHI argument",
1000 e->src->index, e->dest->index);
1001 err = true;
1004 if (err)
1006 fprintf (stderr, "PHI argument\n");
1007 print_generic_stmt (stderr, op, TDF_VOPS);
1008 goto error;
1012 error:
1013 if (err)
1015 fprintf (stderr, "for PHI node\n");
1016 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1020 return err;
1024 /* Verify common invariants in the SSA web.
1025 TODO: verify the variable annotations. */
1027 DEBUG_FUNCTION void
1028 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1030 basic_block bb;
1031 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1032 ssa_op_iter iter;
1033 tree op;
1034 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1035 auto_bitmap names_defined_in_bb;
1037 gcc_assert (!need_ssa_update_p (cfun));
1039 timevar_push (TV_TREE_SSA_VERIFY);
1042 /* Keep track of SSA names present in the IL. */
1043 size_t i;
1044 tree name;
1045 hash_map <void *, tree> ssa_info;
1047 FOR_EACH_SSA_NAME (i, name, cfun)
1049 gimple *stmt;
1050 TREE_VISITED (name) = 0;
1052 verify_ssa_name (name, virtual_operand_p (name));
1054 stmt = SSA_NAME_DEF_STMT (name);
1055 if (!gimple_nop_p (stmt))
1057 basic_block bb = gimple_bb (stmt);
1058 if (verify_def (bb, definition_block,
1059 name, stmt, virtual_operand_p (name)))
1060 goto err;
1063 void *info = NULL;
1064 if (POINTER_TYPE_P (TREE_TYPE (name)))
1065 info = SSA_NAME_PTR_INFO (name);
1066 else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1067 info = SSA_NAME_RANGE_INFO (name);
1068 if (info)
1070 bool existed;
1071 tree &val = ssa_info.get_or_insert (info, &existed);
1072 if (existed)
1074 error ("shared SSA name info");
1075 print_generic_expr (stderr, val);
1076 fprintf (stderr, " and ");
1077 print_generic_expr (stderr, name);
1078 fprintf (stderr, "\n");
1079 goto err;
1081 else
1082 val = name;
1087 calculate_dominance_info (CDI_DOMINATORS);
1089 /* Now verify all the uses and make sure they agree with the definitions
1090 found in the previous pass. */
1091 FOR_EACH_BB_FN (bb, cfun)
1093 edge e;
1094 edge_iterator ei;
1096 /* Make sure that all edges have a clear 'aux' field. */
1097 FOR_EACH_EDGE (e, ei, bb->preds)
1099 if (e->aux)
1101 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1102 e->dest->index);
1103 goto err;
1107 /* Verify the arguments for every PHI node in the block. */
1108 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1110 gphi *phi = gsi.phi ();
1111 if (verify_phi_args (phi, bb, definition_block))
1112 goto err;
1114 bitmap_set_bit (names_defined_in_bb,
1115 SSA_NAME_VERSION (gimple_phi_result (phi)));
1118 /* Now verify all the uses and vuses in every statement of the block. */
1119 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1120 gsi_next (&gsi))
1122 gimple *stmt = gsi_stmt (gsi);
1123 use_operand_p use_p;
1125 if (check_modified_stmt && gimple_modified_p (stmt))
1127 error ("stmt (%p) marked modified after optimization pass: ",
1128 (void *)stmt);
1129 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1130 goto err;
1133 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1135 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1136 goto err;
1139 if (gimple_debug_bind_p (stmt)
1140 && !gimple_debug_bind_has_value_p (stmt))
1141 continue;
1143 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1145 op = USE_FROM_PTR (use_p);
1146 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1147 use_p, stmt, false, names_defined_in_bb))
1148 goto err;
1151 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1153 if (SSA_NAME_DEF_STMT (op) != stmt)
1155 error ("SSA_NAME_DEF_STMT is wrong");
1156 fprintf (stderr, "Expected definition statement:\n");
1157 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1158 fprintf (stderr, "\nActual definition statement:\n");
1159 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1160 4, TDF_VOPS);
1161 goto err;
1163 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1167 bitmap_clear (names_defined_in_bb);
1170 free (definition_block);
1172 if (gimple_vop (cfun)
1173 && ssa_default_def (cfun, gimple_vop (cfun)))
1175 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1176 bitmap_clear (visited);
1177 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1178 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1179 goto err;
1182 /* Restore the dominance information to its prior known state, so
1183 that we do not perturb the compiler's subsequent behavior. */
1184 if (orig_dom_state == DOM_NONE)
1185 free_dominance_info (CDI_DOMINATORS);
1186 else
1187 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1189 timevar_pop (TV_TREE_SSA_VERIFY);
1190 return;
1192 err:
1193 internal_error ("verify_ssa failed");
1197 /* Initialize global DFA and SSA structures. */
1199 void
1200 init_tree_ssa (struct function *fn)
1202 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1203 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1204 pt_solution_reset (&fn->gimple_df->escaped);
1205 init_ssanames (fn, 0);
1208 /* Deallocate memory associated with SSA data structures for FNDECL. */
1210 void
1211 delete_tree_ssa (struct function *fn)
1213 fini_ssanames (fn);
1215 /* We no longer maintain the SSA operand cache at this point. */
1216 if (ssa_operands_active (fn))
1217 fini_ssa_operands (fn);
1219 fn->gimple_df->default_defs->empty ();
1220 fn->gimple_df->default_defs = NULL;
1221 pt_solution_reset (&fn->gimple_df->escaped);
1222 if (fn->gimple_df->decls_to_pointers != NULL)
1223 delete fn->gimple_df->decls_to_pointers;
1224 fn->gimple_df->decls_to_pointers = NULL;
1225 fn->gimple_df = NULL;
1227 /* We no longer need the edge variable maps. */
1228 redirect_edge_var_map_empty ();
1231 /* Return true if EXPR is a useless type conversion, otherwise return
1232 false. */
1234 bool
1235 tree_ssa_useless_type_conversion (tree expr)
1237 /* If we have an assignment that merely uses a NOP_EXPR to change
1238 the top of the RHS to the type of the LHS and the type conversion
1239 is "safe", then strip away the type conversion so that we can
1240 enter LHS = RHS into the const_and_copies table. */
1241 if (CONVERT_EXPR_P (expr)
1242 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1243 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1244 return useless_type_conversion_p
1245 (TREE_TYPE (expr),
1246 TREE_TYPE (TREE_OPERAND (expr, 0)));
1248 return false;
1251 /* Strip conversions from EXP according to
1252 tree_ssa_useless_type_conversion and return the resulting
1253 expression. */
1255 tree
1256 tree_ssa_strip_useless_type_conversions (tree exp)
1258 while (tree_ssa_useless_type_conversion (exp))
1259 exp = TREE_OPERAND (exp, 0);
1260 return exp;
1263 /* Return true if T, as SSA_NAME, has an implicit default defined value. */
1265 bool
1266 ssa_defined_default_def_p (tree t)
1268 tree var = SSA_NAME_VAR (t);
1270 if (!var)
1272 /* Parameters get their initial value from the function entry. */
1273 else if (TREE_CODE (var) == PARM_DECL)
1274 return true;
1275 /* When returning by reference the return address is actually a hidden
1276 parameter. */
1277 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1278 return true;
1279 /* Hard register variables get their initial value from the ether. */
1280 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1281 return true;
1283 return false;
1287 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1288 should be returned if the value is only partially undefined. */
1290 bool
1291 ssa_undefined_value_p (tree t, bool partial)
1293 gimple *def_stmt;
1295 if (ssa_defined_default_def_p (t))
1296 return false;
1298 /* The value is undefined iff its definition statement is empty. */
1299 def_stmt = SSA_NAME_DEF_STMT (t);
1300 if (gimple_nop_p (def_stmt))
1301 return true;
1303 /* Check if the complex was not only partially defined. */
1304 if (partial && is_gimple_assign (def_stmt)
1305 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1307 tree rhs1, rhs2;
1309 rhs1 = gimple_assign_rhs1 (def_stmt);
1310 rhs2 = gimple_assign_rhs2 (def_stmt);
1311 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1312 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1314 return false;
1318 /* Return TRUE iff STMT, a gimple statement, references an undefined
1319 SSA name. */
1321 bool
1322 gimple_uses_undefined_value_p (gimple *stmt)
1324 ssa_op_iter iter;
1325 tree op;
1327 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1328 if (ssa_undefined_value_p (op))
1329 return true;
1331 return false;
1336 /* If necessary, rewrite the base of the reference tree *TP from
1337 a MEM_REF to a plain or converted symbol. */
1339 static void
1340 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1342 tree sym;
1344 while (handled_component_p (*tp))
1345 tp = &TREE_OPERAND (*tp, 0);
1346 if (TREE_CODE (*tp) == MEM_REF
1347 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1348 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1349 && DECL_P (sym)
1350 && !TREE_ADDRESSABLE (sym)
1351 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1352 && is_gimple_reg_type (TREE_TYPE (*tp))
1353 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1355 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1356 && useless_type_conversion_p (TREE_TYPE (*tp),
1357 TREE_TYPE (TREE_TYPE (sym)))
1358 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1359 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1361 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1362 TYPE_SIZE (TREE_TYPE (*tp)),
1363 int_const_binop (MULT_EXPR,
1364 bitsize_int (BITS_PER_UNIT),
1365 TREE_OPERAND (*tp, 1)));
1367 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1368 && useless_type_conversion_p (TREE_TYPE (*tp),
1369 TREE_TYPE (TREE_TYPE (sym))))
1371 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1372 ? REALPART_EXPR : IMAGPART_EXPR,
1373 TREE_TYPE (*tp), sym);
1375 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1376 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1378 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1379 TREE_TYPE (sym)))
1380 *tp = build1 (VIEW_CONVERT_EXPR,
1381 TREE_TYPE (*tp), sym);
1382 else
1383 *tp = sym;
1385 else if (DECL_SIZE (sym)
1386 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1387 && (known_subrange_p
1388 (mem_ref_offset (*tp),
1389 wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1390 0, wi::to_offset (DECL_SIZE_UNIT (sym))))
1391 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1392 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1393 == TYPE_PRECISION (TREE_TYPE (*tp))))
1394 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1395 BITS_PER_UNIT) == 0)
1397 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1398 TYPE_SIZE (TREE_TYPE (*tp)),
1399 wide_int_to_tree (bitsizetype,
1400 mem_ref_offset (*tp)
1401 << LOG2_BITS_PER_UNIT));
1406 /* For a tree REF return its base if it is the base of a MEM_REF
1407 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1409 static tree
1410 non_rewritable_mem_ref_base (tree ref)
1412 tree base;
1414 /* A plain decl does not need it set. */
1415 if (DECL_P (ref))
1416 return NULL_TREE;
1418 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1420 base = get_base_address (ref);
1421 if (DECL_P (base))
1422 return base;
1423 return NULL_TREE;
1426 /* But watch out for MEM_REFs we cannot lower to a
1427 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1428 if (TREE_CODE (base) == MEM_REF
1429 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1431 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1432 if (! DECL_P (decl))
1433 return NULL_TREE;
1434 if (! is_gimple_reg_type (TREE_TYPE (base))
1435 || VOID_TYPE_P (TREE_TYPE (base))
1436 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1437 return decl;
1438 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1439 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1440 && useless_type_conversion_p (TREE_TYPE (base),
1441 TREE_TYPE (TREE_TYPE (decl)))
1442 && known_ge (mem_ref_offset (base), 0)
1443 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1444 mem_ref_offset (base))
1445 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1446 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1447 return NULL_TREE;
1448 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1449 if (integer_zerop (TREE_OPERAND (base, 1))
1450 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1451 return NULL_TREE;
1452 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1453 if (DECL_SIZE (decl)
1454 && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1455 && (known_subrange_p
1456 (mem_ref_offset (base),
1457 wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1458 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1459 /* ??? We can't handle bitfield precision extracts without
1460 either using an alternate type for the BIT_FIELD_REF and
1461 then doing a conversion or possibly adjusting the offset
1462 according to endianness. */
1463 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1464 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1465 == TYPE_PRECISION (TREE_TYPE (base))))
1466 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1467 BITS_PER_UNIT) == 0)
1468 return NULL_TREE;
1469 return decl;
1472 return NULL_TREE;
1475 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1476 Otherwise return true. */
1478 static bool
1479 non_rewritable_lvalue_p (tree lhs)
1481 /* A plain decl is always rewritable. */
1482 if (DECL_P (lhs))
1483 return false;
1485 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1486 a reasonably efficient manner... */
1487 if ((TREE_CODE (lhs) == REALPART_EXPR
1488 || TREE_CODE (lhs) == IMAGPART_EXPR)
1489 && DECL_P (TREE_OPERAND (lhs, 0)))
1490 return false;
1492 /* ??? The following could be relaxed allowing component
1493 references that do not change the access size. */
1494 if (TREE_CODE (lhs) == MEM_REF
1495 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1497 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1499 /* A decl that is wrapped inside a MEM-REF that covers
1500 it full is also rewritable. */
1501 if (integer_zerop (TREE_OPERAND (lhs, 1))
1502 && DECL_P (decl)
1503 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1504 /* If the dynamic type of the decl has larger precision than
1505 the decl itself we can't use the decls type for SSA rewriting. */
1506 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1507 || compare_tree_int (DECL_SIZE (decl),
1508 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1509 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1510 && (TYPE_PRECISION (TREE_TYPE (decl))
1511 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1512 /* Make sure we are not re-writing non-float copying into float
1513 copying as that can incur normalization. */
1514 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1515 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1516 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1517 return false;
1519 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1520 using a BIT_INSERT_EXPR. */
1521 if (DECL_P (decl)
1522 && VECTOR_TYPE_P (TREE_TYPE (decl))
1523 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1524 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1525 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))), 0)
1526 && known_ge (mem_ref_offset (lhs), 0)
1527 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1528 mem_ref_offset (lhs))
1529 && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1),
1530 TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1531 return false;
1534 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1535 BIT_INSERT_EXPR. */
1536 if (TREE_CODE (lhs) == BIT_FIELD_REF
1537 && DECL_P (TREE_OPERAND (lhs, 0))
1538 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1539 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1540 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1541 TYPE_SIZE_UNIT
1542 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0)
1543 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1544 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1545 return false;
1547 return true;
1550 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1551 mark the variable VAR for conversion into SSA. Return true when updating
1552 stmts is required. */
1554 static void
1555 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1556 bitmap suitable_for_renaming)
1558 /* Global Variables, result decls cannot be changed. */
1559 if (is_global_var (var)
1560 || TREE_CODE (var) == RESULT_DECL
1561 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1562 return;
1564 if (TREE_ADDRESSABLE (var)
1565 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1566 a non-register. Otherwise we are confused and forget to
1567 add virtual operands for it. */
1568 && (!is_gimple_reg_type (TREE_TYPE (var))
1569 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1570 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1571 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1573 TREE_ADDRESSABLE (var) = 0;
1574 /* If we cleared TREE_ADDRESSABLE make sure DECL_GIMPLE_REG_P
1575 is unset if we cannot rewrite the var into SSA. */
1576 if ((TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1577 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
1578 && bitmap_bit_p (not_reg_needs, DECL_UID (var)))
1579 DECL_GIMPLE_REG_P (var) = 0;
1580 if (is_gimple_reg (var))
1581 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1582 if (dump_file)
1584 fprintf (dump_file, "No longer having address taken: ");
1585 print_generic_expr (dump_file, var);
1586 fprintf (dump_file, "\n");
1590 if (!DECL_GIMPLE_REG_P (var)
1591 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1592 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1593 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1594 && !TREE_THIS_VOLATILE (var)
1595 && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1597 DECL_GIMPLE_REG_P (var) = 1;
1598 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1599 if (dump_file)
1601 fprintf (dump_file, "Now a gimple register: ");
1602 print_generic_expr (dump_file, var);
1603 fprintf (dump_file, "\n");
1608 /* Return true when STMT is ASAN mark where second argument is an address
1609 of a local variable. */
1611 static bool
1612 is_asan_mark_p (gimple *stmt)
1614 if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1615 return false;
1617 tree addr = get_base_address (gimple_call_arg (stmt, 1));
1618 if (TREE_CODE (addr) == ADDR_EXPR
1619 && VAR_P (TREE_OPERAND (addr, 0)))
1621 tree var = TREE_OPERAND (addr, 0);
1622 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1623 DECL_ATTRIBUTES (var)))
1624 return false;
1626 unsigned addressable = TREE_ADDRESSABLE (var);
1627 TREE_ADDRESSABLE (var) = 0;
1628 bool r = is_gimple_reg (var);
1629 TREE_ADDRESSABLE (var) = addressable;
1630 return r;
1633 return false;
1636 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1638 void
1639 execute_update_addresses_taken (void)
1641 basic_block bb;
1642 auto_bitmap addresses_taken;
1643 auto_bitmap not_reg_needs;
1644 auto_bitmap suitable_for_renaming;
1645 tree var;
1646 unsigned i;
1648 timevar_push (TV_ADDRESS_TAKEN);
1650 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1651 the function body. */
1652 FOR_EACH_BB_FN (bb, cfun)
1654 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1655 gsi_next (&gsi))
1657 gimple *stmt = gsi_stmt (gsi);
1658 enum gimple_code code = gimple_code (stmt);
1659 tree decl;
1661 if (code == GIMPLE_CALL)
1663 if (optimize_atomic_compare_exchange_p (stmt))
1665 /* For __atomic_compare_exchange_N if the second argument
1666 is &var, don't mark var addressable;
1667 if it becomes non-addressable, we'll rewrite it into
1668 ATOMIC_COMPARE_EXCHANGE call. */
1669 tree arg = gimple_call_arg (stmt, 1);
1670 gimple_call_set_arg (stmt, 1, null_pointer_node);
1671 gimple_ior_addresses_taken (addresses_taken, stmt);
1672 gimple_call_set_arg (stmt, 1, arg);
1674 else if (is_asan_mark_p (stmt)
1675 || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1677 else
1678 gimple_ior_addresses_taken (addresses_taken, stmt);
1680 else
1681 /* Note all addresses taken by the stmt. */
1682 gimple_ior_addresses_taken (addresses_taken, stmt);
1684 /* If we have a call or an assignment, see if the lhs contains
1685 a local decl that requires not to be a gimple register. */
1686 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1688 tree lhs = gimple_get_lhs (stmt);
1689 if (lhs
1690 && TREE_CODE (lhs) != SSA_NAME
1691 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1692 || non_rewritable_lvalue_p (lhs)))
1694 decl = get_base_address (lhs);
1695 if (DECL_P (decl))
1696 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1700 if (gimple_assign_single_p (stmt))
1702 tree rhs = gimple_assign_rhs1 (stmt);
1703 if ((decl = non_rewritable_mem_ref_base (rhs)))
1704 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1707 else if (code == GIMPLE_CALL)
1709 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1711 tree arg = gimple_call_arg (stmt, i);
1712 if ((decl = non_rewritable_mem_ref_base (arg)))
1713 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1717 else if (code == GIMPLE_ASM)
1719 gasm *asm_stmt = as_a <gasm *> (stmt);
1720 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1722 tree link = gimple_asm_output_op (asm_stmt, i);
1723 tree lhs = TREE_VALUE (link);
1724 if (TREE_CODE (lhs) != SSA_NAME)
1726 decl = get_base_address (lhs);
1727 if (DECL_P (decl)
1728 && (non_rewritable_lvalue_p (lhs)
1729 /* We cannot move required conversions from
1730 the lhs to the rhs in asm statements, so
1731 require we do not need any. */
1732 || !useless_type_conversion_p
1733 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1734 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1737 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1739 tree link = gimple_asm_input_op (asm_stmt, i);
1740 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1741 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1746 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1747 gsi_next (&gsi))
1749 size_t i;
1750 gphi *phi = gsi.phi ();
1752 for (i = 0; i < gimple_phi_num_args (phi); i++)
1754 tree op = PHI_ARG_DEF (phi, i), var;
1755 if (TREE_CODE (op) == ADDR_EXPR
1756 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1757 && DECL_P (var))
1758 bitmap_set_bit (addresses_taken, DECL_UID (var));
1763 /* We cannot iterate over all referenced vars because that can contain
1764 unused vars from BLOCK trees, which causes code generation differences
1765 for -g vs. -g0. */
1766 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1767 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1768 suitable_for_renaming);
1770 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1771 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1772 suitable_for_renaming);
1774 /* Operand caches need to be recomputed for operands referencing the updated
1775 variables and operands need to be rewritten to expose bare symbols. */
1776 if (!bitmap_empty_p (suitable_for_renaming))
1778 FOR_EACH_BB_FN (bb, cfun)
1779 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1781 gimple *stmt = gsi_stmt (gsi);
1783 /* Re-write TARGET_MEM_REFs of symbols we want to
1784 rewrite into SSA form. */
1785 if (gimple_assign_single_p (stmt))
1787 tree lhs = gimple_assign_lhs (stmt);
1788 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1789 tree sym;
1791 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1792 gimplify_modify_expr_complex_part. */
1793 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1794 || TREE_CODE (lhs) == REALPART_EXPR)
1795 && DECL_P (TREE_OPERAND (lhs, 0))
1796 && bitmap_bit_p (suitable_for_renaming,
1797 DECL_UID (TREE_OPERAND (lhs, 0))))
1799 tree other = make_ssa_name (TREE_TYPE (lhs));
1800 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1801 ? REALPART_EXPR : IMAGPART_EXPR,
1802 TREE_TYPE (other),
1803 TREE_OPERAND (lhs, 0));
1804 gimple *load = gimple_build_assign (other, lrhs);
1805 location_t loc = gimple_location (stmt);
1806 gimple_set_location (load, loc);
1807 gimple_set_vuse (load, gimple_vuse (stmt));
1808 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1809 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1810 gimple_assign_set_rhs_with_ops
1811 (&gsi, COMPLEX_EXPR,
1812 TREE_CODE (lhs) == IMAGPART_EXPR
1813 ? other : gimple_assign_rhs1 (stmt),
1814 TREE_CODE (lhs) == IMAGPART_EXPR
1815 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1816 stmt = gsi_stmt (gsi);
1817 unlink_stmt_vdef (stmt);
1818 update_stmt (stmt);
1819 continue;
1822 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1823 into a BIT_INSERT_EXPR. */
1824 if (TREE_CODE (lhs) == BIT_FIELD_REF
1825 && DECL_P (TREE_OPERAND (lhs, 0))
1826 && bitmap_bit_p (suitable_for_renaming,
1827 DECL_UID (TREE_OPERAND (lhs, 0)))
1828 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1829 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1830 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1831 TYPE_SIZE_UNIT (TREE_TYPE
1832 (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
1834 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1835 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1837 tree var = TREE_OPERAND (lhs, 0);
1838 tree val = gimple_assign_rhs1 (stmt);
1839 if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
1840 TREE_TYPE (val)))
1842 tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
1843 gimple *pun
1844 = gimple_build_assign (tem,
1845 build1 (VIEW_CONVERT_EXPR,
1846 TREE_TYPE (tem), val));
1847 gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1848 val = tem;
1850 tree bitpos = TREE_OPERAND (lhs, 2);
1851 gimple_assign_set_lhs (stmt, var);
1852 gimple_assign_set_rhs_with_ops
1853 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1854 stmt = gsi_stmt (gsi);
1855 unlink_stmt_vdef (stmt);
1856 update_stmt (stmt);
1857 continue;
1860 /* Rewrite a vector insert using a MEM_REF on the LHS
1861 into a BIT_INSERT_EXPR. */
1862 if (TREE_CODE (lhs) == MEM_REF
1863 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1864 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1865 && DECL_P (sym)
1866 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1867 && VECTOR_TYPE_P (TREE_TYPE (sym))
1868 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1869 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1870 TYPE_SIZE_UNIT
1871 (TREE_TYPE (TREE_TYPE (sym))), 0)
1872 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1873 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1874 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1875 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1876 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1878 tree val = gimple_assign_rhs1 (stmt);
1879 if (! types_compatible_p (TREE_TYPE (val),
1880 TREE_TYPE (TREE_TYPE (sym))))
1882 tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (sym)));
1883 gimple *pun
1884 = gimple_build_assign (tem,
1885 build1 (VIEW_CONVERT_EXPR,
1886 TREE_TYPE (tem), val));
1887 gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1888 val = tem;
1890 tree bitpos
1891 = wide_int_to_tree (bitsizetype,
1892 mem_ref_offset (lhs) * BITS_PER_UNIT);
1893 gimple_assign_set_lhs (stmt, sym);
1894 gimple_assign_set_rhs_with_ops
1895 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1896 stmt = gsi_stmt (gsi);
1897 unlink_stmt_vdef (stmt);
1898 update_stmt (stmt);
1899 continue;
1902 /* We shouldn't have any fancy wrapping of
1903 component-refs on the LHS, but look through
1904 VIEW_CONVERT_EXPRs as that is easy. */
1905 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1906 lhs = TREE_OPERAND (lhs, 0);
1907 if (TREE_CODE (lhs) == MEM_REF
1908 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1909 && integer_zerop (TREE_OPERAND (lhs, 1))
1910 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1911 && DECL_P (sym)
1912 && !TREE_ADDRESSABLE (sym)
1913 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1914 lhs = sym;
1915 else
1916 lhs = gimple_assign_lhs (stmt);
1918 /* Rewrite the RHS and make sure the resulting assignment
1919 is validly typed. */
1920 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1921 rhs = gimple_assign_rhs1 (stmt);
1922 if (gimple_assign_lhs (stmt) != lhs
1923 && !useless_type_conversion_p (TREE_TYPE (lhs),
1924 TREE_TYPE (rhs)))
1926 if (gimple_clobber_p (stmt))
1928 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1929 TREE_THIS_VOLATILE (rhs) = 1;
1931 else
1932 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1933 TREE_TYPE (lhs), rhs);
1935 if (gimple_assign_lhs (stmt) != lhs)
1936 gimple_assign_set_lhs (stmt, lhs);
1938 if (gimple_assign_rhs1 (stmt) != rhs)
1940 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1941 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1945 else if (gimple_code (stmt) == GIMPLE_CALL)
1947 unsigned i;
1948 if (optimize_atomic_compare_exchange_p (stmt))
1950 tree expected = gimple_call_arg (stmt, 1);
1951 if (bitmap_bit_p (suitable_for_renaming,
1952 DECL_UID (TREE_OPERAND (expected, 0))))
1954 fold_builtin_atomic_compare_exchange (&gsi);
1955 continue;
1958 else if (is_asan_mark_p (stmt))
1960 tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1961 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1963 unlink_stmt_vdef (stmt);
1964 if (asan_mark_p (stmt, ASAN_MARK_POISON))
1966 gcall *call
1967 = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1968 gimple_call_set_lhs (call, var);
1969 gsi_replace (&gsi, call, GSI_SAME_STMT);
1971 else
1973 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1974 is uninitialized. Avoid dependencies on
1975 previous out of scope value. */
1976 tree clobber
1977 = build_constructor (TREE_TYPE (var), NULL);
1978 TREE_THIS_VOLATILE (clobber) = 1;
1979 gimple *g = gimple_build_assign (var, clobber);
1980 gsi_replace (&gsi, g, GSI_SAME_STMT);
1982 continue;
1985 else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1986 for (i = 1; i < gimple_call_num_args (stmt); i++)
1988 tree *argp = gimple_call_arg_ptr (stmt, i);
1989 if (*argp == null_pointer_node)
1990 continue;
1991 gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
1992 && VAR_P (TREE_OPERAND (*argp, 0)));
1993 tree var = TREE_OPERAND (*argp, 0);
1994 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1995 *argp = null_pointer_node;
1997 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1999 tree *argp = gimple_call_arg_ptr (stmt, i);
2000 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
2004 else if (gimple_code (stmt) == GIMPLE_ASM)
2006 gasm *asm_stmt = as_a <gasm *> (stmt);
2007 unsigned i;
2008 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
2010 tree link = gimple_asm_output_op (asm_stmt, i);
2011 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2012 suitable_for_renaming);
2014 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2016 tree link = gimple_asm_input_op (asm_stmt, i);
2017 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2018 suitable_for_renaming);
2022 else if (gimple_debug_bind_p (stmt)
2023 && gimple_debug_bind_has_value_p (stmt))
2025 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2026 tree decl;
2027 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
2028 decl = non_rewritable_mem_ref_base (*valuep);
2029 if (decl
2030 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2031 gimple_debug_bind_reset_value (stmt);
2034 if (gimple_references_memory_p (stmt)
2035 || is_gimple_debug (stmt))
2036 update_stmt (stmt);
2038 gsi_next (&gsi);
2041 /* Update SSA form here, we are called as non-pass as well. */
2042 if (number_of_loops (cfun) > 1
2043 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2044 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2045 else
2046 update_ssa (TODO_update_ssa);
2049 timevar_pop (TV_ADDRESS_TAKEN);
2052 namespace {
2054 const pass_data pass_data_update_address_taken =
2056 GIMPLE_PASS, /* type */
2057 "addressables", /* name */
2058 OPTGROUP_NONE, /* optinfo_flags */
2059 TV_ADDRESS_TAKEN, /* tv_id */
2060 PROP_ssa, /* properties_required */
2061 0, /* properties_provided */
2062 0, /* properties_destroyed */
2063 0, /* todo_flags_start */
2064 TODO_update_address_taken, /* todo_flags_finish */
2067 class pass_update_address_taken : public gimple_opt_pass
2069 public:
2070 pass_update_address_taken (gcc::context *ctxt)
2071 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2074 /* opt_pass methods: */
2076 }; // class pass_update_address_taken
2078 } // anon namespace
2080 gimple_opt_pass *
2081 make_pass_update_address_taken (gcc::context *ctxt)
2083 return new pass_update_address_taken (ctxt);