Move array-type va_list handling to build_va_arg
[official-gcc.git] / gcc / tree-ssa.c
blob10d3314558e8a413a0c246b3f74809554e451af3
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "flags.h"
37 #include "tm_p.h"
38 #include "target.h"
39 #include "langhooks.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "input.h"
43 #include "function.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "gimple-pretty-print.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-fold.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimple-walk.h"
57 #include "gimple-ssa.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "tree-ssa-loop-manip.h"
63 #include "tree-into-ssa.h"
64 #include "tree-ssa.h"
65 #include "tree-inline.h"
66 #include "hash-map.h"
67 #include "tree-pass.h"
68 #include "diagnostic-core.h"
69 #include "cfgloop.h"
70 #include "cfgexpand.h"
72 /* Pointer map of variable mappings, keyed by edge. */
73 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
76 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
78 void
79 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
81 edge_var_map new_node;
83 if (edge_var_maps == NULL)
84 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
86 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
87 new_node.def = def;
88 new_node.result = result;
89 new_node.locus = locus;
91 slot.safe_push (new_node);
95 /* Clear the var mappings in edge E. */
97 void
98 redirect_edge_var_map_clear (edge e)
100 if (!edge_var_maps)
101 return;
103 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
105 if (head)
106 head->release ();
110 /* Duplicate the redirected var mappings in OLDE in NEWE.
112 This assumes a hash_map can have multiple edges mapping to the same
113 var_map (many to one mapping), since we don't remove the previous mappings.
116 void
117 redirect_edge_var_map_dup (edge newe, edge olde)
119 if (!edge_var_maps)
120 return;
122 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
123 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
124 if (!old_head)
125 return;
127 new_head->safe_splice (*old_head);
131 /* Return the variable mappings for a given edge. If there is none, return
132 NULL. */
134 vec<edge_var_map> *
135 redirect_edge_var_map_vector (edge e)
137 /* Hey, what kind of idiot would... you'd be surprised. */
138 if (!edge_var_maps)
139 return NULL;
141 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
142 if (!slot)
143 return NULL;
145 return slot;
148 /* Clear the edge variable mappings. */
150 void
151 redirect_edge_var_map_destroy (void)
153 delete edge_var_maps;
154 edge_var_maps = NULL;
158 /* Remove the corresponding arguments from the PHI nodes in E's
159 destination block and redirect it to DEST. Return redirected edge.
160 The list of removed arguments is stored in a vector accessed
161 through edge_var_maps. */
163 edge
164 ssa_redirect_edge (edge e, basic_block dest)
166 gphi_iterator gsi;
167 gphi *phi;
169 redirect_edge_var_map_clear (e);
171 /* Remove the appropriate PHI arguments in E's destination block. */
172 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
174 tree def;
175 source_location locus ;
177 phi = gsi.phi ();
178 def = gimple_phi_arg_def (phi, e->dest_idx);
179 locus = gimple_phi_arg_location (phi, e->dest_idx);
181 if (def == NULL_TREE)
182 continue;
184 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
187 e = redirect_edge_succ_nodup (e, dest);
189 return e;
193 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
194 E->dest. */
196 void
197 flush_pending_stmts (edge e)
199 gphi *phi;
200 edge_var_map *vm;
201 int i;
202 gphi_iterator gsi;
204 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
205 if (!v)
206 return;
208 for (gsi = gsi_start_phis (e->dest), i = 0;
209 !gsi_end_p (gsi) && v->iterate (i, &vm);
210 gsi_next (&gsi), i++)
212 tree def;
214 phi = gsi.phi ();
215 def = redirect_edge_var_map_def (vm);
216 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
219 redirect_edge_var_map_clear (e);
222 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
223 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
224 expression with a different value.
226 This will update any annotations (say debug bind stmts) referring
227 to the original LHS, so that they use the RHS instead. This is
228 done even if NLHS and LHS are the same, for it is understood that
229 the RHS will be modified afterwards, and NLHS will not be assigned
230 an equivalent value.
232 Adjusting any non-annotation uses of the LHS, if needed, is a
233 responsibility of the caller.
235 The effect of this call should be pretty much the same as that of
236 inserting a copy of STMT before STMT, and then removing the
237 original stmt, at which time gsi_remove() would have update
238 annotations, but using this function saves all the inserting,
239 copying and removing. */
241 void
242 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
244 if (MAY_HAVE_DEBUG_STMTS)
246 tree lhs = gimple_get_lhs (stmt);
248 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
250 insert_debug_temp_for_var_def (NULL, lhs);
253 gimple_set_lhs (stmt, nlhs);
257 /* Given a tree for an expression for which we might want to emit
258 locations or values in debug information (generally a variable, but
259 we might deal with other kinds of trees in the future), return the
260 tree that should be used as the variable of a DEBUG_BIND STMT or
261 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
263 tree
264 target_for_debug_bind (tree var)
266 if (!MAY_HAVE_DEBUG_STMTS)
267 return NULL_TREE;
269 if (TREE_CODE (var) == SSA_NAME)
271 var = SSA_NAME_VAR (var);
272 if (var == NULL_TREE)
273 return NULL_TREE;
276 if ((TREE_CODE (var) != VAR_DECL
277 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
278 && TREE_CODE (var) != PARM_DECL)
279 return NULL_TREE;
281 if (DECL_HAS_VALUE_EXPR_P (var))
282 return target_for_debug_bind (DECL_VALUE_EXPR (var));
284 if (DECL_IGNORED_P (var))
285 return NULL_TREE;
287 /* var-tracking only tracks registers. */
288 if (!is_gimple_reg_type (TREE_TYPE (var)))
289 return NULL_TREE;
291 return var;
294 /* Called via walk_tree, look for SSA_NAMEs that have already been
295 released. */
297 static tree
298 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
300 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
302 if (wi && wi->is_lhs)
303 return NULL_TREE;
305 if (TREE_CODE (*tp) == SSA_NAME)
307 if (SSA_NAME_IN_FREE_LIST (*tp))
308 return *tp;
310 *walk_subtrees = 0;
312 else if (IS_TYPE_OR_DECL_P (*tp))
313 *walk_subtrees = 0;
315 return NULL_TREE;
318 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
319 by other DEBUG stmts, and replace uses of the DEF with the
320 newly-created debug temp. */
322 void
323 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
325 imm_use_iterator imm_iter;
326 use_operand_p use_p;
327 gimple stmt;
328 gimple def_stmt = NULL;
329 int usecount = 0;
330 tree value = NULL;
332 if (!MAY_HAVE_DEBUG_STMTS)
333 return;
335 /* If this name has already been registered for replacement, do nothing
336 as anything that uses this name isn't in SSA form. */
337 if (name_registered_for_update_p (var))
338 return;
340 /* Check whether there are debug stmts that reference this variable and,
341 if there are, decide whether we should use a debug temp. */
342 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
344 stmt = USE_STMT (use_p);
346 if (!gimple_debug_bind_p (stmt))
347 continue;
349 if (usecount++)
350 break;
352 if (gimple_debug_bind_get_value (stmt) != var)
354 /* Count this as an additional use, so as to make sure we
355 use a temp unless VAR's definition has a SINGLE_RHS that
356 can be shared. */
357 usecount++;
358 break;
362 if (!usecount)
363 return;
365 if (gsi)
366 def_stmt = gsi_stmt (*gsi);
367 else
368 def_stmt = SSA_NAME_DEF_STMT (var);
370 /* If we didn't get an insertion point, and the stmt has already
371 been removed, we won't be able to insert the debug bind stmt, so
372 we'll have to drop debug information. */
373 if (gimple_code (def_stmt) == GIMPLE_PHI)
375 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
376 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
377 value = NULL;
378 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
379 to. */
380 else if (value == error_mark_node)
381 value = NULL;
383 else if (is_gimple_assign (def_stmt))
385 bool no_value = false;
387 if (!dom_info_available_p (CDI_DOMINATORS))
389 struct walk_stmt_info wi;
391 memset (&wi, 0, sizeof (wi));
393 /* When removing blocks without following reverse dominance
394 order, we may sometimes encounter SSA_NAMEs that have
395 already been released, referenced in other SSA_DEFs that
396 we're about to release. Consider:
398 <bb X>:
399 v_1 = foo;
401 <bb Y>:
402 w_2 = v_1 + bar;
403 # DEBUG w => w_2
405 If we deleted BB X first, propagating the value of w_2
406 won't do us any good. It's too late to recover their
407 original definition of v_1: when it was deleted, it was
408 only referenced in other DEFs, it couldn't possibly know
409 it should have been retained, and propagating every
410 single DEF just in case it might have to be propagated
411 into a DEBUG STMT would probably be too wasteful.
413 When dominator information is not readily available, we
414 check for and accept some loss of debug information. But
415 if it is available, there's no excuse for us to remove
416 blocks in the wrong order, so we don't even check for
417 dead SSA NAMEs. SSA verification shall catch any
418 errors. */
419 if ((!gsi && !gimple_bb (def_stmt))
420 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
421 no_value = true;
424 if (!no_value)
425 value = gimple_assign_rhs_to_tree (def_stmt);
428 if (value)
430 /* If there's a single use of VAR, and VAR is the entire debug
431 expression (usecount would have been incremented again
432 otherwise), and the definition involves only constants and
433 SSA names, then we can propagate VALUE into this single use,
434 avoiding the temp.
436 We can also avoid using a temp if VALUE can be shared and
437 propagated into all uses, without generating expressions that
438 wouldn't be valid gimple RHSs.
440 Other cases that would require unsharing or non-gimple RHSs
441 are deferred to a debug temp, although we could avoid temps
442 at the expense of duplication of expressions. */
444 if (CONSTANT_CLASS_P (value)
445 || gimple_code (def_stmt) == GIMPLE_PHI
446 || (usecount == 1
447 && (!gimple_assign_single_p (def_stmt)
448 || is_gimple_min_invariant (value)))
449 || is_gimple_reg (value))
451 else
453 gdebug *def_temp;
454 tree vexpr = make_node (DEBUG_EXPR_DECL);
456 def_temp = gimple_build_debug_bind (vexpr,
457 unshare_expr (value),
458 def_stmt);
460 DECL_ARTIFICIAL (vexpr) = 1;
461 TREE_TYPE (vexpr) = TREE_TYPE (value);
462 if (DECL_P (value))
463 DECL_MODE (vexpr) = DECL_MODE (value);
464 else
465 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
467 if (gsi)
468 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
469 else
471 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
472 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
475 value = vexpr;
479 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
481 if (!gimple_debug_bind_p (stmt))
482 continue;
484 if (value)
486 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
487 /* unshare_expr is not needed here. vexpr is either a
488 SINGLE_RHS, that can be safely shared, some other RHS
489 that was unshared when we found it had a single debug
490 use, or a DEBUG_EXPR_DECL, that can be safely
491 shared. */
492 SET_USE (use_p, unshare_expr (value));
493 /* If we didn't replace uses with a debug decl fold the
494 resulting expression. Otherwise we end up with invalid IL. */
495 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
497 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
498 fold_stmt_inplace (&gsi);
501 else
502 gimple_debug_bind_reset_value (stmt);
504 update_stmt (stmt);
509 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
510 other DEBUG stmts, and replace uses of the DEF with the
511 newly-created debug temp. */
513 void
514 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
516 gimple stmt;
517 ssa_op_iter op_iter;
518 def_operand_p def_p;
520 if (!MAY_HAVE_DEBUG_STMTS)
521 return;
523 stmt = gsi_stmt (*gsi);
525 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
527 tree var = DEF_FROM_PTR (def_p);
529 if (TREE_CODE (var) != SSA_NAME)
530 continue;
532 insert_debug_temp_for_var_def (gsi, var);
536 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
538 void
539 reset_debug_uses (gimple stmt)
541 ssa_op_iter op_iter;
542 def_operand_p def_p;
543 imm_use_iterator imm_iter;
544 gimple use_stmt;
546 if (!MAY_HAVE_DEBUG_STMTS)
547 return;
549 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
551 tree var = DEF_FROM_PTR (def_p);
553 if (TREE_CODE (var) != SSA_NAME)
554 continue;
556 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
558 if (!gimple_debug_bind_p (use_stmt))
559 continue;
561 gimple_debug_bind_reset_value (use_stmt);
562 update_stmt (use_stmt);
567 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
568 dominated stmts before their dominators, so that release_ssa_defs
569 stands a chance of propagating DEFs into debug bind stmts. */
571 void
572 release_defs_bitset (bitmap toremove)
574 unsigned j;
575 bitmap_iterator bi;
577 /* Performing a topological sort is probably overkill, this will
578 most likely run in slightly superlinear time, rather than the
579 pathological quadratic worst case. */
580 while (!bitmap_empty_p (toremove))
581 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
583 bool remove_now = true;
584 tree var = ssa_name (j);
585 gimple stmt;
586 imm_use_iterator uit;
588 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
590 ssa_op_iter dit;
591 def_operand_p def_p;
593 /* We can't propagate PHI nodes into debug stmts. */
594 if (gimple_code (stmt) == GIMPLE_PHI
595 || is_gimple_debug (stmt))
596 continue;
598 /* If we find another definition to remove that uses
599 the one we're looking at, defer the removal of this
600 one, so that it can be propagated into debug stmts
601 after the other is. */
602 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
604 tree odef = DEF_FROM_PTR (def_p);
606 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
608 remove_now = false;
609 break;
613 if (!remove_now)
614 BREAK_FROM_IMM_USE_STMT (uit);
617 if (remove_now)
619 gimple def = SSA_NAME_DEF_STMT (var);
620 gimple_stmt_iterator gsi = gsi_for_stmt (def);
622 if (gimple_code (def) == GIMPLE_PHI)
623 remove_phi_node (&gsi, true);
624 else
626 gsi_remove (&gsi, true);
627 release_defs (def);
630 bitmap_clear_bit (toremove, j);
635 /* Return true if SSA_NAME is malformed and mark it visited.
637 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
638 operand. */
640 static bool
641 verify_ssa_name (tree ssa_name, bool is_virtual)
643 if (TREE_CODE (ssa_name) != SSA_NAME)
645 error ("expected an SSA_NAME object");
646 return true;
649 if (SSA_NAME_IN_FREE_LIST (ssa_name))
651 error ("found an SSA_NAME that had been released into the free pool");
652 return true;
655 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
656 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
658 error ("type mismatch between an SSA_NAME and its symbol");
659 return true;
662 if (is_virtual && !virtual_operand_p (ssa_name))
664 error ("found a virtual definition for a GIMPLE register");
665 return true;
668 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
670 error ("virtual SSA name for non-VOP decl");
671 return true;
674 if (!is_virtual && virtual_operand_p (ssa_name))
676 error ("found a real definition for a non-register");
677 return true;
680 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
681 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
683 error ("found a default name with a non-empty defining statement");
684 return true;
687 return false;
691 /* Return true if the definition of SSA_NAME at block BB is malformed.
693 STMT is the statement where SSA_NAME is created.
695 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
696 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
697 it means that the block in that array slot contains the
698 definition of SSA_NAME.
700 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
702 static bool
703 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
704 gimple stmt, bool is_virtual)
706 if (verify_ssa_name (ssa_name, is_virtual))
707 goto err;
709 if (SSA_NAME_VAR (ssa_name)
710 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
711 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
713 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
714 goto err;
717 if (definition_block[SSA_NAME_VERSION (ssa_name)])
719 error ("SSA_NAME created in two different blocks %i and %i",
720 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
721 goto err;
724 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
726 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
728 error ("SSA_NAME_DEF_STMT is wrong");
729 fprintf (stderr, "Expected definition statement:\n");
730 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
731 fprintf (stderr, "\nActual definition statement:\n");
732 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
733 goto err;
736 return false;
738 err:
739 fprintf (stderr, "while verifying SSA_NAME ");
740 print_generic_expr (stderr, ssa_name, 0);
741 fprintf (stderr, " in statement\n");
742 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
744 return true;
748 /* Return true if the use of SSA_NAME at statement STMT in block BB is
749 malformed.
751 DEF_BB is the block where SSA_NAME was found to be created.
753 IDOM contains immediate dominator information for the flowgraph.
755 CHECK_ABNORMAL is true if the caller wants to check whether this use
756 is flowing through an abnormal edge (only used when checking PHI
757 arguments).
759 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
760 that are defined before STMT in basic block BB. */
762 static bool
763 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
764 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
766 bool err = false;
767 tree ssa_name = USE_FROM_PTR (use_p);
769 if (!TREE_VISITED (ssa_name))
770 if (verify_imm_links (stderr, ssa_name))
771 err = true;
773 TREE_VISITED (ssa_name) = 1;
775 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
776 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
777 ; /* Default definitions have empty statements. Nothing to do. */
778 else if (!def_bb)
780 error ("missing definition");
781 err = true;
783 else if (bb != def_bb
784 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
786 error ("definition in block %i does not dominate use in block %i",
787 def_bb->index, bb->index);
788 err = true;
790 else if (bb == def_bb
791 && names_defined_in_bb != NULL
792 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
794 error ("definition in block %i follows the use", def_bb->index);
795 err = true;
798 if (check_abnormal
799 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
801 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
802 err = true;
805 /* Make sure the use is in an appropriate list by checking the previous
806 element to make sure it's the same. */
807 if (use_p->prev == NULL)
809 error ("no immediate_use list");
810 err = true;
812 else
814 tree listvar;
815 if (use_p->prev->use == NULL)
816 listvar = use_p->prev->loc.ssa_name;
817 else
818 listvar = USE_FROM_PTR (use_p->prev);
819 if (listvar != ssa_name)
821 error ("wrong immediate use list");
822 err = true;
826 if (err)
828 fprintf (stderr, "for SSA_NAME: ");
829 print_generic_expr (stderr, ssa_name, TDF_VOPS);
830 fprintf (stderr, " in statement:\n");
831 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
834 return err;
838 /* Return true if any of the arguments for PHI node PHI at block BB is
839 malformed.
841 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
842 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
843 it means that the block in that array slot contains the
844 definition of SSA_NAME. */
846 static bool
847 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
849 edge e;
850 bool err = false;
851 size_t i, phi_num_args = gimple_phi_num_args (phi);
853 if (EDGE_COUNT (bb->preds) != phi_num_args)
855 error ("incoming edge count does not match number of PHI arguments");
856 err = true;
857 goto error;
860 for (i = 0; i < phi_num_args; i++)
862 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
863 tree op = USE_FROM_PTR (op_p);
865 e = EDGE_PRED (bb, i);
867 if (op == NULL_TREE)
869 error ("PHI argument is missing for edge %d->%d",
870 e->src->index,
871 e->dest->index);
872 err = true;
873 goto error;
876 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
878 error ("PHI argument is not SSA_NAME, or invariant");
879 err = true;
882 if (TREE_CODE (op) == SSA_NAME)
884 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
885 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
886 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
889 if (TREE_CODE (op) == ADDR_EXPR)
891 tree base = TREE_OPERAND (op, 0);
892 while (handled_component_p (base))
893 base = TREE_OPERAND (base, 0);
894 if ((TREE_CODE (base) == VAR_DECL
895 || TREE_CODE (base) == PARM_DECL
896 || TREE_CODE (base) == RESULT_DECL)
897 && !TREE_ADDRESSABLE (base))
899 error ("address taken, but ADDRESSABLE bit not set");
900 err = true;
904 if (e->dest != bb)
906 error ("wrong edge %d->%d for PHI argument",
907 e->src->index, e->dest->index);
908 err = true;
911 if (err)
913 fprintf (stderr, "PHI argument\n");
914 print_generic_stmt (stderr, op, TDF_VOPS);
915 goto error;
919 error:
920 if (err)
922 fprintf (stderr, "for PHI node\n");
923 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
927 return err;
931 /* Verify common invariants in the SSA web.
932 TODO: verify the variable annotations. */
934 DEBUG_FUNCTION void
935 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
937 size_t i;
938 basic_block bb;
939 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
940 ssa_op_iter iter;
941 tree op;
942 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
943 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
945 gcc_assert (!need_ssa_update_p (cfun));
947 timevar_push (TV_TREE_SSA_VERIFY);
949 /* Keep track of SSA names present in the IL. */
950 for (i = 1; i < num_ssa_names; i++)
952 tree name = ssa_name (i);
953 if (name)
955 gimple stmt;
956 TREE_VISITED (name) = 0;
958 verify_ssa_name (name, virtual_operand_p (name));
960 stmt = SSA_NAME_DEF_STMT (name);
961 if (!gimple_nop_p (stmt))
963 basic_block bb = gimple_bb (stmt);
964 if (verify_def (bb, definition_block,
965 name, stmt, virtual_operand_p (name)))
966 goto err;
971 calculate_dominance_info (CDI_DOMINATORS);
973 /* Now verify all the uses and make sure they agree with the definitions
974 found in the previous pass. */
975 FOR_EACH_BB_FN (bb, cfun)
977 edge e;
978 edge_iterator ei;
980 /* Make sure that all edges have a clear 'aux' field. */
981 FOR_EACH_EDGE (e, ei, bb->preds)
983 if (e->aux)
985 error ("AUX pointer initialized for edge %d->%d", e->src->index,
986 e->dest->index);
987 goto err;
991 /* Verify the arguments for every PHI node in the block. */
992 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
994 gphi *phi = gsi.phi ();
995 if (verify_phi_args (phi, bb, definition_block))
996 goto err;
998 bitmap_set_bit (names_defined_in_bb,
999 SSA_NAME_VERSION (gimple_phi_result (phi)));
1002 /* Now verify all the uses and vuses in every statement of the block. */
1003 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1004 gsi_next (&gsi))
1006 gimple stmt = gsi_stmt (gsi);
1007 use_operand_p use_p;
1009 if (check_modified_stmt && gimple_modified_p (stmt))
1011 error ("stmt (%p) marked modified after optimization pass: ",
1012 (void *)stmt);
1013 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1014 goto err;
1017 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1019 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1020 goto err;
1023 if (gimple_debug_bind_p (stmt)
1024 && !gimple_debug_bind_has_value_p (stmt))
1025 continue;
1027 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1029 op = USE_FROM_PTR (use_p);
1030 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1031 use_p, stmt, false, names_defined_in_bb))
1032 goto err;
1035 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1037 if (SSA_NAME_DEF_STMT (op) != stmt)
1039 error ("SSA_NAME_DEF_STMT is wrong");
1040 fprintf (stderr, "Expected definition statement:\n");
1041 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1042 fprintf (stderr, "\nActual definition statement:\n");
1043 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1044 4, TDF_VOPS);
1045 goto err;
1047 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1051 bitmap_clear (names_defined_in_bb);
1054 free (definition_block);
1056 /* Restore the dominance information to its prior known state, so
1057 that we do not perturb the compiler's subsequent behavior. */
1058 if (orig_dom_state == DOM_NONE)
1059 free_dominance_info (CDI_DOMINATORS);
1060 else
1061 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1063 BITMAP_FREE (names_defined_in_bb);
1064 timevar_pop (TV_TREE_SSA_VERIFY);
1065 return;
1067 err:
1068 internal_error ("verify_ssa failed");
1072 /* Initialize global DFA and SSA structures. */
1074 void
1075 init_tree_ssa (struct function *fn)
1077 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1078 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1079 pt_solution_reset (&fn->gimple_df->escaped);
1080 init_ssanames (fn, 0);
1083 /* Do the actions required to initialize internal data structures used
1084 in tree-ssa optimization passes. */
1086 static unsigned int
1087 execute_init_datastructures (void)
1089 /* Allocate hash tables, arrays and other structures. */
1090 gcc_assert (!cfun->gimple_df);
1091 init_tree_ssa (cfun);
1092 return 0;
1095 namespace {
1097 const pass_data pass_data_init_datastructures =
1099 GIMPLE_PASS, /* type */
1100 "*init_datastructures", /* name */
1101 OPTGROUP_NONE, /* optinfo_flags */
1102 TV_NONE, /* tv_id */
1103 PROP_cfg, /* properties_required */
1104 0, /* properties_provided */
1105 0, /* properties_destroyed */
1106 0, /* todo_flags_start */
1107 0, /* todo_flags_finish */
1110 class pass_init_datastructures : public gimple_opt_pass
1112 public:
1113 pass_init_datastructures (gcc::context *ctxt)
1114 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1117 /* opt_pass methods: */
1118 virtual bool gate (function *fun)
1120 /* Do nothing for funcions that was produced already in SSA form. */
1121 return !(fun->curr_properties & PROP_ssa);
1124 virtual unsigned int execute (function *)
1126 return execute_init_datastructures ();
1129 }; // class pass_init_datastructures
1131 } // anon namespace
1133 gimple_opt_pass *
1134 make_pass_init_datastructures (gcc::context *ctxt)
1136 return new pass_init_datastructures (ctxt);
1139 /* Deallocate memory associated with SSA data structures for FNDECL. */
1141 void
1142 delete_tree_ssa (void)
1144 fini_ssanames ();
1146 /* We no longer maintain the SSA operand cache at this point. */
1147 if (ssa_operands_active (cfun))
1148 fini_ssa_operands (cfun);
1150 cfun->gimple_df->default_defs->empty ();
1151 cfun->gimple_df->default_defs = NULL;
1152 pt_solution_reset (&cfun->gimple_df->escaped);
1153 if (cfun->gimple_df->decls_to_pointers != NULL)
1154 delete cfun->gimple_df->decls_to_pointers;
1155 cfun->gimple_df->decls_to_pointers = NULL;
1156 cfun->gimple_df->modified_noreturn_calls = NULL;
1157 cfun->gimple_df = NULL;
1159 /* We no longer need the edge variable maps. */
1160 redirect_edge_var_map_destroy ();
1163 /* Return true if EXPR is a useless type conversion, otherwise return
1164 false. */
1166 bool
1167 tree_ssa_useless_type_conversion (tree expr)
1169 /* If we have an assignment that merely uses a NOP_EXPR to change
1170 the top of the RHS to the type of the LHS and the type conversion
1171 is "safe", then strip away the type conversion so that we can
1172 enter LHS = RHS into the const_and_copies table. */
1173 if (CONVERT_EXPR_P (expr)
1174 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1175 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1176 return useless_type_conversion_p
1177 (TREE_TYPE (expr),
1178 TREE_TYPE (TREE_OPERAND (expr, 0)));
1180 return false;
1183 /* Strip conversions from EXP according to
1184 tree_ssa_useless_type_conversion and return the resulting
1185 expression. */
1187 tree
1188 tree_ssa_strip_useless_type_conversions (tree exp)
1190 while (tree_ssa_useless_type_conversion (exp))
1191 exp = TREE_OPERAND (exp, 0);
1192 return exp;
1196 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1197 should be returned if the value is only partially undefined. */
1199 bool
1200 ssa_undefined_value_p (tree t, bool partial)
1202 gimple def_stmt;
1203 tree var = SSA_NAME_VAR (t);
1205 if (!var)
1207 /* Parameters get their initial value from the function entry. */
1208 else if (TREE_CODE (var) == PARM_DECL)
1209 return false;
1210 /* When returning by reference the return address is actually a hidden
1211 parameter. */
1212 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1213 return false;
1214 /* Hard register variables get their initial value from the ether. */
1215 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1216 return false;
1218 /* The value is undefined iff its definition statement is empty. */
1219 def_stmt = SSA_NAME_DEF_STMT (t);
1220 if (gimple_nop_p (def_stmt))
1221 return true;
1223 /* Check if the complex was not only partially defined. */
1224 if (partial && is_gimple_assign (def_stmt)
1225 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1227 tree rhs1, rhs2;
1229 rhs1 = gimple_assign_rhs1 (def_stmt);
1230 rhs2 = gimple_assign_rhs2 (def_stmt);
1231 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1232 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1234 return false;
1238 /* If necessary, rewrite the base of the reference tree *TP from
1239 a MEM_REF to a plain or converted symbol. */
1241 static void
1242 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1244 tree sym;
1246 while (handled_component_p (*tp))
1247 tp = &TREE_OPERAND (*tp, 0);
1248 if (TREE_CODE (*tp) == MEM_REF
1249 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1250 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1251 && DECL_P (sym)
1252 && !TREE_ADDRESSABLE (sym)
1253 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1255 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1256 && useless_type_conversion_p (TREE_TYPE (*tp),
1257 TREE_TYPE (TREE_TYPE (sym)))
1258 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1259 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1261 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1262 TYPE_SIZE (TREE_TYPE (*tp)),
1263 int_const_binop (MULT_EXPR,
1264 bitsize_int (BITS_PER_UNIT),
1265 TREE_OPERAND (*tp, 1)));
1267 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1268 && useless_type_conversion_p (TREE_TYPE (*tp),
1269 TREE_TYPE (TREE_TYPE (sym))))
1271 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1272 ? REALPART_EXPR : IMAGPART_EXPR,
1273 TREE_TYPE (*tp), sym);
1275 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1277 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1278 TREE_TYPE (sym)))
1279 *tp = build1 (VIEW_CONVERT_EXPR,
1280 TREE_TYPE (*tp), sym);
1281 else
1282 *tp = sym;
1287 /* For a tree REF return its base if it is the base of a MEM_REF
1288 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1290 static tree
1291 non_rewritable_mem_ref_base (tree ref)
1293 tree base = ref;
1295 /* A plain decl does not need it set. */
1296 if (DECL_P (ref))
1297 return NULL_TREE;
1299 while (handled_component_p (base))
1300 base = TREE_OPERAND (base, 0);
1302 /* But watch out for MEM_REFs we cannot lower to a
1303 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1304 if (TREE_CODE (base) == MEM_REF
1305 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1307 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1308 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1309 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1310 && useless_type_conversion_p (TREE_TYPE (base),
1311 TREE_TYPE (TREE_TYPE (decl)))
1312 && wi::fits_uhwi_p (mem_ref_offset (base))
1313 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1314 mem_ref_offset (base))
1315 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1316 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1317 return NULL_TREE;
1318 if (DECL_P (decl)
1319 && (!integer_zerop (TREE_OPERAND (base, 1))
1320 || (DECL_SIZE (decl)
1321 != TYPE_SIZE (TREE_TYPE (base)))
1322 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1323 return decl;
1326 return NULL_TREE;
1329 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1330 Otherwise return true. */
1332 static bool
1333 non_rewritable_lvalue_p (tree lhs)
1335 /* A plain decl is always rewritable. */
1336 if (DECL_P (lhs))
1337 return false;
1339 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1340 a reasonably efficient manner... */
1341 if ((TREE_CODE (lhs) == REALPART_EXPR
1342 || TREE_CODE (lhs) == IMAGPART_EXPR)
1343 && DECL_P (TREE_OPERAND (lhs, 0)))
1344 return false;
1346 /* A decl that is wrapped inside a MEM-REF that covers
1347 it full is also rewritable.
1348 ??? The following could be relaxed allowing component
1349 references that do not change the access size. */
1350 if (TREE_CODE (lhs) == MEM_REF
1351 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1352 && integer_zerop (TREE_OPERAND (lhs, 1)))
1354 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1355 if (DECL_P (decl)
1356 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1357 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1358 return false;
1361 return true;
1364 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1365 mark the variable VAR for conversion into SSA. Return true when updating
1366 stmts is required. */
1368 static void
1369 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1370 bitmap suitable_for_renaming)
1372 /* Global Variables, result decls cannot be changed. */
1373 if (is_global_var (var)
1374 || TREE_CODE (var) == RESULT_DECL
1375 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1376 return;
1378 if (TREE_ADDRESSABLE (var)
1379 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1380 a non-register. Otherwise we are confused and forget to
1381 add virtual operands for it. */
1382 && (!is_gimple_reg_type (TREE_TYPE (var))
1383 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1384 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1385 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1387 TREE_ADDRESSABLE (var) = 0;
1388 if (is_gimple_reg (var))
1389 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1390 if (dump_file)
1392 fprintf (dump_file, "No longer having address taken: ");
1393 print_generic_expr (dump_file, var, 0);
1394 fprintf (dump_file, "\n");
1398 if (!DECL_GIMPLE_REG_P (var)
1399 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1400 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1401 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1402 && !TREE_THIS_VOLATILE (var)
1403 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1405 DECL_GIMPLE_REG_P (var) = 1;
1406 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1407 if (dump_file)
1409 fprintf (dump_file, "Now a gimple register: ");
1410 print_generic_expr (dump_file, var, 0);
1411 fprintf (dump_file, "\n");
1416 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1418 void
1419 execute_update_addresses_taken (void)
1421 basic_block bb;
1422 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1423 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1424 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1425 tree var;
1426 unsigned i;
1428 timevar_push (TV_ADDRESS_TAKEN);
1430 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1431 the function body. */
1432 FOR_EACH_BB_FN (bb, cfun)
1434 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1435 gsi_next (&gsi))
1437 gimple stmt = gsi_stmt (gsi);
1438 enum gimple_code code = gimple_code (stmt);
1439 tree decl;
1441 /* Note all addresses taken by the stmt. */
1442 gimple_ior_addresses_taken (addresses_taken, stmt);
1444 /* If we have a call or an assignment, see if the lhs contains
1445 a local decl that requires not to be a gimple register. */
1446 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1448 tree lhs = gimple_get_lhs (stmt);
1449 if (lhs
1450 && TREE_CODE (lhs) != SSA_NAME
1451 && non_rewritable_lvalue_p (lhs))
1453 decl = get_base_address (lhs);
1454 if (DECL_P (decl))
1455 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1459 if (gimple_assign_single_p (stmt))
1461 tree rhs = gimple_assign_rhs1 (stmt);
1462 if ((decl = non_rewritable_mem_ref_base (rhs)))
1463 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1466 else if (code == GIMPLE_CALL)
1468 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1470 tree arg = gimple_call_arg (stmt, i);
1471 if ((decl = non_rewritable_mem_ref_base (arg)))
1472 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1476 else if (code == GIMPLE_ASM)
1478 gasm *asm_stmt = as_a <gasm *> (stmt);
1479 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1481 tree link = gimple_asm_output_op (asm_stmt, i);
1482 tree lhs = TREE_VALUE (link);
1483 if (TREE_CODE (lhs) != SSA_NAME)
1485 decl = get_base_address (lhs);
1486 if (DECL_P (decl)
1487 && (non_rewritable_lvalue_p (lhs)
1488 /* We cannot move required conversions from
1489 the lhs to the rhs in asm statements, so
1490 require we do not need any. */
1491 || !useless_type_conversion_p
1492 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1493 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1496 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1498 tree link = gimple_asm_input_op (asm_stmt, i);
1499 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1500 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1505 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1506 gsi_next (&gsi))
1508 size_t i;
1509 gphi *phi = gsi.phi ();
1511 for (i = 0; i < gimple_phi_num_args (phi); i++)
1513 tree op = PHI_ARG_DEF (phi, i), var;
1514 if (TREE_CODE (op) == ADDR_EXPR
1515 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1516 && DECL_P (var))
1517 bitmap_set_bit (addresses_taken, DECL_UID (var));
1522 /* We cannot iterate over all referenced vars because that can contain
1523 unused vars from BLOCK trees, which causes code generation differences
1524 for -g vs. -g0. */
1525 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1526 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1527 suitable_for_renaming);
1529 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1530 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1531 suitable_for_renaming);
1533 /* Operand caches need to be recomputed for operands referencing the updated
1534 variables and operands need to be rewritten to expose bare symbols. */
1535 if (!bitmap_empty_p (suitable_for_renaming))
1537 FOR_EACH_BB_FN (bb, cfun)
1538 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1540 gimple stmt = gsi_stmt (gsi);
1542 /* Re-write TARGET_MEM_REFs of symbols we want to
1543 rewrite into SSA form. */
1544 if (gimple_assign_single_p (stmt))
1546 tree lhs = gimple_assign_lhs (stmt);
1547 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1548 tree sym;
1550 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1551 gimplify_modify_expr_complex_part. */
1552 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1553 || TREE_CODE (lhs) == REALPART_EXPR)
1554 && DECL_P (TREE_OPERAND (lhs, 0))
1555 && bitmap_bit_p (suitable_for_renaming,
1556 DECL_UID (TREE_OPERAND (lhs, 0))))
1558 tree other = make_ssa_name (TREE_TYPE (lhs));
1559 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1560 ? REALPART_EXPR : IMAGPART_EXPR,
1561 TREE_TYPE (other),
1562 TREE_OPERAND (lhs, 0));
1563 gimple load = gimple_build_assign (other, lrhs);
1564 location_t loc = gimple_location (stmt);
1565 gimple_set_location (load, loc);
1566 gimple_set_vuse (load, gimple_vuse (stmt));
1567 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1568 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1569 gimple_assign_set_rhs_with_ops
1570 (&gsi, COMPLEX_EXPR,
1571 TREE_CODE (lhs) == IMAGPART_EXPR
1572 ? other : gimple_assign_rhs1 (stmt),
1573 TREE_CODE (lhs) == IMAGPART_EXPR
1574 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1575 stmt = gsi_stmt (gsi);
1576 unlink_stmt_vdef (stmt);
1577 update_stmt (stmt);
1578 continue;
1581 /* We shouldn't have any fancy wrapping of
1582 component-refs on the LHS, but look through
1583 VIEW_CONVERT_EXPRs as that is easy. */
1584 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1585 lhs = TREE_OPERAND (lhs, 0);
1586 if (TREE_CODE (lhs) == MEM_REF
1587 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1588 && integer_zerop (TREE_OPERAND (lhs, 1))
1589 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1590 && DECL_P (sym)
1591 && !TREE_ADDRESSABLE (sym)
1592 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1593 lhs = sym;
1594 else
1595 lhs = gimple_assign_lhs (stmt);
1597 /* Rewrite the RHS and make sure the resulting assignment
1598 is validly typed. */
1599 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1600 rhs = gimple_assign_rhs1 (stmt);
1601 if (gimple_assign_lhs (stmt) != lhs
1602 && !useless_type_conversion_p (TREE_TYPE (lhs),
1603 TREE_TYPE (rhs)))
1604 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1605 TREE_TYPE (lhs), rhs);
1607 if (gimple_assign_lhs (stmt) != lhs)
1608 gimple_assign_set_lhs (stmt, lhs);
1610 if (gimple_assign_rhs1 (stmt) != rhs)
1612 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1613 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1617 else if (gimple_code (stmt) == GIMPLE_CALL)
1619 unsigned i;
1620 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1622 tree *argp = gimple_call_arg_ptr (stmt, i);
1623 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1627 else if (gimple_code (stmt) == GIMPLE_ASM)
1629 gasm *asm_stmt = as_a <gasm *> (stmt);
1630 unsigned i;
1631 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1633 tree link = gimple_asm_output_op (asm_stmt, i);
1634 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1635 suitable_for_renaming);
1637 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1639 tree link = gimple_asm_input_op (asm_stmt, i);
1640 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1641 suitable_for_renaming);
1645 else if (gimple_debug_bind_p (stmt)
1646 && gimple_debug_bind_has_value_p (stmt))
1648 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1649 tree decl;
1650 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1651 decl = non_rewritable_mem_ref_base (*valuep);
1652 if (decl
1653 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1654 gimple_debug_bind_reset_value (stmt);
1657 if (gimple_references_memory_p (stmt)
1658 || is_gimple_debug (stmt))
1659 update_stmt (stmt);
1661 gsi_next (&gsi);
1664 /* Update SSA form here, we are called as non-pass as well. */
1665 if (number_of_loops (cfun) > 1
1666 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1667 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1668 else
1669 update_ssa (TODO_update_ssa);
1672 BITMAP_FREE (not_reg_needs);
1673 BITMAP_FREE (addresses_taken);
1674 BITMAP_FREE (suitable_for_renaming);
1675 timevar_pop (TV_ADDRESS_TAKEN);
1678 namespace {
1680 const pass_data pass_data_update_address_taken =
1682 GIMPLE_PASS, /* type */
1683 "addressables", /* name */
1684 OPTGROUP_NONE, /* optinfo_flags */
1685 TV_ADDRESS_TAKEN, /* tv_id */
1686 PROP_ssa, /* properties_required */
1687 0, /* properties_provided */
1688 0, /* properties_destroyed */
1689 0, /* todo_flags_start */
1690 TODO_update_address_taken, /* todo_flags_finish */
1693 class pass_update_address_taken : public gimple_opt_pass
1695 public:
1696 pass_update_address_taken (gcc::context *ctxt)
1697 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1700 /* opt_pass methods: */
1702 }; // class pass_update_address_taken
1704 } // anon namespace
1706 gimple_opt_pass *
1707 make_pass_update_address_taken (gcc::context *ctxt)
1709 return new pass_update_address_taken (ctxt);