2016-09-25 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa.c
blobd442a5f89e1cf05bc6495e2d51f9db58bd7beedb
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
45 /* Pointer map of variable mappings, keyed by edge. */
46 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
49 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
51 void
52 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
54 edge_var_map new_node;
56 if (edge_var_maps == NULL)
57 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
59 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
60 new_node.def = def;
61 new_node.result = result;
62 new_node.locus = locus;
64 slot.safe_push (new_node);
68 /* Clear the var mappings in edge E. */
70 void
71 redirect_edge_var_map_clear (edge e)
73 if (!edge_var_maps)
74 return;
76 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
78 if (head)
79 head->release ();
83 /* Duplicate the redirected var mappings in OLDE in NEWE.
85 This assumes a hash_map can have multiple edges mapping to the same
86 var_map (many to one mapping), since we don't remove the previous mappings.
89 void
90 redirect_edge_var_map_dup (edge newe, edge olde)
92 if (!edge_var_maps)
93 return;
95 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
96 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
97 if (!old_head)
98 return;
100 new_head->safe_splice (*old_head);
104 /* Return the variable mappings for a given edge. If there is none, return
105 NULL. */
107 vec<edge_var_map> *
108 redirect_edge_var_map_vector (edge e)
110 /* Hey, what kind of idiot would... you'd be surprised. */
111 if (!edge_var_maps)
112 return NULL;
114 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
115 if (!slot)
116 return NULL;
118 return slot;
121 /* Clear the edge variable mappings. */
123 void
124 redirect_edge_var_map_empty (void)
126 if (edge_var_maps)
127 edge_var_maps->empty ();
131 /* Remove the corresponding arguments from the PHI nodes in E's
132 destination block and redirect it to DEST. Return redirected edge.
133 The list of removed arguments is stored in a vector accessed
134 through edge_var_maps. */
136 edge
137 ssa_redirect_edge (edge e, basic_block dest)
139 gphi_iterator gsi;
140 gphi *phi;
142 redirect_edge_var_map_clear (e);
144 /* Remove the appropriate PHI arguments in E's destination block. */
145 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
147 tree def;
148 source_location locus ;
150 phi = gsi.phi ();
151 def = gimple_phi_arg_def (phi, e->dest_idx);
152 locus = gimple_phi_arg_location (phi, e->dest_idx);
154 if (def == NULL_TREE)
155 continue;
157 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
160 e = redirect_edge_succ_nodup (e, dest);
162 return e;
166 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
167 E->dest. */
169 void
170 flush_pending_stmts (edge e)
172 gphi *phi;
173 edge_var_map *vm;
174 int i;
175 gphi_iterator gsi;
177 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
178 if (!v)
179 return;
181 for (gsi = gsi_start_phis (e->dest), i = 0;
182 !gsi_end_p (gsi) && v->iterate (i, &vm);
183 gsi_next (&gsi), i++)
185 tree def;
187 phi = gsi.phi ();
188 def = redirect_edge_var_map_def (vm);
189 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
192 redirect_edge_var_map_clear (e);
195 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
196 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
197 expression with a different value.
199 This will update any annotations (say debug bind stmts) referring
200 to the original LHS, so that they use the RHS instead. This is
201 done even if NLHS and LHS are the same, for it is understood that
202 the RHS will be modified afterwards, and NLHS will not be assigned
203 an equivalent value.
205 Adjusting any non-annotation uses of the LHS, if needed, is a
206 responsibility of the caller.
208 The effect of this call should be pretty much the same as that of
209 inserting a copy of STMT before STMT, and then removing the
210 original stmt, at which time gsi_remove() would have update
211 annotations, but using this function saves all the inserting,
212 copying and removing. */
214 void
215 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
217 if (MAY_HAVE_DEBUG_STMTS)
219 tree lhs = gimple_get_lhs (stmt);
221 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
223 insert_debug_temp_for_var_def (NULL, lhs);
226 gimple_set_lhs (stmt, nlhs);
230 /* Given a tree for an expression for which we might want to emit
231 locations or values in debug information (generally a variable, but
232 we might deal with other kinds of trees in the future), return the
233 tree that should be used as the variable of a DEBUG_BIND STMT or
234 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
236 tree
237 target_for_debug_bind (tree var)
239 if (!MAY_HAVE_DEBUG_STMTS)
240 return NULL_TREE;
242 if (TREE_CODE (var) == SSA_NAME)
244 var = SSA_NAME_VAR (var);
245 if (var == NULL_TREE)
246 return NULL_TREE;
249 if ((TREE_CODE (var) != VAR_DECL
250 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
251 && TREE_CODE (var) != PARM_DECL)
252 return NULL_TREE;
254 if (DECL_HAS_VALUE_EXPR_P (var))
255 return target_for_debug_bind (DECL_VALUE_EXPR (var));
257 if (DECL_IGNORED_P (var))
258 return NULL_TREE;
260 /* var-tracking only tracks registers. */
261 if (!is_gimple_reg_type (TREE_TYPE (var)))
262 return NULL_TREE;
264 return var;
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
268 released. */
270 static tree
271 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
273 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
275 if (wi && wi->is_lhs)
276 return NULL_TREE;
278 if (TREE_CODE (*tp) == SSA_NAME)
280 if (SSA_NAME_IN_FREE_LIST (*tp))
281 return *tp;
283 *walk_subtrees = 0;
285 else if (IS_TYPE_OR_DECL_P (*tp))
286 *walk_subtrees = 0;
288 return NULL_TREE;
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292 by other DEBUG stmts, and replace uses of the DEF with the
293 newly-created debug temp. */
295 void
296 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
298 imm_use_iterator imm_iter;
299 use_operand_p use_p;
300 gimple *stmt;
301 gimple *def_stmt = NULL;
302 int usecount = 0;
303 tree value = NULL;
305 if (!MAY_HAVE_DEBUG_STMTS)
306 return;
308 /* If this name has already been registered for replacement, do nothing
309 as anything that uses this name isn't in SSA form. */
310 if (name_registered_for_update_p (var))
311 return;
313 /* Check whether there are debug stmts that reference this variable and,
314 if there are, decide whether we should use a debug temp. */
315 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
317 stmt = USE_STMT (use_p);
319 if (!gimple_debug_bind_p (stmt))
320 continue;
322 if (usecount++)
323 break;
325 if (gimple_debug_bind_get_value (stmt) != var)
327 /* Count this as an additional use, so as to make sure we
328 use a temp unless VAR's definition has a SINGLE_RHS that
329 can be shared. */
330 usecount++;
331 break;
335 if (!usecount)
336 return;
338 if (gsi)
339 def_stmt = gsi_stmt (*gsi);
340 else
341 def_stmt = SSA_NAME_DEF_STMT (var);
343 /* If we didn't get an insertion point, and the stmt has already
344 been removed, we won't be able to insert the debug bind stmt, so
345 we'll have to drop debug information. */
346 if (gimple_code (def_stmt) == GIMPLE_PHI)
348 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
349 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
350 value = NULL;
351 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 to. */
353 else if (value == error_mark_node)
354 value = NULL;
356 else if (is_gimple_assign (def_stmt))
358 bool no_value = false;
360 if (!dom_info_available_p (CDI_DOMINATORS))
362 struct walk_stmt_info wi;
364 memset (&wi, 0, sizeof (wi));
366 /* When removing blocks without following reverse dominance
367 order, we may sometimes encounter SSA_NAMEs that have
368 already been released, referenced in other SSA_DEFs that
369 we're about to release. Consider:
371 <bb X>:
372 v_1 = foo;
374 <bb Y>:
375 w_2 = v_1 + bar;
376 # DEBUG w => w_2
378 If we deleted BB X first, propagating the value of w_2
379 won't do us any good. It's too late to recover their
380 original definition of v_1: when it was deleted, it was
381 only referenced in other DEFs, it couldn't possibly know
382 it should have been retained, and propagating every
383 single DEF just in case it might have to be propagated
384 into a DEBUG STMT would probably be too wasteful.
386 When dominator information is not readily available, we
387 check for and accept some loss of debug information. But
388 if it is available, there's no excuse for us to remove
389 blocks in the wrong order, so we don't even check for
390 dead SSA NAMEs. SSA verification shall catch any
391 errors. */
392 if ((!gsi && !gimple_bb (def_stmt))
393 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
394 no_value = true;
397 if (!no_value)
398 value = gimple_assign_rhs_to_tree (def_stmt);
401 if (value)
403 /* If there's a single use of VAR, and VAR is the entire debug
404 expression (usecount would have been incremented again
405 otherwise), and the definition involves only constants and
406 SSA names, then we can propagate VALUE into this single use,
407 avoiding the temp.
409 We can also avoid using a temp if VALUE can be shared and
410 propagated into all uses, without generating expressions that
411 wouldn't be valid gimple RHSs.
413 Other cases that would require unsharing or non-gimple RHSs
414 are deferred to a debug temp, although we could avoid temps
415 at the expense of duplication of expressions. */
417 if (CONSTANT_CLASS_P (value)
418 || gimple_code (def_stmt) == GIMPLE_PHI
419 || (usecount == 1
420 && (!gimple_assign_single_p (def_stmt)
421 || is_gimple_min_invariant (value)))
422 || is_gimple_reg (value))
424 else
426 gdebug *def_temp;
427 tree vexpr = make_node (DEBUG_EXPR_DECL);
429 def_temp = gimple_build_debug_bind (vexpr,
430 unshare_expr (value),
431 def_stmt);
433 DECL_ARTIFICIAL (vexpr) = 1;
434 TREE_TYPE (vexpr) = TREE_TYPE (value);
435 if (DECL_P (value))
436 DECL_MODE (vexpr) = DECL_MODE (value);
437 else
438 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
440 if (gsi)
441 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
442 else
444 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
445 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
448 value = vexpr;
452 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
454 if (!gimple_debug_bind_p (stmt))
455 continue;
457 if (value)
459 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
460 /* unshare_expr is not needed here. vexpr is either a
461 SINGLE_RHS, that can be safely shared, some other RHS
462 that was unshared when we found it had a single debug
463 use, or a DEBUG_EXPR_DECL, that can be safely
464 shared. */
465 SET_USE (use_p, unshare_expr (value));
466 /* If we didn't replace uses with a debug decl fold the
467 resulting expression. Otherwise we end up with invalid IL. */
468 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
470 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
471 fold_stmt_inplace (&gsi);
474 else
475 gimple_debug_bind_reset_value (stmt);
477 update_stmt (stmt);
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483 other DEBUG stmts, and replace uses of the DEF with the
484 newly-created debug temp. */
486 void
487 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
489 gimple *stmt;
490 ssa_op_iter op_iter;
491 def_operand_p def_p;
493 if (!MAY_HAVE_DEBUG_STMTS)
494 return;
496 stmt = gsi_stmt (*gsi);
498 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
500 tree var = DEF_FROM_PTR (def_p);
502 if (TREE_CODE (var) != SSA_NAME)
503 continue;
505 insert_debug_temp_for_var_def (gsi, var);
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
511 void
512 reset_debug_uses (gimple *stmt)
514 ssa_op_iter op_iter;
515 def_operand_p def_p;
516 imm_use_iterator imm_iter;
517 gimple *use_stmt;
519 if (!MAY_HAVE_DEBUG_STMTS)
520 return;
522 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
524 tree var = DEF_FROM_PTR (def_p);
526 if (TREE_CODE (var) != SSA_NAME)
527 continue;
529 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
531 if (!gimple_debug_bind_p (use_stmt))
532 continue;
534 gimple_debug_bind_reset_value (use_stmt);
535 update_stmt (use_stmt);
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541 dominated stmts before their dominators, so that release_ssa_defs
542 stands a chance of propagating DEFs into debug bind stmts. */
544 void
545 release_defs_bitset (bitmap toremove)
547 unsigned j;
548 bitmap_iterator bi;
550 /* Performing a topological sort is probably overkill, this will
551 most likely run in slightly superlinear time, rather than the
552 pathological quadratic worst case. */
553 while (!bitmap_empty_p (toremove))
554 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
556 bool remove_now = true;
557 tree var = ssa_name (j);
558 gimple *stmt;
559 imm_use_iterator uit;
561 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
563 ssa_op_iter dit;
564 def_operand_p def_p;
566 /* We can't propagate PHI nodes into debug stmts. */
567 if (gimple_code (stmt) == GIMPLE_PHI
568 || is_gimple_debug (stmt))
569 continue;
571 /* If we find another definition to remove that uses
572 the one we're looking at, defer the removal of this
573 one, so that it can be propagated into debug stmts
574 after the other is. */
575 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
577 tree odef = DEF_FROM_PTR (def_p);
579 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
581 remove_now = false;
582 break;
586 if (!remove_now)
587 BREAK_FROM_IMM_USE_STMT (uit);
590 if (remove_now)
592 gimple *def = SSA_NAME_DEF_STMT (var);
593 gimple_stmt_iterator gsi = gsi_for_stmt (def);
595 if (gimple_code (def) == GIMPLE_PHI)
596 remove_phi_node (&gsi, true);
597 else
599 gsi_remove (&gsi, true);
600 release_defs (def);
603 bitmap_clear_bit (toremove, j);
608 /* Verify virtual SSA form. */
610 bool
611 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
613 bool err = false;
615 if (bitmap_bit_p (visited, bb->index))
616 return false;
618 bitmap_set_bit (visited, bb->index);
620 /* Pick up the single virtual PHI def. */
621 gphi *phi = NULL;
622 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
623 gsi_next (&si))
625 tree res = gimple_phi_result (si.phi ());
626 if (virtual_operand_p (res))
628 if (phi)
630 error ("multiple virtual PHI nodes in BB %d", bb->index);
631 print_gimple_stmt (stderr, phi, 0, 0);
632 print_gimple_stmt (stderr, si.phi (), 0, 0);
633 err = true;
635 else
636 phi = si.phi ();
639 if (phi)
641 current_vdef = gimple_phi_result (phi);
642 if (TREE_CODE (current_vdef) != SSA_NAME)
644 error ("virtual definition is not an SSA name");
645 print_gimple_stmt (stderr, phi, 0, 0);
646 err = true;
650 /* Verify stmts. */
651 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
652 gsi_next (&gsi))
654 gimple *stmt = gsi_stmt (gsi);
655 tree vuse = gimple_vuse (stmt);
656 if (vuse)
658 if (vuse != current_vdef)
660 error ("stmt with wrong VUSE");
661 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
662 fprintf (stderr, "expected ");
663 print_generic_expr (stderr, current_vdef, 0);
664 fprintf (stderr, "\n");
665 err = true;
667 tree vdef = gimple_vdef (stmt);
668 if (vdef)
670 current_vdef = vdef;
671 if (TREE_CODE (current_vdef) != SSA_NAME)
673 error ("virtual definition is not an SSA name");
674 print_gimple_stmt (stderr, phi, 0, 0);
675 err = true;
681 /* Verify destination PHI uses and recurse. */
682 edge_iterator ei;
683 edge e;
684 FOR_EACH_EDGE (e, ei, bb->succs)
686 gphi *phi = get_virtual_phi (e->dest);
687 if (phi
688 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
690 error ("PHI node with wrong VUSE on edge from BB %d",
691 e->src->index);
692 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
693 fprintf (stderr, "expected ");
694 print_generic_expr (stderr, current_vdef, 0);
695 fprintf (stderr, "\n");
696 err = true;
699 /* Recurse. */
700 err |= verify_vssa (e->dest, current_vdef, visited);
703 return err;
706 /* Return true if SSA_NAME is malformed and mark it visited.
708 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
709 operand. */
711 static bool
712 verify_ssa_name (tree ssa_name, bool is_virtual)
714 if (TREE_CODE (ssa_name) != SSA_NAME)
716 error ("expected an SSA_NAME object");
717 return true;
720 if (SSA_NAME_IN_FREE_LIST (ssa_name))
722 error ("found an SSA_NAME that had been released into the free pool");
723 return true;
726 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
727 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
729 error ("type mismatch between an SSA_NAME and its symbol");
730 return true;
733 if (is_virtual && !virtual_operand_p (ssa_name))
735 error ("found a virtual definition for a GIMPLE register");
736 return true;
739 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
741 error ("virtual SSA name for non-VOP decl");
742 return true;
745 if (!is_virtual && virtual_operand_p (ssa_name))
747 error ("found a real definition for a non-register");
748 return true;
751 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
752 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
754 error ("found a default name with a non-empty defining statement");
755 return true;
758 return false;
762 /* Return true if the definition of SSA_NAME at block BB is malformed.
764 STMT is the statement where SSA_NAME is created.
766 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
767 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
768 it means that the block in that array slot contains the
769 definition of SSA_NAME.
771 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
773 static bool
774 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
775 gimple *stmt, bool is_virtual)
777 if (verify_ssa_name (ssa_name, is_virtual))
778 goto err;
780 if (SSA_NAME_VAR (ssa_name)
781 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
782 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
784 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
785 goto err;
788 if (definition_block[SSA_NAME_VERSION (ssa_name)])
790 error ("SSA_NAME created in two different blocks %i and %i",
791 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
792 goto err;
795 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
797 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
799 error ("SSA_NAME_DEF_STMT is wrong");
800 fprintf (stderr, "Expected definition statement:\n");
801 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
802 fprintf (stderr, "\nActual definition statement:\n");
803 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
804 goto err;
807 return false;
809 err:
810 fprintf (stderr, "while verifying SSA_NAME ");
811 print_generic_expr (stderr, ssa_name, 0);
812 fprintf (stderr, " in statement\n");
813 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
815 return true;
819 /* Return true if the use of SSA_NAME at statement STMT in block BB is
820 malformed.
822 DEF_BB is the block where SSA_NAME was found to be created.
824 IDOM contains immediate dominator information for the flowgraph.
826 CHECK_ABNORMAL is true if the caller wants to check whether this use
827 is flowing through an abnormal edge (only used when checking PHI
828 arguments).
830 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
831 that are defined before STMT in basic block BB. */
833 static bool
834 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
835 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
837 bool err = false;
838 tree ssa_name = USE_FROM_PTR (use_p);
840 if (!TREE_VISITED (ssa_name))
841 if (verify_imm_links (stderr, ssa_name))
842 err = true;
844 TREE_VISITED (ssa_name) = 1;
846 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
847 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
848 ; /* Default definitions have empty statements. Nothing to do. */
849 else if (!def_bb)
851 error ("missing definition");
852 err = true;
854 else if (bb != def_bb
855 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
857 error ("definition in block %i does not dominate use in block %i",
858 def_bb->index, bb->index);
859 err = true;
861 else if (bb == def_bb
862 && names_defined_in_bb != NULL
863 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
865 error ("definition in block %i follows the use", def_bb->index);
866 err = true;
869 if (check_abnormal
870 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
872 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
873 err = true;
876 /* Make sure the use is in an appropriate list by checking the previous
877 element to make sure it's the same. */
878 if (use_p->prev == NULL)
880 error ("no immediate_use list");
881 err = true;
883 else
885 tree listvar;
886 if (use_p->prev->use == NULL)
887 listvar = use_p->prev->loc.ssa_name;
888 else
889 listvar = USE_FROM_PTR (use_p->prev);
890 if (listvar != ssa_name)
892 error ("wrong immediate use list");
893 err = true;
897 if (err)
899 fprintf (stderr, "for SSA_NAME: ");
900 print_generic_expr (stderr, ssa_name, TDF_VOPS);
901 fprintf (stderr, " in statement:\n");
902 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
905 return err;
909 /* Return true if any of the arguments for PHI node PHI at block BB is
910 malformed.
912 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
913 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
914 it means that the block in that array slot contains the
915 definition of SSA_NAME. */
917 static bool
918 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
920 edge e;
921 bool err = false;
922 size_t i, phi_num_args = gimple_phi_num_args (phi);
924 if (EDGE_COUNT (bb->preds) != phi_num_args)
926 error ("incoming edge count does not match number of PHI arguments");
927 err = true;
928 goto error;
931 for (i = 0; i < phi_num_args; i++)
933 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
934 tree op = USE_FROM_PTR (op_p);
936 e = EDGE_PRED (bb, i);
938 if (op == NULL_TREE)
940 error ("PHI argument is missing for edge %d->%d",
941 e->src->index,
942 e->dest->index);
943 err = true;
944 goto error;
947 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
949 error ("PHI argument is not SSA_NAME, or invariant");
950 err = true;
953 if (TREE_CODE (op) == SSA_NAME)
955 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
956 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
957 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
960 if (TREE_CODE (op) == ADDR_EXPR)
962 tree base = TREE_OPERAND (op, 0);
963 while (handled_component_p (base))
964 base = TREE_OPERAND (base, 0);
965 if ((TREE_CODE (base) == VAR_DECL
966 || TREE_CODE (base) == PARM_DECL
967 || TREE_CODE (base) == RESULT_DECL)
968 && !TREE_ADDRESSABLE (base))
970 error ("address taken, but ADDRESSABLE bit not set");
971 err = true;
975 if (e->dest != bb)
977 error ("wrong edge %d->%d for PHI argument",
978 e->src->index, e->dest->index);
979 err = true;
982 if (err)
984 fprintf (stderr, "PHI argument\n");
985 print_generic_stmt (stderr, op, TDF_VOPS);
986 goto error;
990 error:
991 if (err)
993 fprintf (stderr, "for PHI node\n");
994 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
998 return err;
1002 /* Verify common invariants in the SSA web.
1003 TODO: verify the variable annotations. */
1005 DEBUG_FUNCTION void
1006 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1008 basic_block bb;
1009 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1010 ssa_op_iter iter;
1011 tree op;
1012 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1013 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1015 gcc_assert (!need_ssa_update_p (cfun));
1017 timevar_push (TV_TREE_SSA_VERIFY);
1019 /* Keep track of SSA names present in the IL. */
1020 size_t i;
1021 tree name;
1023 FOR_EACH_SSA_NAME (i, name, cfun)
1025 gimple *stmt;
1026 TREE_VISITED (name) = 0;
1028 verify_ssa_name (name, virtual_operand_p (name));
1030 stmt = SSA_NAME_DEF_STMT (name);
1031 if (!gimple_nop_p (stmt))
1033 basic_block bb = gimple_bb (stmt);
1034 if (verify_def (bb, definition_block,
1035 name, stmt, virtual_operand_p (name)))
1036 goto err;
1040 calculate_dominance_info (CDI_DOMINATORS);
1042 /* Now verify all the uses and make sure they agree with the definitions
1043 found in the previous pass. */
1044 FOR_EACH_BB_FN (bb, cfun)
1046 edge e;
1047 edge_iterator ei;
1049 /* Make sure that all edges have a clear 'aux' field. */
1050 FOR_EACH_EDGE (e, ei, bb->preds)
1052 if (e->aux)
1054 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1055 e->dest->index);
1056 goto err;
1060 /* Verify the arguments for every PHI node in the block. */
1061 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1063 gphi *phi = gsi.phi ();
1064 if (verify_phi_args (phi, bb, definition_block))
1065 goto err;
1067 bitmap_set_bit (names_defined_in_bb,
1068 SSA_NAME_VERSION (gimple_phi_result (phi)));
1071 /* Now verify all the uses and vuses in every statement of the block. */
1072 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1073 gsi_next (&gsi))
1075 gimple *stmt = gsi_stmt (gsi);
1076 use_operand_p use_p;
1078 if (check_modified_stmt && gimple_modified_p (stmt))
1080 error ("stmt (%p) marked modified after optimization pass: ",
1081 (void *)stmt);
1082 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1083 goto err;
1086 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1088 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1089 goto err;
1092 if (gimple_debug_bind_p (stmt)
1093 && !gimple_debug_bind_has_value_p (stmt))
1094 continue;
1096 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1098 op = USE_FROM_PTR (use_p);
1099 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1100 use_p, stmt, false, names_defined_in_bb))
1101 goto err;
1104 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1106 if (SSA_NAME_DEF_STMT (op) != stmt)
1108 error ("SSA_NAME_DEF_STMT is wrong");
1109 fprintf (stderr, "Expected definition statement:\n");
1110 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1111 fprintf (stderr, "\nActual definition statement:\n");
1112 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1113 4, TDF_VOPS);
1114 goto err;
1116 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1120 bitmap_clear (names_defined_in_bb);
1123 free (definition_block);
1125 if (gimple_vop (cfun)
1126 && ssa_default_def (cfun, gimple_vop (cfun)))
1128 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1129 bitmap_clear (visited);
1130 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1131 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1132 goto err;
1135 /* Restore the dominance information to its prior known state, so
1136 that we do not perturb the compiler's subsequent behavior. */
1137 if (orig_dom_state == DOM_NONE)
1138 free_dominance_info (CDI_DOMINATORS);
1139 else
1140 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1142 BITMAP_FREE (names_defined_in_bb);
1143 timevar_pop (TV_TREE_SSA_VERIFY);
1144 return;
1146 err:
1147 internal_error ("verify_ssa failed");
1151 /* Initialize global DFA and SSA structures. */
1153 void
1154 init_tree_ssa (struct function *fn)
1156 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1157 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1158 pt_solution_reset (&fn->gimple_df->escaped);
1159 init_ssanames (fn, 0);
1162 /* Deallocate memory associated with SSA data structures for FNDECL. */
1164 void
1165 delete_tree_ssa (struct function *fn)
1167 fini_ssanames (fn);
1169 /* We no longer maintain the SSA operand cache at this point. */
1170 if (ssa_operands_active (fn))
1171 fini_ssa_operands (fn);
1173 fn->gimple_df->default_defs->empty ();
1174 fn->gimple_df->default_defs = NULL;
1175 pt_solution_reset (&fn->gimple_df->escaped);
1176 if (fn->gimple_df->decls_to_pointers != NULL)
1177 delete fn->gimple_df->decls_to_pointers;
1178 fn->gimple_df->decls_to_pointers = NULL;
1179 fn->gimple_df = NULL;
1181 /* We no longer need the edge variable maps. */
1182 redirect_edge_var_map_empty ();
1185 /* Return true if EXPR is a useless type conversion, otherwise return
1186 false. */
1188 bool
1189 tree_ssa_useless_type_conversion (tree expr)
1191 /* If we have an assignment that merely uses a NOP_EXPR to change
1192 the top of the RHS to the type of the LHS and the type conversion
1193 is "safe", then strip away the type conversion so that we can
1194 enter LHS = RHS into the const_and_copies table. */
1195 if (CONVERT_EXPR_P (expr)
1196 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1197 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1198 return useless_type_conversion_p
1199 (TREE_TYPE (expr),
1200 TREE_TYPE (TREE_OPERAND (expr, 0)));
1202 return false;
1205 /* Strip conversions from EXP according to
1206 tree_ssa_useless_type_conversion and return the resulting
1207 expression. */
1209 tree
1210 tree_ssa_strip_useless_type_conversions (tree exp)
1212 while (tree_ssa_useless_type_conversion (exp))
1213 exp = TREE_OPERAND (exp, 0);
1214 return exp;
1218 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1219 should be returned if the value is only partially undefined. */
1221 bool
1222 ssa_undefined_value_p (tree t, bool partial)
1224 gimple *def_stmt;
1225 tree var = SSA_NAME_VAR (t);
1227 if (!var)
1229 /* Parameters get their initial value from the function entry. */
1230 else if (TREE_CODE (var) == PARM_DECL)
1231 return false;
1232 /* When returning by reference the return address is actually a hidden
1233 parameter. */
1234 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1235 return false;
1236 /* Hard register variables get their initial value from the ether. */
1237 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1238 return false;
1240 /* The value is undefined iff its definition statement is empty. */
1241 def_stmt = SSA_NAME_DEF_STMT (t);
1242 if (gimple_nop_p (def_stmt))
1243 return true;
1245 /* Check if the complex was not only partially defined. */
1246 if (partial && is_gimple_assign (def_stmt)
1247 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1249 tree rhs1, rhs2;
1251 rhs1 = gimple_assign_rhs1 (def_stmt);
1252 rhs2 = gimple_assign_rhs2 (def_stmt);
1253 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1254 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1256 return false;
1260 /* Return TRUE iff STMT, a gimple statement, references an undefined
1261 SSA name. */
1263 bool
1264 gimple_uses_undefined_value_p (gimple *stmt)
1266 ssa_op_iter iter;
1267 tree op;
1269 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1270 if (ssa_undefined_value_p (op))
1271 return true;
1273 return false;
1278 /* If necessary, rewrite the base of the reference tree *TP from
1279 a MEM_REF to a plain or converted symbol. */
1281 static void
1282 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1284 tree sym;
1286 while (handled_component_p (*tp))
1287 tp = &TREE_OPERAND (*tp, 0);
1288 if (TREE_CODE (*tp) == MEM_REF
1289 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1290 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1291 && DECL_P (sym)
1292 && !TREE_ADDRESSABLE (sym)
1293 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1294 && is_gimple_reg_type (TREE_TYPE (*tp))
1295 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1297 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1298 && useless_type_conversion_p (TREE_TYPE (*tp),
1299 TREE_TYPE (TREE_TYPE (sym)))
1300 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1301 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1303 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1304 TYPE_SIZE (TREE_TYPE (*tp)),
1305 int_const_binop (MULT_EXPR,
1306 bitsize_int (BITS_PER_UNIT),
1307 TREE_OPERAND (*tp, 1)));
1309 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1310 && useless_type_conversion_p (TREE_TYPE (*tp),
1311 TREE_TYPE (TREE_TYPE (sym))))
1313 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1314 ? REALPART_EXPR : IMAGPART_EXPR,
1315 TREE_TYPE (*tp), sym);
1317 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1318 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1320 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1321 TREE_TYPE (sym)))
1322 *tp = build1 (VIEW_CONVERT_EXPR,
1323 TREE_TYPE (*tp), sym);
1324 else
1325 *tp = sym;
1327 else if (DECL_SIZE (sym)
1328 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1329 && mem_ref_offset (*tp) >= 0
1330 && wi::leu_p (mem_ref_offset (*tp)
1331 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1332 wi::to_offset (DECL_SIZE_UNIT (sym)))
1333 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1334 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1335 == TYPE_PRECISION (TREE_TYPE (*tp))))
1336 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1337 BITS_PER_UNIT) == 0)
1339 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1340 TYPE_SIZE (TREE_TYPE (*tp)),
1341 wide_int_to_tree (bitsizetype,
1342 mem_ref_offset (*tp)
1343 << LOG2_BITS_PER_UNIT));
1348 /* For a tree REF return its base if it is the base of a MEM_REF
1349 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1351 static tree
1352 non_rewritable_mem_ref_base (tree ref)
1354 tree base;
1356 /* A plain decl does not need it set. */
1357 if (DECL_P (ref))
1358 return NULL_TREE;
1360 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1362 base = get_base_address (ref);
1363 if (DECL_P (base))
1364 return base;
1365 return NULL_TREE;
1368 /* But watch out for MEM_REFs we cannot lower to a
1369 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1370 if (TREE_CODE (base) == MEM_REF
1371 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1373 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1374 if (! DECL_P (decl))
1375 return NULL_TREE;
1376 if (! is_gimple_reg_type (TREE_TYPE (base))
1377 || VOID_TYPE_P (TREE_TYPE (base)))
1378 return decl;
1379 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1380 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1381 && useless_type_conversion_p (TREE_TYPE (base),
1382 TREE_TYPE (TREE_TYPE (decl)))
1383 && wi::fits_uhwi_p (mem_ref_offset (base))
1384 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1385 mem_ref_offset (base))
1386 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1387 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1388 return NULL_TREE;
1389 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1390 if (integer_zerop (TREE_OPERAND (base, 1))
1391 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1392 return NULL_TREE;
1393 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1394 if (DECL_SIZE (decl)
1395 && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
1396 && mem_ref_offset (base) >= 0
1397 && wi::leu_p (mem_ref_offset (base)
1398 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1399 wi::to_offset (DECL_SIZE_UNIT (decl)))
1400 /* ??? We can't handle bitfield precision extracts without
1401 either using an alternate type for the BIT_FIELD_REF and
1402 then doing a conversion or possibly adjusting the offset
1403 according to endianess. */
1404 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1405 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1406 == TYPE_PRECISION (TREE_TYPE (base))))
1407 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1408 BITS_PER_UNIT) == 0)
1409 return NULL_TREE;
1410 return decl;
1413 return NULL_TREE;
1416 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1417 Otherwise return true. */
1419 static bool
1420 non_rewritable_lvalue_p (tree lhs)
1422 /* A plain decl is always rewritable. */
1423 if (DECL_P (lhs))
1424 return false;
1426 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1427 a reasonably efficient manner... */
1428 if ((TREE_CODE (lhs) == REALPART_EXPR
1429 || TREE_CODE (lhs) == IMAGPART_EXPR)
1430 && DECL_P (TREE_OPERAND (lhs, 0)))
1431 return false;
1433 /* ??? The following could be relaxed allowing component
1434 references that do not change the access size. */
1435 if (TREE_CODE (lhs) == MEM_REF
1436 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1438 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1440 /* A decl that is wrapped inside a MEM-REF that covers
1441 it full is also rewritable. */
1442 if (integer_zerop (TREE_OPERAND (lhs, 1))
1443 && DECL_P (decl)
1444 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1445 /* If the dynamic type of the decl has larger precision than
1446 the decl itself we can't use the decls type for SSA rewriting. */
1447 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1448 || compare_tree_int (DECL_SIZE (decl),
1449 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1450 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1451 && (TYPE_PRECISION (TREE_TYPE (decl))
1452 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1453 /* Make sure we are not re-writing non-float copying into float
1454 copying as that can incur normalization. */
1455 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1456 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1457 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1458 return false;
1460 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1461 using a BIT_INSERT_EXPR. */
1462 if (DECL_P (decl)
1463 && VECTOR_TYPE_P (TREE_TYPE (decl))
1464 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1465 && types_compatible_p (TREE_TYPE (lhs),
1466 TREE_TYPE (TREE_TYPE (decl)))
1467 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1468 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1469 TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1470 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1471 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1472 return false;
1475 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1476 BIT_INSERT_EXPR. */
1477 if (TREE_CODE (lhs) == BIT_FIELD_REF
1478 && DECL_P (TREE_OPERAND (lhs, 0))
1479 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1480 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1481 && types_compatible_p (TREE_TYPE (lhs),
1482 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1483 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1484 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1485 return false;
1487 return true;
1490 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1491 mark the variable VAR for conversion into SSA. Return true when updating
1492 stmts is required. */
1494 static void
1495 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1496 bitmap suitable_for_renaming)
1498 /* Global Variables, result decls cannot be changed. */
1499 if (is_global_var (var)
1500 || TREE_CODE (var) == RESULT_DECL
1501 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1502 return;
1504 if (TREE_ADDRESSABLE (var)
1505 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1506 a non-register. Otherwise we are confused and forget to
1507 add virtual operands for it. */
1508 && (!is_gimple_reg_type (TREE_TYPE (var))
1509 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1510 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1511 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1513 TREE_ADDRESSABLE (var) = 0;
1514 if (is_gimple_reg (var))
1515 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1516 if (dump_file)
1518 fprintf (dump_file, "No longer having address taken: ");
1519 print_generic_expr (dump_file, var, 0);
1520 fprintf (dump_file, "\n");
1524 if (!DECL_GIMPLE_REG_P (var)
1525 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1526 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1527 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1528 && !TREE_THIS_VOLATILE (var)
1529 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1531 DECL_GIMPLE_REG_P (var) = 1;
1532 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1533 if (dump_file)
1535 fprintf (dump_file, "Now a gimple register: ");
1536 print_generic_expr (dump_file, var, 0);
1537 fprintf (dump_file, "\n");
1542 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1544 void
1545 execute_update_addresses_taken (void)
1547 basic_block bb;
1548 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1549 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1550 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1551 tree var;
1552 unsigned i;
1554 timevar_push (TV_ADDRESS_TAKEN);
1556 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1557 the function body. */
1558 FOR_EACH_BB_FN (bb, cfun)
1560 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1561 gsi_next (&gsi))
1563 gimple *stmt = gsi_stmt (gsi);
1564 enum gimple_code code = gimple_code (stmt);
1565 tree decl;
1567 if (code == GIMPLE_CALL
1568 && optimize_atomic_compare_exchange_p (stmt))
1570 /* For __atomic_compare_exchange_N if the second argument
1571 is &var, don't mark var addressable;
1572 if it becomes non-addressable, we'll rewrite it into
1573 ATOMIC_COMPARE_EXCHANGE call. */
1574 tree arg = gimple_call_arg (stmt, 1);
1575 gimple_call_set_arg (stmt, 1, null_pointer_node);
1576 gimple_ior_addresses_taken (addresses_taken, stmt);
1577 gimple_call_set_arg (stmt, 1, arg);
1579 else
1580 /* Note all addresses taken by the stmt. */
1581 gimple_ior_addresses_taken (addresses_taken, stmt);
1583 /* If we have a call or an assignment, see if the lhs contains
1584 a local decl that requires not to be a gimple register. */
1585 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1587 tree lhs = gimple_get_lhs (stmt);
1588 if (lhs
1589 && TREE_CODE (lhs) != SSA_NAME
1590 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1591 || non_rewritable_lvalue_p (lhs)))
1593 decl = get_base_address (lhs);
1594 if (DECL_P (decl))
1595 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1599 if (gimple_assign_single_p (stmt))
1601 tree rhs = gimple_assign_rhs1 (stmt);
1602 if ((decl = non_rewritable_mem_ref_base (rhs)))
1603 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1606 else if (code == GIMPLE_CALL)
1608 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1610 tree arg = gimple_call_arg (stmt, i);
1611 if ((decl = non_rewritable_mem_ref_base (arg)))
1612 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1616 else if (code == GIMPLE_ASM)
1618 gasm *asm_stmt = as_a <gasm *> (stmt);
1619 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1621 tree link = gimple_asm_output_op (asm_stmt, i);
1622 tree lhs = TREE_VALUE (link);
1623 if (TREE_CODE (lhs) != SSA_NAME)
1625 decl = get_base_address (lhs);
1626 if (DECL_P (decl)
1627 && (non_rewritable_lvalue_p (lhs)
1628 /* We cannot move required conversions from
1629 the lhs to the rhs in asm statements, so
1630 require we do not need any. */
1631 || !useless_type_conversion_p
1632 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1633 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1636 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1638 tree link = gimple_asm_input_op (asm_stmt, i);
1639 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1640 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1645 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1646 gsi_next (&gsi))
1648 size_t i;
1649 gphi *phi = gsi.phi ();
1651 for (i = 0; i < gimple_phi_num_args (phi); i++)
1653 tree op = PHI_ARG_DEF (phi, i), var;
1654 if (TREE_CODE (op) == ADDR_EXPR
1655 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1656 && DECL_P (var))
1657 bitmap_set_bit (addresses_taken, DECL_UID (var));
1662 /* We cannot iterate over all referenced vars because that can contain
1663 unused vars from BLOCK trees, which causes code generation differences
1664 for -g vs. -g0. */
1665 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1666 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1667 suitable_for_renaming);
1669 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1670 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1671 suitable_for_renaming);
1673 /* Operand caches need to be recomputed for operands referencing the updated
1674 variables and operands need to be rewritten to expose bare symbols. */
1675 if (!bitmap_empty_p (suitable_for_renaming))
1677 FOR_EACH_BB_FN (bb, cfun)
1678 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1680 gimple *stmt = gsi_stmt (gsi);
1682 /* Re-write TARGET_MEM_REFs of symbols we want to
1683 rewrite into SSA form. */
1684 if (gimple_assign_single_p (stmt))
1686 tree lhs = gimple_assign_lhs (stmt);
1687 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1688 tree sym;
1690 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1691 gimplify_modify_expr_complex_part. */
1692 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1693 || TREE_CODE (lhs) == REALPART_EXPR)
1694 && DECL_P (TREE_OPERAND (lhs, 0))
1695 && bitmap_bit_p (suitable_for_renaming,
1696 DECL_UID (TREE_OPERAND (lhs, 0))))
1698 tree other = make_ssa_name (TREE_TYPE (lhs));
1699 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1700 ? REALPART_EXPR : IMAGPART_EXPR,
1701 TREE_TYPE (other),
1702 TREE_OPERAND (lhs, 0));
1703 gimple *load = gimple_build_assign (other, lrhs);
1704 location_t loc = gimple_location (stmt);
1705 gimple_set_location (load, loc);
1706 gimple_set_vuse (load, gimple_vuse (stmt));
1707 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1708 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1709 gimple_assign_set_rhs_with_ops
1710 (&gsi, COMPLEX_EXPR,
1711 TREE_CODE (lhs) == IMAGPART_EXPR
1712 ? other : gimple_assign_rhs1 (stmt),
1713 TREE_CODE (lhs) == IMAGPART_EXPR
1714 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1715 stmt = gsi_stmt (gsi);
1716 unlink_stmt_vdef (stmt);
1717 update_stmt (stmt);
1718 continue;
1721 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1722 into a BIT_INSERT_EXPR. */
1723 if (TREE_CODE (lhs) == BIT_FIELD_REF
1724 && DECL_P (TREE_OPERAND (lhs, 0))
1725 && bitmap_bit_p (suitable_for_renaming,
1726 DECL_UID (TREE_OPERAND (lhs, 0)))
1727 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1728 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1729 && types_compatible_p (TREE_TYPE (lhs),
1730 TREE_TYPE (TREE_TYPE
1731 (TREE_OPERAND (lhs, 0))))
1732 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1733 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1735 tree var = TREE_OPERAND (lhs, 0);
1736 tree val = gimple_assign_rhs1 (stmt);
1737 tree bitpos = TREE_OPERAND (lhs, 2);
1738 gimple_assign_set_lhs (stmt, var);
1739 gimple_assign_set_rhs_with_ops
1740 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1741 stmt = gsi_stmt (gsi);
1742 unlink_stmt_vdef (stmt);
1743 update_stmt (stmt);
1744 continue;
1747 /* Rewrite a vector insert using a MEM_REF on the LHS
1748 into a BIT_INSERT_EXPR. */
1749 if (TREE_CODE (lhs) == MEM_REF
1750 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1751 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1752 && DECL_P (sym)
1753 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1754 && VECTOR_TYPE_P (TREE_TYPE (sym))
1755 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1756 && types_compatible_p (TREE_TYPE (lhs),
1757 TREE_TYPE (TREE_TYPE (sym)))
1758 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1759 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1760 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1761 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1762 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1764 tree val = gimple_assign_rhs1 (stmt);
1765 tree bitpos
1766 = wide_int_to_tree (bitsizetype,
1767 mem_ref_offset (lhs) * BITS_PER_UNIT);
1768 gimple_assign_set_lhs (stmt, sym);
1769 gimple_assign_set_rhs_with_ops
1770 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1771 stmt = gsi_stmt (gsi);
1772 unlink_stmt_vdef (stmt);
1773 update_stmt (stmt);
1774 continue;
1777 /* We shouldn't have any fancy wrapping of
1778 component-refs on the LHS, but look through
1779 VIEW_CONVERT_EXPRs as that is easy. */
1780 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1781 lhs = TREE_OPERAND (lhs, 0);
1782 if (TREE_CODE (lhs) == MEM_REF
1783 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1784 && integer_zerop (TREE_OPERAND (lhs, 1))
1785 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1786 && DECL_P (sym)
1787 && !TREE_ADDRESSABLE (sym)
1788 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1789 lhs = sym;
1790 else
1791 lhs = gimple_assign_lhs (stmt);
1793 /* Rewrite the RHS and make sure the resulting assignment
1794 is validly typed. */
1795 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1796 rhs = gimple_assign_rhs1 (stmt);
1797 if (gimple_assign_lhs (stmt) != lhs
1798 && !useless_type_conversion_p (TREE_TYPE (lhs),
1799 TREE_TYPE (rhs)))
1801 if (gimple_clobber_p (stmt))
1803 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1804 TREE_THIS_VOLATILE (rhs) = 1;
1806 else
1807 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1808 TREE_TYPE (lhs), rhs);
1810 if (gimple_assign_lhs (stmt) != lhs)
1811 gimple_assign_set_lhs (stmt, lhs);
1813 if (gimple_assign_rhs1 (stmt) != rhs)
1815 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1816 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1820 else if (gimple_code (stmt) == GIMPLE_CALL)
1822 unsigned i;
1823 if (optimize_atomic_compare_exchange_p (stmt))
1825 tree expected = gimple_call_arg (stmt, 1);
1826 if (bitmap_bit_p (suitable_for_renaming,
1827 DECL_UID (TREE_OPERAND (expected, 0))))
1829 fold_builtin_atomic_compare_exchange (&gsi);
1830 continue;
1833 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1835 tree *argp = gimple_call_arg_ptr (stmt, i);
1836 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1840 else if (gimple_code (stmt) == GIMPLE_ASM)
1842 gasm *asm_stmt = as_a <gasm *> (stmt);
1843 unsigned i;
1844 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1846 tree link = gimple_asm_output_op (asm_stmt, i);
1847 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1848 suitable_for_renaming);
1850 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1852 tree link = gimple_asm_input_op (asm_stmt, i);
1853 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1854 suitable_for_renaming);
1858 else if (gimple_debug_bind_p (stmt)
1859 && gimple_debug_bind_has_value_p (stmt))
1861 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1862 tree decl;
1863 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1864 decl = non_rewritable_mem_ref_base (*valuep);
1865 if (decl
1866 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1867 gimple_debug_bind_reset_value (stmt);
1870 if (gimple_references_memory_p (stmt)
1871 || is_gimple_debug (stmt))
1872 update_stmt (stmt);
1874 gsi_next (&gsi);
1877 /* Update SSA form here, we are called as non-pass as well. */
1878 if (number_of_loops (cfun) > 1
1879 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1880 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1881 else
1882 update_ssa (TODO_update_ssa);
1885 BITMAP_FREE (not_reg_needs);
1886 BITMAP_FREE (addresses_taken);
1887 BITMAP_FREE (suitable_for_renaming);
1888 timevar_pop (TV_ADDRESS_TAKEN);
1891 namespace {
1893 const pass_data pass_data_update_address_taken =
1895 GIMPLE_PASS, /* type */
1896 "addressables", /* name */
1897 OPTGROUP_NONE, /* optinfo_flags */
1898 TV_ADDRESS_TAKEN, /* tv_id */
1899 PROP_ssa, /* properties_required */
1900 0, /* properties_provided */
1901 0, /* properties_destroyed */
1902 0, /* todo_flags_start */
1903 TODO_update_address_taken, /* todo_flags_finish */
1906 class pass_update_address_taken : public gimple_opt_pass
1908 public:
1909 pass_update_address_taken (gcc::context *ctxt)
1910 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1913 /* opt_pass methods: */
1915 }; // class pass_update_address_taken
1917 } // anon namespace
1919 gimple_opt_pass *
1920 make_pass_update_address_taken (gcc::context *ctxt)
1922 return new pass_update_address_taken (ctxt);