2016-10-07 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa.c
blob261d9b0c90b18506e3a476238c8896ef11dd61bd
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
45 /* Pointer map of variable mappings, keyed by edge. */
46 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
49 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
51 void
52 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
54 edge_var_map new_node;
56 if (edge_var_maps == NULL)
57 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
59 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
60 new_node.def = def;
61 new_node.result = result;
62 new_node.locus = locus;
64 slot.safe_push (new_node);
68 /* Clear the var mappings in edge E. */
70 void
71 redirect_edge_var_map_clear (edge e)
73 if (!edge_var_maps)
74 return;
76 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
78 if (head)
79 head->release ();
83 /* Duplicate the redirected var mappings in OLDE in NEWE.
85 This assumes a hash_map can have multiple edges mapping to the same
86 var_map (many to one mapping), since we don't remove the previous mappings.
89 void
90 redirect_edge_var_map_dup (edge newe, edge olde)
92 if (!edge_var_maps)
93 return;
95 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
96 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
97 if (!old_head)
98 return;
100 new_head->safe_splice (*old_head);
104 /* Return the variable mappings for a given edge. If there is none, return
105 NULL. */
107 vec<edge_var_map> *
108 redirect_edge_var_map_vector (edge e)
110 /* Hey, what kind of idiot would... you'd be surprised. */
111 if (!edge_var_maps)
112 return NULL;
114 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
115 if (!slot)
116 return NULL;
118 return slot;
121 /* Clear the edge variable mappings. */
123 void
124 redirect_edge_var_map_empty (void)
126 if (edge_var_maps)
127 edge_var_maps->empty ();
131 /* Remove the corresponding arguments from the PHI nodes in E's
132 destination block and redirect it to DEST. Return redirected edge.
133 The list of removed arguments is stored in a vector accessed
134 through edge_var_maps. */
136 edge
137 ssa_redirect_edge (edge e, basic_block dest)
139 gphi_iterator gsi;
140 gphi *phi;
142 redirect_edge_var_map_clear (e);
144 /* Remove the appropriate PHI arguments in E's destination block. */
145 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
147 tree def;
148 source_location locus ;
150 phi = gsi.phi ();
151 def = gimple_phi_arg_def (phi, e->dest_idx);
152 locus = gimple_phi_arg_location (phi, e->dest_idx);
154 if (def == NULL_TREE)
155 continue;
157 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
160 e = redirect_edge_succ_nodup (e, dest);
162 return e;
166 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
167 E->dest. */
169 void
170 flush_pending_stmts (edge e)
172 gphi *phi;
173 edge_var_map *vm;
174 int i;
175 gphi_iterator gsi;
177 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
178 if (!v)
179 return;
181 for (gsi = gsi_start_phis (e->dest), i = 0;
182 !gsi_end_p (gsi) && v->iterate (i, &vm);
183 gsi_next (&gsi), i++)
185 tree def;
187 phi = gsi.phi ();
188 def = redirect_edge_var_map_def (vm);
189 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
192 redirect_edge_var_map_clear (e);
195 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
196 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
197 expression with a different value.
199 This will update any annotations (say debug bind stmts) referring
200 to the original LHS, so that they use the RHS instead. This is
201 done even if NLHS and LHS are the same, for it is understood that
202 the RHS will be modified afterwards, and NLHS will not be assigned
203 an equivalent value.
205 Adjusting any non-annotation uses of the LHS, if needed, is a
206 responsibility of the caller.
208 The effect of this call should be pretty much the same as that of
209 inserting a copy of STMT before STMT, and then removing the
210 original stmt, at which time gsi_remove() would have update
211 annotations, but using this function saves all the inserting,
212 copying and removing. */
214 void
215 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
217 if (MAY_HAVE_DEBUG_STMTS)
219 tree lhs = gimple_get_lhs (stmt);
221 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
223 insert_debug_temp_for_var_def (NULL, lhs);
226 gimple_set_lhs (stmt, nlhs);
230 /* Given a tree for an expression for which we might want to emit
231 locations or values in debug information (generally a variable, but
232 we might deal with other kinds of trees in the future), return the
233 tree that should be used as the variable of a DEBUG_BIND STMT or
234 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
236 tree
237 target_for_debug_bind (tree var)
239 if (!MAY_HAVE_DEBUG_STMTS)
240 return NULL_TREE;
242 if (TREE_CODE (var) == SSA_NAME)
244 var = SSA_NAME_VAR (var);
245 if (var == NULL_TREE)
246 return NULL_TREE;
249 if ((TREE_CODE (var) != VAR_DECL
250 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
251 && TREE_CODE (var) != PARM_DECL)
252 return NULL_TREE;
254 if (DECL_HAS_VALUE_EXPR_P (var))
255 return target_for_debug_bind (DECL_VALUE_EXPR (var));
257 if (DECL_IGNORED_P (var))
258 return NULL_TREE;
260 /* var-tracking only tracks registers. */
261 if (!is_gimple_reg_type (TREE_TYPE (var)))
262 return NULL_TREE;
264 return var;
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
268 released. */
270 static tree
271 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
273 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
275 if (wi && wi->is_lhs)
276 return NULL_TREE;
278 if (TREE_CODE (*tp) == SSA_NAME)
280 if (SSA_NAME_IN_FREE_LIST (*tp))
281 return *tp;
283 *walk_subtrees = 0;
285 else if (IS_TYPE_OR_DECL_P (*tp))
286 *walk_subtrees = 0;
288 return NULL_TREE;
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292 by other DEBUG stmts, and replace uses of the DEF with the
293 newly-created debug temp. */
295 void
296 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
298 imm_use_iterator imm_iter;
299 use_operand_p use_p;
300 gimple *stmt;
301 gimple *def_stmt = NULL;
302 int usecount = 0;
303 tree value = NULL;
305 if (!MAY_HAVE_DEBUG_STMTS)
306 return;
308 /* If this name has already been registered for replacement, do nothing
309 as anything that uses this name isn't in SSA form. */
310 if (name_registered_for_update_p (var))
311 return;
313 /* Check whether there are debug stmts that reference this variable and,
314 if there are, decide whether we should use a debug temp. */
315 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
317 stmt = USE_STMT (use_p);
319 if (!gimple_debug_bind_p (stmt))
320 continue;
322 if (usecount++)
323 break;
325 if (gimple_debug_bind_get_value (stmt) != var)
327 /* Count this as an additional use, so as to make sure we
328 use a temp unless VAR's definition has a SINGLE_RHS that
329 can be shared. */
330 usecount++;
331 break;
335 if (!usecount)
336 return;
338 if (gsi)
339 def_stmt = gsi_stmt (*gsi);
340 else
341 def_stmt = SSA_NAME_DEF_STMT (var);
343 /* If we didn't get an insertion point, and the stmt has already
344 been removed, we won't be able to insert the debug bind stmt, so
345 we'll have to drop debug information. */
346 if (gimple_code (def_stmt) == GIMPLE_PHI)
348 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
349 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
350 value = NULL;
351 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 to. */
353 else if (value == error_mark_node)
354 value = NULL;
356 else if (is_gimple_assign (def_stmt))
358 bool no_value = false;
360 if (!dom_info_available_p (CDI_DOMINATORS))
362 struct walk_stmt_info wi;
364 memset (&wi, 0, sizeof (wi));
366 /* When removing blocks without following reverse dominance
367 order, we may sometimes encounter SSA_NAMEs that have
368 already been released, referenced in other SSA_DEFs that
369 we're about to release. Consider:
371 <bb X>:
372 v_1 = foo;
374 <bb Y>:
375 w_2 = v_1 + bar;
376 # DEBUG w => w_2
378 If we deleted BB X first, propagating the value of w_2
379 won't do us any good. It's too late to recover their
380 original definition of v_1: when it was deleted, it was
381 only referenced in other DEFs, it couldn't possibly know
382 it should have been retained, and propagating every
383 single DEF just in case it might have to be propagated
384 into a DEBUG STMT would probably be too wasteful.
386 When dominator information is not readily available, we
387 check for and accept some loss of debug information. But
388 if it is available, there's no excuse for us to remove
389 blocks in the wrong order, so we don't even check for
390 dead SSA NAMEs. SSA verification shall catch any
391 errors. */
392 if ((!gsi && !gimple_bb (def_stmt))
393 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
394 no_value = true;
397 if (!no_value)
398 value = gimple_assign_rhs_to_tree (def_stmt);
401 if (value)
403 /* If there's a single use of VAR, and VAR is the entire debug
404 expression (usecount would have been incremented again
405 otherwise), and the definition involves only constants and
406 SSA names, then we can propagate VALUE into this single use,
407 avoiding the temp.
409 We can also avoid using a temp if VALUE can be shared and
410 propagated into all uses, without generating expressions that
411 wouldn't be valid gimple RHSs.
413 Other cases that would require unsharing or non-gimple RHSs
414 are deferred to a debug temp, although we could avoid temps
415 at the expense of duplication of expressions. */
417 if (CONSTANT_CLASS_P (value)
418 || gimple_code (def_stmt) == GIMPLE_PHI
419 || (usecount == 1
420 && (!gimple_assign_single_p (def_stmt)
421 || is_gimple_min_invariant (value)))
422 || is_gimple_reg (value))
424 else
426 gdebug *def_temp;
427 tree vexpr = make_node (DEBUG_EXPR_DECL);
429 def_temp = gimple_build_debug_bind (vexpr,
430 unshare_expr (value),
431 def_stmt);
433 DECL_ARTIFICIAL (vexpr) = 1;
434 TREE_TYPE (vexpr) = TREE_TYPE (value);
435 if (DECL_P (value))
436 DECL_MODE (vexpr) = DECL_MODE (value);
437 else
438 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
440 if (gsi)
441 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
442 else
444 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
445 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
448 value = vexpr;
452 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
454 if (!gimple_debug_bind_p (stmt))
455 continue;
457 if (value)
459 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
460 /* unshare_expr is not needed here. vexpr is either a
461 SINGLE_RHS, that can be safely shared, some other RHS
462 that was unshared when we found it had a single debug
463 use, or a DEBUG_EXPR_DECL, that can be safely
464 shared. */
465 SET_USE (use_p, unshare_expr (value));
466 /* If we didn't replace uses with a debug decl fold the
467 resulting expression. Otherwise we end up with invalid IL. */
468 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
470 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
471 fold_stmt_inplace (&gsi);
474 else
475 gimple_debug_bind_reset_value (stmt);
477 update_stmt (stmt);
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483 other DEBUG stmts, and replace uses of the DEF with the
484 newly-created debug temp. */
486 void
487 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
489 gimple *stmt;
490 ssa_op_iter op_iter;
491 def_operand_p def_p;
493 if (!MAY_HAVE_DEBUG_STMTS)
494 return;
496 stmt = gsi_stmt (*gsi);
498 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
500 tree var = DEF_FROM_PTR (def_p);
502 if (TREE_CODE (var) != SSA_NAME)
503 continue;
505 insert_debug_temp_for_var_def (gsi, var);
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
511 void
512 reset_debug_uses (gimple *stmt)
514 ssa_op_iter op_iter;
515 def_operand_p def_p;
516 imm_use_iterator imm_iter;
517 gimple *use_stmt;
519 if (!MAY_HAVE_DEBUG_STMTS)
520 return;
522 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
524 tree var = DEF_FROM_PTR (def_p);
526 if (TREE_CODE (var) != SSA_NAME)
527 continue;
529 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
531 if (!gimple_debug_bind_p (use_stmt))
532 continue;
534 gimple_debug_bind_reset_value (use_stmt);
535 update_stmt (use_stmt);
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541 dominated stmts before their dominators, so that release_ssa_defs
542 stands a chance of propagating DEFs into debug bind stmts. */
544 void
545 release_defs_bitset (bitmap toremove)
547 unsigned j;
548 bitmap_iterator bi;
550 /* Performing a topological sort is probably overkill, this will
551 most likely run in slightly superlinear time, rather than the
552 pathological quadratic worst case. */
553 while (!bitmap_empty_p (toremove))
555 unsigned to_remove_bit = -1U;
556 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
558 if (to_remove_bit != -1U)
560 bitmap_clear_bit (toremove, to_remove_bit);
561 to_remove_bit = -1U;
564 bool remove_now = true;
565 tree var = ssa_name (j);
566 gimple *stmt;
567 imm_use_iterator uit;
569 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
571 ssa_op_iter dit;
572 def_operand_p def_p;
574 /* We can't propagate PHI nodes into debug stmts. */
575 if (gimple_code (stmt) == GIMPLE_PHI
576 || is_gimple_debug (stmt))
577 continue;
579 /* If we find another definition to remove that uses
580 the one we're looking at, defer the removal of this
581 one, so that it can be propagated into debug stmts
582 after the other is. */
583 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
585 tree odef = DEF_FROM_PTR (def_p);
587 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
589 remove_now = false;
590 break;
594 if (!remove_now)
595 BREAK_FROM_IMM_USE_STMT (uit);
598 if (remove_now)
600 gimple *def = SSA_NAME_DEF_STMT (var);
601 gimple_stmt_iterator gsi = gsi_for_stmt (def);
603 if (gimple_code (def) == GIMPLE_PHI)
604 remove_phi_node (&gsi, true);
605 else
607 gsi_remove (&gsi, true);
608 release_defs (def);
611 to_remove_bit = j;
614 if (to_remove_bit != -1U)
615 bitmap_clear_bit (toremove, to_remove_bit);
620 /* Verify virtual SSA form. */
622 bool
623 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
625 bool err = false;
627 if (bitmap_bit_p (visited, bb->index))
628 return false;
630 bitmap_set_bit (visited, bb->index);
632 /* Pick up the single virtual PHI def. */
633 gphi *phi = NULL;
634 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
635 gsi_next (&si))
637 tree res = gimple_phi_result (si.phi ());
638 if (virtual_operand_p (res))
640 if (phi)
642 error ("multiple virtual PHI nodes in BB %d", bb->index);
643 print_gimple_stmt (stderr, phi, 0, 0);
644 print_gimple_stmt (stderr, si.phi (), 0, 0);
645 err = true;
647 else
648 phi = si.phi ();
651 if (phi)
653 current_vdef = gimple_phi_result (phi);
654 if (TREE_CODE (current_vdef) != SSA_NAME)
656 error ("virtual definition is not an SSA name");
657 print_gimple_stmt (stderr, phi, 0, 0);
658 err = true;
662 /* Verify stmts. */
663 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
664 gsi_next (&gsi))
666 gimple *stmt = gsi_stmt (gsi);
667 tree vuse = gimple_vuse (stmt);
668 if (vuse)
670 if (vuse != current_vdef)
672 error ("stmt with wrong VUSE");
673 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
674 fprintf (stderr, "expected ");
675 print_generic_expr (stderr, current_vdef, 0);
676 fprintf (stderr, "\n");
677 err = true;
679 tree vdef = gimple_vdef (stmt);
680 if (vdef)
682 current_vdef = vdef;
683 if (TREE_CODE (current_vdef) != SSA_NAME)
685 error ("virtual definition is not an SSA name");
686 print_gimple_stmt (stderr, phi, 0, 0);
687 err = true;
693 /* Verify destination PHI uses and recurse. */
694 edge_iterator ei;
695 edge e;
696 FOR_EACH_EDGE (e, ei, bb->succs)
698 gphi *phi = get_virtual_phi (e->dest);
699 if (phi
700 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
702 error ("PHI node with wrong VUSE on edge from BB %d",
703 e->src->index);
704 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
705 fprintf (stderr, "expected ");
706 print_generic_expr (stderr, current_vdef, 0);
707 fprintf (stderr, "\n");
708 err = true;
711 /* Recurse. */
712 err |= verify_vssa (e->dest, current_vdef, visited);
715 return err;
718 /* Return true if SSA_NAME is malformed and mark it visited.
720 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
721 operand. */
723 static bool
724 verify_ssa_name (tree ssa_name, bool is_virtual)
726 if (TREE_CODE (ssa_name) != SSA_NAME)
728 error ("expected an SSA_NAME object");
729 return true;
732 if (SSA_NAME_IN_FREE_LIST (ssa_name))
734 error ("found an SSA_NAME that had been released into the free pool");
735 return true;
738 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
739 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
741 error ("type mismatch between an SSA_NAME and its symbol");
742 return true;
745 if (is_virtual && !virtual_operand_p (ssa_name))
747 error ("found a virtual definition for a GIMPLE register");
748 return true;
751 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
753 error ("virtual SSA name for non-VOP decl");
754 return true;
757 if (!is_virtual && virtual_operand_p (ssa_name))
759 error ("found a real definition for a non-register");
760 return true;
763 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
764 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
766 error ("found a default name with a non-empty defining statement");
767 return true;
770 return false;
774 /* Return true if the definition of SSA_NAME at block BB is malformed.
776 STMT is the statement where SSA_NAME is created.
778 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
779 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
780 it means that the block in that array slot contains the
781 definition of SSA_NAME.
783 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
785 static bool
786 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
787 gimple *stmt, bool is_virtual)
789 if (verify_ssa_name (ssa_name, is_virtual))
790 goto err;
792 if (SSA_NAME_VAR (ssa_name)
793 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
794 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
796 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
797 goto err;
800 if (definition_block[SSA_NAME_VERSION (ssa_name)])
802 error ("SSA_NAME created in two different blocks %i and %i",
803 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
804 goto err;
807 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
809 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
811 error ("SSA_NAME_DEF_STMT is wrong");
812 fprintf (stderr, "Expected definition statement:\n");
813 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
814 fprintf (stderr, "\nActual definition statement:\n");
815 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
816 goto err;
819 return false;
821 err:
822 fprintf (stderr, "while verifying SSA_NAME ");
823 print_generic_expr (stderr, ssa_name, 0);
824 fprintf (stderr, " in statement\n");
825 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
827 return true;
831 /* Return true if the use of SSA_NAME at statement STMT in block BB is
832 malformed.
834 DEF_BB is the block where SSA_NAME was found to be created.
836 IDOM contains immediate dominator information for the flowgraph.
838 CHECK_ABNORMAL is true if the caller wants to check whether this use
839 is flowing through an abnormal edge (only used when checking PHI
840 arguments).
842 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
843 that are defined before STMT in basic block BB. */
845 static bool
846 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
847 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
849 bool err = false;
850 tree ssa_name = USE_FROM_PTR (use_p);
852 if (!TREE_VISITED (ssa_name))
853 if (verify_imm_links (stderr, ssa_name))
854 err = true;
856 TREE_VISITED (ssa_name) = 1;
858 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
859 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
860 ; /* Default definitions have empty statements. Nothing to do. */
861 else if (!def_bb)
863 error ("missing definition");
864 err = true;
866 else if (bb != def_bb
867 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
869 error ("definition in block %i does not dominate use in block %i",
870 def_bb->index, bb->index);
871 err = true;
873 else if (bb == def_bb
874 && names_defined_in_bb != NULL
875 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
877 error ("definition in block %i follows the use", def_bb->index);
878 err = true;
881 if (check_abnormal
882 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
884 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
885 err = true;
888 /* Make sure the use is in an appropriate list by checking the previous
889 element to make sure it's the same. */
890 if (use_p->prev == NULL)
892 error ("no immediate_use list");
893 err = true;
895 else
897 tree listvar;
898 if (use_p->prev->use == NULL)
899 listvar = use_p->prev->loc.ssa_name;
900 else
901 listvar = USE_FROM_PTR (use_p->prev);
902 if (listvar != ssa_name)
904 error ("wrong immediate use list");
905 err = true;
909 if (err)
911 fprintf (stderr, "for SSA_NAME: ");
912 print_generic_expr (stderr, ssa_name, TDF_VOPS);
913 fprintf (stderr, " in statement:\n");
914 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
917 return err;
921 /* Return true if any of the arguments for PHI node PHI at block BB is
922 malformed.
924 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
925 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
926 it means that the block in that array slot contains the
927 definition of SSA_NAME. */
929 static bool
930 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
932 edge e;
933 bool err = false;
934 size_t i, phi_num_args = gimple_phi_num_args (phi);
936 if (EDGE_COUNT (bb->preds) != phi_num_args)
938 error ("incoming edge count does not match number of PHI arguments");
939 err = true;
940 goto error;
943 for (i = 0; i < phi_num_args; i++)
945 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
946 tree op = USE_FROM_PTR (op_p);
948 e = EDGE_PRED (bb, i);
950 if (op == NULL_TREE)
952 error ("PHI argument is missing for edge %d->%d",
953 e->src->index,
954 e->dest->index);
955 err = true;
956 goto error;
959 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
961 error ("PHI argument is not SSA_NAME, or invariant");
962 err = true;
965 if (TREE_CODE (op) == SSA_NAME)
967 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
968 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
969 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
972 if (TREE_CODE (op) == ADDR_EXPR)
974 tree base = TREE_OPERAND (op, 0);
975 while (handled_component_p (base))
976 base = TREE_OPERAND (base, 0);
977 if ((TREE_CODE (base) == VAR_DECL
978 || TREE_CODE (base) == PARM_DECL
979 || TREE_CODE (base) == RESULT_DECL)
980 && !TREE_ADDRESSABLE (base))
982 error ("address taken, but ADDRESSABLE bit not set");
983 err = true;
987 if (e->dest != bb)
989 error ("wrong edge %d->%d for PHI argument",
990 e->src->index, e->dest->index);
991 err = true;
994 if (err)
996 fprintf (stderr, "PHI argument\n");
997 print_generic_stmt (stderr, op, TDF_VOPS);
998 goto error;
1002 error:
1003 if (err)
1005 fprintf (stderr, "for PHI node\n");
1006 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1010 return err;
1014 /* Verify common invariants in the SSA web.
1015 TODO: verify the variable annotations. */
1017 DEBUG_FUNCTION void
1018 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1020 basic_block bb;
1021 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1022 ssa_op_iter iter;
1023 tree op;
1024 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1025 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1027 gcc_assert (!need_ssa_update_p (cfun));
1029 timevar_push (TV_TREE_SSA_VERIFY);
1031 /* Keep track of SSA names present in the IL. */
1032 size_t i;
1033 tree name;
1035 FOR_EACH_SSA_NAME (i, name, cfun)
1037 gimple *stmt;
1038 TREE_VISITED (name) = 0;
1040 verify_ssa_name (name, virtual_operand_p (name));
1042 stmt = SSA_NAME_DEF_STMT (name);
1043 if (!gimple_nop_p (stmt))
1045 basic_block bb = gimple_bb (stmt);
1046 if (verify_def (bb, definition_block,
1047 name, stmt, virtual_operand_p (name)))
1048 goto err;
1052 calculate_dominance_info (CDI_DOMINATORS);
1054 /* Now verify all the uses and make sure they agree with the definitions
1055 found in the previous pass. */
1056 FOR_EACH_BB_FN (bb, cfun)
1058 edge e;
1059 edge_iterator ei;
1061 /* Make sure that all edges have a clear 'aux' field. */
1062 FOR_EACH_EDGE (e, ei, bb->preds)
1064 if (e->aux)
1066 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1067 e->dest->index);
1068 goto err;
1072 /* Verify the arguments for every PHI node in the block. */
1073 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1075 gphi *phi = gsi.phi ();
1076 if (verify_phi_args (phi, bb, definition_block))
1077 goto err;
1079 bitmap_set_bit (names_defined_in_bb,
1080 SSA_NAME_VERSION (gimple_phi_result (phi)));
1083 /* Now verify all the uses and vuses in every statement of the block. */
1084 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1085 gsi_next (&gsi))
1087 gimple *stmt = gsi_stmt (gsi);
1088 use_operand_p use_p;
1090 if (check_modified_stmt && gimple_modified_p (stmt))
1092 error ("stmt (%p) marked modified after optimization pass: ",
1093 (void *)stmt);
1094 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1095 goto err;
1098 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1100 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1101 goto err;
1104 if (gimple_debug_bind_p (stmt)
1105 && !gimple_debug_bind_has_value_p (stmt))
1106 continue;
1108 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1110 op = USE_FROM_PTR (use_p);
1111 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1112 use_p, stmt, false, names_defined_in_bb))
1113 goto err;
1116 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1118 if (SSA_NAME_DEF_STMT (op) != stmt)
1120 error ("SSA_NAME_DEF_STMT is wrong");
1121 fprintf (stderr, "Expected definition statement:\n");
1122 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1123 fprintf (stderr, "\nActual definition statement:\n");
1124 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1125 4, TDF_VOPS);
1126 goto err;
1128 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1132 bitmap_clear (names_defined_in_bb);
1135 free (definition_block);
1137 if (gimple_vop (cfun)
1138 && ssa_default_def (cfun, gimple_vop (cfun)))
1140 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1141 bitmap_clear (visited);
1142 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1143 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1144 goto err;
1147 /* Restore the dominance information to its prior known state, so
1148 that we do not perturb the compiler's subsequent behavior. */
1149 if (orig_dom_state == DOM_NONE)
1150 free_dominance_info (CDI_DOMINATORS);
1151 else
1152 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1154 BITMAP_FREE (names_defined_in_bb);
1155 timevar_pop (TV_TREE_SSA_VERIFY);
1156 return;
1158 err:
1159 internal_error ("verify_ssa failed");
1163 /* Initialize global DFA and SSA structures. */
1165 void
1166 init_tree_ssa (struct function *fn)
1168 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1169 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1170 pt_solution_reset (&fn->gimple_df->escaped);
1171 init_ssanames (fn, 0);
1174 /* Deallocate memory associated with SSA data structures for FNDECL. */
1176 void
1177 delete_tree_ssa (struct function *fn)
1179 fini_ssanames (fn);
1181 /* We no longer maintain the SSA operand cache at this point. */
1182 if (ssa_operands_active (fn))
1183 fini_ssa_operands (fn);
1185 fn->gimple_df->default_defs->empty ();
1186 fn->gimple_df->default_defs = NULL;
1187 pt_solution_reset (&fn->gimple_df->escaped);
1188 if (fn->gimple_df->decls_to_pointers != NULL)
1189 delete fn->gimple_df->decls_to_pointers;
1190 fn->gimple_df->decls_to_pointers = NULL;
1191 fn->gimple_df = NULL;
1193 /* We no longer need the edge variable maps. */
1194 redirect_edge_var_map_empty ();
1197 /* Return true if EXPR is a useless type conversion, otherwise return
1198 false. */
1200 bool
1201 tree_ssa_useless_type_conversion (tree expr)
1203 /* If we have an assignment that merely uses a NOP_EXPR to change
1204 the top of the RHS to the type of the LHS and the type conversion
1205 is "safe", then strip away the type conversion so that we can
1206 enter LHS = RHS into the const_and_copies table. */
1207 if (CONVERT_EXPR_P (expr)
1208 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1209 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1210 return useless_type_conversion_p
1211 (TREE_TYPE (expr),
1212 TREE_TYPE (TREE_OPERAND (expr, 0)));
1214 return false;
1217 /* Strip conversions from EXP according to
1218 tree_ssa_useless_type_conversion and return the resulting
1219 expression. */
1221 tree
1222 tree_ssa_strip_useless_type_conversions (tree exp)
1224 while (tree_ssa_useless_type_conversion (exp))
1225 exp = TREE_OPERAND (exp, 0);
1226 return exp;
1230 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1231 should be returned if the value is only partially undefined. */
1233 bool
1234 ssa_undefined_value_p (tree t, bool partial)
1236 gimple *def_stmt;
1237 tree var = SSA_NAME_VAR (t);
1239 if (!var)
1241 /* Parameters get their initial value from the function entry. */
1242 else if (TREE_CODE (var) == PARM_DECL)
1243 return false;
1244 /* When returning by reference the return address is actually a hidden
1245 parameter. */
1246 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1247 return false;
1248 /* Hard register variables get their initial value from the ether. */
1249 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1250 return false;
1252 /* The value is undefined iff its definition statement is empty. */
1253 def_stmt = SSA_NAME_DEF_STMT (t);
1254 if (gimple_nop_p (def_stmt))
1255 return true;
1257 /* Check if the complex was not only partially defined. */
1258 if (partial && is_gimple_assign (def_stmt)
1259 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1261 tree rhs1, rhs2;
1263 rhs1 = gimple_assign_rhs1 (def_stmt);
1264 rhs2 = gimple_assign_rhs2 (def_stmt);
1265 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1266 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1268 return false;
1272 /* Return TRUE iff STMT, a gimple statement, references an undefined
1273 SSA name. */
1275 bool
1276 gimple_uses_undefined_value_p (gimple *stmt)
1278 ssa_op_iter iter;
1279 tree op;
1281 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1282 if (ssa_undefined_value_p (op))
1283 return true;
1285 return false;
1290 /* If necessary, rewrite the base of the reference tree *TP from
1291 a MEM_REF to a plain or converted symbol. */
1293 static void
1294 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1296 tree sym;
1298 while (handled_component_p (*tp))
1299 tp = &TREE_OPERAND (*tp, 0);
1300 if (TREE_CODE (*tp) == MEM_REF
1301 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1302 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1303 && DECL_P (sym)
1304 && !TREE_ADDRESSABLE (sym)
1305 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1306 && is_gimple_reg_type (TREE_TYPE (*tp))
1307 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1309 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1310 && useless_type_conversion_p (TREE_TYPE (*tp),
1311 TREE_TYPE (TREE_TYPE (sym)))
1312 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1313 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1315 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1316 TYPE_SIZE (TREE_TYPE (*tp)),
1317 int_const_binop (MULT_EXPR,
1318 bitsize_int (BITS_PER_UNIT),
1319 TREE_OPERAND (*tp, 1)));
1321 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1322 && useless_type_conversion_p (TREE_TYPE (*tp),
1323 TREE_TYPE (TREE_TYPE (sym))))
1325 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1326 ? REALPART_EXPR : IMAGPART_EXPR,
1327 TREE_TYPE (*tp), sym);
1329 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1330 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1332 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1333 TREE_TYPE (sym)))
1334 *tp = build1 (VIEW_CONVERT_EXPR,
1335 TREE_TYPE (*tp), sym);
1336 else
1337 *tp = sym;
1339 else if (DECL_SIZE (sym)
1340 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1341 && mem_ref_offset (*tp) >= 0
1342 && wi::leu_p (mem_ref_offset (*tp)
1343 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1344 wi::to_offset (DECL_SIZE_UNIT (sym)))
1345 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1346 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1347 == TYPE_PRECISION (TREE_TYPE (*tp))))
1348 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1349 BITS_PER_UNIT) == 0)
1351 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1352 TYPE_SIZE (TREE_TYPE (*tp)),
1353 wide_int_to_tree (bitsizetype,
1354 mem_ref_offset (*tp)
1355 << LOG2_BITS_PER_UNIT));
1360 /* For a tree REF return its base if it is the base of a MEM_REF
1361 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1363 static tree
1364 non_rewritable_mem_ref_base (tree ref)
1366 tree base;
1368 /* A plain decl does not need it set. */
1369 if (DECL_P (ref))
1370 return NULL_TREE;
1372 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1374 base = get_base_address (ref);
1375 if (DECL_P (base))
1376 return base;
1377 return NULL_TREE;
1380 /* But watch out for MEM_REFs we cannot lower to a
1381 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1382 if (TREE_CODE (base) == MEM_REF
1383 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1385 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1386 if (! DECL_P (decl))
1387 return NULL_TREE;
1388 if (! is_gimple_reg_type (TREE_TYPE (base))
1389 || VOID_TYPE_P (TREE_TYPE (base)))
1390 return decl;
1391 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1392 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1393 && useless_type_conversion_p (TREE_TYPE (base),
1394 TREE_TYPE (TREE_TYPE (decl)))
1395 && wi::fits_uhwi_p (mem_ref_offset (base))
1396 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1397 mem_ref_offset (base))
1398 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1399 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1400 return NULL_TREE;
1401 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1402 if (integer_zerop (TREE_OPERAND (base, 1))
1403 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1404 return NULL_TREE;
1405 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1406 if (DECL_SIZE (decl)
1407 && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
1408 && mem_ref_offset (base) >= 0
1409 && wi::leu_p (mem_ref_offset (base)
1410 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1411 wi::to_offset (DECL_SIZE_UNIT (decl)))
1412 /* ??? We can't handle bitfield precision extracts without
1413 either using an alternate type for the BIT_FIELD_REF and
1414 then doing a conversion or possibly adjusting the offset
1415 according to endianess. */
1416 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1417 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1418 == TYPE_PRECISION (TREE_TYPE (base))))
1419 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1420 BITS_PER_UNIT) == 0)
1421 return NULL_TREE;
1422 return decl;
1425 return NULL_TREE;
1428 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1429 Otherwise return true. */
1431 static bool
1432 non_rewritable_lvalue_p (tree lhs)
1434 /* A plain decl is always rewritable. */
1435 if (DECL_P (lhs))
1436 return false;
1438 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1439 a reasonably efficient manner... */
1440 if ((TREE_CODE (lhs) == REALPART_EXPR
1441 || TREE_CODE (lhs) == IMAGPART_EXPR)
1442 && DECL_P (TREE_OPERAND (lhs, 0)))
1443 return false;
1445 /* ??? The following could be relaxed allowing component
1446 references that do not change the access size. */
1447 if (TREE_CODE (lhs) == MEM_REF
1448 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1450 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1452 /* A decl that is wrapped inside a MEM-REF that covers
1453 it full is also rewritable. */
1454 if (integer_zerop (TREE_OPERAND (lhs, 1))
1455 && DECL_P (decl)
1456 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1457 /* If the dynamic type of the decl has larger precision than
1458 the decl itself we can't use the decls type for SSA rewriting. */
1459 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1460 || compare_tree_int (DECL_SIZE (decl),
1461 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1462 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1463 && (TYPE_PRECISION (TREE_TYPE (decl))
1464 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1465 /* Make sure we are not re-writing non-float copying into float
1466 copying as that can incur normalization. */
1467 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1468 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1469 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1470 return false;
1472 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1473 using a BIT_INSERT_EXPR. */
1474 if (DECL_P (decl)
1475 && VECTOR_TYPE_P (TREE_TYPE (decl))
1476 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1477 && types_compatible_p (TREE_TYPE (lhs),
1478 TREE_TYPE (TREE_TYPE (decl)))
1479 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1480 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1481 TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1482 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1483 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1484 return false;
1487 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1488 BIT_INSERT_EXPR. */
1489 if (TREE_CODE (lhs) == BIT_FIELD_REF
1490 && DECL_P (TREE_OPERAND (lhs, 0))
1491 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1492 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1493 && types_compatible_p (TREE_TYPE (lhs),
1494 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1495 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1496 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1497 return false;
1499 return true;
1502 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1503 mark the variable VAR for conversion into SSA. Return true when updating
1504 stmts is required. */
1506 static void
1507 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1508 bitmap suitable_for_renaming)
1510 /* Global Variables, result decls cannot be changed. */
1511 if (is_global_var (var)
1512 || TREE_CODE (var) == RESULT_DECL
1513 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1514 return;
1516 if (TREE_ADDRESSABLE (var)
1517 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1518 a non-register. Otherwise we are confused and forget to
1519 add virtual operands for it. */
1520 && (!is_gimple_reg_type (TREE_TYPE (var))
1521 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1522 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1523 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1525 TREE_ADDRESSABLE (var) = 0;
1526 if (is_gimple_reg (var))
1527 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1528 if (dump_file)
1530 fprintf (dump_file, "No longer having address taken: ");
1531 print_generic_expr (dump_file, var, 0);
1532 fprintf (dump_file, "\n");
1536 if (!DECL_GIMPLE_REG_P (var)
1537 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1538 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1539 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1540 && !TREE_THIS_VOLATILE (var)
1541 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1543 DECL_GIMPLE_REG_P (var) = 1;
1544 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1545 if (dump_file)
1547 fprintf (dump_file, "Now a gimple register: ");
1548 print_generic_expr (dump_file, var, 0);
1549 fprintf (dump_file, "\n");
1554 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1556 void
1557 execute_update_addresses_taken (void)
1559 basic_block bb;
1560 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1561 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1562 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1563 tree var;
1564 unsigned i;
1566 timevar_push (TV_ADDRESS_TAKEN);
1568 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1569 the function body. */
1570 FOR_EACH_BB_FN (bb, cfun)
1572 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1573 gsi_next (&gsi))
1575 gimple *stmt = gsi_stmt (gsi);
1576 enum gimple_code code = gimple_code (stmt);
1577 tree decl;
1579 if (code == GIMPLE_CALL
1580 && optimize_atomic_compare_exchange_p (stmt))
1582 /* For __atomic_compare_exchange_N if the second argument
1583 is &var, don't mark var addressable;
1584 if it becomes non-addressable, we'll rewrite it into
1585 ATOMIC_COMPARE_EXCHANGE call. */
1586 tree arg = gimple_call_arg (stmt, 1);
1587 gimple_call_set_arg (stmt, 1, null_pointer_node);
1588 gimple_ior_addresses_taken (addresses_taken, stmt);
1589 gimple_call_set_arg (stmt, 1, arg);
1591 else
1592 /* Note all addresses taken by the stmt. */
1593 gimple_ior_addresses_taken (addresses_taken, stmt);
1595 /* If we have a call or an assignment, see if the lhs contains
1596 a local decl that requires not to be a gimple register. */
1597 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1599 tree lhs = gimple_get_lhs (stmt);
1600 if (lhs
1601 && TREE_CODE (lhs) != SSA_NAME
1602 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1603 || non_rewritable_lvalue_p (lhs)))
1605 decl = get_base_address (lhs);
1606 if (DECL_P (decl))
1607 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1611 if (gimple_assign_single_p (stmt))
1613 tree rhs = gimple_assign_rhs1 (stmt);
1614 if ((decl = non_rewritable_mem_ref_base (rhs)))
1615 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1618 else if (code == GIMPLE_CALL)
1620 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1622 tree arg = gimple_call_arg (stmt, i);
1623 if ((decl = non_rewritable_mem_ref_base (arg)))
1624 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1628 else if (code == GIMPLE_ASM)
1630 gasm *asm_stmt = as_a <gasm *> (stmt);
1631 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1633 tree link = gimple_asm_output_op (asm_stmt, i);
1634 tree lhs = TREE_VALUE (link);
1635 if (TREE_CODE (lhs) != SSA_NAME)
1637 decl = get_base_address (lhs);
1638 if (DECL_P (decl)
1639 && (non_rewritable_lvalue_p (lhs)
1640 /* We cannot move required conversions from
1641 the lhs to the rhs in asm statements, so
1642 require we do not need any. */
1643 || !useless_type_conversion_p
1644 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1645 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1648 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1650 tree link = gimple_asm_input_op (asm_stmt, i);
1651 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1652 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1657 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1658 gsi_next (&gsi))
1660 size_t i;
1661 gphi *phi = gsi.phi ();
1663 for (i = 0; i < gimple_phi_num_args (phi); i++)
1665 tree op = PHI_ARG_DEF (phi, i), var;
1666 if (TREE_CODE (op) == ADDR_EXPR
1667 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1668 && DECL_P (var))
1669 bitmap_set_bit (addresses_taken, DECL_UID (var));
1674 /* We cannot iterate over all referenced vars because that can contain
1675 unused vars from BLOCK trees, which causes code generation differences
1676 for -g vs. -g0. */
1677 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1678 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1679 suitable_for_renaming);
1681 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1682 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1683 suitable_for_renaming);
1685 /* Operand caches need to be recomputed for operands referencing the updated
1686 variables and operands need to be rewritten to expose bare symbols. */
1687 if (!bitmap_empty_p (suitable_for_renaming))
1689 FOR_EACH_BB_FN (bb, cfun)
1690 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1692 gimple *stmt = gsi_stmt (gsi);
1694 /* Re-write TARGET_MEM_REFs of symbols we want to
1695 rewrite into SSA form. */
1696 if (gimple_assign_single_p (stmt))
1698 tree lhs = gimple_assign_lhs (stmt);
1699 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1700 tree sym;
1702 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1703 gimplify_modify_expr_complex_part. */
1704 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1705 || TREE_CODE (lhs) == REALPART_EXPR)
1706 && DECL_P (TREE_OPERAND (lhs, 0))
1707 && bitmap_bit_p (suitable_for_renaming,
1708 DECL_UID (TREE_OPERAND (lhs, 0))))
1710 tree other = make_ssa_name (TREE_TYPE (lhs));
1711 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1712 ? REALPART_EXPR : IMAGPART_EXPR,
1713 TREE_TYPE (other),
1714 TREE_OPERAND (lhs, 0));
1715 gimple *load = gimple_build_assign (other, lrhs);
1716 location_t loc = gimple_location (stmt);
1717 gimple_set_location (load, loc);
1718 gimple_set_vuse (load, gimple_vuse (stmt));
1719 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1720 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1721 gimple_assign_set_rhs_with_ops
1722 (&gsi, COMPLEX_EXPR,
1723 TREE_CODE (lhs) == IMAGPART_EXPR
1724 ? other : gimple_assign_rhs1 (stmt),
1725 TREE_CODE (lhs) == IMAGPART_EXPR
1726 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1727 stmt = gsi_stmt (gsi);
1728 unlink_stmt_vdef (stmt);
1729 update_stmt (stmt);
1730 continue;
1733 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1734 into a BIT_INSERT_EXPR. */
1735 if (TREE_CODE (lhs) == BIT_FIELD_REF
1736 && DECL_P (TREE_OPERAND (lhs, 0))
1737 && bitmap_bit_p (suitable_for_renaming,
1738 DECL_UID (TREE_OPERAND (lhs, 0)))
1739 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1740 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1741 && types_compatible_p (TREE_TYPE (lhs),
1742 TREE_TYPE (TREE_TYPE
1743 (TREE_OPERAND (lhs, 0))))
1744 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1745 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1747 tree var = TREE_OPERAND (lhs, 0);
1748 tree val = gimple_assign_rhs1 (stmt);
1749 tree bitpos = TREE_OPERAND (lhs, 2);
1750 gimple_assign_set_lhs (stmt, var);
1751 gimple_assign_set_rhs_with_ops
1752 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1753 stmt = gsi_stmt (gsi);
1754 unlink_stmt_vdef (stmt);
1755 update_stmt (stmt);
1756 continue;
1759 /* Rewrite a vector insert using a MEM_REF on the LHS
1760 into a BIT_INSERT_EXPR. */
1761 if (TREE_CODE (lhs) == MEM_REF
1762 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1763 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1764 && DECL_P (sym)
1765 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1766 && VECTOR_TYPE_P (TREE_TYPE (sym))
1767 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1768 && types_compatible_p (TREE_TYPE (lhs),
1769 TREE_TYPE (TREE_TYPE (sym)))
1770 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1771 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1772 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1773 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1774 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1776 tree val = gimple_assign_rhs1 (stmt);
1777 tree bitpos
1778 = wide_int_to_tree (bitsizetype,
1779 mem_ref_offset (lhs) * BITS_PER_UNIT);
1780 gimple_assign_set_lhs (stmt, sym);
1781 gimple_assign_set_rhs_with_ops
1782 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1783 stmt = gsi_stmt (gsi);
1784 unlink_stmt_vdef (stmt);
1785 update_stmt (stmt);
1786 continue;
1789 /* We shouldn't have any fancy wrapping of
1790 component-refs on the LHS, but look through
1791 VIEW_CONVERT_EXPRs as that is easy. */
1792 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1793 lhs = TREE_OPERAND (lhs, 0);
1794 if (TREE_CODE (lhs) == MEM_REF
1795 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1796 && integer_zerop (TREE_OPERAND (lhs, 1))
1797 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1798 && DECL_P (sym)
1799 && !TREE_ADDRESSABLE (sym)
1800 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1801 lhs = sym;
1802 else
1803 lhs = gimple_assign_lhs (stmt);
1805 /* Rewrite the RHS and make sure the resulting assignment
1806 is validly typed. */
1807 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1808 rhs = gimple_assign_rhs1 (stmt);
1809 if (gimple_assign_lhs (stmt) != lhs
1810 && !useless_type_conversion_p (TREE_TYPE (lhs),
1811 TREE_TYPE (rhs)))
1813 if (gimple_clobber_p (stmt))
1815 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1816 TREE_THIS_VOLATILE (rhs) = 1;
1818 else
1819 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1820 TREE_TYPE (lhs), rhs);
1822 if (gimple_assign_lhs (stmt) != lhs)
1823 gimple_assign_set_lhs (stmt, lhs);
1825 if (gimple_assign_rhs1 (stmt) != rhs)
1827 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1828 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1832 else if (gimple_code (stmt) == GIMPLE_CALL)
1834 unsigned i;
1835 if (optimize_atomic_compare_exchange_p (stmt))
1837 tree expected = gimple_call_arg (stmt, 1);
1838 if (bitmap_bit_p (suitable_for_renaming,
1839 DECL_UID (TREE_OPERAND (expected, 0))))
1841 fold_builtin_atomic_compare_exchange (&gsi);
1842 continue;
1845 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1847 tree *argp = gimple_call_arg_ptr (stmt, i);
1848 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1852 else if (gimple_code (stmt) == GIMPLE_ASM)
1854 gasm *asm_stmt = as_a <gasm *> (stmt);
1855 unsigned i;
1856 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1858 tree link = gimple_asm_output_op (asm_stmt, i);
1859 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1860 suitable_for_renaming);
1862 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1864 tree link = gimple_asm_input_op (asm_stmt, i);
1865 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1866 suitable_for_renaming);
1870 else if (gimple_debug_bind_p (stmt)
1871 && gimple_debug_bind_has_value_p (stmt))
1873 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1874 tree decl;
1875 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1876 decl = non_rewritable_mem_ref_base (*valuep);
1877 if (decl
1878 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1879 gimple_debug_bind_reset_value (stmt);
1882 if (gimple_references_memory_p (stmt)
1883 || is_gimple_debug (stmt))
1884 update_stmt (stmt);
1886 gsi_next (&gsi);
1889 /* Update SSA form here, we are called as non-pass as well. */
1890 if (number_of_loops (cfun) > 1
1891 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1892 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1893 else
1894 update_ssa (TODO_update_ssa);
1897 BITMAP_FREE (not_reg_needs);
1898 BITMAP_FREE (addresses_taken);
1899 BITMAP_FREE (suitable_for_renaming);
1900 timevar_pop (TV_ADDRESS_TAKEN);
1903 namespace {
1905 const pass_data pass_data_update_address_taken =
1907 GIMPLE_PASS, /* type */
1908 "addressables", /* name */
1909 OPTGROUP_NONE, /* optinfo_flags */
1910 TV_ADDRESS_TAKEN, /* tv_id */
1911 PROP_ssa, /* properties_required */
1912 0, /* properties_provided */
1913 0, /* properties_destroyed */
1914 0, /* todo_flags_start */
1915 TODO_update_address_taken, /* todo_flags_finish */
1918 class pass_update_address_taken : public gimple_opt_pass
1920 public:
1921 pass_update_address_taken (gcc::context *ctxt)
1922 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1925 /* opt_pass methods: */
1927 }; // class pass_update_address_taken
1929 } // anon namespace
1931 gimple_opt_pass *
1932 make_pass_update_address_taken (gcc::context *ctxt)
1934 return new pass_update_address_taken (ctxt);